##// END OF EJS Templates
tests: fixed author for commit messages to be in a proper format.
marcink -
r3840:eb39c224 default
parent child Browse files
Show More
@@ -1,1218 +1,1221 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35
35
36
36
37 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
38 import urllib
38 import urllib
39
39
40 base_url = {
40 base_url = {
41 'repo_changelog': '/{repo_name}/changelog',
41 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_commits': '/{repo_name}/commits',
43 'repo_commits': '/{repo_name}/commits',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 }[name].format(**kwargs)
57 }[name].format(**kwargs)
58
58
59 if params:
59 if params:
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 return base_url
61 return base_url
62
62
63
63
64 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.usefixtures('app', 'autologin_user')
65 @pytest.mark.backends("git", "hg")
65 @pytest.mark.backends("git", "hg")
66 class TestPullrequestsView(object):
66 class TestPullrequestsView(object):
67
67
68 def test_index(self, backend):
68 def test_index(self, backend):
69 self.app.get(route_path(
69 self.app.get(route_path(
70 'pullrequest_new',
70 'pullrequest_new',
71 repo_name=backend.repo_name))
71 repo_name=backend.repo_name))
72
72
73 def test_option_menu_create_pull_request_exists(self, backend):
73 def test_option_menu_create_pull_request_exists(self, backend):
74 repo_name = backend.repo_name
74 repo_name = backend.repo_name
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76
76
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 'pullrequest_new', repo_name=repo_name)
78 'pullrequest_new', repo_name=repo_name)
79 response.mustcontain(create_pr_link)
79 response.mustcontain(create_pr_link)
80
80
81 def test_create_pr_form_with_raw_commit_id(self, backend):
81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 repo = backend.repo
82 repo = backend.repo
83
83
84 self.app.get(
84 self.app.get(
85 route_path('pullrequest_new', repo_name=repo.repo_name,
85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 pull_request = pr_util.create_pull_request(
92 pull_request = pr_util.create_pull_request(
93 mergeable=pr_merge_enabled, enable_notifications=False)
93 mergeable=pr_merge_enabled, enable_notifications=False)
94
94
95 response = self.app.get(route_path(
95 response = self.app.get(route_path(
96 'pullrequest_show',
96 'pullrequest_show',
97 repo_name=pull_request.target_repo.scm_instance().name,
97 repo_name=pull_request.target_repo.scm_instance().name,
98 pull_request_id=pull_request.pull_request_id,
98 pull_request_id=pull_request.pull_request_id,
99 params={'range-diff': range_diff}))
99 params={'range-diff': range_diff}))
100
100
101 for commit_id in pull_request.revisions:
101 for commit_id in pull_request.revisions:
102 response.mustcontain(commit_id)
102 response.mustcontain(commit_id)
103
103
104 assert pull_request.target_ref_parts.type in response
104 assert pull_request.target_ref_parts.type in response
105 assert pull_request.target_ref_parts.name in response
105 assert pull_request.target_ref_parts.name in response
106 target_clone_url = pull_request.target_repo.clone_url()
106 target_clone_url = pull_request.target_repo.clone_url()
107 assert target_clone_url in response
107 assert target_clone_url in response
108
108
109 assert 'class="pull-request-merge"' in response
109 assert 'class="pull-request-merge"' in response
110 if pr_merge_enabled:
110 if pr_merge_enabled:
111 response.mustcontain('Pull request reviewer approval is pending')
111 response.mustcontain('Pull request reviewer approval is pending')
112 else:
112 else:
113 response.mustcontain('Server-side pull request merging is disabled.')
113 response.mustcontain('Server-side pull request merging is disabled.')
114
114
115 if range_diff == "1":
115 if range_diff == "1":
116 response.mustcontain('Turn off: Show the diff as commit range')
116 response.mustcontain('Turn off: Show the diff as commit range')
117
117
118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
119 # Logout
119 # Logout
120 response = self.app.post(
120 response = self.app.post(
121 h.route_path('logout'),
121 h.route_path('logout'),
122 params={'csrf_token': csrf_token})
122 params={'csrf_token': csrf_token})
123 # Login as regular user
123 # Login as regular user
124 response = self.app.post(h.route_path('login'),
124 response = self.app.post(h.route_path('login'),
125 {'username': TEST_USER_REGULAR_LOGIN,
125 {'username': TEST_USER_REGULAR_LOGIN,
126 'password': 'test12'})
126 'password': 'test12'})
127
127
128 pull_request = pr_util.create_pull_request(
128 pull_request = pr_util.create_pull_request(
129 author=TEST_USER_REGULAR_LOGIN)
129 author=TEST_USER_REGULAR_LOGIN)
130
130
131 response = self.app.get(route_path(
131 response = self.app.get(route_path(
132 'pullrequest_show',
132 'pullrequest_show',
133 repo_name=pull_request.target_repo.scm_instance().name,
133 repo_name=pull_request.target_repo.scm_instance().name,
134 pull_request_id=pull_request.pull_request_id))
134 pull_request_id=pull_request.pull_request_id))
135
135
136 response.mustcontain('Server-side pull request merging is disabled.')
136 response.mustcontain('Server-side pull request merging is disabled.')
137
137
138 assert_response = response.assert_response()
138 assert_response = response.assert_response()
139 # for regular user without a merge permissions, we don't see it
139 # for regular user without a merge permissions, we don't see it
140 assert_response.no_element_exists('#close-pull-request-action')
140 assert_response.no_element_exists('#close-pull-request-action')
141
141
142 user_util.grant_user_permission_to_repo(
142 user_util.grant_user_permission_to_repo(
143 pull_request.target_repo,
143 pull_request.target_repo,
144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
145 'repository.write')
145 'repository.write')
146 response = self.app.get(route_path(
146 response = self.app.get(route_path(
147 'pullrequest_show',
147 'pullrequest_show',
148 repo_name=pull_request.target_repo.scm_instance().name,
148 repo_name=pull_request.target_repo.scm_instance().name,
149 pull_request_id=pull_request.pull_request_id))
149 pull_request_id=pull_request.pull_request_id))
150
150
151 response.mustcontain('Server-side pull request merging is disabled.')
151 response.mustcontain('Server-side pull request merging is disabled.')
152
152
153 assert_response = response.assert_response()
153 assert_response = response.assert_response()
154 # now regular user has a merge permissions, we have CLOSE button
154 # now regular user has a merge permissions, we have CLOSE button
155 assert_response.one_element_exists('#close-pull-request-action')
155 assert_response.one_element_exists('#close-pull-request-action')
156
156
157 def test_show_invalid_commit_id(self, pr_util):
157 def test_show_invalid_commit_id(self, pr_util):
158 # Simulating invalid revisions which will cause a lookup error
158 # Simulating invalid revisions which will cause a lookup error
159 pull_request = pr_util.create_pull_request()
159 pull_request = pr_util.create_pull_request()
160 pull_request.revisions = ['invalid']
160 pull_request.revisions = ['invalid']
161 Session().add(pull_request)
161 Session().add(pull_request)
162 Session().commit()
162 Session().commit()
163
163
164 response = self.app.get(route_path(
164 response = self.app.get(route_path(
165 'pullrequest_show',
165 'pullrequest_show',
166 repo_name=pull_request.target_repo.scm_instance().name,
166 repo_name=pull_request.target_repo.scm_instance().name,
167 pull_request_id=pull_request.pull_request_id))
167 pull_request_id=pull_request.pull_request_id))
168
168
169 for commit_id in pull_request.revisions:
169 for commit_id in pull_request.revisions:
170 response.mustcontain(commit_id)
170 response.mustcontain(commit_id)
171
171
172 def test_show_invalid_source_reference(self, pr_util):
172 def test_show_invalid_source_reference(self, pr_util):
173 pull_request = pr_util.create_pull_request()
173 pull_request = pr_util.create_pull_request()
174 pull_request.source_ref = 'branch:b:invalid'
174 pull_request.source_ref = 'branch:b:invalid'
175 Session().add(pull_request)
175 Session().add(pull_request)
176 Session().commit()
176 Session().commit()
177
177
178 self.app.get(route_path(
178 self.app.get(route_path(
179 'pullrequest_show',
179 'pullrequest_show',
180 repo_name=pull_request.target_repo.scm_instance().name,
180 repo_name=pull_request.target_repo.scm_instance().name,
181 pull_request_id=pull_request.pull_request_id))
181 pull_request_id=pull_request.pull_request_id))
182
182
183 def test_edit_title_description(self, pr_util, csrf_token):
183 def test_edit_title_description(self, pr_util, csrf_token):
184 pull_request = pr_util.create_pull_request()
184 pull_request = pr_util.create_pull_request()
185 pull_request_id = pull_request.pull_request_id
185 pull_request_id = pull_request.pull_request_id
186
186
187 response = self.app.post(
187 response = self.app.post(
188 route_path('pullrequest_update',
188 route_path('pullrequest_update',
189 repo_name=pull_request.target_repo.repo_name,
189 repo_name=pull_request.target_repo.repo_name,
190 pull_request_id=pull_request_id),
190 pull_request_id=pull_request_id),
191 params={
191 params={
192 'edit_pull_request': 'true',
192 'edit_pull_request': 'true',
193 'title': 'New title',
193 'title': 'New title',
194 'description': 'New description',
194 'description': 'New description',
195 'csrf_token': csrf_token})
195 'csrf_token': csrf_token})
196
196
197 assert_session_flash(
197 assert_session_flash(
198 response, u'Pull request title & description updated.',
198 response, u'Pull request title & description updated.',
199 category='success')
199 category='success')
200
200
201 pull_request = PullRequest.get(pull_request_id)
201 pull_request = PullRequest.get(pull_request_id)
202 assert pull_request.title == 'New title'
202 assert pull_request.title == 'New title'
203 assert pull_request.description == 'New description'
203 assert pull_request.description == 'New description'
204
204
205 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 def test_edit_title_description_closed(self, pr_util, csrf_token):
206 pull_request = pr_util.create_pull_request()
206 pull_request = pr_util.create_pull_request()
207 pull_request_id = pull_request.pull_request_id
207 pull_request_id = pull_request.pull_request_id
208 repo_name = pull_request.target_repo.repo_name
208 repo_name = pull_request.target_repo.repo_name
209 pr_util.close()
209 pr_util.close()
210
210
211 response = self.app.post(
211 response = self.app.post(
212 route_path('pullrequest_update',
212 route_path('pullrequest_update',
213 repo_name=repo_name, pull_request_id=pull_request_id),
213 repo_name=repo_name, pull_request_id=pull_request_id),
214 params={
214 params={
215 'edit_pull_request': 'true',
215 'edit_pull_request': 'true',
216 'title': 'New title',
216 'title': 'New title',
217 'description': 'New description',
217 'description': 'New description',
218 'csrf_token': csrf_token}, status=200)
218 'csrf_token': csrf_token}, status=200)
219 assert_session_flash(
219 assert_session_flash(
220 response, u'Cannot update closed pull requests.',
220 response, u'Cannot update closed pull requests.',
221 category='error')
221 category='error')
222
222
223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
225
225
226 pull_request = pr_util.create_pull_request()
226 pull_request = pr_util.create_pull_request()
227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
228 Session().add(pull_request)
228 Session().add(pull_request)
229 Session().commit()
229 Session().commit()
230
230
231 pull_request_id = pull_request.pull_request_id
231 pull_request_id = pull_request.pull_request_id
232
232
233 response = self.app.post(
233 response = self.app.post(
234 route_path('pullrequest_update',
234 route_path('pullrequest_update',
235 repo_name=pull_request.target_repo.repo_name,
235 repo_name=pull_request.target_repo.repo_name,
236 pull_request_id=pull_request_id),
236 pull_request_id=pull_request_id),
237 params={'update_commits': 'true', 'csrf_token': csrf_token})
237 params={'update_commits': 'true', 'csrf_token': csrf_token})
238
238
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 UpdateFailureReason.MISSING_SOURCE_REF])
240 UpdateFailureReason.MISSING_SOURCE_REF])
241 assert_session_flash(response, expected_msg, category='error')
241 assert_session_flash(response, expected_msg, category='error')
242
242
243 def test_missing_target_reference(self, pr_util, csrf_token):
243 def test_missing_target_reference(self, pr_util, csrf_token):
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 pull_request = pr_util.create_pull_request(
245 pull_request = pr_util.create_pull_request(
246 approved=True, mergeable=True)
246 approved=True, mergeable=True)
247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
248 pull_request.target_ref = unicode_reference
248 pull_request.target_ref = unicode_reference
249 Session().add(pull_request)
249 Session().add(pull_request)
250 Session().commit()
250 Session().commit()
251
251
252 pull_request_id = pull_request.pull_request_id
252 pull_request_id = pull_request.pull_request_id
253 pull_request_url = route_path(
253 pull_request_url = route_path(
254 'pullrequest_show',
254 'pullrequest_show',
255 repo_name=pull_request.target_repo.repo_name,
255 repo_name=pull_request.target_repo.repo_name,
256 pull_request_id=pull_request_id)
256 pull_request_id=pull_request_id)
257
257
258 response = self.app.get(pull_request_url)
258 response = self.app.get(pull_request_url)
259 target_ref_id = 'invalid-branch'
259 target_ref_id = 'invalid-branch'
260 merge_resp = MergeResponse(
260 merge_resp = MergeResponse(
261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
263 response.assert_response().element_contains(
263 response.assert_response().element_contains(
264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
265
265
266 def test_comment_and_close_pull_request_custom_message_approved(
266 def test_comment_and_close_pull_request_custom_message_approved(
267 self, pr_util, csrf_token, xhr_header):
267 self, pr_util, csrf_token, xhr_header):
268
268
269 pull_request = pr_util.create_pull_request(approved=True)
269 pull_request = pr_util.create_pull_request(approved=True)
270 pull_request_id = pull_request.pull_request_id
270 pull_request_id = pull_request.pull_request_id
271 author = pull_request.user_id
271 author = pull_request.user_id
272 repo = pull_request.target_repo.repo_id
272 repo = pull_request.target_repo.repo_id
273
273
274 self.app.post(
274 self.app.post(
275 route_path('pullrequest_comment_create',
275 route_path('pullrequest_comment_create',
276 repo_name=pull_request.target_repo.scm_instance().name,
276 repo_name=pull_request.target_repo.scm_instance().name,
277 pull_request_id=pull_request_id),
277 pull_request_id=pull_request_id),
278 params={
278 params={
279 'close_pull_request': '1',
279 'close_pull_request': '1',
280 'text': 'Closing a PR',
280 'text': 'Closing a PR',
281 'csrf_token': csrf_token},
281 'csrf_token': csrf_token},
282 extra_environ=xhr_header,)
282 extra_environ=xhr_header,)
283
283
284 journal = UserLog.query()\
284 journal = UserLog.query()\
285 .filter(UserLog.user_id == author)\
285 .filter(UserLog.user_id == author)\
286 .filter(UserLog.repository_id == repo) \
286 .filter(UserLog.repository_id == repo) \
287 .order_by('user_log_id') \
287 .order_by('user_log_id') \
288 .all()
288 .all()
289 assert journal[-1].action == 'repo.pull_request.close'
289 assert journal[-1].action == 'repo.pull_request.close'
290
290
291 pull_request = PullRequest.get(pull_request_id)
291 pull_request = PullRequest.get(pull_request_id)
292 assert pull_request.is_closed()
292 assert pull_request.is_closed()
293
293
294 status = ChangesetStatusModel().get_status(
294 status = ChangesetStatusModel().get_status(
295 pull_request.source_repo, pull_request=pull_request)
295 pull_request.source_repo, pull_request=pull_request)
296 assert status == ChangesetStatus.STATUS_APPROVED
296 assert status == ChangesetStatus.STATUS_APPROVED
297 comments = ChangesetComment().query() \
297 comments = ChangesetComment().query() \
298 .filter(ChangesetComment.pull_request == pull_request) \
298 .filter(ChangesetComment.pull_request == pull_request) \
299 .order_by(ChangesetComment.comment_id.asc())\
299 .order_by(ChangesetComment.comment_id.asc())\
300 .all()
300 .all()
301 assert comments[-1].text == 'Closing a PR'
301 assert comments[-1].text == 'Closing a PR'
302
302
303 def test_comment_force_close_pull_request_rejected(
303 def test_comment_force_close_pull_request_rejected(
304 self, pr_util, csrf_token, xhr_header):
304 self, pr_util, csrf_token, xhr_header):
305 pull_request = pr_util.create_pull_request()
305 pull_request = pr_util.create_pull_request()
306 pull_request_id = pull_request.pull_request_id
306 pull_request_id = pull_request.pull_request_id
307 PullRequestModel().update_reviewers(
307 PullRequestModel().update_reviewers(
308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
309 pull_request.author)
309 pull_request.author)
310 author = pull_request.user_id
310 author = pull_request.user_id
311 repo = pull_request.target_repo.repo_id
311 repo = pull_request.target_repo.repo_id
312
312
313 self.app.post(
313 self.app.post(
314 route_path('pullrequest_comment_create',
314 route_path('pullrequest_comment_create',
315 repo_name=pull_request.target_repo.scm_instance().name,
315 repo_name=pull_request.target_repo.scm_instance().name,
316 pull_request_id=pull_request_id),
316 pull_request_id=pull_request_id),
317 params={
317 params={
318 'close_pull_request': '1',
318 'close_pull_request': '1',
319 'csrf_token': csrf_token},
319 'csrf_token': csrf_token},
320 extra_environ=xhr_header)
320 extra_environ=xhr_header)
321
321
322 pull_request = PullRequest.get(pull_request_id)
322 pull_request = PullRequest.get(pull_request_id)
323
323
324 journal = UserLog.query()\
324 journal = UserLog.query()\
325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
326 .order_by('user_log_id') \
326 .order_by('user_log_id') \
327 .all()
327 .all()
328 assert journal[-1].action == 'repo.pull_request.close'
328 assert journal[-1].action == 'repo.pull_request.close'
329
329
330 # check only the latest status, not the review status
330 # check only the latest status, not the review status
331 status = ChangesetStatusModel().get_status(
331 status = ChangesetStatusModel().get_status(
332 pull_request.source_repo, pull_request=pull_request)
332 pull_request.source_repo, pull_request=pull_request)
333 assert status == ChangesetStatus.STATUS_REJECTED
333 assert status == ChangesetStatus.STATUS_REJECTED
334
334
335 def test_comment_and_close_pull_request(
335 def test_comment_and_close_pull_request(
336 self, pr_util, csrf_token, xhr_header):
336 self, pr_util, csrf_token, xhr_header):
337 pull_request = pr_util.create_pull_request()
337 pull_request = pr_util.create_pull_request()
338 pull_request_id = pull_request.pull_request_id
338 pull_request_id = pull_request.pull_request_id
339
339
340 response = self.app.post(
340 response = self.app.post(
341 route_path('pullrequest_comment_create',
341 route_path('pullrequest_comment_create',
342 repo_name=pull_request.target_repo.scm_instance().name,
342 repo_name=pull_request.target_repo.scm_instance().name,
343 pull_request_id=pull_request.pull_request_id),
343 pull_request_id=pull_request.pull_request_id),
344 params={
344 params={
345 'close_pull_request': 'true',
345 'close_pull_request': 'true',
346 'csrf_token': csrf_token},
346 'csrf_token': csrf_token},
347 extra_environ=xhr_header)
347 extra_environ=xhr_header)
348
348
349 assert response.json
349 assert response.json
350
350
351 pull_request = PullRequest.get(pull_request_id)
351 pull_request = PullRequest.get(pull_request_id)
352 assert pull_request.is_closed()
352 assert pull_request.is_closed()
353
353
354 # check only the latest status, not the review status
354 # check only the latest status, not the review status
355 status = ChangesetStatusModel().get_status(
355 status = ChangesetStatusModel().get_status(
356 pull_request.source_repo, pull_request=pull_request)
356 pull_request.source_repo, pull_request=pull_request)
357 assert status == ChangesetStatus.STATUS_REJECTED
357 assert status == ChangesetStatus.STATUS_REJECTED
358
358
359 def test_create_pull_request(self, backend, csrf_token):
359 def test_create_pull_request(self, backend, csrf_token):
360 commits = [
360 commits = [
361 {'message': 'ancestor'},
361 {'message': 'ancestor'},
362 {'message': 'change'},
362 {'message': 'change'},
363 {'message': 'change2'},
363 {'message': 'change2'},
364 ]
364 ]
365 commit_ids = backend.create_master_repo(commits)
365 commit_ids = backend.create_master_repo(commits)
366 target = backend.create_repo(heads=['ancestor'])
366 target = backend.create_repo(heads=['ancestor'])
367 source = backend.create_repo(heads=['change2'])
367 source = backend.create_repo(heads=['change2'])
368
368
369 response = self.app.post(
369 response = self.app.post(
370 route_path('pullrequest_create', repo_name=source.repo_name),
370 route_path('pullrequest_create', repo_name=source.repo_name),
371 [
371 [
372 ('source_repo', source.repo_name),
372 ('source_repo', source.repo_name),
373 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 ('source_ref', 'branch:default:' + commit_ids['change2']),
374 ('target_repo', target.repo_name),
374 ('target_repo', target.repo_name),
375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
376 ('common_ancestor', commit_ids['ancestor']),
376 ('common_ancestor', commit_ids['ancestor']),
377 ('pullrequest_title', 'Title'),
377 ('pullrequest_title', 'Title'),
378 ('pullrequest_desc', 'Description'),
378 ('pullrequest_desc', 'Description'),
379 ('description_renderer', 'markdown'),
379 ('description_renderer', 'markdown'),
380 ('__start__', 'review_members:sequence'),
380 ('__start__', 'review_members:sequence'),
381 ('__start__', 'reviewer:mapping'),
381 ('__start__', 'reviewer:mapping'),
382 ('user_id', '1'),
382 ('user_id', '1'),
383 ('__start__', 'reasons:sequence'),
383 ('__start__', 'reasons:sequence'),
384 ('reason', 'Some reason'),
384 ('reason', 'Some reason'),
385 ('__end__', 'reasons:sequence'),
385 ('__end__', 'reasons:sequence'),
386 ('__start__', 'rules:sequence'),
386 ('__start__', 'rules:sequence'),
387 ('__end__', 'rules:sequence'),
387 ('__end__', 'rules:sequence'),
388 ('mandatory', 'False'),
388 ('mandatory', 'False'),
389 ('__end__', 'reviewer:mapping'),
389 ('__end__', 'reviewer:mapping'),
390 ('__end__', 'review_members:sequence'),
390 ('__end__', 'review_members:sequence'),
391 ('__start__', 'revisions:sequence'),
391 ('__start__', 'revisions:sequence'),
392 ('revisions', commit_ids['change']),
392 ('revisions', commit_ids['change']),
393 ('revisions', commit_ids['change2']),
393 ('revisions', commit_ids['change2']),
394 ('__end__', 'revisions:sequence'),
394 ('__end__', 'revisions:sequence'),
395 ('user', ''),
395 ('user', ''),
396 ('csrf_token', csrf_token),
396 ('csrf_token', csrf_token),
397 ],
397 ],
398 status=302)
398 status=302)
399
399
400 location = response.headers['Location']
400 location = response.headers['Location']
401 pull_request_id = location.rsplit('/', 1)[1]
401 pull_request_id = location.rsplit('/', 1)[1]
402 assert pull_request_id != 'new'
402 assert pull_request_id != 'new'
403 pull_request = PullRequest.get(int(pull_request_id))
403 pull_request = PullRequest.get(int(pull_request_id))
404
404
405 # check that we have now both revisions
405 # check that we have now both revisions
406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
409 assert pull_request.target_ref == expected_target_ref
409 assert pull_request.target_ref == expected_target_ref
410
410
411 def test_reviewer_notifications(self, backend, csrf_token):
411 def test_reviewer_notifications(self, backend, csrf_token):
412 # We have to use the app.post for this test so it will create the
412 # We have to use the app.post for this test so it will create the
413 # notifications properly with the new PR
413 # notifications properly with the new PR
414 commits = [
414 commits = [
415 {'message': 'ancestor',
415 {'message': 'ancestor',
416 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 'added': [FileNode('file_A', content='content_of_ancestor')]},
417 {'message': 'change',
417 {'message': 'change',
418 'added': [FileNode('file_a', content='content_of_change')]},
418 'added': [FileNode('file_a', content='content_of_change')]},
419 {'message': 'change-child'},
419 {'message': 'change-child'},
420 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 {'message': 'ancestor-child', 'parents': ['ancestor'],
421 'added': [
421 'added': [
422 FileNode('file_B', content='content_of_ancestor_child')]},
422 FileNode('file_B', content='content_of_ancestor_child')]},
423 {'message': 'ancestor-child-2'},
423 {'message': 'ancestor-child-2'},
424 ]
424 ]
425 commit_ids = backend.create_master_repo(commits)
425 commit_ids = backend.create_master_repo(commits)
426 target = backend.create_repo(heads=['ancestor-child'])
426 target = backend.create_repo(heads=['ancestor-child'])
427 source = backend.create_repo(heads=['change'])
427 source = backend.create_repo(heads=['change'])
428
428
429 response = self.app.post(
429 response = self.app.post(
430 route_path('pullrequest_create', repo_name=source.repo_name),
430 route_path('pullrequest_create', repo_name=source.repo_name),
431 [
431 [
432 ('source_repo', source.repo_name),
432 ('source_repo', source.repo_name),
433 ('source_ref', 'branch:default:' + commit_ids['change']),
433 ('source_ref', 'branch:default:' + commit_ids['change']),
434 ('target_repo', target.repo_name),
434 ('target_repo', target.repo_name),
435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
436 ('common_ancestor', commit_ids['ancestor']),
436 ('common_ancestor', commit_ids['ancestor']),
437 ('pullrequest_title', 'Title'),
437 ('pullrequest_title', 'Title'),
438 ('pullrequest_desc', 'Description'),
438 ('pullrequest_desc', 'Description'),
439 ('description_renderer', 'markdown'),
439 ('description_renderer', 'markdown'),
440 ('__start__', 'review_members:sequence'),
440 ('__start__', 'review_members:sequence'),
441 ('__start__', 'reviewer:mapping'),
441 ('__start__', 'reviewer:mapping'),
442 ('user_id', '2'),
442 ('user_id', '2'),
443 ('__start__', 'reasons:sequence'),
443 ('__start__', 'reasons:sequence'),
444 ('reason', 'Some reason'),
444 ('reason', 'Some reason'),
445 ('__end__', 'reasons:sequence'),
445 ('__end__', 'reasons:sequence'),
446 ('__start__', 'rules:sequence'),
446 ('__start__', 'rules:sequence'),
447 ('__end__', 'rules:sequence'),
447 ('__end__', 'rules:sequence'),
448 ('mandatory', 'False'),
448 ('mandatory', 'False'),
449 ('__end__', 'reviewer:mapping'),
449 ('__end__', 'reviewer:mapping'),
450 ('__end__', 'review_members:sequence'),
450 ('__end__', 'review_members:sequence'),
451 ('__start__', 'revisions:sequence'),
451 ('__start__', 'revisions:sequence'),
452 ('revisions', commit_ids['change']),
452 ('revisions', commit_ids['change']),
453 ('__end__', 'revisions:sequence'),
453 ('__end__', 'revisions:sequence'),
454 ('user', ''),
454 ('user', ''),
455 ('csrf_token', csrf_token),
455 ('csrf_token', csrf_token),
456 ],
456 ],
457 status=302)
457 status=302)
458
458
459 location = response.headers['Location']
459 location = response.headers['Location']
460
460
461 pull_request_id = location.rsplit('/', 1)[1]
461 pull_request_id = location.rsplit('/', 1)[1]
462 assert pull_request_id != 'new'
462 assert pull_request_id != 'new'
463 pull_request = PullRequest.get(int(pull_request_id))
463 pull_request = PullRequest.get(int(pull_request_id))
464
464
465 # Check that a notification was made
465 # Check that a notification was made
466 notifications = Notification.query()\
466 notifications = Notification.query()\
467 .filter(Notification.created_by == pull_request.author.user_id,
467 .filter(Notification.created_by == pull_request.author.user_id,
468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
469 Notification.subject.contains(
469 Notification.subject.contains(
470 "wants you to review pull request #%s" % pull_request_id))
470 "wants you to review pull request #%s" % pull_request_id))
471 assert len(notifications.all()) == 1
471 assert len(notifications.all()) == 1
472
472
473 # Change reviewers and check that a notification was made
473 # Change reviewers and check that a notification was made
474 PullRequestModel().update_reviewers(
474 PullRequestModel().update_reviewers(
475 pull_request.pull_request_id, [(1, [], False, [])],
475 pull_request.pull_request_id, [(1, [], False, [])],
476 pull_request.author)
476 pull_request.author)
477 assert len(notifications.all()) == 2
477 assert len(notifications.all()) == 2
478
478
479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
480 csrf_token):
480 csrf_token):
481 commits = [
481 commits = [
482 {'message': 'ancestor',
482 {'message': 'ancestor',
483 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 'added': [FileNode('file_A', content='content_of_ancestor')]},
484 {'message': 'change',
484 {'message': 'change',
485 'added': [FileNode('file_a', content='content_of_change')]},
485 'added': [FileNode('file_a', content='content_of_change')]},
486 {'message': 'change-child'},
486 {'message': 'change-child'},
487 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 {'message': 'ancestor-child', 'parents': ['ancestor'],
488 'added': [
488 'added': [
489 FileNode('file_B', content='content_of_ancestor_child')]},
489 FileNode('file_B', content='content_of_ancestor_child')]},
490 {'message': 'ancestor-child-2'},
490 {'message': 'ancestor-child-2'},
491 ]
491 ]
492 commit_ids = backend.create_master_repo(commits)
492 commit_ids = backend.create_master_repo(commits)
493 target = backend.create_repo(heads=['ancestor-child'])
493 target = backend.create_repo(heads=['ancestor-child'])
494 source = backend.create_repo(heads=['change'])
494 source = backend.create_repo(heads=['change'])
495
495
496 response = self.app.post(
496 response = self.app.post(
497 route_path('pullrequest_create', repo_name=source.repo_name),
497 route_path('pullrequest_create', repo_name=source.repo_name),
498 [
498 [
499 ('source_repo', source.repo_name),
499 ('source_repo', source.repo_name),
500 ('source_ref', 'branch:default:' + commit_ids['change']),
500 ('source_ref', 'branch:default:' + commit_ids['change']),
501 ('target_repo', target.repo_name),
501 ('target_repo', target.repo_name),
502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
503 ('common_ancestor', commit_ids['ancestor']),
503 ('common_ancestor', commit_ids['ancestor']),
504 ('pullrequest_title', 'Title'),
504 ('pullrequest_title', 'Title'),
505 ('pullrequest_desc', 'Description'),
505 ('pullrequest_desc', 'Description'),
506 ('description_renderer', 'markdown'),
506 ('description_renderer', 'markdown'),
507 ('__start__', 'review_members:sequence'),
507 ('__start__', 'review_members:sequence'),
508 ('__start__', 'reviewer:mapping'),
508 ('__start__', 'reviewer:mapping'),
509 ('user_id', '1'),
509 ('user_id', '1'),
510 ('__start__', 'reasons:sequence'),
510 ('__start__', 'reasons:sequence'),
511 ('reason', 'Some reason'),
511 ('reason', 'Some reason'),
512 ('__end__', 'reasons:sequence'),
512 ('__end__', 'reasons:sequence'),
513 ('__start__', 'rules:sequence'),
513 ('__start__', 'rules:sequence'),
514 ('__end__', 'rules:sequence'),
514 ('__end__', 'rules:sequence'),
515 ('mandatory', 'False'),
515 ('mandatory', 'False'),
516 ('__end__', 'reviewer:mapping'),
516 ('__end__', 'reviewer:mapping'),
517 ('__end__', 'review_members:sequence'),
517 ('__end__', 'review_members:sequence'),
518 ('__start__', 'revisions:sequence'),
518 ('__start__', 'revisions:sequence'),
519 ('revisions', commit_ids['change']),
519 ('revisions', commit_ids['change']),
520 ('__end__', 'revisions:sequence'),
520 ('__end__', 'revisions:sequence'),
521 ('user', ''),
521 ('user', ''),
522 ('csrf_token', csrf_token),
522 ('csrf_token', csrf_token),
523 ],
523 ],
524 status=302)
524 status=302)
525
525
526 location = response.headers['Location']
526 location = response.headers['Location']
527
527
528 pull_request_id = location.rsplit('/', 1)[1]
528 pull_request_id = location.rsplit('/', 1)[1]
529 assert pull_request_id != 'new'
529 assert pull_request_id != 'new'
530 pull_request = PullRequest.get(int(pull_request_id))
530 pull_request = PullRequest.get(int(pull_request_id))
531
531
532 # target_ref has to point to the ancestor's commit_id in order to
532 # target_ref has to point to the ancestor's commit_id in order to
533 # show the correct diff
533 # show the correct diff
534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
535 assert pull_request.target_ref == expected_target_ref
535 assert pull_request.target_ref == expected_target_ref
536
536
537 # Check generated diff contents
537 # Check generated diff contents
538 response = response.follow()
538 response = response.follow()
539 assert 'content_of_ancestor' not in response.body
539 assert 'content_of_ancestor' not in response.body
540 assert 'content_of_ancestor-child' not in response.body
540 assert 'content_of_ancestor-child' not in response.body
541 assert 'content_of_change' in response.body
541 assert 'content_of_change' in response.body
542
542
543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
544 # Clear any previous calls to rcextensions
544 # Clear any previous calls to rcextensions
545 rhodecode.EXTENSIONS.calls.clear()
545 rhodecode.EXTENSIONS.calls.clear()
546
546
547 pull_request = pr_util.create_pull_request(
547 pull_request = pr_util.create_pull_request(
548 approved=True, mergeable=True)
548 approved=True, mergeable=True)
549 pull_request_id = pull_request.pull_request_id
549 pull_request_id = pull_request.pull_request_id
550 repo_name = pull_request.target_repo.scm_instance().name,
550 repo_name = pull_request.target_repo.scm_instance().name,
551
551
552 response = self.app.post(
552 response = self.app.post(
553 route_path('pullrequest_merge',
553 route_path('pullrequest_merge',
554 repo_name=str(repo_name[0]),
554 repo_name=str(repo_name[0]),
555 pull_request_id=pull_request_id),
555 pull_request_id=pull_request_id),
556 params={'csrf_token': csrf_token}).follow()
556 params={'csrf_token': csrf_token}).follow()
557
557
558 pull_request = PullRequest.get(pull_request_id)
558 pull_request = PullRequest.get(pull_request_id)
559
559
560 assert response.status_int == 200
560 assert response.status_int == 200
561 assert pull_request.is_closed()
561 assert pull_request.is_closed()
562 assert_pull_request_status(
562 assert_pull_request_status(
563 pull_request, ChangesetStatus.STATUS_APPROVED)
563 pull_request, ChangesetStatus.STATUS_APPROVED)
564
564
565 # Check the relevant log entries were added
565 # Check the relevant log entries were added
566 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
566 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
567 actions = [log.action for log in user_logs]
567 actions = [log.action for log in user_logs]
568 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
568 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
569 expected_actions = [
569 expected_actions = [
570 u'repo.pull_request.close',
570 u'repo.pull_request.close',
571 u'repo.pull_request.merge',
571 u'repo.pull_request.merge',
572 u'repo.pull_request.comment.create'
572 u'repo.pull_request.comment.create'
573 ]
573 ]
574 assert actions == expected_actions
574 assert actions == expected_actions
575
575
576 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
576 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
577 actions = [log for log in user_logs]
577 actions = [log for log in user_logs]
578 assert actions[-1].action == 'user.push'
578 assert actions[-1].action == 'user.push'
579 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
579 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
580
580
581 # Check post_push rcextension was really executed
581 # Check post_push rcextension was really executed
582 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
582 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
583 assert len(push_calls) == 1
583 assert len(push_calls) == 1
584 unused_last_call_args, last_call_kwargs = push_calls[0]
584 unused_last_call_args, last_call_kwargs = push_calls[0]
585 assert last_call_kwargs['action'] == 'push'
585 assert last_call_kwargs['action'] == 'push'
586 assert last_call_kwargs['commit_ids'] == pr_commit_ids
586 assert last_call_kwargs['commit_ids'] == pr_commit_ids
587
587
588 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
588 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
589 pull_request = pr_util.create_pull_request(mergeable=False)
589 pull_request = pr_util.create_pull_request(mergeable=False)
590 pull_request_id = pull_request.pull_request_id
590 pull_request_id = pull_request.pull_request_id
591 pull_request = PullRequest.get(pull_request_id)
591 pull_request = PullRequest.get(pull_request_id)
592
592
593 response = self.app.post(
593 response = self.app.post(
594 route_path('pullrequest_merge',
594 route_path('pullrequest_merge',
595 repo_name=pull_request.target_repo.scm_instance().name,
595 repo_name=pull_request.target_repo.scm_instance().name,
596 pull_request_id=pull_request.pull_request_id),
596 pull_request_id=pull_request.pull_request_id),
597 params={'csrf_token': csrf_token}).follow()
597 params={'csrf_token': csrf_token}).follow()
598
598
599 assert response.status_int == 200
599 assert response.status_int == 200
600 response.mustcontain(
600 response.mustcontain(
601 'Merge is not currently possible because of below failed checks.')
601 'Merge is not currently possible because of below failed checks.')
602 response.mustcontain('Server-side pull request merging is disabled.')
602 response.mustcontain('Server-side pull request merging is disabled.')
603
603
604 @pytest.mark.skip_backends('svn')
604 @pytest.mark.skip_backends('svn')
605 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
605 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
606 pull_request = pr_util.create_pull_request(mergeable=True)
606 pull_request = pr_util.create_pull_request(mergeable=True)
607 pull_request_id = pull_request.pull_request_id
607 pull_request_id = pull_request.pull_request_id
608 repo_name = pull_request.target_repo.scm_instance().name
608 repo_name = pull_request.target_repo.scm_instance().name
609
609
610 response = self.app.post(
610 response = self.app.post(
611 route_path('pullrequest_merge',
611 route_path('pullrequest_merge',
612 repo_name=repo_name, pull_request_id=pull_request_id),
612 repo_name=repo_name, pull_request_id=pull_request_id),
613 params={'csrf_token': csrf_token}).follow()
613 params={'csrf_token': csrf_token}).follow()
614
614
615 assert response.status_int == 200
615 assert response.status_int == 200
616
616
617 response.mustcontain(
617 response.mustcontain(
618 'Merge is not currently possible because of below failed checks.')
618 'Merge is not currently possible because of below failed checks.')
619 response.mustcontain('Pull request reviewer approval is pending.')
619 response.mustcontain('Pull request reviewer approval is pending.')
620
620
621 def test_merge_pull_request_renders_failure_reason(
621 def test_merge_pull_request_renders_failure_reason(
622 self, user_regular, csrf_token, pr_util):
622 self, user_regular, csrf_token, pr_util):
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
624 pull_request_id = pull_request.pull_request_id
624 pull_request_id = pull_request.pull_request_id
625 repo_name = pull_request.target_repo.scm_instance().name
625 repo_name = pull_request.target_repo.scm_instance().name
626
626
627 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
627 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
628 MergeFailureReason.PUSH_FAILED,
628 MergeFailureReason.PUSH_FAILED,
629 metadata={'target': 'shadow repo',
629 metadata={'target': 'shadow repo',
630 'merge_commit': 'xxx'})
630 'merge_commit': 'xxx'})
631 model_patcher = mock.patch.multiple(
631 model_patcher = mock.patch.multiple(
632 PullRequestModel,
632 PullRequestModel,
633 merge_repo=mock.Mock(return_value=merge_resp),
633 merge_repo=mock.Mock(return_value=merge_resp),
634 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
634 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
635
635
636 with model_patcher:
636 with model_patcher:
637 response = self.app.post(
637 response = self.app.post(
638 route_path('pullrequest_merge',
638 route_path('pullrequest_merge',
639 repo_name=repo_name,
639 repo_name=repo_name,
640 pull_request_id=pull_request_id),
640 pull_request_id=pull_request_id),
641 params={'csrf_token': csrf_token}, status=302)
641 params={'csrf_token': csrf_token}, status=302)
642
642
643 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
643 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
644 metadata={'target': 'shadow repo',
644 metadata={'target': 'shadow repo',
645 'merge_commit': 'xxx'})
645 'merge_commit': 'xxx'})
646 assert_session_flash(response, merge_resp.merge_status_message)
646 assert_session_flash(response, merge_resp.merge_status_message)
647
647
648 def test_update_source_revision(self, backend, csrf_token):
648 def test_update_source_revision(self, backend, csrf_token):
649 commits = [
649 commits = [
650 {'message': 'ancestor'},
650 {'message': 'ancestor'},
651 {'message': 'change'},
651 {'message': 'change'},
652 {'message': 'change-2'},
652 {'message': 'change-2'},
653 ]
653 ]
654 commit_ids = backend.create_master_repo(commits)
654 commit_ids = backend.create_master_repo(commits)
655 target = backend.create_repo(heads=['ancestor'])
655 target = backend.create_repo(heads=['ancestor'])
656 source = backend.create_repo(heads=['change'])
656 source = backend.create_repo(heads=['change'])
657
657
658 # create pr from a in source to A in target
658 # create pr from a in source to A in target
659 pull_request = PullRequest()
659 pull_request = PullRequest()
660
660
661 pull_request.source_repo = source
661 pull_request.source_repo = source
662 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
662 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
663 branch=backend.default_branch_name, commit_id=commit_ids['change'])
663 branch=backend.default_branch_name, commit_id=commit_ids['change'])
664
664
665 pull_request.target_repo = target
665 pull_request.target_repo = target
666 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
666 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
667 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
667 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
668
668
669 pull_request.revisions = [commit_ids['change']]
669 pull_request.revisions = [commit_ids['change']]
670 pull_request.title = u"Test"
670 pull_request.title = u"Test"
671 pull_request.description = u"Description"
671 pull_request.description = u"Description"
672 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
672 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
673 pull_request.pull_request_state = PullRequest.STATE_CREATED
673 pull_request.pull_request_state = PullRequest.STATE_CREATED
674 Session().add(pull_request)
674 Session().add(pull_request)
675 Session().commit()
675 Session().commit()
676 pull_request_id = pull_request.pull_request_id
676 pull_request_id = pull_request.pull_request_id
677
677
678 # source has ancestor - change - change-2
678 # source has ancestor - change - change-2
679 backend.pull_heads(source, heads=['change-2'])
679 backend.pull_heads(source, heads=['change-2'])
680
680
681 # update PR
681 # update PR
682 self.app.post(
682 self.app.post(
683 route_path('pullrequest_update',
683 route_path('pullrequest_update',
684 repo_name=target.repo_name, pull_request_id=pull_request_id),
684 repo_name=target.repo_name, pull_request_id=pull_request_id),
685 params={'update_commits': 'true', 'csrf_token': csrf_token})
685 params={'update_commits': 'true', 'csrf_token': csrf_token})
686
686
687 response = self.app.get(
687 response = self.app.get(
688 route_path('pullrequest_show',
688 route_path('pullrequest_show',
689 repo_name=target.repo_name,
689 repo_name=target.repo_name,
690 pull_request_id=pull_request.pull_request_id))
690 pull_request_id=pull_request.pull_request_id))
691
691
692 assert response.status_int == 200
692 assert response.status_int == 200
693 assert 'Pull request updated to' in response.body
693 assert 'Pull request updated to' in response.body
694 assert 'with 1 added, 0 removed commits.' in response.body
694 assert 'with 1 added, 0 removed commits.' in response.body
695
695
696 # check that we have now both revisions
696 # check that we have now both revisions
697 pull_request = PullRequest.get(pull_request_id)
697 pull_request = PullRequest.get(pull_request_id)
698 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
698 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
699
699
700 def test_update_target_revision(self, backend, csrf_token):
700 def test_update_target_revision(self, backend, csrf_token):
701 commits = [
701 commits = [
702 {'message': 'ancestor'},
702 {'message': 'ancestor'},
703 {'message': 'change'},
703 {'message': 'change'},
704 {'message': 'ancestor-new', 'parents': ['ancestor']},
704 {'message': 'ancestor-new', 'parents': ['ancestor']},
705 {'message': 'change-rebased'},
705 {'message': 'change-rebased'},
706 ]
706 ]
707 commit_ids = backend.create_master_repo(commits)
707 commit_ids = backend.create_master_repo(commits)
708 target = backend.create_repo(heads=['ancestor'])
708 target = backend.create_repo(heads=['ancestor'])
709 source = backend.create_repo(heads=['change'])
709 source = backend.create_repo(heads=['change'])
710
710
711 # create pr from a in source to A in target
711 # create pr from a in source to A in target
712 pull_request = PullRequest()
712 pull_request = PullRequest()
713
713
714 pull_request.source_repo = source
714 pull_request.source_repo = source
715 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
715 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
716 branch=backend.default_branch_name, commit_id=commit_ids['change'])
716 branch=backend.default_branch_name, commit_id=commit_ids['change'])
717
717
718 pull_request.target_repo = target
718 pull_request.target_repo = target
719 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
719 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
720 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
720 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
721
721
722 pull_request.revisions = [commit_ids['change']]
722 pull_request.revisions = [commit_ids['change']]
723 pull_request.title = u"Test"
723 pull_request.title = u"Test"
724 pull_request.description = u"Description"
724 pull_request.description = u"Description"
725 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
725 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
726 pull_request.pull_request_state = PullRequest.STATE_CREATED
726 pull_request.pull_request_state = PullRequest.STATE_CREATED
727
727
728 Session().add(pull_request)
728 Session().add(pull_request)
729 Session().commit()
729 Session().commit()
730 pull_request_id = pull_request.pull_request_id
730 pull_request_id = pull_request.pull_request_id
731
731
732 # target has ancestor - ancestor-new
732 # target has ancestor - ancestor-new
733 # source has ancestor - ancestor-new - change-rebased
733 # source has ancestor - ancestor-new - change-rebased
734 backend.pull_heads(target, heads=['ancestor-new'])
734 backend.pull_heads(target, heads=['ancestor-new'])
735 backend.pull_heads(source, heads=['change-rebased'])
735 backend.pull_heads(source, heads=['change-rebased'])
736
736
737 # update PR
737 # update PR
738 self.app.post(
738 self.app.post(
739 route_path('pullrequest_update',
739 route_path('pullrequest_update',
740 repo_name=target.repo_name,
740 repo_name=target.repo_name,
741 pull_request_id=pull_request_id),
741 pull_request_id=pull_request_id),
742 params={'update_commits': 'true', 'csrf_token': csrf_token},
742 params={'update_commits': 'true', 'csrf_token': csrf_token},
743 status=200)
743 status=200)
744
744
745 # check that we have now both revisions
745 # check that we have now both revisions
746 pull_request = PullRequest.get(pull_request_id)
746 pull_request = PullRequest.get(pull_request_id)
747 assert pull_request.revisions == [commit_ids['change-rebased']]
747 assert pull_request.revisions == [commit_ids['change-rebased']]
748 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
748 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
749 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
749 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
750
750
751 response = self.app.get(
751 response = self.app.get(
752 route_path('pullrequest_show',
752 route_path('pullrequest_show',
753 repo_name=target.repo_name,
753 repo_name=target.repo_name,
754 pull_request_id=pull_request.pull_request_id))
754 pull_request_id=pull_request.pull_request_id))
755 assert response.status_int == 200
755 assert response.status_int == 200
756 assert 'Pull request updated to' in response.body
756 assert 'Pull request updated to' in response.body
757 assert 'with 1 added, 1 removed commits.' in response.body
757 assert 'with 1 added, 1 removed commits.' in response.body
758
758
759 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
759 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
760 backend = backend_git
760 backend = backend_git
761 commits = [
761 commits = [
762 {'message': 'master-commit-1'},
762 {'message': 'master-commit-1'},
763 {'message': 'master-commit-2-change-1'},
763 {'message': 'master-commit-2-change-1'},
764 {'message': 'master-commit-3-change-2'},
764 {'message': 'master-commit-3-change-2'},
765
765
766 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
766 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
767 {'message': 'feat-commit-2'},
767 {'message': 'feat-commit-2'},
768 ]
768 ]
769 commit_ids = backend.create_master_repo(commits)
769 commit_ids = backend.create_master_repo(commits)
770 target = backend.create_repo(heads=['master-commit-3-change-2'])
770 target = backend.create_repo(heads=['master-commit-3-change-2'])
771 source = backend.create_repo(heads=['feat-commit-2'])
771 source = backend.create_repo(heads=['feat-commit-2'])
772
772
773 # create pr from a in source to A in target
773 # create pr from a in source to A in target
774 pull_request = PullRequest()
774 pull_request = PullRequest()
775 pull_request.source_repo = source
775 pull_request.source_repo = source
776
776
777 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
777 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
778 branch=backend.default_branch_name,
778 branch=backend.default_branch_name,
779 commit_id=commit_ids['master-commit-3-change-2'])
779 commit_id=commit_ids['master-commit-3-change-2'])
780
780
781 pull_request.target_repo = target
781 pull_request.target_repo = target
782 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
782 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
783 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
783 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
784
784
785 pull_request.revisions = [
785 pull_request.revisions = [
786 commit_ids['feat-commit-1'],
786 commit_ids['feat-commit-1'],
787 commit_ids['feat-commit-2']
787 commit_ids['feat-commit-2']
788 ]
788 ]
789 pull_request.title = u"Test"
789 pull_request.title = u"Test"
790 pull_request.description = u"Description"
790 pull_request.description = u"Description"
791 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
791 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
792 pull_request.pull_request_state = PullRequest.STATE_CREATED
792 pull_request.pull_request_state = PullRequest.STATE_CREATED
793 Session().add(pull_request)
793 Session().add(pull_request)
794 Session().commit()
794 Session().commit()
795 pull_request_id = pull_request.pull_request_id
795 pull_request_id = pull_request.pull_request_id
796
796
797 # PR is created, now we simulate a force-push into target,
797 # PR is created, now we simulate a force-push into target,
798 # that drops a 2 last commits
798 # that drops a 2 last commits
799 vcsrepo = target.scm_instance()
799 vcsrepo = target.scm_instance()
800 vcsrepo.config.clear_section('hooks')
800 vcsrepo.config.clear_section('hooks')
801 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
801 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
802
802
803 # update PR
803 # update PR
804 self.app.post(
804 self.app.post(
805 route_path('pullrequest_update',
805 route_path('pullrequest_update',
806 repo_name=target.repo_name,
806 repo_name=target.repo_name,
807 pull_request_id=pull_request_id),
807 pull_request_id=pull_request_id),
808 params={'update_commits': 'true', 'csrf_token': csrf_token},
808 params={'update_commits': 'true', 'csrf_token': csrf_token},
809 status=200)
809 status=200)
810
810
811 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
811 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
812 assert response.status_int == 200
812 assert response.status_int == 200
813 response.mustcontain('Pull request updated to')
813 response.mustcontain('Pull request updated to')
814 response.mustcontain('with 0 added, 0 removed commits.')
814 response.mustcontain('with 0 added, 0 removed commits.')
815
815
816 def test_update_of_ancestor_reference(self, backend, csrf_token):
816 def test_update_of_ancestor_reference(self, backend, csrf_token):
817 commits = [
817 commits = [
818 {'message': 'ancestor'},
818 {'message': 'ancestor'},
819 {'message': 'change'},
819 {'message': 'change'},
820 {'message': 'change-2'},
820 {'message': 'change-2'},
821 {'message': 'ancestor-new', 'parents': ['ancestor']},
821 {'message': 'ancestor-new', 'parents': ['ancestor']},
822 {'message': 'change-rebased'},
822 {'message': 'change-rebased'},
823 ]
823 ]
824 commit_ids = backend.create_master_repo(commits)
824 commit_ids = backend.create_master_repo(commits)
825 target = backend.create_repo(heads=['ancestor'])
825 target = backend.create_repo(heads=['ancestor'])
826 source = backend.create_repo(heads=['change'])
826 source = backend.create_repo(heads=['change'])
827
827
828 # create pr from a in source to A in target
828 # create pr from a in source to A in target
829 pull_request = PullRequest()
829 pull_request = PullRequest()
830 pull_request.source_repo = source
830 pull_request.source_repo = source
831
831
832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
833 branch=backend.default_branch_name, commit_id=commit_ids['change'])
833 branch=backend.default_branch_name, commit_id=commit_ids['change'])
834 pull_request.target_repo = target
834 pull_request.target_repo = target
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
836 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
837 pull_request.revisions = [commit_ids['change']]
837 pull_request.revisions = [commit_ids['change']]
838 pull_request.title = u"Test"
838 pull_request.title = u"Test"
839 pull_request.description = u"Description"
839 pull_request.description = u"Description"
840 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
840 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
841 pull_request.pull_request_state = PullRequest.STATE_CREATED
841 pull_request.pull_request_state = PullRequest.STATE_CREATED
842 Session().add(pull_request)
842 Session().add(pull_request)
843 Session().commit()
843 Session().commit()
844 pull_request_id = pull_request.pull_request_id
844 pull_request_id = pull_request.pull_request_id
845
845
846 # target has ancestor - ancestor-new
846 # target has ancestor - ancestor-new
847 # source has ancestor - ancestor-new - change-rebased
847 # source has ancestor - ancestor-new - change-rebased
848 backend.pull_heads(target, heads=['ancestor-new'])
848 backend.pull_heads(target, heads=['ancestor-new'])
849 backend.pull_heads(source, heads=['change-rebased'])
849 backend.pull_heads(source, heads=['change-rebased'])
850
850
851 # update PR
851 # update PR
852 self.app.post(
852 self.app.post(
853 route_path('pullrequest_update',
853 route_path('pullrequest_update',
854 repo_name=target.repo_name, pull_request_id=pull_request_id),
854 repo_name=target.repo_name, pull_request_id=pull_request_id),
855 params={'update_commits': 'true', 'csrf_token': csrf_token},
855 params={'update_commits': 'true', 'csrf_token': csrf_token},
856 status=200)
856 status=200)
857
857
858 # Expect the target reference to be updated correctly
858 # Expect the target reference to be updated correctly
859 pull_request = PullRequest.get(pull_request_id)
859 pull_request = PullRequest.get(pull_request_id)
860 assert pull_request.revisions == [commit_ids['change-rebased']]
860 assert pull_request.revisions == [commit_ids['change-rebased']]
861 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
861 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
862 branch=backend.default_branch_name,
862 branch=backend.default_branch_name,
863 commit_id=commit_ids['ancestor-new'])
863 commit_id=commit_ids['ancestor-new'])
864 assert pull_request.target_ref == expected_target_ref
864 assert pull_request.target_ref == expected_target_ref
865
865
866 def test_remove_pull_request_branch(self, backend_git, csrf_token):
866 def test_remove_pull_request_branch(self, backend_git, csrf_token):
867 branch_name = 'development'
867 branch_name = 'development'
868 commits = [
868 commits = [
869 {'message': 'initial-commit'},
869 {'message': 'initial-commit'},
870 {'message': 'old-feature'},
870 {'message': 'old-feature'},
871 {'message': 'new-feature', 'branch': branch_name},
871 {'message': 'new-feature', 'branch': branch_name},
872 ]
872 ]
873 repo = backend_git.create_repo(commits)
873 repo = backend_git.create_repo(commits)
874 repo_name = repo.repo_name
874 commit_ids = backend_git.commit_ids
875 commit_ids = backend_git.commit_ids
875
876
876 pull_request = PullRequest()
877 pull_request = PullRequest()
877 pull_request.source_repo = repo
878 pull_request.source_repo = repo
878 pull_request.target_repo = repo
879 pull_request.target_repo = repo
879 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
880 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
880 branch=branch_name, commit_id=commit_ids['new-feature'])
881 branch=branch_name, commit_id=commit_ids['new-feature'])
881 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
882 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
882 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
883 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
883 pull_request.revisions = [commit_ids['new-feature']]
884 pull_request.revisions = [commit_ids['new-feature']]
884 pull_request.title = u"Test"
885 pull_request.title = u"Test"
885 pull_request.description = u"Description"
886 pull_request.description = u"Description"
886 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
887 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
887 pull_request.pull_request_state = PullRequest.STATE_CREATED
888 pull_request.pull_request_state = PullRequest.STATE_CREATED
888 Session().add(pull_request)
889 Session().add(pull_request)
889 Session().commit()
890 Session().commit()
890
891
892 pull_request_id = pull_request.pull_request_id
893
891 vcs = repo.scm_instance()
894 vcs = repo.scm_instance()
892 vcs.remove_ref('refs/heads/{}'.format(branch_name))
895 vcs.remove_ref('refs/heads/{}'.format(branch_name))
893
896
894 response = self.app.get(route_path(
897 response = self.app.get(route_path(
895 'pullrequest_show',
898 'pullrequest_show',
896 repo_name=repo.repo_name,
899 repo_name=repo_name,
897 pull_request_id=pull_request.pull_request_id))
900 pull_request_id=pull_request_id))
898
901
899 assert response.status_int == 200
902 assert response.status_int == 200
900
903
901 response.assert_response().element_contains(
904 response.assert_response().element_contains(
902 '#changeset_compare_view_content .alert strong',
905 '#changeset_compare_view_content .alert strong',
903 'Missing commits')
906 'Missing commits')
904 response.assert_response().element_contains(
907 response.assert_response().element_contains(
905 '#changeset_compare_view_content .alert',
908 '#changeset_compare_view_content .alert',
906 'This pull request cannot be displayed, because one or more'
909 'This pull request cannot be displayed, because one or more'
907 ' commits no longer exist in the source repository.')
910 ' commits no longer exist in the source repository.')
908
911
909 def test_strip_commits_from_pull_request(
912 def test_strip_commits_from_pull_request(
910 self, backend, pr_util, csrf_token):
913 self, backend, pr_util, csrf_token):
911 commits = [
914 commits = [
912 {'message': 'initial-commit'},
915 {'message': 'initial-commit'},
913 {'message': 'old-feature'},
916 {'message': 'old-feature'},
914 {'message': 'new-feature', 'parents': ['initial-commit']},
917 {'message': 'new-feature', 'parents': ['initial-commit']},
915 ]
918 ]
916 pull_request = pr_util.create_pull_request(
919 pull_request = pr_util.create_pull_request(
917 commits, target_head='initial-commit', source_head='new-feature',
920 commits, target_head='initial-commit', source_head='new-feature',
918 revisions=['new-feature'])
921 revisions=['new-feature'])
919
922
920 vcs = pr_util.source_repository.scm_instance()
923 vcs = pr_util.source_repository.scm_instance()
921 if backend.alias == 'git':
924 if backend.alias == 'git':
922 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
925 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
923 else:
926 else:
924 vcs.strip(pr_util.commit_ids['new-feature'])
927 vcs.strip(pr_util.commit_ids['new-feature'])
925
928
926 response = self.app.get(route_path(
929 response = self.app.get(route_path(
927 'pullrequest_show',
930 'pullrequest_show',
928 repo_name=pr_util.target_repository.repo_name,
931 repo_name=pr_util.target_repository.repo_name,
929 pull_request_id=pull_request.pull_request_id))
932 pull_request_id=pull_request.pull_request_id))
930
933
931 assert response.status_int == 200
934 assert response.status_int == 200
932
935
933 response.assert_response().element_contains(
936 response.assert_response().element_contains(
934 '#changeset_compare_view_content .alert strong',
937 '#changeset_compare_view_content .alert strong',
935 'Missing commits')
938 'Missing commits')
936 response.assert_response().element_contains(
939 response.assert_response().element_contains(
937 '#changeset_compare_view_content .alert',
940 '#changeset_compare_view_content .alert',
938 'This pull request cannot be displayed, because one or more'
941 'This pull request cannot be displayed, because one or more'
939 ' commits no longer exist in the source repository.')
942 ' commits no longer exist in the source repository.')
940 response.assert_response().element_contains(
943 response.assert_response().element_contains(
941 '#update_commits',
944 '#update_commits',
942 'Update commits')
945 'Update commits')
943
946
944 def test_strip_commits_and_update(
947 def test_strip_commits_and_update(
945 self, backend, pr_util, csrf_token):
948 self, backend, pr_util, csrf_token):
946 commits = [
949 commits = [
947 {'message': 'initial-commit'},
950 {'message': 'initial-commit'},
948 {'message': 'old-feature'},
951 {'message': 'old-feature'},
949 {'message': 'new-feature', 'parents': ['old-feature']},
952 {'message': 'new-feature', 'parents': ['old-feature']},
950 ]
953 ]
951 pull_request = pr_util.create_pull_request(
954 pull_request = pr_util.create_pull_request(
952 commits, target_head='old-feature', source_head='new-feature',
955 commits, target_head='old-feature', source_head='new-feature',
953 revisions=['new-feature'], mergeable=True)
956 revisions=['new-feature'], mergeable=True)
954
957
955 vcs = pr_util.source_repository.scm_instance()
958 vcs = pr_util.source_repository.scm_instance()
956 if backend.alias == 'git':
959 if backend.alias == 'git':
957 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
960 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
958 else:
961 else:
959 vcs.strip(pr_util.commit_ids['new-feature'])
962 vcs.strip(pr_util.commit_ids['new-feature'])
960
963
961 response = self.app.post(
964 response = self.app.post(
962 route_path('pullrequest_update',
965 route_path('pullrequest_update',
963 repo_name=pull_request.target_repo.repo_name,
966 repo_name=pull_request.target_repo.repo_name,
964 pull_request_id=pull_request.pull_request_id),
967 pull_request_id=pull_request.pull_request_id),
965 params={'update_commits': 'true',
968 params={'update_commits': 'true',
966 'csrf_token': csrf_token})
969 'csrf_token': csrf_token})
967
970
968 assert response.status_int == 200
971 assert response.status_int == 200
969 assert response.body == 'true'
972 assert response.body == 'true'
970
973
971 # Make sure that after update, it won't raise 500 errors
974 # Make sure that after update, it won't raise 500 errors
972 response = self.app.get(route_path(
975 response = self.app.get(route_path(
973 'pullrequest_show',
976 'pullrequest_show',
974 repo_name=pr_util.target_repository.repo_name,
977 repo_name=pr_util.target_repository.repo_name,
975 pull_request_id=pull_request.pull_request_id))
978 pull_request_id=pull_request.pull_request_id))
976
979
977 assert response.status_int == 200
980 assert response.status_int == 200
978 response.assert_response().element_contains(
981 response.assert_response().element_contains(
979 '#changeset_compare_view_content .alert strong',
982 '#changeset_compare_view_content .alert strong',
980 'Missing commits')
983 'Missing commits')
981
984
982 def test_branch_is_a_link(self, pr_util):
985 def test_branch_is_a_link(self, pr_util):
983 pull_request = pr_util.create_pull_request()
986 pull_request = pr_util.create_pull_request()
984 pull_request.source_ref = 'branch:origin:1234567890abcdef'
987 pull_request.source_ref = 'branch:origin:1234567890abcdef'
985 pull_request.target_ref = 'branch:target:abcdef1234567890'
988 pull_request.target_ref = 'branch:target:abcdef1234567890'
986 Session().add(pull_request)
989 Session().add(pull_request)
987 Session().commit()
990 Session().commit()
988
991
989 response = self.app.get(route_path(
992 response = self.app.get(route_path(
990 'pullrequest_show',
993 'pullrequest_show',
991 repo_name=pull_request.target_repo.scm_instance().name,
994 repo_name=pull_request.target_repo.scm_instance().name,
992 pull_request_id=pull_request.pull_request_id))
995 pull_request_id=pull_request.pull_request_id))
993 assert response.status_int == 200
996 assert response.status_int == 200
994
997
995 origin = response.assert_response().get_element('.pr-origininfo .tag')
998 origin = response.assert_response().get_element('.pr-origininfo .tag')
996 origin_children = origin.getchildren()
999 origin_children = origin.getchildren()
997 assert len(origin_children) == 1
1000 assert len(origin_children) == 1
998 target = response.assert_response().get_element('.pr-targetinfo .tag')
1001 target = response.assert_response().get_element('.pr-targetinfo .tag')
999 target_children = target.getchildren()
1002 target_children = target.getchildren()
1000 assert len(target_children) == 1
1003 assert len(target_children) == 1
1001
1004
1002 expected_origin_link = route_path(
1005 expected_origin_link = route_path(
1003 'repo_commits',
1006 'repo_commits',
1004 repo_name=pull_request.source_repo.scm_instance().name,
1007 repo_name=pull_request.source_repo.scm_instance().name,
1005 params=dict(branch='origin'))
1008 params=dict(branch='origin'))
1006 expected_target_link = route_path(
1009 expected_target_link = route_path(
1007 'repo_commits',
1010 'repo_commits',
1008 repo_name=pull_request.target_repo.scm_instance().name,
1011 repo_name=pull_request.target_repo.scm_instance().name,
1009 params=dict(branch='target'))
1012 params=dict(branch='target'))
1010 assert origin_children[0].attrib['href'] == expected_origin_link
1013 assert origin_children[0].attrib['href'] == expected_origin_link
1011 assert origin_children[0].text == 'branch: origin'
1014 assert origin_children[0].text == 'branch: origin'
1012 assert target_children[0].attrib['href'] == expected_target_link
1015 assert target_children[0].attrib['href'] == expected_target_link
1013 assert target_children[0].text == 'branch: target'
1016 assert target_children[0].text == 'branch: target'
1014
1017
1015 def test_bookmark_is_not_a_link(self, pr_util):
1018 def test_bookmark_is_not_a_link(self, pr_util):
1016 pull_request = pr_util.create_pull_request()
1019 pull_request = pr_util.create_pull_request()
1017 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1020 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1018 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1021 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1019 Session().add(pull_request)
1022 Session().add(pull_request)
1020 Session().commit()
1023 Session().commit()
1021
1024
1022 response = self.app.get(route_path(
1025 response = self.app.get(route_path(
1023 'pullrequest_show',
1026 'pullrequest_show',
1024 repo_name=pull_request.target_repo.scm_instance().name,
1027 repo_name=pull_request.target_repo.scm_instance().name,
1025 pull_request_id=pull_request.pull_request_id))
1028 pull_request_id=pull_request.pull_request_id))
1026 assert response.status_int == 200
1029 assert response.status_int == 200
1027
1030
1028 origin = response.assert_response().get_element('.pr-origininfo .tag')
1031 origin = response.assert_response().get_element('.pr-origininfo .tag')
1029 assert origin.text.strip() == 'bookmark: origin'
1032 assert origin.text.strip() == 'bookmark: origin'
1030 assert origin.getchildren() == []
1033 assert origin.getchildren() == []
1031
1034
1032 target = response.assert_response().get_element('.pr-targetinfo .tag')
1035 target = response.assert_response().get_element('.pr-targetinfo .tag')
1033 assert target.text.strip() == 'bookmark: target'
1036 assert target.text.strip() == 'bookmark: target'
1034 assert target.getchildren() == []
1037 assert target.getchildren() == []
1035
1038
1036 def test_tag_is_not_a_link(self, pr_util):
1039 def test_tag_is_not_a_link(self, pr_util):
1037 pull_request = pr_util.create_pull_request()
1040 pull_request = pr_util.create_pull_request()
1038 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1041 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1039 pull_request.target_ref = 'tag:target:abcdef1234567890'
1042 pull_request.target_ref = 'tag:target:abcdef1234567890'
1040 Session().add(pull_request)
1043 Session().add(pull_request)
1041 Session().commit()
1044 Session().commit()
1042
1045
1043 response = self.app.get(route_path(
1046 response = self.app.get(route_path(
1044 'pullrequest_show',
1047 'pullrequest_show',
1045 repo_name=pull_request.target_repo.scm_instance().name,
1048 repo_name=pull_request.target_repo.scm_instance().name,
1046 pull_request_id=pull_request.pull_request_id))
1049 pull_request_id=pull_request.pull_request_id))
1047 assert response.status_int == 200
1050 assert response.status_int == 200
1048
1051
1049 origin = response.assert_response().get_element('.pr-origininfo .tag')
1052 origin = response.assert_response().get_element('.pr-origininfo .tag')
1050 assert origin.text.strip() == 'tag: origin'
1053 assert origin.text.strip() == 'tag: origin'
1051 assert origin.getchildren() == []
1054 assert origin.getchildren() == []
1052
1055
1053 target = response.assert_response().get_element('.pr-targetinfo .tag')
1056 target = response.assert_response().get_element('.pr-targetinfo .tag')
1054 assert target.text.strip() == 'tag: target'
1057 assert target.text.strip() == 'tag: target'
1055 assert target.getchildren() == []
1058 assert target.getchildren() == []
1056
1059
1057 @pytest.mark.parametrize('mergeable', [True, False])
1060 @pytest.mark.parametrize('mergeable', [True, False])
1058 def test_shadow_repository_link(
1061 def test_shadow_repository_link(
1059 self, mergeable, pr_util, http_host_only_stub):
1062 self, mergeable, pr_util, http_host_only_stub):
1060 """
1063 """
1061 Check that the pull request summary page displays a link to the shadow
1064 Check that the pull request summary page displays a link to the shadow
1062 repository if the pull request is mergeable. If it is not mergeable
1065 repository if the pull request is mergeable. If it is not mergeable
1063 the link should not be displayed.
1066 the link should not be displayed.
1064 """
1067 """
1065 pull_request = pr_util.create_pull_request(
1068 pull_request = pr_util.create_pull_request(
1066 mergeable=mergeable, enable_notifications=False)
1069 mergeable=mergeable, enable_notifications=False)
1067 target_repo = pull_request.target_repo.scm_instance()
1070 target_repo = pull_request.target_repo.scm_instance()
1068 pr_id = pull_request.pull_request_id
1071 pr_id = pull_request.pull_request_id
1069 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1072 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1070 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1073 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1071
1074
1072 response = self.app.get(route_path(
1075 response = self.app.get(route_path(
1073 'pullrequest_show',
1076 'pullrequest_show',
1074 repo_name=target_repo.name,
1077 repo_name=target_repo.name,
1075 pull_request_id=pr_id))
1078 pull_request_id=pr_id))
1076
1079
1077 if mergeable:
1080 if mergeable:
1078 response.assert_response().element_value_contains(
1081 response.assert_response().element_value_contains(
1079 'input.pr-mergeinfo', shadow_url)
1082 'input.pr-mergeinfo', shadow_url)
1080 response.assert_response().element_value_contains(
1083 response.assert_response().element_value_contains(
1081 'input.pr-mergeinfo ', 'pr-merge')
1084 'input.pr-mergeinfo ', 'pr-merge')
1082 else:
1085 else:
1083 response.assert_response().no_element_exists('.pr-mergeinfo')
1086 response.assert_response().no_element_exists('.pr-mergeinfo')
1084
1087
1085
1088
1086 @pytest.mark.usefixtures('app')
1089 @pytest.mark.usefixtures('app')
1087 @pytest.mark.backends("git", "hg")
1090 @pytest.mark.backends("git", "hg")
1088 class TestPullrequestsControllerDelete(object):
1091 class TestPullrequestsControllerDelete(object):
1089 def test_pull_request_delete_button_permissions_admin(
1092 def test_pull_request_delete_button_permissions_admin(
1090 self, autologin_user, user_admin, pr_util):
1093 self, autologin_user, user_admin, pr_util):
1091 pull_request = pr_util.create_pull_request(
1094 pull_request = pr_util.create_pull_request(
1092 author=user_admin.username, enable_notifications=False)
1095 author=user_admin.username, enable_notifications=False)
1093
1096
1094 response = self.app.get(route_path(
1097 response = self.app.get(route_path(
1095 'pullrequest_show',
1098 'pullrequest_show',
1096 repo_name=pull_request.target_repo.scm_instance().name,
1099 repo_name=pull_request.target_repo.scm_instance().name,
1097 pull_request_id=pull_request.pull_request_id))
1100 pull_request_id=pull_request.pull_request_id))
1098
1101
1099 response.mustcontain('id="delete_pullrequest"')
1102 response.mustcontain('id="delete_pullrequest"')
1100 response.mustcontain('Confirm to delete this pull request')
1103 response.mustcontain('Confirm to delete this pull request')
1101
1104
1102 def test_pull_request_delete_button_permissions_owner(
1105 def test_pull_request_delete_button_permissions_owner(
1103 self, autologin_regular_user, user_regular, pr_util):
1106 self, autologin_regular_user, user_regular, pr_util):
1104 pull_request = pr_util.create_pull_request(
1107 pull_request = pr_util.create_pull_request(
1105 author=user_regular.username, enable_notifications=False)
1108 author=user_regular.username, enable_notifications=False)
1106
1109
1107 response = self.app.get(route_path(
1110 response = self.app.get(route_path(
1108 'pullrequest_show',
1111 'pullrequest_show',
1109 repo_name=pull_request.target_repo.scm_instance().name,
1112 repo_name=pull_request.target_repo.scm_instance().name,
1110 pull_request_id=pull_request.pull_request_id))
1113 pull_request_id=pull_request.pull_request_id))
1111
1114
1112 response.mustcontain('id="delete_pullrequest"')
1115 response.mustcontain('id="delete_pullrequest"')
1113 response.mustcontain('Confirm to delete this pull request')
1116 response.mustcontain('Confirm to delete this pull request')
1114
1117
1115 def test_pull_request_delete_button_permissions_forbidden(
1118 def test_pull_request_delete_button_permissions_forbidden(
1116 self, autologin_regular_user, user_regular, user_admin, pr_util):
1119 self, autologin_regular_user, user_regular, user_admin, pr_util):
1117 pull_request = pr_util.create_pull_request(
1120 pull_request = pr_util.create_pull_request(
1118 author=user_admin.username, enable_notifications=False)
1121 author=user_admin.username, enable_notifications=False)
1119
1122
1120 response = self.app.get(route_path(
1123 response = self.app.get(route_path(
1121 'pullrequest_show',
1124 'pullrequest_show',
1122 repo_name=pull_request.target_repo.scm_instance().name,
1125 repo_name=pull_request.target_repo.scm_instance().name,
1123 pull_request_id=pull_request.pull_request_id))
1126 pull_request_id=pull_request.pull_request_id))
1124 response.mustcontain(no=['id="delete_pullrequest"'])
1127 response.mustcontain(no=['id="delete_pullrequest"'])
1125 response.mustcontain(no=['Confirm to delete this pull request'])
1128 response.mustcontain(no=['Confirm to delete this pull request'])
1126
1129
1127 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1130 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1128 self, autologin_regular_user, user_regular, user_admin, pr_util,
1131 self, autologin_regular_user, user_regular, user_admin, pr_util,
1129 user_util):
1132 user_util):
1130
1133
1131 pull_request = pr_util.create_pull_request(
1134 pull_request = pr_util.create_pull_request(
1132 author=user_admin.username, enable_notifications=False)
1135 author=user_admin.username, enable_notifications=False)
1133
1136
1134 user_util.grant_user_permission_to_repo(
1137 user_util.grant_user_permission_to_repo(
1135 pull_request.target_repo, user_regular,
1138 pull_request.target_repo, user_regular,
1136 'repository.write')
1139 'repository.write')
1137
1140
1138 response = self.app.get(route_path(
1141 response = self.app.get(route_path(
1139 'pullrequest_show',
1142 'pullrequest_show',
1140 repo_name=pull_request.target_repo.scm_instance().name,
1143 repo_name=pull_request.target_repo.scm_instance().name,
1141 pull_request_id=pull_request.pull_request_id))
1144 pull_request_id=pull_request.pull_request_id))
1142
1145
1143 response.mustcontain('id="open_edit_pullrequest"')
1146 response.mustcontain('id="open_edit_pullrequest"')
1144 response.mustcontain('id="delete_pullrequest"')
1147 response.mustcontain('id="delete_pullrequest"')
1145 response.mustcontain(no=['Confirm to delete this pull request'])
1148 response.mustcontain(no=['Confirm to delete this pull request'])
1146
1149
1147 def test_delete_comment_returns_404_if_comment_does_not_exist(
1150 def test_delete_comment_returns_404_if_comment_does_not_exist(
1148 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1151 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1149
1152
1150 pull_request = pr_util.create_pull_request(
1153 pull_request = pr_util.create_pull_request(
1151 author=user_admin.username, enable_notifications=False)
1154 author=user_admin.username, enable_notifications=False)
1152
1155
1153 self.app.post(
1156 self.app.post(
1154 route_path(
1157 route_path(
1155 'pullrequest_comment_delete',
1158 'pullrequest_comment_delete',
1156 repo_name=pull_request.target_repo.scm_instance().name,
1159 repo_name=pull_request.target_repo.scm_instance().name,
1157 pull_request_id=pull_request.pull_request_id,
1160 pull_request_id=pull_request.pull_request_id,
1158 comment_id=1024404),
1161 comment_id=1024404),
1159 extra_environ=xhr_header,
1162 extra_environ=xhr_header,
1160 params={'csrf_token': csrf_token},
1163 params={'csrf_token': csrf_token},
1161 status=404
1164 status=404
1162 )
1165 )
1163
1166
1164 def test_delete_comment(
1167 def test_delete_comment(
1165 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1168 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1166
1169
1167 pull_request = pr_util.create_pull_request(
1170 pull_request = pr_util.create_pull_request(
1168 author=user_admin.username, enable_notifications=False)
1171 author=user_admin.username, enable_notifications=False)
1169 comment = pr_util.create_comment()
1172 comment = pr_util.create_comment()
1170 comment_id = comment.comment_id
1173 comment_id = comment.comment_id
1171
1174
1172 response = self.app.post(
1175 response = self.app.post(
1173 route_path(
1176 route_path(
1174 'pullrequest_comment_delete',
1177 'pullrequest_comment_delete',
1175 repo_name=pull_request.target_repo.scm_instance().name,
1178 repo_name=pull_request.target_repo.scm_instance().name,
1176 pull_request_id=pull_request.pull_request_id,
1179 pull_request_id=pull_request.pull_request_id,
1177 comment_id=comment_id),
1180 comment_id=comment_id),
1178 extra_environ=xhr_header,
1181 extra_environ=xhr_header,
1179 params={'csrf_token': csrf_token},
1182 params={'csrf_token': csrf_token},
1180 status=200
1183 status=200
1181 )
1184 )
1182 assert response.body == 'true'
1185 assert response.body == 'true'
1183
1186
1184 @pytest.mark.parametrize('url_type', [
1187 @pytest.mark.parametrize('url_type', [
1185 'pullrequest_new',
1188 'pullrequest_new',
1186 'pullrequest_create',
1189 'pullrequest_create',
1187 'pullrequest_update',
1190 'pullrequest_update',
1188 'pullrequest_merge',
1191 'pullrequest_merge',
1189 ])
1192 ])
1190 def test_pull_request_is_forbidden_on_archived_repo(
1193 def test_pull_request_is_forbidden_on_archived_repo(
1191 self, autologin_user, backend, xhr_header, user_util, url_type):
1194 self, autologin_user, backend, xhr_header, user_util, url_type):
1192
1195
1193 # create a temporary repo
1196 # create a temporary repo
1194 source = user_util.create_repo(repo_type=backend.alias)
1197 source = user_util.create_repo(repo_type=backend.alias)
1195 repo_name = source.repo_name
1198 repo_name = source.repo_name
1196 repo = Repository.get_by_repo_name(repo_name)
1199 repo = Repository.get_by_repo_name(repo_name)
1197 repo.archived = True
1200 repo.archived = True
1198 Session().commit()
1201 Session().commit()
1199
1202
1200 response = self.app.get(
1203 response = self.app.get(
1201 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1204 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1202
1205
1203 msg = 'Action not supported for archived repository.'
1206 msg = 'Action not supported for archived repository.'
1204 assert_session_flash(response, msg)
1207 assert_session_flash(response, msg)
1205
1208
1206
1209
1207 def assert_pull_request_status(pull_request, expected_status):
1210 def assert_pull_request_status(pull_request, expected_status):
1208 status = ChangesetStatusModel().calculated_review_status(
1211 status = ChangesetStatusModel().calculated_review_status(
1209 pull_request=pull_request)
1212 pull_request=pull_request)
1210 assert status == expected_status
1213 assert status == expected_status
1211
1214
1212
1215
1213 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1216 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1214 @pytest.mark.usefixtures("autologin_user")
1217 @pytest.mark.usefixtures("autologin_user")
1215 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1218 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1216 response = app.get(
1219 response = app.get(
1217 route_path(route, repo_name=backend_svn.repo_name), status=404)
1220 route_path(route, repo_name=backend_svn.repo_name), status=404)
1218
1221
@@ -1,1902 +1,1902 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 def _split_comma(value):
71 def _split_comma(value):
72 return value.split(',')
72 return value.split(',')
73
73
74
74
75 def pytest_addoption(parser):
75 def pytest_addoption(parser):
76 parser.addoption(
76 parser.addoption(
77 '--keep-tmp-path', action='store_true',
77 '--keep-tmp-path', action='store_true',
78 help="Keep the test temporary directories")
78 help="Keep the test temporary directories")
79 parser.addoption(
79 parser.addoption(
80 '--backends', action='store', type=_split_comma,
80 '--backends', action='store', type=_split_comma,
81 default=['git', 'hg', 'svn'],
81 default=['git', 'hg', 'svn'],
82 help="Select which backends to test for backend specific tests.")
82 help="Select which backends to test for backend specific tests.")
83 parser.addoption(
83 parser.addoption(
84 '--dbs', action='store', type=_split_comma,
84 '--dbs', action='store', type=_split_comma,
85 default=['sqlite'],
85 default=['sqlite'],
86 help="Select which database to test for database specific tests. "
86 help="Select which database to test for database specific tests. "
87 "Possible options are sqlite,postgres,mysql")
87 "Possible options are sqlite,postgres,mysql")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight', '--ae', action='store_true',
89 '--appenlight', '--ae', action='store_true',
90 help="Track statistics in appenlight.")
90 help="Track statistics in appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-api-key', '--ae-key',
92 '--appenlight-api-key', '--ae-key',
93 help="API key for Appenlight.")
93 help="API key for Appenlight.")
94 parser.addoption(
94 parser.addoption(
95 '--appenlight-url', '--ae-url',
95 '--appenlight-url', '--ae-url',
96 default="https://ae.rhodecode.com",
96 default="https://ae.rhodecode.com",
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 parser.addoption(
98 parser.addoption(
99 '--sqlite-connection-string', action='store',
99 '--sqlite-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with SQLite")
100 default='', help="Connection string for the dbs tests with SQLite")
101 parser.addoption(
101 parser.addoption(
102 '--postgres-connection-string', action='store',
102 '--postgres-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with Postgres")
103 default='', help="Connection string for the dbs tests with Postgres")
104 parser.addoption(
104 parser.addoption(
105 '--mysql-connection-string', action='store',
105 '--mysql-connection-string', action='store',
106 default='', help="Connection string for the dbs tests with MySQL")
106 default='', help="Connection string for the dbs tests with MySQL")
107 parser.addoption(
107 parser.addoption(
108 '--repeat', type=int, default=100,
108 '--repeat', type=int, default=100,
109 help="Number of repetitions in performance tests.")
109 help="Number of repetitions in performance tests.")
110
110
111
111
112 def pytest_configure(config):
112 def pytest_configure(config):
113 from rhodecode.config import patches
113 from rhodecode.config import patches
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.definition.get_closest_marker('backends').args
142 backends = metafunc.definition.get_closest_marker('backends').args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.config import rcextensions
157 from rhodecode.config import rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = rcextensions
160 rhodecode.EXTENSIONS = rcextensions
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162
162
163 @request.addfinalizer
163 @request.addfinalizer
164 def cleanup():
164 def cleanup():
165 rhodecode.EXTENSIONS = old_extensions
165 rhodecode.EXTENSIONS = old_extensions
166
166
167
167
168 @pytest.fixture
168 @pytest.fixture
169 def capture_rcextensions():
169 def capture_rcextensions():
170 """
170 """
171 Returns the recorded calls to entry points in rcextensions.
171 Returns the recorded calls to entry points in rcextensions.
172 """
172 """
173 calls = rhodecode.EXTENSIONS.calls
173 calls = rhodecode.EXTENSIONS.calls
174 calls.clear()
174 calls.clear()
175 # Note: At this moment, it is still the empty dict, but that will
175 # Note: At this moment, it is still the empty dict, but that will
176 # be filled during the test run and since it is a reference this
176 # be filled during the test run and since it is a reference this
177 # is enough to make it work.
177 # is enough to make it work.
178 return calls
178 return calls
179
179
180
180
181 @pytest.fixture(scope='session')
181 @pytest.fixture(scope='session')
182 def http_environ_session():
182 def http_environ_session():
183 """
183 """
184 Allow to use "http_environ" in session scope.
184 Allow to use "http_environ" in session scope.
185 """
185 """
186 return plain_http_environ()
186 return plain_http_environ()
187
187
188
188
189 def plain_http_host_stub():
189 def plain_http_host_stub():
190 """
190 """
191 Value of HTTP_HOST in the test run.
191 Value of HTTP_HOST in the test run.
192 """
192 """
193 return 'example.com:80'
193 return 'example.com:80'
194
194
195
195
196 @pytest.fixture
196 @pytest.fixture
197 def http_host_stub():
197 def http_host_stub():
198 """
198 """
199 Value of HTTP_HOST in the test run.
199 Value of HTTP_HOST in the test run.
200 """
200 """
201 return plain_http_host_stub()
201 return plain_http_host_stub()
202
202
203
203
204 def plain_http_host_only_stub():
204 def plain_http_host_only_stub():
205 """
205 """
206 Value of HTTP_HOST in the test run.
206 Value of HTTP_HOST in the test run.
207 """
207 """
208 return plain_http_host_stub().split(':')[0]
208 return plain_http_host_stub().split(':')[0]
209
209
210
210
211 @pytest.fixture
211 @pytest.fixture
212 def http_host_only_stub():
212 def http_host_only_stub():
213 """
213 """
214 Value of HTTP_HOST in the test run.
214 Value of HTTP_HOST in the test run.
215 """
215 """
216 return plain_http_host_only_stub()
216 return plain_http_host_only_stub()
217
217
218
218
219 def plain_http_environ():
219 def plain_http_environ():
220 """
220 """
221 HTTP extra environ keys.
221 HTTP extra environ keys.
222
222
223 User by the test application and as well for setting up the pylons
223 User by the test application and as well for setting up the pylons
224 environment. In the case of the fixture "app" it should be possible
224 environment. In the case of the fixture "app" it should be possible
225 to override this for a specific test case.
225 to override this for a specific test case.
226 """
226 """
227 return {
227 return {
228 'SERVER_NAME': plain_http_host_only_stub(),
228 'SERVER_NAME': plain_http_host_only_stub(),
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 'HTTP_HOST': plain_http_host_stub(),
230 'HTTP_HOST': plain_http_host_stub(),
231 'HTTP_USER_AGENT': 'rc-test-agent',
231 'HTTP_USER_AGENT': 'rc-test-agent',
232 'REQUEST_METHOD': 'GET'
232 'REQUEST_METHOD': 'GET'
233 }
233 }
234
234
235
235
236 @pytest.fixture
236 @pytest.fixture
237 def http_environ():
237 def http_environ():
238 """
238 """
239 HTTP extra environ keys.
239 HTTP extra environ keys.
240
240
241 User by the test application and as well for setting up the pylons
241 User by the test application and as well for setting up the pylons
242 environment. In the case of the fixture "app" it should be possible
242 environment. In the case of the fixture "app" it should be possible
243 to override this for a specific test case.
243 to override this for a specific test case.
244 """
244 """
245 return plain_http_environ()
245 return plain_http_environ()
246
246
247
247
248 @pytest.fixture(scope='session')
248 @pytest.fixture(scope='session')
249 def baseapp(ini_config, vcsserver, http_environ_session):
249 def baseapp(ini_config, vcsserver, http_environ_session):
250 from rhodecode.lib.pyramid_utils import get_app_config
250 from rhodecode.lib.pyramid_utils import get_app_config
251 from rhodecode.config.middleware import make_pyramid_app
251 from rhodecode.config.middleware import make_pyramid_app
252
252
253 print("Using the RhodeCode configuration:{}".format(ini_config))
253 print("Using the RhodeCode configuration:{}".format(ini_config))
254 pyramid.paster.setup_logging(ini_config)
254 pyramid.paster.setup_logging(ini_config)
255
255
256 settings = get_app_config(ini_config)
256 settings = get_app_config(ini_config)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
258
258
259 return app
259 return app
260
260
261
261
262 @pytest.fixture(scope='function')
262 @pytest.fixture(scope='function')
263 def app(request, config_stub, baseapp, http_environ):
263 def app(request, config_stub, baseapp, http_environ):
264 app = CustomTestApp(
264 app = CustomTestApp(
265 baseapp,
265 baseapp,
266 extra_environ=http_environ)
266 extra_environ=http_environ)
267 if request.cls:
267 if request.cls:
268 request.cls.app = app
268 request.cls.app = app
269 return app
269 return app
270
270
271
271
272 @pytest.fixture(scope='session')
272 @pytest.fixture(scope='session')
273 def app_settings(baseapp, ini_config):
273 def app_settings(baseapp, ini_config):
274 """
274 """
275 Settings dictionary used to create the app.
275 Settings dictionary used to create the app.
276
276
277 Parses the ini file and passes the result through the sanitize and apply
277 Parses the ini file and passes the result through the sanitize and apply
278 defaults mechanism in `rhodecode.config.middleware`.
278 defaults mechanism in `rhodecode.config.middleware`.
279 """
279 """
280 return baseapp.config.get_settings()
280 return baseapp.config.get_settings()
281
281
282
282
283 @pytest.fixture(scope='session')
283 @pytest.fixture(scope='session')
284 def db_connection(ini_settings):
284 def db_connection(ini_settings):
285 # Initialize the database connection.
285 # Initialize the database connection.
286 config_utils.initialize_database(ini_settings)
286 config_utils.initialize_database(ini_settings)
287
287
288
288
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290
290
291
291
292 def _autologin_user(app, *args):
292 def _autologin_user(app, *args):
293 session = login_user_session(app, *args)
293 session = login_user_session(app, *args)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 return LoginData(csrf_token, session['rhodecode_user'])
295 return LoginData(csrf_token, session['rhodecode_user'])
296
296
297
297
298 @pytest.fixture
298 @pytest.fixture
299 def autologin_user(app):
299 def autologin_user(app):
300 """
300 """
301 Utility fixture which makes sure that the admin user is logged in
301 Utility fixture which makes sure that the admin user is logged in
302 """
302 """
303 return _autologin_user(app)
303 return _autologin_user(app)
304
304
305
305
306 @pytest.fixture
306 @pytest.fixture
307 def autologin_regular_user(app):
307 def autologin_regular_user(app):
308 """
308 """
309 Utility fixture which makes sure that the regular user is logged in
309 Utility fixture which makes sure that the regular user is logged in
310 """
310 """
311 return _autologin_user(
311 return _autologin_user(
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313
313
314
314
315 @pytest.fixture(scope='function')
315 @pytest.fixture(scope='function')
316 def csrf_token(request, autologin_user):
316 def csrf_token(request, autologin_user):
317 return autologin_user.csrf_token
317 return autologin_user.csrf_token
318
318
319
319
320 @pytest.fixture(scope='function')
320 @pytest.fixture(scope='function')
321 def xhr_header(request):
321 def xhr_header(request):
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323
323
324
324
325 @pytest.fixture
325 @pytest.fixture
326 def real_crypto_backend(monkeypatch):
326 def real_crypto_backend(monkeypatch):
327 """
327 """
328 Switch the production crypto backend on for this test.
328 Switch the production crypto backend on for this test.
329
329
330 During the test run the crypto backend is replaced with a faster
330 During the test run the crypto backend is replaced with a faster
331 implementation based on the MD5 algorithm.
331 implementation based on the MD5 algorithm.
332 """
332 """
333 monkeypatch.setattr(rhodecode, 'is_test', False)
333 monkeypatch.setattr(rhodecode, 'is_test', False)
334
334
335
335
336 @pytest.fixture(scope='class')
336 @pytest.fixture(scope='class')
337 def index_location(request, baseapp):
337 def index_location(request, baseapp):
338 index_location = baseapp.config.get_settings()['search.location']
338 index_location = baseapp.config.get_settings()['search.location']
339 if request.cls:
339 if request.cls:
340 request.cls.index_location = index_location
340 request.cls.index_location = index_location
341 return index_location
341 return index_location
342
342
343
343
344 @pytest.fixture(scope='session', autouse=True)
344 @pytest.fixture(scope='session', autouse=True)
345 def tests_tmp_path(request):
345 def tests_tmp_path(request):
346 """
346 """
347 Create temporary directory to be used during the test session.
347 Create temporary directory to be used during the test session.
348 """
348 """
349 if not os.path.exists(TESTS_TMP_PATH):
349 if not os.path.exists(TESTS_TMP_PATH):
350 os.makedirs(TESTS_TMP_PATH)
350 os.makedirs(TESTS_TMP_PATH)
351
351
352 if not request.config.getoption('--keep-tmp-path'):
352 if not request.config.getoption('--keep-tmp-path'):
353 @request.addfinalizer
353 @request.addfinalizer
354 def remove_tmp_path():
354 def remove_tmp_path():
355 shutil.rmtree(TESTS_TMP_PATH)
355 shutil.rmtree(TESTS_TMP_PATH)
356
356
357 return TESTS_TMP_PATH
357 return TESTS_TMP_PATH
358
358
359
359
360 @pytest.fixture
360 @pytest.fixture
361 def test_repo_group(request):
361 def test_repo_group(request):
362 """
362 """
363 Create a temporary repository group, and destroy it after
363 Create a temporary repository group, and destroy it after
364 usage automatically
364 usage automatically
365 """
365 """
366 fixture = Fixture()
366 fixture = Fixture()
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 repo_group = fixture.create_repo_group(repogroupid)
368 repo_group = fixture.create_repo_group(repogroupid)
369
369
370 def _cleanup():
370 def _cleanup():
371 fixture.destroy_repo_group(repogroupid)
371 fixture.destroy_repo_group(repogroupid)
372
372
373 request.addfinalizer(_cleanup)
373 request.addfinalizer(_cleanup)
374 return repo_group
374 return repo_group
375
375
376
376
377 @pytest.fixture
377 @pytest.fixture
378 def test_user_group(request):
378 def test_user_group(request):
379 """
379 """
380 Create a temporary user group, and destroy it after
380 Create a temporary user group, and destroy it after
381 usage automatically
381 usage automatically
382 """
382 """
383 fixture = Fixture()
383 fixture = Fixture()
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 user_group = fixture.create_user_group(usergroupid)
385 user_group = fixture.create_user_group(usergroupid)
386
386
387 def _cleanup():
387 def _cleanup():
388 fixture.destroy_user_group(user_group)
388 fixture.destroy_user_group(user_group)
389
389
390 request.addfinalizer(_cleanup)
390 request.addfinalizer(_cleanup)
391 return user_group
391 return user_group
392
392
393
393
394 @pytest.fixture(scope='session')
394 @pytest.fixture(scope='session')
395 def test_repo(request):
395 def test_repo(request):
396 container = TestRepoContainer()
396 container = TestRepoContainer()
397 request.addfinalizer(container._cleanup)
397 request.addfinalizer(container._cleanup)
398 return container
398 return container
399
399
400
400
401 class TestRepoContainer(object):
401 class TestRepoContainer(object):
402 """
402 """
403 Container for test repositories which are used read only.
403 Container for test repositories which are used read only.
404
404
405 Repositories will be created on demand and re-used during the lifetime
405 Repositories will be created on demand and re-used during the lifetime
406 of this object.
406 of this object.
407
407
408 Usage to get the svn test repository "minimal"::
408 Usage to get the svn test repository "minimal"::
409
409
410 test_repo = TestContainer()
410 test_repo = TestContainer()
411 repo = test_repo('minimal', 'svn')
411 repo = test_repo('minimal', 'svn')
412
412
413 """
413 """
414
414
415 dump_extractors = {
415 dump_extractors = {
416 'git': utils.extract_git_repo_from_dump,
416 'git': utils.extract_git_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
419 }
419 }
420
420
421 def __init__(self):
421 def __init__(self):
422 self._cleanup_repos = []
422 self._cleanup_repos = []
423 self._fixture = Fixture()
423 self._fixture = Fixture()
424 self._repos = {}
424 self._repos = {}
425
425
426 def __call__(self, dump_name, backend_alias, config=None):
426 def __call__(self, dump_name, backend_alias, config=None):
427 key = (dump_name, backend_alias)
427 key = (dump_name, backend_alias)
428 if key not in self._repos:
428 if key not in self._repos:
429 repo = self._create_repo(dump_name, backend_alias, config)
429 repo = self._create_repo(dump_name, backend_alias, config)
430 self._repos[key] = repo.repo_id
430 self._repos[key] = repo.repo_id
431 return Repository.get(self._repos[key])
431 return Repository.get(self._repos[key])
432
432
433 def _create_repo(self, dump_name, backend_alias, config):
433 def _create_repo(self, dump_name, backend_alias, config):
434 repo_name = '%s-%s' % (backend_alias, dump_name)
434 repo_name = '%s-%s' % (backend_alias, dump_name)
435 backend_class = get_backend(backend_alias)
435 backend_class = get_backend(backend_alias)
436 dump_extractor = self.dump_extractors[backend_alias]
436 dump_extractor = self.dump_extractors[backend_alias]
437 repo_path = dump_extractor(dump_name, repo_name)
437 repo_path = dump_extractor(dump_name, repo_name)
438
438
439 vcs_repo = backend_class(repo_path, config=config)
439 vcs_repo = backend_class(repo_path, config=config)
440 repo2db_mapper({repo_name: vcs_repo})
440 repo2db_mapper({repo_name: vcs_repo})
441
441
442 repo = RepoModel().get_by_repo_name(repo_name)
442 repo = RepoModel().get_by_repo_name(repo_name)
443 self._cleanup_repos.append(repo_name)
443 self._cleanup_repos.append(repo_name)
444 return repo
444 return repo
445
445
446 def _cleanup(self):
446 def _cleanup(self):
447 for repo_name in reversed(self._cleanup_repos):
447 for repo_name in reversed(self._cleanup_repos):
448 self._fixture.destroy_repo(repo_name)
448 self._fixture.destroy_repo(repo_name)
449
449
450
450
451 def backend_base(request, backend_alias, baseapp, test_repo):
451 def backend_base(request, backend_alias, baseapp, test_repo):
452 if backend_alias not in request.config.getoption('--backends'):
452 if backend_alias not in request.config.getoption('--backends'):
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
454
454
455 utils.check_xfail_backends(request.node, backend_alias)
455 utils.check_xfail_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
457
457
458 repo_name = 'vcs_test_%s' % (backend_alias, )
458 repo_name = 'vcs_test_%s' % (backend_alias, )
459 backend = Backend(
459 backend = Backend(
460 alias=backend_alias,
460 alias=backend_alias,
461 repo_name=repo_name,
461 repo_name=repo_name,
462 test_name=request.node.name,
462 test_name=request.node.name,
463 test_repo_container=test_repo)
463 test_repo_container=test_repo)
464 request.addfinalizer(backend.cleanup)
464 request.addfinalizer(backend.cleanup)
465 return backend
465 return backend
466
466
467
467
468 @pytest.fixture
468 @pytest.fixture
469 def backend(request, backend_alias, baseapp, test_repo):
469 def backend(request, backend_alias, baseapp, test_repo):
470 """
470 """
471 Parametrized fixture which represents a single backend implementation.
471 Parametrized fixture which represents a single backend implementation.
472
472
473 It respects the option `--backends` to focus the test run on specific
473 It respects the option `--backends` to focus the test run on specific
474 backend implementations.
474 backend implementations.
475
475
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 for specific backends. This is intended as a utility for incremental
477 for specific backends. This is intended as a utility for incremental
478 development of a new backend implementation.
478 development of a new backend implementation.
479 """
479 """
480 return backend_base(request, backend_alias, baseapp, test_repo)
480 return backend_base(request, backend_alias, baseapp, test_repo)
481
481
482
482
483 @pytest.fixture
483 @pytest.fixture
484 def backend_git(request, baseapp, test_repo):
484 def backend_git(request, baseapp, test_repo):
485 return backend_base(request, 'git', baseapp, test_repo)
485 return backend_base(request, 'git', baseapp, test_repo)
486
486
487
487
488 @pytest.fixture
488 @pytest.fixture
489 def backend_hg(request, baseapp, test_repo):
489 def backend_hg(request, baseapp, test_repo):
490 return backend_base(request, 'hg', baseapp, test_repo)
490 return backend_base(request, 'hg', baseapp, test_repo)
491
491
492
492
493 @pytest.fixture
493 @pytest.fixture
494 def backend_svn(request, baseapp, test_repo):
494 def backend_svn(request, baseapp, test_repo):
495 return backend_base(request, 'svn', baseapp, test_repo)
495 return backend_base(request, 'svn', baseapp, test_repo)
496
496
497
497
498 @pytest.fixture
498 @pytest.fixture
499 def backend_random(backend_git):
499 def backend_random(backend_git):
500 """
500 """
501 Use this to express that your tests need "a backend.
501 Use this to express that your tests need "a backend.
502
502
503 A few of our tests need a backend, so that we can run the code. This
503 A few of our tests need a backend, so that we can run the code. This
504 fixture is intended to be used for such cases. It will pick one of the
504 fixture is intended to be used for such cases. It will pick one of the
505 backends and run the tests.
505 backends and run the tests.
506
506
507 The fixture `backend` would run the test multiple times for each
507 The fixture `backend` would run the test multiple times for each
508 available backend which is a pure waste of time if the test is
508 available backend which is a pure waste of time if the test is
509 independent of the backend type.
509 independent of the backend type.
510 """
510 """
511 # TODO: johbo: Change this to pick a random backend
511 # TODO: johbo: Change this to pick a random backend
512 return backend_git
512 return backend_git
513
513
514
514
515 @pytest.fixture
515 @pytest.fixture
516 def backend_stub(backend_git):
516 def backend_stub(backend_git):
517 """
517 """
518 Use this to express that your tests need a backend stub
518 Use this to express that your tests need a backend stub
519
519
520 TODO: mikhail: Implement a real stub logic instead of returning
520 TODO: mikhail: Implement a real stub logic instead of returning
521 a git backend
521 a git backend
522 """
522 """
523 return backend_git
523 return backend_git
524
524
525
525
526 @pytest.fixture
526 @pytest.fixture
527 def repo_stub(backend_stub):
527 def repo_stub(backend_stub):
528 """
528 """
529 Use this to express that your tests need a repository stub
529 Use this to express that your tests need a repository stub
530 """
530 """
531 return backend_stub.create_repo()
531 return backend_stub.create_repo()
532
532
533
533
534 class Backend(object):
534 class Backend(object):
535 """
535 """
536 Represents the test configuration for one supported backend
536 Represents the test configuration for one supported backend
537
537
538 Provides easy access to different test repositories based on
538 Provides easy access to different test repositories based on
539 `__getitem__`. Such repositories will only be created once per test
539 `__getitem__`. Such repositories will only be created once per test
540 session.
540 session.
541 """
541 """
542
542
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 _master_repo = None
544 _master_repo = None
545 _commit_ids = {}
545 _commit_ids = {}
546
546
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 self.alias = alias
548 self.alias = alias
549 self.repo_name = repo_name
549 self.repo_name = repo_name
550 self._cleanup_repos = []
550 self._cleanup_repos = []
551 self._test_name = test_name
551 self._test_name = test_name
552 self._test_repo_container = test_repo_container
552 self._test_repo_container = test_repo_container
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 # Fixture will survive in the end.
554 # Fixture will survive in the end.
555 self._fixture = Fixture()
555 self._fixture = Fixture()
556
556
557 def __getitem__(self, key):
557 def __getitem__(self, key):
558 return self._test_repo_container(key, self.alias)
558 return self._test_repo_container(key, self.alias)
559
559
560 def create_test_repo(self, key, config=None):
560 def create_test_repo(self, key, config=None):
561 return self._test_repo_container(key, self.alias, config)
561 return self._test_repo_container(key, self.alias, config)
562
562
563 @property
563 @property
564 def repo(self):
564 def repo(self):
565 """
565 """
566 Returns the "current" repository. This is the vcs_test repo or the
566 Returns the "current" repository. This is the vcs_test repo or the
567 last repo which has been created with `create_repo`.
567 last repo which has been created with `create_repo`.
568 """
568 """
569 from rhodecode.model.db import Repository
569 from rhodecode.model.db import Repository
570 return Repository.get_by_repo_name(self.repo_name)
570 return Repository.get_by_repo_name(self.repo_name)
571
571
572 @property
572 @property
573 def default_branch_name(self):
573 def default_branch_name(self):
574 VcsRepository = get_backend(self.alias)
574 VcsRepository = get_backend(self.alias)
575 return VcsRepository.DEFAULT_BRANCH_NAME
575 return VcsRepository.DEFAULT_BRANCH_NAME
576
576
577 @property
577 @property
578 def default_head_id(self):
578 def default_head_id(self):
579 """
579 """
580 Returns the default head id of the underlying backend.
580 Returns the default head id of the underlying backend.
581
581
582 This will be the default branch name in case the backend does have a
582 This will be the default branch name in case the backend does have a
583 default branch. In the other cases it will point to a valid head
583 default branch. In the other cases it will point to a valid head
584 which can serve as the base to create a new commit on top of it.
584 which can serve as the base to create a new commit on top of it.
585 """
585 """
586 vcsrepo = self.repo.scm_instance()
586 vcsrepo = self.repo.scm_instance()
587 head_id = (
587 head_id = (
588 vcsrepo.DEFAULT_BRANCH_NAME or
588 vcsrepo.DEFAULT_BRANCH_NAME or
589 vcsrepo.commit_ids[-1])
589 vcsrepo.commit_ids[-1])
590 return head_id
590 return head_id
591
591
592 @property
592 @property
593 def commit_ids(self):
593 def commit_ids(self):
594 """
594 """
595 Returns the list of commits for the last created repository
595 Returns the list of commits for the last created repository
596 """
596 """
597 return self._commit_ids
597 return self._commit_ids
598
598
599 def create_master_repo(self, commits):
599 def create_master_repo(self, commits):
600 """
600 """
601 Create a repository and remember it as a template.
601 Create a repository and remember it as a template.
602
602
603 This allows to easily create derived repositories to construct
603 This allows to easily create derived repositories to construct
604 more complex scenarios for diff, compare and pull requests.
604 more complex scenarios for diff, compare and pull requests.
605
605
606 Returns a commit map which maps from commit message to raw_id.
606 Returns a commit map which maps from commit message to raw_id.
607 """
607 """
608 self._master_repo = self.create_repo(commits=commits)
608 self._master_repo = self.create_repo(commits=commits)
609 return self._commit_ids
609 return self._commit_ids
610
610
611 def create_repo(
611 def create_repo(
612 self, commits=None, number_of_commits=0, heads=None,
612 self, commits=None, number_of_commits=0, heads=None,
613 name_suffix=u'', bare=False, **kwargs):
613 name_suffix=u'', bare=False, **kwargs):
614 """
614 """
615 Create a repository and record it for later cleanup.
615 Create a repository and record it for later cleanup.
616
616
617 :param commits: Optional. A sequence of dict instances.
617 :param commits: Optional. A sequence of dict instances.
618 Will add a commit per entry to the new repository.
618 Will add a commit per entry to the new repository.
619 :param number_of_commits: Optional. If set to a number, this number of
619 :param number_of_commits: Optional. If set to a number, this number of
620 commits will be added to the new repository.
620 commits will be added to the new repository.
621 :param heads: Optional. Can be set to a sequence of of commit
621 :param heads: Optional. Can be set to a sequence of of commit
622 names which shall be pulled in from the master repository.
622 names which shall be pulled in from the master repository.
623 :param name_suffix: adds special suffix to generated repo name
623 :param name_suffix: adds special suffix to generated repo name
624 :param bare: set a repo as bare (no checkout)
624 :param bare: set a repo as bare (no checkout)
625 """
625 """
626 self.repo_name = self._next_repo_name() + name_suffix
626 self.repo_name = self._next_repo_name() + name_suffix
627 repo = self._fixture.create_repo(
627 repo = self._fixture.create_repo(
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 self._cleanup_repos.append(repo.repo_name)
629 self._cleanup_repos.append(repo.repo_name)
630
630
631 commits = commits or [
631 commits = commits or [
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 for x in range(number_of_commits)]
633 for x in range(number_of_commits)]
634 vcs_repo = repo.scm_instance()
634 vcs_repo = repo.scm_instance()
635 vcs_repo.count()
635 vcs_repo.count()
636 self._add_commits_to_repo(vcs_repo, commits)
636 self._add_commits_to_repo(vcs_repo, commits)
637 if heads:
637 if heads:
638 self.pull_heads(repo, heads)
638 self.pull_heads(repo, heads)
639
639
640 return repo
640 return repo
641
641
642 def pull_heads(self, repo, heads):
642 def pull_heads(self, repo, heads):
643 """
643 """
644 Make sure that repo contains all commits mentioned in `heads`
644 Make sure that repo contains all commits mentioned in `heads`
645 """
645 """
646 vcsmaster = self._master_repo.scm_instance()
646 vcsmaster = self._master_repo.scm_instance()
647 vcsrepo = repo.scm_instance()
647 vcsrepo = repo.scm_instance()
648 vcsrepo.config.clear_section('hooks')
648 vcsrepo.config.clear_section('hooks')
649 commit_ids = [self._commit_ids[h] for h in heads]
649 commit_ids = [self._commit_ids[h] for h in heads]
650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
651
651
652 def create_fork(self):
652 def create_fork(self):
653 repo_to_fork = self.repo_name
653 repo_to_fork = self.repo_name
654 self.repo_name = self._next_repo_name()
654 self.repo_name = self._next_repo_name()
655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
656 self._cleanup_repos.append(self.repo_name)
656 self._cleanup_repos.append(self.repo_name)
657 return repo
657 return repo
658
658
659 def new_repo_name(self, suffix=u''):
659 def new_repo_name(self, suffix=u''):
660 self.repo_name = self._next_repo_name() + suffix
660 self.repo_name = self._next_repo_name() + suffix
661 self._cleanup_repos.append(self.repo_name)
661 self._cleanup_repos.append(self.repo_name)
662 return self.repo_name
662 return self.repo_name
663
663
664 def _next_repo_name(self):
664 def _next_repo_name(self):
665 return u"%s_%s" % (
665 return u"%s_%s" % (
666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
667
667
668 def ensure_file(self, filename, content='Test content\n'):
668 def ensure_file(self, filename, content='Test content\n'):
669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
670 commits = [
670 commits = [
671 {'added': [
671 {'added': [
672 FileNode(filename, content=content),
672 FileNode(filename, content=content),
673 ]},
673 ]},
674 ]
674 ]
675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
676
676
677 def enable_downloads(self):
677 def enable_downloads(self):
678 repo = self.repo
678 repo = self.repo
679 repo.enable_downloads = True
679 repo.enable_downloads = True
680 Session().add(repo)
680 Session().add(repo)
681 Session().commit()
681 Session().commit()
682
682
683 def cleanup(self):
683 def cleanup(self):
684 for repo_name in reversed(self._cleanup_repos):
684 for repo_name in reversed(self._cleanup_repos):
685 self._fixture.destroy_repo(repo_name)
685 self._fixture.destroy_repo(repo_name)
686
686
687 def _add_commits_to_repo(self, repo, commits):
687 def _add_commits_to_repo(self, repo, commits):
688 commit_ids = _add_commits_to_repo(repo, commits)
688 commit_ids = _add_commits_to_repo(repo, commits)
689 if not commit_ids:
689 if not commit_ids:
690 return
690 return
691 self._commit_ids = commit_ids
691 self._commit_ids = commit_ids
692
692
693 # Creating refs for Git to allow fetching them from remote repository
693 # Creating refs for Git to allow fetching them from remote repository
694 if self.alias == 'git':
694 if self.alias == 'git':
695 refs = {}
695 refs = {}
696 for message in self._commit_ids:
696 for message in self._commit_ids:
697 # TODO: mikhail: do more special chars replacements
697 # TODO: mikhail: do more special chars replacements
698 ref_name = 'refs/test-refs/{}'.format(
698 ref_name = 'refs/test-refs/{}'.format(
699 message.replace(' ', ''))
699 message.replace(' ', ''))
700 refs[ref_name] = self._commit_ids[message]
700 refs[ref_name] = self._commit_ids[message]
701 self._create_refs(repo, refs)
701 self._create_refs(repo, refs)
702
702
703 def _create_refs(self, repo, refs):
703 def _create_refs(self, repo, refs):
704 for ref_name in refs:
704 for ref_name in refs:
705 repo.set_refs(ref_name, refs[ref_name])
705 repo.set_refs(ref_name, refs[ref_name])
706
706
707
707
708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
709 if backend_alias not in request.config.getoption('--backends'):
709 if backend_alias not in request.config.getoption('--backends'):
710 pytest.skip("Backend %s not selected." % (backend_alias, ))
710 pytest.skip("Backend %s not selected." % (backend_alias, ))
711
711
712 utils.check_xfail_backends(request.node, backend_alias)
712 utils.check_xfail_backends(request.node, backend_alias)
713 utils.check_skip_backends(request.node, backend_alias)
713 utils.check_skip_backends(request.node, backend_alias)
714
714
715 repo_name = 'vcs_test_%s' % (backend_alias, )
715 repo_name = 'vcs_test_%s' % (backend_alias, )
716 repo_path = os.path.join(tests_tmp_path, repo_name)
716 repo_path = os.path.join(tests_tmp_path, repo_name)
717 backend = VcsBackend(
717 backend = VcsBackend(
718 alias=backend_alias,
718 alias=backend_alias,
719 repo_path=repo_path,
719 repo_path=repo_path,
720 test_name=request.node.name,
720 test_name=request.node.name,
721 test_repo_container=test_repo)
721 test_repo_container=test_repo)
722 request.addfinalizer(backend.cleanup)
722 request.addfinalizer(backend.cleanup)
723 return backend
723 return backend
724
724
725
725
726 @pytest.fixture
726 @pytest.fixture
727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
728 """
728 """
729 Parametrized fixture which represents a single vcs backend implementation.
729 Parametrized fixture which represents a single vcs backend implementation.
730
730
731 See the fixture `backend` for more details. This one implements the same
731 See the fixture `backend` for more details. This one implements the same
732 concept, but on vcs level. So it does not provide model instances etc.
732 concept, but on vcs level. So it does not provide model instances etc.
733
733
734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
735 for how this works.
735 for how this works.
736 """
736 """
737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
738
738
739
739
740 @pytest.fixture
740 @pytest.fixture
741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
743
743
744
744
745 @pytest.fixture
745 @pytest.fixture
746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
748
748
749
749
750 @pytest.fixture
750 @pytest.fixture
751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
753
753
754
754
755 @pytest.fixture
755 @pytest.fixture
756 def vcsbackend_stub(vcsbackend_git):
756 def vcsbackend_stub(vcsbackend_git):
757 """
757 """
758 Use this to express that your test just needs a stub of a vcsbackend.
758 Use this to express that your test just needs a stub of a vcsbackend.
759
759
760 Plan is to eventually implement an in-memory stub to speed tests up.
760 Plan is to eventually implement an in-memory stub to speed tests up.
761 """
761 """
762 return vcsbackend_git
762 return vcsbackend_git
763
763
764
764
765 class VcsBackend(object):
765 class VcsBackend(object):
766 """
766 """
767 Represents the test configuration for one supported vcs backend.
767 Represents the test configuration for one supported vcs backend.
768 """
768 """
769
769
770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
771
771
772 def __init__(self, alias, repo_path, test_name, test_repo_container):
772 def __init__(self, alias, repo_path, test_name, test_repo_container):
773 self.alias = alias
773 self.alias = alias
774 self._repo_path = repo_path
774 self._repo_path = repo_path
775 self._cleanup_repos = []
775 self._cleanup_repos = []
776 self._test_name = test_name
776 self._test_name = test_name
777 self._test_repo_container = test_repo_container
777 self._test_repo_container = test_repo_container
778
778
779 def __getitem__(self, key):
779 def __getitem__(self, key):
780 return self._test_repo_container(key, self.alias).scm_instance()
780 return self._test_repo_container(key, self.alias).scm_instance()
781
781
782 @property
782 @property
783 def repo(self):
783 def repo(self):
784 """
784 """
785 Returns the "current" repository. This is the vcs_test repo of the last
785 Returns the "current" repository. This is the vcs_test repo of the last
786 repo which has been created.
786 repo which has been created.
787 """
787 """
788 Repository = get_backend(self.alias)
788 Repository = get_backend(self.alias)
789 return Repository(self._repo_path)
789 return Repository(self._repo_path)
790
790
791 @property
791 @property
792 def backend(self):
792 def backend(self):
793 """
793 """
794 Returns the backend implementation class.
794 Returns the backend implementation class.
795 """
795 """
796 return get_backend(self.alias)
796 return get_backend(self.alias)
797
797
798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
799 bare=False):
799 bare=False):
800 repo_name = self._next_repo_name()
800 repo_name = self._next_repo_name()
801 self._repo_path = get_new_dir(repo_name)
801 self._repo_path = get_new_dir(repo_name)
802 repo_class = get_backend(self.alias)
802 repo_class = get_backend(self.alias)
803 src_url = None
803 src_url = None
804 if _clone_repo:
804 if _clone_repo:
805 src_url = _clone_repo.path
805 src_url = _clone_repo.path
806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
807 self._cleanup_repos.append(repo)
807 self._cleanup_repos.append(repo)
808
808
809 commits = commits or [
809 commits = commits or [
810 {'message': 'Commit %s of %s' % (x, repo_name)}
810 {'message': 'Commit %s of %s' % (x, repo_name)}
811 for x in xrange(number_of_commits)]
811 for x in xrange(number_of_commits)]
812 _add_commits_to_repo(repo, commits)
812 _add_commits_to_repo(repo, commits)
813 return repo
813 return repo
814
814
815 def clone_repo(self, repo):
815 def clone_repo(self, repo):
816 return self.create_repo(_clone_repo=repo)
816 return self.create_repo(_clone_repo=repo)
817
817
818 def cleanup(self):
818 def cleanup(self):
819 for repo in self._cleanup_repos:
819 for repo in self._cleanup_repos:
820 shutil.rmtree(repo.path)
820 shutil.rmtree(repo.path)
821
821
822 def new_repo_path(self):
822 def new_repo_path(self):
823 repo_name = self._next_repo_name()
823 repo_name = self._next_repo_name()
824 self._repo_path = get_new_dir(repo_name)
824 self._repo_path = get_new_dir(repo_name)
825 return self._repo_path
825 return self._repo_path
826
826
827 def _next_repo_name(self):
827 def _next_repo_name(self):
828 return "%s_%s" % (
828 return "%s_%s" % (
829 self.invalid_repo_name.sub('_', self._test_name),
829 self.invalid_repo_name.sub('_', self._test_name),
830 len(self._cleanup_repos))
830 len(self._cleanup_repos))
831
831
832 def add_file(self, repo, filename, content='Test content\n'):
832 def add_file(self, repo, filename, content='Test content\n'):
833 imc = repo.in_memory_commit
833 imc = repo.in_memory_commit
834 imc.add(FileNode(filename, content=content))
834 imc.add(FileNode(filename, content=content))
835 imc.commit(
835 imc.commit(
836 message=u'Automatic commit from vcsbackend fixture',
836 message=u'Automatic commit from vcsbackend fixture',
837 author=u'Automatic')
837 author=u'Automatic <automatic@rhodecode.com>')
838
838
839 def ensure_file(self, filename, content='Test content\n'):
839 def ensure_file(self, filename, content='Test content\n'):
840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
841 self.add_file(self.repo, filename, content)
841 self.add_file(self.repo, filename, content)
842
842
843
843
844 def _add_commits_to_repo(vcs_repo, commits):
844 def _add_commits_to_repo(vcs_repo, commits):
845 commit_ids = {}
845 commit_ids = {}
846 if not commits:
846 if not commits:
847 return commit_ids
847 return commit_ids
848
848
849 imc = vcs_repo.in_memory_commit
849 imc = vcs_repo.in_memory_commit
850 commit = None
850 commit = None
851
851
852 for idx, commit in enumerate(commits):
852 for idx, commit in enumerate(commits):
853 message = unicode(commit.get('message', 'Commit %s' % idx))
853 message = unicode(commit.get('message', 'Commit %s' % idx))
854
854
855 for node in commit.get('added', []):
855 for node in commit.get('added', []):
856 imc.add(FileNode(node.path, content=node.content))
856 imc.add(FileNode(node.path, content=node.content))
857 for node in commit.get('changed', []):
857 for node in commit.get('changed', []):
858 imc.change(FileNode(node.path, content=node.content))
858 imc.change(FileNode(node.path, content=node.content))
859 for node in commit.get('removed', []):
859 for node in commit.get('removed', []):
860 imc.remove(FileNode(node.path))
860 imc.remove(FileNode(node.path))
861
861
862 parents = [
862 parents = [
863 vcs_repo.get_commit(commit_id=commit_ids[p])
863 vcs_repo.get_commit(commit_id=commit_ids[p])
864 for p in commit.get('parents', [])]
864 for p in commit.get('parents', [])]
865
865
866 operations = ('added', 'changed', 'removed')
866 operations = ('added', 'changed', 'removed')
867 if not any((commit.get(o) for o in operations)):
867 if not any((commit.get(o) for o in operations)):
868 imc.add(FileNode('file_%s' % idx, content=message))
868 imc.add(FileNode('file_%s' % idx, content=message))
869
869
870 commit = imc.commit(
870 commit = imc.commit(
871 message=message,
871 message=message,
872 author=unicode(commit.get('author', 'Automatic')),
872 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
873 date=commit.get('date'),
873 date=commit.get('date'),
874 branch=commit.get('branch'),
874 branch=commit.get('branch'),
875 parents=parents)
875 parents=parents)
876
876
877 commit_ids[commit.message] = commit.raw_id
877 commit_ids[commit.message] = commit.raw_id
878
878
879 return commit_ids
879 return commit_ids
880
880
881
881
882 @pytest.fixture
882 @pytest.fixture
883 def reposerver(request):
883 def reposerver(request):
884 """
884 """
885 Allows to serve a backend repository
885 Allows to serve a backend repository
886 """
886 """
887
887
888 repo_server = RepoServer()
888 repo_server = RepoServer()
889 request.addfinalizer(repo_server.cleanup)
889 request.addfinalizer(repo_server.cleanup)
890 return repo_server
890 return repo_server
891
891
892
892
893 class RepoServer(object):
893 class RepoServer(object):
894 """
894 """
895 Utility to serve a local repository for the duration of a test case.
895 Utility to serve a local repository for the duration of a test case.
896
896
897 Supports only Subversion so far.
897 Supports only Subversion so far.
898 """
898 """
899
899
900 url = None
900 url = None
901
901
902 def __init__(self):
902 def __init__(self):
903 self._cleanup_servers = []
903 self._cleanup_servers = []
904
904
905 def serve(self, vcsrepo):
905 def serve(self, vcsrepo):
906 if vcsrepo.alias != 'svn':
906 if vcsrepo.alias != 'svn':
907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
908
908
909 proc = subprocess32.Popen(
909 proc = subprocess32.Popen(
910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
911 '--root', vcsrepo.path])
911 '--root', vcsrepo.path])
912 self._cleanup_servers.append(proc)
912 self._cleanup_servers.append(proc)
913 self.url = 'svn://localhost'
913 self.url = 'svn://localhost'
914
914
915 def cleanup(self):
915 def cleanup(self):
916 for proc in self._cleanup_servers:
916 for proc in self._cleanup_servers:
917 proc.terminate()
917 proc.terminate()
918
918
919
919
920 @pytest.fixture
920 @pytest.fixture
921 def pr_util(backend, request, config_stub):
921 def pr_util(backend, request, config_stub):
922 """
922 """
923 Utility for tests of models and for functional tests around pull requests.
923 Utility for tests of models and for functional tests around pull requests.
924
924
925 It gives an instance of :class:`PRTestUtility` which provides various
925 It gives an instance of :class:`PRTestUtility` which provides various
926 utility methods around one pull request.
926 utility methods around one pull request.
927
927
928 This fixture uses `backend` and inherits its parameterization.
928 This fixture uses `backend` and inherits its parameterization.
929 """
929 """
930
930
931 util = PRTestUtility(backend)
931 util = PRTestUtility(backend)
932 request.addfinalizer(util.cleanup)
932 request.addfinalizer(util.cleanup)
933
933
934 return util
934 return util
935
935
936
936
937 class PRTestUtility(object):
937 class PRTestUtility(object):
938
938
939 pull_request = None
939 pull_request = None
940 pull_request_id = None
940 pull_request_id = None
941 mergeable_patcher = None
941 mergeable_patcher = None
942 mergeable_mock = None
942 mergeable_mock = None
943 notification_patcher = None
943 notification_patcher = None
944
944
945 def __init__(self, backend):
945 def __init__(self, backend):
946 self.backend = backend
946 self.backend = backend
947
947
948 def create_pull_request(
948 def create_pull_request(
949 self, commits=None, target_head=None, source_head=None,
949 self, commits=None, target_head=None, source_head=None,
950 revisions=None, approved=False, author=None, mergeable=False,
950 revisions=None, approved=False, author=None, mergeable=False,
951 enable_notifications=True, name_suffix=u'', reviewers=None,
951 enable_notifications=True, name_suffix=u'', reviewers=None,
952 title=u"Test", description=u"Description"):
952 title=u"Test", description=u"Description"):
953 self.set_mergeable(mergeable)
953 self.set_mergeable(mergeable)
954 if not enable_notifications:
954 if not enable_notifications:
955 # mock notification side effect
955 # mock notification side effect
956 self.notification_patcher = mock.patch(
956 self.notification_patcher = mock.patch(
957 'rhodecode.model.notification.NotificationModel.create')
957 'rhodecode.model.notification.NotificationModel.create')
958 self.notification_patcher.start()
958 self.notification_patcher.start()
959
959
960 if not self.pull_request:
960 if not self.pull_request:
961 if not commits:
961 if not commits:
962 commits = [
962 commits = [
963 {'message': 'c1'},
963 {'message': 'c1'},
964 {'message': 'c2'},
964 {'message': 'c2'},
965 {'message': 'c3'},
965 {'message': 'c3'},
966 ]
966 ]
967 target_head = 'c1'
967 target_head = 'c1'
968 source_head = 'c2'
968 source_head = 'c2'
969 revisions = ['c2']
969 revisions = ['c2']
970
970
971 self.commit_ids = self.backend.create_master_repo(commits)
971 self.commit_ids = self.backend.create_master_repo(commits)
972 self.target_repository = self.backend.create_repo(
972 self.target_repository = self.backend.create_repo(
973 heads=[target_head], name_suffix=name_suffix)
973 heads=[target_head], name_suffix=name_suffix)
974 self.source_repository = self.backend.create_repo(
974 self.source_repository = self.backend.create_repo(
975 heads=[source_head], name_suffix=name_suffix)
975 heads=[source_head], name_suffix=name_suffix)
976 self.author = author or UserModel().get_by_username(
976 self.author = author or UserModel().get_by_username(
977 TEST_USER_ADMIN_LOGIN)
977 TEST_USER_ADMIN_LOGIN)
978
978
979 model = PullRequestModel()
979 model = PullRequestModel()
980 self.create_parameters = {
980 self.create_parameters = {
981 'created_by': self.author,
981 'created_by': self.author,
982 'source_repo': self.source_repository.repo_name,
982 'source_repo': self.source_repository.repo_name,
983 'source_ref': self._default_branch_reference(source_head),
983 'source_ref': self._default_branch_reference(source_head),
984 'target_repo': self.target_repository.repo_name,
984 'target_repo': self.target_repository.repo_name,
985 'target_ref': self._default_branch_reference(target_head),
985 'target_ref': self._default_branch_reference(target_head),
986 'revisions': [self.commit_ids[r] for r in revisions],
986 'revisions': [self.commit_ids[r] for r in revisions],
987 'reviewers': reviewers or self._get_reviewers(),
987 'reviewers': reviewers or self._get_reviewers(),
988 'title': title,
988 'title': title,
989 'description': description,
989 'description': description,
990 }
990 }
991 self.pull_request = model.create(**self.create_parameters)
991 self.pull_request = model.create(**self.create_parameters)
992 assert model.get_versions(self.pull_request) == []
992 assert model.get_versions(self.pull_request) == []
993
993
994 self.pull_request_id = self.pull_request.pull_request_id
994 self.pull_request_id = self.pull_request.pull_request_id
995
995
996 if approved:
996 if approved:
997 self.approve()
997 self.approve()
998
998
999 Session().add(self.pull_request)
999 Session().add(self.pull_request)
1000 Session().commit()
1000 Session().commit()
1001
1001
1002 return self.pull_request
1002 return self.pull_request
1003
1003
1004 def approve(self):
1004 def approve(self):
1005 self.create_status_votes(
1005 self.create_status_votes(
1006 ChangesetStatus.STATUS_APPROVED,
1006 ChangesetStatus.STATUS_APPROVED,
1007 *self.pull_request.reviewers)
1007 *self.pull_request.reviewers)
1008
1008
1009 def close(self):
1009 def close(self):
1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1011
1011
1012 def _default_branch_reference(self, commit_message):
1012 def _default_branch_reference(self, commit_message):
1013 reference = '%s:%s:%s' % (
1013 reference = '%s:%s:%s' % (
1014 'branch',
1014 'branch',
1015 self.backend.default_branch_name,
1015 self.backend.default_branch_name,
1016 self.commit_ids[commit_message])
1016 self.commit_ids[commit_message])
1017 return reference
1017 return reference
1018
1018
1019 def _get_reviewers(self):
1019 def _get_reviewers(self):
1020 return [
1020 return [
1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1023 ]
1023 ]
1024
1024
1025 def update_source_repository(self, head=None):
1025 def update_source_repository(self, head=None):
1026 heads = [head or 'c3']
1026 heads = [head or 'c3']
1027 self.backend.pull_heads(self.source_repository, heads=heads)
1027 self.backend.pull_heads(self.source_repository, heads=heads)
1028
1028
1029 def add_one_commit(self, head=None):
1029 def add_one_commit(self, head=None):
1030 self.update_source_repository(head=head)
1030 self.update_source_repository(head=head)
1031 old_commit_ids = set(self.pull_request.revisions)
1031 old_commit_ids = set(self.pull_request.revisions)
1032 PullRequestModel().update_commits(self.pull_request)
1032 PullRequestModel().update_commits(self.pull_request)
1033 commit_ids = set(self.pull_request.revisions)
1033 commit_ids = set(self.pull_request.revisions)
1034 new_commit_ids = commit_ids - old_commit_ids
1034 new_commit_ids = commit_ids - old_commit_ids
1035 assert len(new_commit_ids) == 1
1035 assert len(new_commit_ids) == 1
1036 return new_commit_ids.pop()
1036 return new_commit_ids.pop()
1037
1037
1038 def remove_one_commit(self):
1038 def remove_one_commit(self):
1039 assert len(self.pull_request.revisions) == 2
1039 assert len(self.pull_request.revisions) == 2
1040 source_vcs = self.source_repository.scm_instance()
1040 source_vcs = self.source_repository.scm_instance()
1041 removed_commit_id = source_vcs.commit_ids[-1]
1041 removed_commit_id = source_vcs.commit_ids[-1]
1042
1042
1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1044 # remove the if once that's sorted out.
1044 # remove the if once that's sorted out.
1045 if self.backend.alias == "git":
1045 if self.backend.alias == "git":
1046 kwargs = {'branch_name': self.backend.default_branch_name}
1046 kwargs = {'branch_name': self.backend.default_branch_name}
1047 else:
1047 else:
1048 kwargs = {}
1048 kwargs = {}
1049 source_vcs.strip(removed_commit_id, **kwargs)
1049 source_vcs.strip(removed_commit_id, **kwargs)
1050
1050
1051 PullRequestModel().update_commits(self.pull_request)
1051 PullRequestModel().update_commits(self.pull_request)
1052 assert len(self.pull_request.revisions) == 1
1052 assert len(self.pull_request.revisions) == 1
1053 return removed_commit_id
1053 return removed_commit_id
1054
1054
1055 def create_comment(self, linked_to=None):
1055 def create_comment(self, linked_to=None):
1056 comment = CommentsModel().create(
1056 comment = CommentsModel().create(
1057 text=u"Test comment",
1057 text=u"Test comment",
1058 repo=self.target_repository.repo_name,
1058 repo=self.target_repository.repo_name,
1059 user=self.author,
1059 user=self.author,
1060 pull_request=self.pull_request)
1060 pull_request=self.pull_request)
1061 assert comment.pull_request_version_id is None
1061 assert comment.pull_request_version_id is None
1062
1062
1063 if linked_to:
1063 if linked_to:
1064 PullRequestModel()._link_comments_to_version(linked_to)
1064 PullRequestModel()._link_comments_to_version(linked_to)
1065
1065
1066 return comment
1066 return comment
1067
1067
1068 def create_inline_comment(
1068 def create_inline_comment(
1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1070 comment = CommentsModel().create(
1070 comment = CommentsModel().create(
1071 text=u"Test comment",
1071 text=u"Test comment",
1072 repo=self.target_repository.repo_name,
1072 repo=self.target_repository.repo_name,
1073 user=self.author,
1073 user=self.author,
1074 line_no=line_no,
1074 line_no=line_no,
1075 f_path=file_path,
1075 f_path=file_path,
1076 pull_request=self.pull_request)
1076 pull_request=self.pull_request)
1077 assert comment.pull_request_version_id is None
1077 assert comment.pull_request_version_id is None
1078
1078
1079 if linked_to:
1079 if linked_to:
1080 PullRequestModel()._link_comments_to_version(linked_to)
1080 PullRequestModel()._link_comments_to_version(linked_to)
1081
1081
1082 return comment
1082 return comment
1083
1083
1084 def create_version_of_pull_request(self):
1084 def create_version_of_pull_request(self):
1085 pull_request = self.create_pull_request()
1085 pull_request = self.create_pull_request()
1086 version = PullRequestModel()._create_version_from_snapshot(
1086 version = PullRequestModel()._create_version_from_snapshot(
1087 pull_request)
1087 pull_request)
1088 return version
1088 return version
1089
1089
1090 def create_status_votes(self, status, *reviewers):
1090 def create_status_votes(self, status, *reviewers):
1091 for reviewer in reviewers:
1091 for reviewer in reviewers:
1092 ChangesetStatusModel().set_status(
1092 ChangesetStatusModel().set_status(
1093 repo=self.pull_request.target_repo,
1093 repo=self.pull_request.target_repo,
1094 status=status,
1094 status=status,
1095 user=reviewer.user_id,
1095 user=reviewer.user_id,
1096 pull_request=self.pull_request)
1096 pull_request=self.pull_request)
1097
1097
1098 def set_mergeable(self, value):
1098 def set_mergeable(self, value):
1099 if not self.mergeable_patcher:
1099 if not self.mergeable_patcher:
1100 self.mergeable_patcher = mock.patch.object(
1100 self.mergeable_patcher = mock.patch.object(
1101 VcsSettingsModel, 'get_general_settings')
1101 VcsSettingsModel, 'get_general_settings')
1102 self.mergeable_mock = self.mergeable_patcher.start()
1102 self.mergeable_mock = self.mergeable_patcher.start()
1103 self.mergeable_mock.return_value = {
1103 self.mergeable_mock.return_value = {
1104 'rhodecode_pr_merge_enabled': value}
1104 'rhodecode_pr_merge_enabled': value}
1105
1105
1106 def cleanup(self):
1106 def cleanup(self):
1107 # In case the source repository is already cleaned up, the pull
1107 # In case the source repository is already cleaned up, the pull
1108 # request will already be deleted.
1108 # request will already be deleted.
1109 pull_request = PullRequest().get(self.pull_request_id)
1109 pull_request = PullRequest().get(self.pull_request_id)
1110 if pull_request:
1110 if pull_request:
1111 PullRequestModel().delete(pull_request, pull_request.author)
1111 PullRequestModel().delete(pull_request, pull_request.author)
1112 Session().commit()
1112 Session().commit()
1113
1113
1114 if self.notification_patcher:
1114 if self.notification_patcher:
1115 self.notification_patcher.stop()
1115 self.notification_patcher.stop()
1116
1116
1117 if self.mergeable_patcher:
1117 if self.mergeable_patcher:
1118 self.mergeable_patcher.stop()
1118 self.mergeable_patcher.stop()
1119
1119
1120
1120
1121 @pytest.fixture
1121 @pytest.fixture
1122 def user_admin(baseapp):
1122 def user_admin(baseapp):
1123 """
1123 """
1124 Provides the default admin test user as an instance of `db.User`.
1124 Provides the default admin test user as an instance of `db.User`.
1125 """
1125 """
1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1127 return user
1127 return user
1128
1128
1129
1129
1130 @pytest.fixture
1130 @pytest.fixture
1131 def user_regular(baseapp):
1131 def user_regular(baseapp):
1132 """
1132 """
1133 Provides the default regular test user as an instance of `db.User`.
1133 Provides the default regular test user as an instance of `db.User`.
1134 """
1134 """
1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1136 return user
1136 return user
1137
1137
1138
1138
1139 @pytest.fixture
1139 @pytest.fixture
1140 def user_util(request, db_connection):
1140 def user_util(request, db_connection):
1141 """
1141 """
1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1143 """
1143 """
1144 utility = UserUtility(test_name=request.node.name)
1144 utility = UserUtility(test_name=request.node.name)
1145 request.addfinalizer(utility.cleanup)
1145 request.addfinalizer(utility.cleanup)
1146 return utility
1146 return utility
1147
1147
1148
1148
1149 # TODO: johbo: Split this up into utilities per domain or something similar
1149 # TODO: johbo: Split this up into utilities per domain or something similar
1150 class UserUtility(object):
1150 class UserUtility(object):
1151
1151
1152 def __init__(self, test_name="test"):
1152 def __init__(self, test_name="test"):
1153 self._test_name = self._sanitize_name(test_name)
1153 self._test_name = self._sanitize_name(test_name)
1154 self.fixture = Fixture()
1154 self.fixture = Fixture()
1155 self.repo_group_ids = []
1155 self.repo_group_ids = []
1156 self.repos_ids = []
1156 self.repos_ids = []
1157 self.user_ids = []
1157 self.user_ids = []
1158 self.user_group_ids = []
1158 self.user_group_ids = []
1159 self.user_repo_permission_ids = []
1159 self.user_repo_permission_ids = []
1160 self.user_group_repo_permission_ids = []
1160 self.user_group_repo_permission_ids = []
1161 self.user_repo_group_permission_ids = []
1161 self.user_repo_group_permission_ids = []
1162 self.user_group_repo_group_permission_ids = []
1162 self.user_group_repo_group_permission_ids = []
1163 self.user_user_group_permission_ids = []
1163 self.user_user_group_permission_ids = []
1164 self.user_group_user_group_permission_ids = []
1164 self.user_group_user_group_permission_ids = []
1165 self.user_permissions = []
1165 self.user_permissions = []
1166
1166
1167 def _sanitize_name(self, name):
1167 def _sanitize_name(self, name):
1168 for char in ['[', ']']:
1168 for char in ['[', ']']:
1169 name = name.replace(char, '_')
1169 name = name.replace(char, '_')
1170 return name
1170 return name
1171
1171
1172 def create_repo_group(
1172 def create_repo_group(
1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1174 group_name = "{prefix}_repogroup_{count}".format(
1174 group_name = "{prefix}_repogroup_{count}".format(
1175 prefix=self._test_name,
1175 prefix=self._test_name,
1176 count=len(self.repo_group_ids))
1176 count=len(self.repo_group_ids))
1177 repo_group = self.fixture.create_repo_group(
1177 repo_group = self.fixture.create_repo_group(
1178 group_name, cur_user=owner)
1178 group_name, cur_user=owner)
1179 if auto_cleanup:
1179 if auto_cleanup:
1180 self.repo_group_ids.append(repo_group.group_id)
1180 self.repo_group_ids.append(repo_group.group_id)
1181 return repo_group
1181 return repo_group
1182
1182
1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1184 auto_cleanup=True, repo_type='hg', bare=False):
1184 auto_cleanup=True, repo_type='hg', bare=False):
1185 repo_name = "{prefix}_repository_{count}".format(
1185 repo_name = "{prefix}_repository_{count}".format(
1186 prefix=self._test_name,
1186 prefix=self._test_name,
1187 count=len(self.repos_ids))
1187 count=len(self.repos_ids))
1188
1188
1189 repository = self.fixture.create_repo(
1189 repository = self.fixture.create_repo(
1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1191 if auto_cleanup:
1191 if auto_cleanup:
1192 self.repos_ids.append(repository.repo_id)
1192 self.repos_ids.append(repository.repo_id)
1193 return repository
1193 return repository
1194
1194
1195 def create_user(self, auto_cleanup=True, **kwargs):
1195 def create_user(self, auto_cleanup=True, **kwargs):
1196 user_name = "{prefix}_user_{count}".format(
1196 user_name = "{prefix}_user_{count}".format(
1197 prefix=self._test_name,
1197 prefix=self._test_name,
1198 count=len(self.user_ids))
1198 count=len(self.user_ids))
1199 user = self.fixture.create_user(user_name, **kwargs)
1199 user = self.fixture.create_user(user_name, **kwargs)
1200 if auto_cleanup:
1200 if auto_cleanup:
1201 self.user_ids.append(user.user_id)
1201 self.user_ids.append(user.user_id)
1202 return user
1202 return user
1203
1203
1204 def create_additional_user_email(self, user, email):
1204 def create_additional_user_email(self, user, email):
1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1206 return uem
1206 return uem
1207
1207
1208 def create_user_with_group(self):
1208 def create_user_with_group(self):
1209 user = self.create_user()
1209 user = self.create_user()
1210 user_group = self.create_user_group(members=[user])
1210 user_group = self.create_user_group(members=[user])
1211 return user, user_group
1211 return user, user_group
1212
1212
1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1214 auto_cleanup=True, **kwargs):
1214 auto_cleanup=True, **kwargs):
1215 group_name = "{prefix}_usergroup_{count}".format(
1215 group_name = "{prefix}_usergroup_{count}".format(
1216 prefix=self._test_name,
1216 prefix=self._test_name,
1217 count=len(self.user_group_ids))
1217 count=len(self.user_group_ids))
1218 user_group = self.fixture.create_user_group(
1218 user_group = self.fixture.create_user_group(
1219 group_name, cur_user=owner, **kwargs)
1219 group_name, cur_user=owner, **kwargs)
1220
1220
1221 if auto_cleanup:
1221 if auto_cleanup:
1222 self.user_group_ids.append(user_group.users_group_id)
1222 self.user_group_ids.append(user_group.users_group_id)
1223 if members:
1223 if members:
1224 for user in members:
1224 for user in members:
1225 UserGroupModel().add_user_to_group(user_group, user)
1225 UserGroupModel().add_user_to_group(user_group, user)
1226 return user_group
1226 return user_group
1227
1227
1228 def grant_user_permission(self, user_name, permission_name):
1228 def grant_user_permission(self, user_name, permission_name):
1229 self.inherit_default_user_permissions(user_name, False)
1229 self.inherit_default_user_permissions(user_name, False)
1230 self.user_permissions.append((user_name, permission_name))
1230 self.user_permissions.append((user_name, permission_name))
1231
1231
1232 def grant_user_permission_to_repo_group(
1232 def grant_user_permission_to_repo_group(
1233 self, repo_group, user, permission_name):
1233 self, repo_group, user, permission_name):
1234 permission = RepoGroupModel().grant_user_permission(
1234 permission = RepoGroupModel().grant_user_permission(
1235 repo_group, user, permission_name)
1235 repo_group, user, permission_name)
1236 self.user_repo_group_permission_ids.append(
1236 self.user_repo_group_permission_ids.append(
1237 (repo_group.group_id, user.user_id))
1237 (repo_group.group_id, user.user_id))
1238 return permission
1238 return permission
1239
1239
1240 def grant_user_group_permission_to_repo_group(
1240 def grant_user_group_permission_to_repo_group(
1241 self, repo_group, user_group, permission_name):
1241 self, repo_group, user_group, permission_name):
1242 permission = RepoGroupModel().grant_user_group_permission(
1242 permission = RepoGroupModel().grant_user_group_permission(
1243 repo_group, user_group, permission_name)
1243 repo_group, user_group, permission_name)
1244 self.user_group_repo_group_permission_ids.append(
1244 self.user_group_repo_group_permission_ids.append(
1245 (repo_group.group_id, user_group.users_group_id))
1245 (repo_group.group_id, user_group.users_group_id))
1246 return permission
1246 return permission
1247
1247
1248 def grant_user_permission_to_repo(
1248 def grant_user_permission_to_repo(
1249 self, repo, user, permission_name):
1249 self, repo, user, permission_name):
1250 permission = RepoModel().grant_user_permission(
1250 permission = RepoModel().grant_user_permission(
1251 repo, user, permission_name)
1251 repo, user, permission_name)
1252 self.user_repo_permission_ids.append(
1252 self.user_repo_permission_ids.append(
1253 (repo.repo_id, user.user_id))
1253 (repo.repo_id, user.user_id))
1254 return permission
1254 return permission
1255
1255
1256 def grant_user_group_permission_to_repo(
1256 def grant_user_group_permission_to_repo(
1257 self, repo, user_group, permission_name):
1257 self, repo, user_group, permission_name):
1258 permission = RepoModel().grant_user_group_permission(
1258 permission = RepoModel().grant_user_group_permission(
1259 repo, user_group, permission_name)
1259 repo, user_group, permission_name)
1260 self.user_group_repo_permission_ids.append(
1260 self.user_group_repo_permission_ids.append(
1261 (repo.repo_id, user_group.users_group_id))
1261 (repo.repo_id, user_group.users_group_id))
1262 return permission
1262 return permission
1263
1263
1264 def grant_user_permission_to_user_group(
1264 def grant_user_permission_to_user_group(
1265 self, target_user_group, user, permission_name):
1265 self, target_user_group, user, permission_name):
1266 permission = UserGroupModel().grant_user_permission(
1266 permission = UserGroupModel().grant_user_permission(
1267 target_user_group, user, permission_name)
1267 target_user_group, user, permission_name)
1268 self.user_user_group_permission_ids.append(
1268 self.user_user_group_permission_ids.append(
1269 (target_user_group.users_group_id, user.user_id))
1269 (target_user_group.users_group_id, user.user_id))
1270 return permission
1270 return permission
1271
1271
1272 def grant_user_group_permission_to_user_group(
1272 def grant_user_group_permission_to_user_group(
1273 self, target_user_group, user_group, permission_name):
1273 self, target_user_group, user_group, permission_name):
1274 permission = UserGroupModel().grant_user_group_permission(
1274 permission = UserGroupModel().grant_user_group_permission(
1275 target_user_group, user_group, permission_name)
1275 target_user_group, user_group, permission_name)
1276 self.user_group_user_group_permission_ids.append(
1276 self.user_group_user_group_permission_ids.append(
1277 (target_user_group.users_group_id, user_group.users_group_id))
1277 (target_user_group.users_group_id, user_group.users_group_id))
1278 return permission
1278 return permission
1279
1279
1280 def revoke_user_permission(self, user_name, permission_name):
1280 def revoke_user_permission(self, user_name, permission_name):
1281 self.inherit_default_user_permissions(user_name, True)
1281 self.inherit_default_user_permissions(user_name, True)
1282 UserModel().revoke_perm(user_name, permission_name)
1282 UserModel().revoke_perm(user_name, permission_name)
1283
1283
1284 def inherit_default_user_permissions(self, user_name, value):
1284 def inherit_default_user_permissions(self, user_name, value):
1285 user = UserModel().get_by_username(user_name)
1285 user = UserModel().get_by_username(user_name)
1286 user.inherit_default_permissions = value
1286 user.inherit_default_permissions = value
1287 Session().add(user)
1287 Session().add(user)
1288 Session().commit()
1288 Session().commit()
1289
1289
1290 def cleanup(self):
1290 def cleanup(self):
1291 self._cleanup_permissions()
1291 self._cleanup_permissions()
1292 self._cleanup_repos()
1292 self._cleanup_repos()
1293 self._cleanup_repo_groups()
1293 self._cleanup_repo_groups()
1294 self._cleanup_user_groups()
1294 self._cleanup_user_groups()
1295 self._cleanup_users()
1295 self._cleanup_users()
1296
1296
1297 def _cleanup_permissions(self):
1297 def _cleanup_permissions(self):
1298 if self.user_permissions:
1298 if self.user_permissions:
1299 for user_name, permission_name in self.user_permissions:
1299 for user_name, permission_name in self.user_permissions:
1300 self.revoke_user_permission(user_name, permission_name)
1300 self.revoke_user_permission(user_name, permission_name)
1301
1301
1302 for permission in self.user_repo_permission_ids:
1302 for permission in self.user_repo_permission_ids:
1303 RepoModel().revoke_user_permission(*permission)
1303 RepoModel().revoke_user_permission(*permission)
1304
1304
1305 for permission in self.user_group_repo_permission_ids:
1305 for permission in self.user_group_repo_permission_ids:
1306 RepoModel().revoke_user_group_permission(*permission)
1306 RepoModel().revoke_user_group_permission(*permission)
1307
1307
1308 for permission in self.user_repo_group_permission_ids:
1308 for permission in self.user_repo_group_permission_ids:
1309 RepoGroupModel().revoke_user_permission(*permission)
1309 RepoGroupModel().revoke_user_permission(*permission)
1310
1310
1311 for permission in self.user_group_repo_group_permission_ids:
1311 for permission in self.user_group_repo_group_permission_ids:
1312 RepoGroupModel().revoke_user_group_permission(*permission)
1312 RepoGroupModel().revoke_user_group_permission(*permission)
1313
1313
1314 for permission in self.user_user_group_permission_ids:
1314 for permission in self.user_user_group_permission_ids:
1315 UserGroupModel().revoke_user_permission(*permission)
1315 UserGroupModel().revoke_user_permission(*permission)
1316
1316
1317 for permission in self.user_group_user_group_permission_ids:
1317 for permission in self.user_group_user_group_permission_ids:
1318 UserGroupModel().revoke_user_group_permission(*permission)
1318 UserGroupModel().revoke_user_group_permission(*permission)
1319
1319
1320 def _cleanup_repo_groups(self):
1320 def _cleanup_repo_groups(self):
1321 def _repo_group_compare(first_group_id, second_group_id):
1321 def _repo_group_compare(first_group_id, second_group_id):
1322 """
1322 """
1323 Gives higher priority to the groups with the most complex paths
1323 Gives higher priority to the groups with the most complex paths
1324 """
1324 """
1325 first_group = RepoGroup.get(first_group_id)
1325 first_group = RepoGroup.get(first_group_id)
1326 second_group = RepoGroup.get(second_group_id)
1326 second_group = RepoGroup.get(second_group_id)
1327 first_group_parts = (
1327 first_group_parts = (
1328 len(first_group.group_name.split('/')) if first_group else 0)
1328 len(first_group.group_name.split('/')) if first_group else 0)
1329 second_group_parts = (
1329 second_group_parts = (
1330 len(second_group.group_name.split('/')) if second_group else 0)
1330 len(second_group.group_name.split('/')) if second_group else 0)
1331 return cmp(second_group_parts, first_group_parts)
1331 return cmp(second_group_parts, first_group_parts)
1332
1332
1333 sorted_repo_group_ids = sorted(
1333 sorted_repo_group_ids = sorted(
1334 self.repo_group_ids, cmp=_repo_group_compare)
1334 self.repo_group_ids, cmp=_repo_group_compare)
1335 for repo_group_id in sorted_repo_group_ids:
1335 for repo_group_id in sorted_repo_group_ids:
1336 self.fixture.destroy_repo_group(repo_group_id)
1336 self.fixture.destroy_repo_group(repo_group_id)
1337
1337
1338 def _cleanup_repos(self):
1338 def _cleanup_repos(self):
1339 sorted_repos_ids = sorted(self.repos_ids)
1339 sorted_repos_ids = sorted(self.repos_ids)
1340 for repo_id in sorted_repos_ids:
1340 for repo_id in sorted_repos_ids:
1341 self.fixture.destroy_repo(repo_id)
1341 self.fixture.destroy_repo(repo_id)
1342
1342
1343 def _cleanup_user_groups(self):
1343 def _cleanup_user_groups(self):
1344 def _user_group_compare(first_group_id, second_group_id):
1344 def _user_group_compare(first_group_id, second_group_id):
1345 """
1345 """
1346 Gives higher priority to the groups with the most complex paths
1346 Gives higher priority to the groups with the most complex paths
1347 """
1347 """
1348 first_group = UserGroup.get(first_group_id)
1348 first_group = UserGroup.get(first_group_id)
1349 second_group = UserGroup.get(second_group_id)
1349 second_group = UserGroup.get(second_group_id)
1350 first_group_parts = (
1350 first_group_parts = (
1351 len(first_group.users_group_name.split('/'))
1351 len(first_group.users_group_name.split('/'))
1352 if first_group else 0)
1352 if first_group else 0)
1353 second_group_parts = (
1353 second_group_parts = (
1354 len(second_group.users_group_name.split('/'))
1354 len(second_group.users_group_name.split('/'))
1355 if second_group else 0)
1355 if second_group else 0)
1356 return cmp(second_group_parts, first_group_parts)
1356 return cmp(second_group_parts, first_group_parts)
1357
1357
1358 sorted_user_group_ids = sorted(
1358 sorted_user_group_ids = sorted(
1359 self.user_group_ids, cmp=_user_group_compare)
1359 self.user_group_ids, cmp=_user_group_compare)
1360 for user_group_id in sorted_user_group_ids:
1360 for user_group_id in sorted_user_group_ids:
1361 self.fixture.destroy_user_group(user_group_id)
1361 self.fixture.destroy_user_group(user_group_id)
1362
1362
1363 def _cleanup_users(self):
1363 def _cleanup_users(self):
1364 for user_id in self.user_ids:
1364 for user_id in self.user_ids:
1365 self.fixture.destroy_user(user_id)
1365 self.fixture.destroy_user(user_id)
1366
1366
1367
1367
1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1369 # pytest plugin
1369 # pytest plugin
1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1371 def pytest_runtest_makereport(item, call):
1371 def pytest_runtest_makereport(item, call):
1372 """
1372 """
1373 Adding the remote traceback if the exception has this information.
1373 Adding the remote traceback if the exception has this information.
1374
1374
1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1376 to the exception instance.
1376 to the exception instance.
1377 """
1377 """
1378 outcome = yield
1378 outcome = yield
1379 report = outcome.get_result()
1379 report = outcome.get_result()
1380 if call.excinfo:
1380 if call.excinfo:
1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1382
1382
1383
1383
1384 def _add_vcsserver_remote_traceback(report, exc):
1384 def _add_vcsserver_remote_traceback(report, exc):
1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1386
1386
1387 if vcsserver_traceback:
1387 if vcsserver_traceback:
1388 section = 'VCSServer remote traceback ' + report.when
1388 section = 'VCSServer remote traceback ' + report.when
1389 report.sections.append((section, vcsserver_traceback))
1389 report.sections.append((section, vcsserver_traceback))
1390
1390
1391
1391
1392 @pytest.fixture(scope='session')
1392 @pytest.fixture(scope='session')
1393 def testrun():
1393 def testrun():
1394 return {
1394 return {
1395 'uuid': uuid.uuid4(),
1395 'uuid': uuid.uuid4(),
1396 'start': datetime.datetime.utcnow().isoformat(),
1396 'start': datetime.datetime.utcnow().isoformat(),
1397 'timestamp': int(time.time()),
1397 'timestamp': int(time.time()),
1398 }
1398 }
1399
1399
1400
1400
1401 @pytest.fixture(autouse=True)
1401 @pytest.fixture(autouse=True)
1402 def collect_appenlight_stats(request, testrun):
1402 def collect_appenlight_stats(request, testrun):
1403 """
1403 """
1404 This fixture reports memory consumtion of single tests.
1404 This fixture reports memory consumtion of single tests.
1405
1405
1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1407 ``--ae`` has te be used to enable this fixture and the API key for your
1407 ``--ae`` has te be used to enable this fixture and the API key for your
1408 application has to be provided in ``--ae-key``.
1408 application has to be provided in ``--ae-key``.
1409 """
1409 """
1410 try:
1410 try:
1411 # cygwin cannot have yet psutil support.
1411 # cygwin cannot have yet psutil support.
1412 import psutil
1412 import psutil
1413 except ImportError:
1413 except ImportError:
1414 return
1414 return
1415
1415
1416 if not request.config.getoption('--appenlight'):
1416 if not request.config.getoption('--appenlight'):
1417 return
1417 return
1418 else:
1418 else:
1419 # Only request the baseapp fixture if appenlight tracking is
1419 # Only request the baseapp fixture if appenlight tracking is
1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1421 # seconds if appenlight is not enabled.
1421 # seconds if appenlight is not enabled.
1422 baseapp = request.getfuncargvalue("baseapp")
1422 baseapp = request.getfuncargvalue("baseapp")
1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1424 client = AppenlightClient(
1424 client = AppenlightClient(
1425 url=url,
1425 url=url,
1426 api_key=request.config.getoption('--appenlight-api-key'),
1426 api_key=request.config.getoption('--appenlight-api-key'),
1427 namespace=request.node.nodeid,
1427 namespace=request.node.nodeid,
1428 request=str(testrun['uuid']),
1428 request=str(testrun['uuid']),
1429 testrun=testrun)
1429 testrun=testrun)
1430
1430
1431 client.collect({
1431 client.collect({
1432 'message': "Starting",
1432 'message': "Starting",
1433 })
1433 })
1434
1434
1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1437 server = create_vcsserver_proxy(server_and_port, protocol)
1437 server = create_vcsserver_proxy(server_and_port, protocol)
1438 with server:
1438 with server:
1439 vcs_pid = server.get_pid()
1439 vcs_pid = server.get_pid()
1440 server.run_gc()
1440 server.run_gc()
1441 vcs_process = psutil.Process(vcs_pid)
1441 vcs_process = psutil.Process(vcs_pid)
1442 mem = vcs_process.memory_info()
1442 mem = vcs_process.memory_info()
1443 client.tag_before('vcsserver.rss', mem.rss)
1443 client.tag_before('vcsserver.rss', mem.rss)
1444 client.tag_before('vcsserver.vms', mem.vms)
1444 client.tag_before('vcsserver.vms', mem.vms)
1445
1445
1446 test_process = psutil.Process()
1446 test_process = psutil.Process()
1447 mem = test_process.memory_info()
1447 mem = test_process.memory_info()
1448 client.tag_before('test.rss', mem.rss)
1448 client.tag_before('test.rss', mem.rss)
1449 client.tag_before('test.vms', mem.vms)
1449 client.tag_before('test.vms', mem.vms)
1450
1450
1451 client.tag_before('time', time.time())
1451 client.tag_before('time', time.time())
1452
1452
1453 @request.addfinalizer
1453 @request.addfinalizer
1454 def send_stats():
1454 def send_stats():
1455 client.tag_after('time', time.time())
1455 client.tag_after('time', time.time())
1456 with server:
1456 with server:
1457 gc_stats = server.run_gc()
1457 gc_stats = server.run_gc()
1458 for tag, value in gc_stats.items():
1458 for tag, value in gc_stats.items():
1459 client.tag_after(tag, value)
1459 client.tag_after(tag, value)
1460 mem = vcs_process.memory_info()
1460 mem = vcs_process.memory_info()
1461 client.tag_after('vcsserver.rss', mem.rss)
1461 client.tag_after('vcsserver.rss', mem.rss)
1462 client.tag_after('vcsserver.vms', mem.vms)
1462 client.tag_after('vcsserver.vms', mem.vms)
1463
1463
1464 mem = test_process.memory_info()
1464 mem = test_process.memory_info()
1465 client.tag_after('test.rss', mem.rss)
1465 client.tag_after('test.rss', mem.rss)
1466 client.tag_after('test.vms', mem.vms)
1466 client.tag_after('test.vms', mem.vms)
1467
1467
1468 client.collect({
1468 client.collect({
1469 'message': "Finished",
1469 'message': "Finished",
1470 })
1470 })
1471 client.send_stats()
1471 client.send_stats()
1472
1472
1473 return client
1473 return client
1474
1474
1475
1475
1476 class AppenlightClient():
1476 class AppenlightClient():
1477
1477
1478 url_template = '{url}?protocol_version=0.5'
1478 url_template = '{url}?protocol_version=0.5'
1479
1479
1480 def __init__(
1480 def __init__(
1481 self, url, api_key, add_server=True, add_timestamp=True,
1481 self, url, api_key, add_server=True, add_timestamp=True,
1482 namespace=None, request=None, testrun=None):
1482 namespace=None, request=None, testrun=None):
1483 self.url = self.url_template.format(url=url)
1483 self.url = self.url_template.format(url=url)
1484 self.api_key = api_key
1484 self.api_key = api_key
1485 self.add_server = add_server
1485 self.add_server = add_server
1486 self.add_timestamp = add_timestamp
1486 self.add_timestamp = add_timestamp
1487 self.namespace = namespace
1487 self.namespace = namespace
1488 self.request = request
1488 self.request = request
1489 self.server = socket.getfqdn(socket.gethostname())
1489 self.server = socket.getfqdn(socket.gethostname())
1490 self.tags_before = {}
1490 self.tags_before = {}
1491 self.tags_after = {}
1491 self.tags_after = {}
1492 self.stats = []
1492 self.stats = []
1493 self.testrun = testrun or {}
1493 self.testrun = testrun or {}
1494
1494
1495 def tag_before(self, tag, value):
1495 def tag_before(self, tag, value):
1496 self.tags_before[tag] = value
1496 self.tags_before[tag] = value
1497
1497
1498 def tag_after(self, tag, value):
1498 def tag_after(self, tag, value):
1499 self.tags_after[tag] = value
1499 self.tags_after[tag] = value
1500
1500
1501 def collect(self, data):
1501 def collect(self, data):
1502 if self.add_server:
1502 if self.add_server:
1503 data.setdefault('server', self.server)
1503 data.setdefault('server', self.server)
1504 if self.add_timestamp:
1504 if self.add_timestamp:
1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1506 if self.namespace:
1506 if self.namespace:
1507 data.setdefault('namespace', self.namespace)
1507 data.setdefault('namespace', self.namespace)
1508 if self.request:
1508 if self.request:
1509 data.setdefault('request', self.request)
1509 data.setdefault('request', self.request)
1510 self.stats.append(data)
1510 self.stats.append(data)
1511
1511
1512 def send_stats(self):
1512 def send_stats(self):
1513 tags = [
1513 tags = [
1514 ('testrun', self.request),
1514 ('testrun', self.request),
1515 ('testrun.start', self.testrun['start']),
1515 ('testrun.start', self.testrun['start']),
1516 ('testrun.timestamp', self.testrun['timestamp']),
1516 ('testrun.timestamp', self.testrun['timestamp']),
1517 ('test', self.namespace),
1517 ('test', self.namespace),
1518 ]
1518 ]
1519 for key, value in self.tags_before.items():
1519 for key, value in self.tags_before.items():
1520 tags.append((key + '.before', value))
1520 tags.append((key + '.before', value))
1521 try:
1521 try:
1522 delta = self.tags_after[key] - value
1522 delta = self.tags_after[key] - value
1523 tags.append((key + '.delta', delta))
1523 tags.append((key + '.delta', delta))
1524 except Exception:
1524 except Exception:
1525 pass
1525 pass
1526 for key, value in self.tags_after.items():
1526 for key, value in self.tags_after.items():
1527 tags.append((key + '.after', value))
1527 tags.append((key + '.after', value))
1528 self.collect({
1528 self.collect({
1529 'message': "Collected tags",
1529 'message': "Collected tags",
1530 'tags': tags,
1530 'tags': tags,
1531 })
1531 })
1532
1532
1533 response = requests.post(
1533 response = requests.post(
1534 self.url,
1534 self.url,
1535 headers={
1535 headers={
1536 'X-appenlight-api-key': self.api_key},
1536 'X-appenlight-api-key': self.api_key},
1537 json=self.stats,
1537 json=self.stats,
1538 )
1538 )
1539
1539
1540 if not response.status_code == 200:
1540 if not response.status_code == 200:
1541 pprint.pprint(self.stats)
1541 pprint.pprint(self.stats)
1542 print(response.headers)
1542 print(response.headers)
1543 print(response.text)
1543 print(response.text)
1544 raise Exception('Sending to appenlight failed')
1544 raise Exception('Sending to appenlight failed')
1545
1545
1546
1546
1547 @pytest.fixture
1547 @pytest.fixture
1548 def gist_util(request, db_connection):
1548 def gist_util(request, db_connection):
1549 """
1549 """
1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1551 """
1551 """
1552 utility = GistUtility()
1552 utility = GistUtility()
1553 request.addfinalizer(utility.cleanup)
1553 request.addfinalizer(utility.cleanup)
1554 return utility
1554 return utility
1555
1555
1556
1556
1557 class GistUtility(object):
1557 class GistUtility(object):
1558 def __init__(self):
1558 def __init__(self):
1559 self.fixture = Fixture()
1559 self.fixture = Fixture()
1560 self.gist_ids = []
1560 self.gist_ids = []
1561
1561
1562 def create_gist(self, **kwargs):
1562 def create_gist(self, **kwargs):
1563 gist = self.fixture.create_gist(**kwargs)
1563 gist = self.fixture.create_gist(**kwargs)
1564 self.gist_ids.append(gist.gist_id)
1564 self.gist_ids.append(gist.gist_id)
1565 return gist
1565 return gist
1566
1566
1567 def cleanup(self):
1567 def cleanup(self):
1568 for id_ in self.gist_ids:
1568 for id_ in self.gist_ids:
1569 self.fixture.destroy_gists(str(id_))
1569 self.fixture.destroy_gists(str(id_))
1570
1570
1571
1571
1572 @pytest.fixture
1572 @pytest.fixture
1573 def enabled_backends(request):
1573 def enabled_backends(request):
1574 backends = request.config.option.backends
1574 backends = request.config.option.backends
1575 return backends[:]
1575 return backends[:]
1576
1576
1577
1577
1578 @pytest.fixture
1578 @pytest.fixture
1579 def settings_util(request, db_connection):
1579 def settings_util(request, db_connection):
1580 """
1580 """
1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1582 """
1582 """
1583 utility = SettingsUtility()
1583 utility = SettingsUtility()
1584 request.addfinalizer(utility.cleanup)
1584 request.addfinalizer(utility.cleanup)
1585 return utility
1585 return utility
1586
1586
1587
1587
1588 class SettingsUtility(object):
1588 class SettingsUtility(object):
1589 def __init__(self):
1589 def __init__(self):
1590 self.rhodecode_ui_ids = []
1590 self.rhodecode_ui_ids = []
1591 self.rhodecode_setting_ids = []
1591 self.rhodecode_setting_ids = []
1592 self.repo_rhodecode_ui_ids = []
1592 self.repo_rhodecode_ui_ids = []
1593 self.repo_rhodecode_setting_ids = []
1593 self.repo_rhodecode_setting_ids = []
1594
1594
1595 def create_repo_rhodecode_ui(
1595 def create_repo_rhodecode_ui(
1596 self, repo, section, value, key=None, active=True, cleanup=True):
1596 self, repo, section, value, key=None, active=True, cleanup=True):
1597 key = key or hashlib.sha1(
1597 key = key or hashlib.sha1(
1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1599
1599
1600 setting = RepoRhodeCodeUi()
1600 setting = RepoRhodeCodeUi()
1601 setting.repository_id = repo.repo_id
1601 setting.repository_id = repo.repo_id
1602 setting.ui_section = section
1602 setting.ui_section = section
1603 setting.ui_value = value
1603 setting.ui_value = value
1604 setting.ui_key = key
1604 setting.ui_key = key
1605 setting.ui_active = active
1605 setting.ui_active = active
1606 Session().add(setting)
1606 Session().add(setting)
1607 Session().commit()
1607 Session().commit()
1608
1608
1609 if cleanup:
1609 if cleanup:
1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1611 return setting
1611 return setting
1612
1612
1613 def create_rhodecode_ui(
1613 def create_rhodecode_ui(
1614 self, section, value, key=None, active=True, cleanup=True):
1614 self, section, value, key=None, active=True, cleanup=True):
1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1616
1616
1617 setting = RhodeCodeUi()
1617 setting = RhodeCodeUi()
1618 setting.ui_section = section
1618 setting.ui_section = section
1619 setting.ui_value = value
1619 setting.ui_value = value
1620 setting.ui_key = key
1620 setting.ui_key = key
1621 setting.ui_active = active
1621 setting.ui_active = active
1622 Session().add(setting)
1622 Session().add(setting)
1623 Session().commit()
1623 Session().commit()
1624
1624
1625 if cleanup:
1625 if cleanup:
1626 self.rhodecode_ui_ids.append(setting.ui_id)
1626 self.rhodecode_ui_ids.append(setting.ui_id)
1627 return setting
1627 return setting
1628
1628
1629 def create_repo_rhodecode_setting(
1629 def create_repo_rhodecode_setting(
1630 self, repo, name, value, type_, cleanup=True):
1630 self, repo, name, value, type_, cleanup=True):
1631 setting = RepoRhodeCodeSetting(
1631 setting = RepoRhodeCodeSetting(
1632 repo.repo_id, key=name, val=value, type=type_)
1632 repo.repo_id, key=name, val=value, type=type_)
1633 Session().add(setting)
1633 Session().add(setting)
1634 Session().commit()
1634 Session().commit()
1635
1635
1636 if cleanup:
1636 if cleanup:
1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1638 return setting
1638 return setting
1639
1639
1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1642 Session().add(setting)
1642 Session().add(setting)
1643 Session().commit()
1643 Session().commit()
1644
1644
1645 if cleanup:
1645 if cleanup:
1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1647
1647
1648 return setting
1648 return setting
1649
1649
1650 def cleanup(self):
1650 def cleanup(self):
1651 for id_ in self.rhodecode_ui_ids:
1651 for id_ in self.rhodecode_ui_ids:
1652 setting = RhodeCodeUi.get(id_)
1652 setting = RhodeCodeUi.get(id_)
1653 Session().delete(setting)
1653 Session().delete(setting)
1654
1654
1655 for id_ in self.rhodecode_setting_ids:
1655 for id_ in self.rhodecode_setting_ids:
1656 setting = RhodeCodeSetting.get(id_)
1656 setting = RhodeCodeSetting.get(id_)
1657 Session().delete(setting)
1657 Session().delete(setting)
1658
1658
1659 for id_ in self.repo_rhodecode_ui_ids:
1659 for id_ in self.repo_rhodecode_ui_ids:
1660 setting = RepoRhodeCodeUi.get(id_)
1660 setting = RepoRhodeCodeUi.get(id_)
1661 Session().delete(setting)
1661 Session().delete(setting)
1662
1662
1663 for id_ in self.repo_rhodecode_setting_ids:
1663 for id_ in self.repo_rhodecode_setting_ids:
1664 setting = RepoRhodeCodeSetting.get(id_)
1664 setting = RepoRhodeCodeSetting.get(id_)
1665 Session().delete(setting)
1665 Session().delete(setting)
1666
1666
1667 Session().commit()
1667 Session().commit()
1668
1668
1669
1669
1670 @pytest.fixture
1670 @pytest.fixture
1671 def no_notifications(request):
1671 def no_notifications(request):
1672 notification_patcher = mock.patch(
1672 notification_patcher = mock.patch(
1673 'rhodecode.model.notification.NotificationModel.create')
1673 'rhodecode.model.notification.NotificationModel.create')
1674 notification_patcher.start()
1674 notification_patcher.start()
1675 request.addfinalizer(notification_patcher.stop)
1675 request.addfinalizer(notification_patcher.stop)
1676
1676
1677
1677
1678 @pytest.fixture(scope='session')
1678 @pytest.fixture(scope='session')
1679 def repeat(request):
1679 def repeat(request):
1680 """
1680 """
1681 The number of repetitions is based on this fixture.
1681 The number of repetitions is based on this fixture.
1682
1682
1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1684 tests are not too slow in our default test suite.
1684 tests are not too slow in our default test suite.
1685 """
1685 """
1686 return request.config.getoption('--repeat')
1686 return request.config.getoption('--repeat')
1687
1687
1688
1688
1689 @pytest.fixture
1689 @pytest.fixture
1690 def rhodecode_fixtures():
1690 def rhodecode_fixtures():
1691 return Fixture()
1691 return Fixture()
1692
1692
1693
1693
1694 @pytest.fixture
1694 @pytest.fixture
1695 def context_stub():
1695 def context_stub():
1696 """
1696 """
1697 Stub context object.
1697 Stub context object.
1698 """
1698 """
1699 context = pyramid.testing.DummyResource()
1699 context = pyramid.testing.DummyResource()
1700 return context
1700 return context
1701
1701
1702
1702
1703 @pytest.fixture
1703 @pytest.fixture
1704 def request_stub():
1704 def request_stub():
1705 """
1705 """
1706 Stub request object.
1706 Stub request object.
1707 """
1707 """
1708 from rhodecode.lib.base import bootstrap_request
1708 from rhodecode.lib.base import bootstrap_request
1709 request = bootstrap_request(scheme='https')
1709 request = bootstrap_request(scheme='https')
1710 return request
1710 return request
1711
1711
1712
1712
1713 @pytest.fixture
1713 @pytest.fixture
1714 def config_stub(request, request_stub):
1714 def config_stub(request, request_stub):
1715 """
1715 """
1716 Set up pyramid.testing and return the Configurator.
1716 Set up pyramid.testing and return the Configurator.
1717 """
1717 """
1718 from rhodecode.lib.base import bootstrap_config
1718 from rhodecode.lib.base import bootstrap_config
1719 config = bootstrap_config(request=request_stub)
1719 config = bootstrap_config(request=request_stub)
1720
1720
1721 @request.addfinalizer
1721 @request.addfinalizer
1722 def cleanup():
1722 def cleanup():
1723 pyramid.testing.tearDown()
1723 pyramid.testing.tearDown()
1724
1724
1725 return config
1725 return config
1726
1726
1727
1727
1728 @pytest.fixture
1728 @pytest.fixture
1729 def StubIntegrationType():
1729 def StubIntegrationType():
1730 class _StubIntegrationType(IntegrationTypeBase):
1730 class _StubIntegrationType(IntegrationTypeBase):
1731 """ Test integration type class """
1731 """ Test integration type class """
1732
1732
1733 key = 'test'
1733 key = 'test'
1734 display_name = 'Test integration type'
1734 display_name = 'Test integration type'
1735 description = 'A test integration type for testing'
1735 description = 'A test integration type for testing'
1736
1736
1737 @classmethod
1737 @classmethod
1738 def icon(cls):
1738 def icon(cls):
1739 return 'test_icon_html_image'
1739 return 'test_icon_html_image'
1740
1740
1741 def __init__(self, settings):
1741 def __init__(self, settings):
1742 super(_StubIntegrationType, self).__init__(settings)
1742 super(_StubIntegrationType, self).__init__(settings)
1743 self.sent_events = [] # for testing
1743 self.sent_events = [] # for testing
1744
1744
1745 def send_event(self, event):
1745 def send_event(self, event):
1746 self.sent_events.append(event)
1746 self.sent_events.append(event)
1747
1747
1748 def settings_schema(self):
1748 def settings_schema(self):
1749 class SettingsSchema(colander.Schema):
1749 class SettingsSchema(colander.Schema):
1750 test_string_field = colander.SchemaNode(
1750 test_string_field = colander.SchemaNode(
1751 colander.String(),
1751 colander.String(),
1752 missing=colander.required,
1752 missing=colander.required,
1753 title='test string field',
1753 title='test string field',
1754 )
1754 )
1755 test_int_field = colander.SchemaNode(
1755 test_int_field = colander.SchemaNode(
1756 colander.Int(),
1756 colander.Int(),
1757 title='some integer setting',
1757 title='some integer setting',
1758 )
1758 )
1759 return SettingsSchema()
1759 return SettingsSchema()
1760
1760
1761
1761
1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1763 return _StubIntegrationType
1763 return _StubIntegrationType
1764
1764
1765 @pytest.fixture
1765 @pytest.fixture
1766 def stub_integration_settings():
1766 def stub_integration_settings():
1767 return {
1767 return {
1768 'test_string_field': 'some data',
1768 'test_string_field': 'some data',
1769 'test_int_field': 100,
1769 'test_int_field': 100,
1770 }
1770 }
1771
1771
1772
1772
1773 @pytest.fixture
1773 @pytest.fixture
1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1775 stub_integration_settings):
1775 stub_integration_settings):
1776 integration = IntegrationModel().create(
1776 integration = IntegrationModel().create(
1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 name='test repo integration',
1778 name='test repo integration',
1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1780
1780
1781 @request.addfinalizer
1781 @request.addfinalizer
1782 def cleanup():
1782 def cleanup():
1783 IntegrationModel().delete(integration)
1783 IntegrationModel().delete(integration)
1784
1784
1785 return integration
1785 return integration
1786
1786
1787
1787
1788 @pytest.fixture
1788 @pytest.fixture
1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1790 stub_integration_settings):
1790 stub_integration_settings):
1791 integration = IntegrationModel().create(
1791 integration = IntegrationModel().create(
1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 name='test repogroup integration',
1793 name='test repogroup integration',
1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1795
1795
1796 @request.addfinalizer
1796 @request.addfinalizer
1797 def cleanup():
1797 def cleanup():
1798 IntegrationModel().delete(integration)
1798 IntegrationModel().delete(integration)
1799
1799
1800 return integration
1800 return integration
1801
1801
1802
1802
1803 @pytest.fixture
1803 @pytest.fixture
1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1805 StubIntegrationType, stub_integration_settings):
1805 StubIntegrationType, stub_integration_settings):
1806 integration = IntegrationModel().create(
1806 integration = IntegrationModel().create(
1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1808 name='test recursive repogroup integration',
1808 name='test recursive repogroup integration',
1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1810
1810
1811 @request.addfinalizer
1811 @request.addfinalizer
1812 def cleanup():
1812 def cleanup():
1813 IntegrationModel().delete(integration)
1813 IntegrationModel().delete(integration)
1814
1814
1815 return integration
1815 return integration
1816
1816
1817
1817
1818 @pytest.fixture
1818 @pytest.fixture
1819 def global_integration_stub(request, StubIntegrationType,
1819 def global_integration_stub(request, StubIntegrationType,
1820 stub_integration_settings):
1820 stub_integration_settings):
1821 integration = IntegrationModel().create(
1821 integration = IntegrationModel().create(
1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1823 name='test global integration',
1823 name='test global integration',
1824 repo=None, repo_group=None, child_repos_only=None)
1824 repo=None, repo_group=None, child_repos_only=None)
1825
1825
1826 @request.addfinalizer
1826 @request.addfinalizer
1827 def cleanup():
1827 def cleanup():
1828 IntegrationModel().delete(integration)
1828 IntegrationModel().delete(integration)
1829
1829
1830 return integration
1830 return integration
1831
1831
1832
1832
1833 @pytest.fixture
1833 @pytest.fixture
1834 def root_repos_integration_stub(request, StubIntegrationType,
1834 def root_repos_integration_stub(request, StubIntegrationType,
1835 stub_integration_settings):
1835 stub_integration_settings):
1836 integration = IntegrationModel().create(
1836 integration = IntegrationModel().create(
1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1838 name='test global integration',
1838 name='test global integration',
1839 repo=None, repo_group=None, child_repos_only=True)
1839 repo=None, repo_group=None, child_repos_only=True)
1840
1840
1841 @request.addfinalizer
1841 @request.addfinalizer
1842 def cleanup():
1842 def cleanup():
1843 IntegrationModel().delete(integration)
1843 IntegrationModel().delete(integration)
1844
1844
1845 return integration
1845 return integration
1846
1846
1847
1847
1848 @pytest.fixture
1848 @pytest.fixture
1849 def local_dt_to_utc():
1849 def local_dt_to_utc():
1850 def _factory(dt):
1850 def _factory(dt):
1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1853 return _factory
1853 return _factory
1854
1854
1855
1855
1856 @pytest.fixture
1856 @pytest.fixture
1857 def disable_anonymous_user(request, baseapp):
1857 def disable_anonymous_user(request, baseapp):
1858 set_anonymous_access(False)
1858 set_anonymous_access(False)
1859
1859
1860 @request.addfinalizer
1860 @request.addfinalizer
1861 def cleanup():
1861 def cleanup():
1862 set_anonymous_access(True)
1862 set_anonymous_access(True)
1863
1863
1864
1864
1865 @pytest.fixture(scope='module')
1865 @pytest.fixture(scope='module')
1866 def rc_fixture(request):
1866 def rc_fixture(request):
1867 return Fixture()
1867 return Fixture()
1868
1868
1869
1869
1870 @pytest.fixture
1870 @pytest.fixture
1871 def repo_groups(request):
1871 def repo_groups(request):
1872 fixture = Fixture()
1872 fixture = Fixture()
1873
1873
1874 session = Session()
1874 session = Session()
1875 zombie_group = fixture.create_repo_group('zombie')
1875 zombie_group = fixture.create_repo_group('zombie')
1876 parent_group = fixture.create_repo_group('parent')
1876 parent_group = fixture.create_repo_group('parent')
1877 child_group = fixture.create_repo_group('parent/child')
1877 child_group = fixture.create_repo_group('parent/child')
1878 groups_in_db = session.query(RepoGroup).all()
1878 groups_in_db = session.query(RepoGroup).all()
1879 assert len(groups_in_db) == 3
1879 assert len(groups_in_db) == 3
1880 assert child_group.group_parent_id == parent_group.group_id
1880 assert child_group.group_parent_id == parent_group.group_id
1881
1881
1882 @request.addfinalizer
1882 @request.addfinalizer
1883 def cleanup():
1883 def cleanup():
1884 fixture.destroy_repo_group(zombie_group)
1884 fixture.destroy_repo_group(zombie_group)
1885 fixture.destroy_repo_group(child_group)
1885 fixture.destroy_repo_group(child_group)
1886 fixture.destroy_repo_group(parent_group)
1886 fixture.destroy_repo_group(parent_group)
1887
1887
1888 return zombie_group, parent_group, child_group
1888 return zombie_group, parent_group, child_group
1889
1889
1890
1890
1891 @pytest.fixture(scope="session")
1891 @pytest.fixture(scope="session")
1892 def tmp_path_factory(request):
1892 def tmp_path_factory(request):
1893 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1893 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1894 """
1894 """
1895
1895
1896 class TempPathFactory:
1896 class TempPathFactory:
1897
1897
1898 def mktemp(self, basename):
1898 def mktemp(self, basename):
1899 import tempfile
1899 import tempfile
1900 return tempfile.mktemp(basename)
1900 return tempfile.mktemp(basename)
1901
1901
1902 return TempPathFactory()
1902 return TempPathFactory()
@@ -1,468 +1,468 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import threading
21 import threading
22 import time
22 import time
23 import logging
23 import logging
24 import os.path
24 import os.path
25 import subprocess32
25 import subprocess32
26 import tempfile
26 import tempfile
27 import urllib2
27 import urllib2
28 from lxml.html import fromstring, tostring
28 from lxml.html import fromstring, tostring
29 from lxml.cssselect import CSSSelector
29 from lxml.cssselect import CSSSelector
30 from urlparse import urlparse, parse_qsl
30 from urlparse import urlparse, parse_qsl
31 from urllib import unquote_plus
31 from urllib import unquote_plus
32 import webob
32 import webob
33
33
34 from webtest.app import TestResponse, TestApp, string_types
34 from webtest.app import TestResponse, TestApp, string_types
35 from webtest.compat import print_stderr
35 from webtest.compat import print_stderr
36
36
37 import pytest
37 import pytest
38 import rc_testdata
38 import rc_testdata
39
39
40 from rhodecode.model.db import User, Repository
40 from rhodecode.model.db import User, Repository
41 from rhodecode.model.meta import Session
41 from rhodecode.model.meta import Session
42 from rhodecode.model.scm import ScmModel
42 from rhodecode.model.scm import ScmModel
43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.tests import login_user_session
45 from rhodecode.tests import login_user_session
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class CustomTestResponse(TestResponse):
50 class CustomTestResponse(TestResponse):
51
51
52 def _save_output(self, out):
52 def _save_output(self, out):
53 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
53 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
54 f.write(out)
54 f.write(out)
55 return f.name
55 return f.name
56
56
57 def mustcontain(self, *strings, **kw):
57 def mustcontain(self, *strings, **kw):
58 """
58 """
59 Assert that the response contains all of the strings passed
59 Assert that the response contains all of the strings passed
60 in as arguments.
60 in as arguments.
61
61
62 Equivalent to::
62 Equivalent to::
63
63
64 assert string in res
64 assert string in res
65 """
65 """
66 print_body = kw.pop('print_body', False)
66 print_body = kw.pop('print_body', False)
67 if 'no' in kw:
67 if 'no' in kw:
68 no = kw['no']
68 no = kw['no']
69 del kw['no']
69 del kw['no']
70 if isinstance(no, string_types):
70 if isinstance(no, string_types):
71 no = [no]
71 no = [no]
72 else:
72 else:
73 no = []
73 no = []
74 if kw:
74 if kw:
75 raise TypeError(
75 raise TypeError(
76 "The only keyword argument allowed is 'no' got %s" % kw)
76 "The only keyword argument allowed is 'no' got %s" % kw)
77
77
78 f = self._save_output(str(self))
78 f = self._save_output(str(self))
79
79
80 for s in strings:
80 for s in strings:
81 if not s in self:
81 if not s in self:
82 print_stderr("Actual response (no %r):" % s)
82 print_stderr("Actual response (no %r):" % s)
83 print_stderr("body output saved as `%s`" % f)
83 print_stderr("body output saved as `%s`" % f)
84 if print_body:
84 if print_body:
85 print_stderr(str(self))
85 print_stderr(str(self))
86 raise IndexError(
86 raise IndexError(
87 "Body does not contain string %r, body output saved as %s" % (s, f))
87 "Body does not contain string %r, body output saved as %s" % (s, f))
88
88
89 for no_s in no:
89 for no_s in no:
90 if no_s in self:
90 if no_s in self:
91 print_stderr("Actual response (has %r)" % no_s)
91 print_stderr("Actual response (has %r)" % no_s)
92 print_stderr("body output saved as `%s`" % f)
92 print_stderr("body output saved as `%s`" % f)
93 if print_body:
93 if print_body:
94 print_stderr(str(self))
94 print_stderr(str(self))
95 raise IndexError(
95 raise IndexError(
96 "Body contains bad string %r, body output saved as %s" % (no_s, f))
96 "Body contains bad string %r, body output saved as %s" % (no_s, f))
97
97
98 def assert_response(self):
98 def assert_response(self):
99 return AssertResponse(self)
99 return AssertResponse(self)
100
100
101 def get_session_from_response(self):
101 def get_session_from_response(self):
102 """
102 """
103 This returns the session from a response object.
103 This returns the session from a response object.
104 """
104 """
105 from rhodecode.lib.rc_beaker import session_factory_from_settings
105 from rhodecode.lib.rc_beaker import session_factory_from_settings
106 session = session_factory_from_settings(self.test_app._pyramid_settings)
106 session = session_factory_from_settings(self.test_app._pyramid_settings)
107 return session(self.request)
107 return session(self.request)
108
108
109
109
110 class TestRequest(webob.BaseRequest):
110 class TestRequest(webob.BaseRequest):
111
111
112 # for py.test
112 # for py.test
113 disabled = True
113 disabled = True
114 ResponseClass = CustomTestResponse
114 ResponseClass = CustomTestResponse
115
115
116 def add_response_callback(self, callback):
116 def add_response_callback(self, callback):
117 pass
117 pass
118
118
119
119
120 class CustomTestApp(TestApp):
120 class CustomTestApp(TestApp):
121 """
121 """
122 Custom app to make mustcontain more Useful, and extract special methods
122 Custom app to make mustcontain more Useful, and extract special methods
123 """
123 """
124 RequestClass = TestRequest
124 RequestClass = TestRequest
125 rc_login_data = {}
125 rc_login_data = {}
126 rc_current_session = None
126 rc_current_session = None
127
127
128 def login(self, username=None, password=None):
128 def login(self, username=None, password=None):
129 from rhodecode.lib import auth
129 from rhodecode.lib import auth
130
130
131 if username and password:
131 if username and password:
132 session = login_user_session(self, username, password)
132 session = login_user_session(self, username, password)
133 else:
133 else:
134 session = login_user_session(self)
134 session = login_user_session(self)
135
135
136 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
136 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
137 self.rc_current_session = session
137 self.rc_current_session = session
138 return session['rhodecode_user']
138 return session['rhodecode_user']
139
139
140 @property
140 @property
141 def csrf_token(self):
141 def csrf_token(self):
142 return self.rc_login_data['csrf_token']
142 return self.rc_login_data['csrf_token']
143
143
144 @property
144 @property
145 def _pyramid_registry(self):
145 def _pyramid_registry(self):
146 return self.app.config.registry
146 return self.app.config.registry
147
147
148 @property
148 @property
149 def _pyramid_settings(self):
149 def _pyramid_settings(self):
150 return self._pyramid_registry.settings
150 return self._pyramid_registry.settings
151
151
152
152
153 def set_anonymous_access(enabled):
153 def set_anonymous_access(enabled):
154 """(Dis)allows anonymous access depending on parameter `enabled`"""
154 """(Dis)allows anonymous access depending on parameter `enabled`"""
155 user = User.get_default_user()
155 user = User.get_default_user()
156 user.active = enabled
156 user.active = enabled
157 Session().add(user)
157 Session().add(user)
158 Session().commit()
158 Session().commit()
159 time.sleep(1.5) # must sleep for cache (1s to expire)
159 time.sleep(1.5) # must sleep for cache (1s to expire)
160 log.info('anonymous access is now: %s', enabled)
160 log.info('anonymous access is now: %s', enabled)
161 assert enabled == User.get_default_user().active, (
161 assert enabled == User.get_default_user().active, (
162 'Cannot set anonymous access')
162 'Cannot set anonymous access')
163
163
164
164
165 def check_xfail_backends(node, backend_alias):
165 def check_xfail_backends(node, backend_alias):
166 # Using "xfail_backends" here intentionally, since this marks work
166 # Using "xfail_backends" here intentionally, since this marks work
167 # which is "to be done" soon.
167 # which is "to be done" soon.
168 skip_marker = node.get_closest_marker('xfail_backends')
168 skip_marker = node.get_closest_marker('xfail_backends')
169 if skip_marker and backend_alias in skip_marker.args:
169 if skip_marker and backend_alias in skip_marker.args:
170 msg = "Support for backend %s to be developed." % (backend_alias, )
170 msg = "Support for backend %s to be developed." % (backend_alias, )
171 msg = skip_marker.kwargs.get('reason', msg)
171 msg = skip_marker.kwargs.get('reason', msg)
172 pytest.xfail(msg)
172 pytest.xfail(msg)
173
173
174
174
175 def check_skip_backends(node, backend_alias):
175 def check_skip_backends(node, backend_alias):
176 # Using "skip_backends" here intentionally, since this marks work which is
176 # Using "skip_backends" here intentionally, since this marks work which is
177 # not supported.
177 # not supported.
178 skip_marker = node.get_closest_marker('skip_backends')
178 skip_marker = node.get_closest_marker('skip_backends')
179 if skip_marker and backend_alias in skip_marker.args:
179 if skip_marker and backend_alias in skip_marker.args:
180 msg = "Feature not supported for backend %s." % (backend_alias, )
180 msg = "Feature not supported for backend %s." % (backend_alias, )
181 msg = skip_marker.kwargs.get('reason', msg)
181 msg = skip_marker.kwargs.get('reason', msg)
182 pytest.skip(msg)
182 pytest.skip(msg)
183
183
184
184
185 def extract_git_repo_from_dump(dump_name, repo_name):
185 def extract_git_repo_from_dump(dump_name, repo_name):
186 """Create git repo `repo_name` from dump `dump_name`."""
186 """Create git repo `repo_name` from dump `dump_name`."""
187 repos_path = ScmModel().repos_path
187 repos_path = ScmModel().repos_path
188 target_path = os.path.join(repos_path, repo_name)
188 target_path = os.path.join(repos_path, repo_name)
189 rc_testdata.extract_git_dump(dump_name, target_path)
189 rc_testdata.extract_git_dump(dump_name, target_path)
190 return target_path
190 return target_path
191
191
192
192
193 def extract_hg_repo_from_dump(dump_name, repo_name):
193 def extract_hg_repo_from_dump(dump_name, repo_name):
194 """Create hg repo `repo_name` from dump `dump_name`."""
194 """Create hg repo `repo_name` from dump `dump_name`."""
195 repos_path = ScmModel().repos_path
195 repos_path = ScmModel().repos_path
196 target_path = os.path.join(repos_path, repo_name)
196 target_path = os.path.join(repos_path, repo_name)
197 rc_testdata.extract_hg_dump(dump_name, target_path)
197 rc_testdata.extract_hg_dump(dump_name, target_path)
198 return target_path
198 return target_path
199
199
200
200
201 def extract_svn_repo_from_dump(dump_name, repo_name):
201 def extract_svn_repo_from_dump(dump_name, repo_name):
202 """Create a svn repo `repo_name` from dump `dump_name`."""
202 """Create a svn repo `repo_name` from dump `dump_name`."""
203 repos_path = ScmModel().repos_path
203 repos_path = ScmModel().repos_path
204 target_path = os.path.join(repos_path, repo_name)
204 target_path = os.path.join(repos_path, repo_name)
205 SubversionRepository(target_path, create=True)
205 SubversionRepository(target_path, create=True)
206 _load_svn_dump_into_repo(dump_name, target_path)
206 _load_svn_dump_into_repo(dump_name, target_path)
207 return target_path
207 return target_path
208
208
209
209
210 def assert_message_in_log(log_records, message, levelno, module):
210 def assert_message_in_log(log_records, message, levelno, module):
211 messages = [
211 messages = [
212 r.message for r in log_records
212 r.message for r in log_records
213 if r.module == module and r.levelno == levelno
213 if r.module == module and r.levelno == levelno
214 ]
214 ]
215 assert message in messages
215 assert message in messages
216
216
217
217
218 def _load_svn_dump_into_repo(dump_name, repo_path):
218 def _load_svn_dump_into_repo(dump_name, repo_path):
219 """
219 """
220 Utility to populate a svn repository with a named dump
220 Utility to populate a svn repository with a named dump
221
221
222 Currently the dumps are in rc_testdata. They might later on be
222 Currently the dumps are in rc_testdata. They might later on be
223 integrated with the main repository once they stabilize more.
223 integrated with the main repository once they stabilize more.
224 """
224 """
225 dump = rc_testdata.load_svn_dump(dump_name)
225 dump = rc_testdata.load_svn_dump(dump_name)
226 load_dump = subprocess32.Popen(
226 load_dump = subprocess32.Popen(
227 ['svnadmin', 'load', repo_path],
227 ['svnadmin', 'load', repo_path],
228 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
228 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
229 stderr=subprocess32.PIPE)
229 stderr=subprocess32.PIPE)
230 out, err = load_dump.communicate(dump)
230 out, err = load_dump.communicate(dump)
231 if load_dump.returncode != 0:
231 if load_dump.returncode != 0:
232 log.error("Output of load_dump command: %s", out)
232 log.error("Output of load_dump command: %s", out)
233 log.error("Error output of load_dump command: %s", err)
233 log.error("Error output of load_dump command: %s", err)
234 raise Exception(
234 raise Exception(
235 'Failed to load dump "%s" into repository at path "%s".'
235 'Failed to load dump "%s" into repository at path "%s".'
236 % (dump_name, repo_path))
236 % (dump_name, repo_path))
237
237
238
238
239 class AssertResponse(object):
239 class AssertResponse(object):
240 """
240 """
241 Utility that helps to assert things about a given HTML response.
241 Utility that helps to assert things about a given HTML response.
242 """
242 """
243
243
244 def __init__(self, response):
244 def __init__(self, response):
245 self.response = response
245 self.response = response
246
246
247 def get_imports(self):
247 def get_imports(self):
248 return fromstring, tostring, CSSSelector
248 return fromstring, tostring, CSSSelector
249
249
250 def one_element_exists(self, css_selector):
250 def one_element_exists(self, css_selector):
251 self.get_element(css_selector)
251 self.get_element(css_selector)
252
252
253 def no_element_exists(self, css_selector):
253 def no_element_exists(self, css_selector):
254 assert not self._get_elements(css_selector)
254 assert not self._get_elements(css_selector)
255
255
256 def element_equals_to(self, css_selector, expected_content):
256 def element_equals_to(self, css_selector, expected_content):
257 element = self.get_element(css_selector)
257 element = self.get_element(css_selector)
258 element_text = self._element_to_string(element)
258 element_text = self._element_to_string(element)
259 assert expected_content in element_text
259 assert expected_content in element_text
260
260
261 def element_contains(self, css_selector, expected_content):
261 def element_contains(self, css_selector, expected_content):
262 element = self.get_element(css_selector)
262 element = self.get_element(css_selector)
263 assert expected_content in element.text_content()
263 assert expected_content in element.text_content()
264
264
265 def element_value_contains(self, css_selector, expected_content):
265 def element_value_contains(self, css_selector, expected_content):
266 element = self.get_element(css_selector)
266 element = self.get_element(css_selector)
267 assert expected_content in element.value
267 assert expected_content in element.value
268
268
269 def contains_one_link(self, link_text, href):
269 def contains_one_link(self, link_text, href):
270 fromstring, tostring, CSSSelector = self.get_imports()
270 fromstring, tostring, CSSSelector = self.get_imports()
271 doc = fromstring(self.response.body)
271 doc = fromstring(self.response.body)
272 sel = CSSSelector('a[href]')
272 sel = CSSSelector('a[href]')
273 elements = [
273 elements = [
274 e for e in sel(doc) if e.text_content().strip() == link_text]
274 e for e in sel(doc) if e.text_content().strip() == link_text]
275 assert len(elements) == 1, "Did not find link or found multiple links"
275 assert len(elements) == 1, "Did not find link or found multiple links"
276 self._ensure_url_equal(elements[0].attrib.get('href'), href)
276 self._ensure_url_equal(elements[0].attrib.get('href'), href)
277
277
278 def contains_one_anchor(self, anchor_id):
278 def contains_one_anchor(self, anchor_id):
279 fromstring, tostring, CSSSelector = self.get_imports()
279 fromstring, tostring, CSSSelector = self.get_imports()
280 doc = fromstring(self.response.body)
280 doc = fromstring(self.response.body)
281 sel = CSSSelector('#' + anchor_id)
281 sel = CSSSelector('#' + anchor_id)
282 elements = sel(doc)
282 elements = sel(doc)
283 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
283 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
284
284
285 def _ensure_url_equal(self, found, expected):
285 def _ensure_url_equal(self, found, expected):
286 assert _Url(found) == _Url(expected)
286 assert _Url(found) == _Url(expected)
287
287
288 def get_element(self, css_selector):
288 def get_element(self, css_selector):
289 elements = self._get_elements(css_selector)
289 elements = self._get_elements(css_selector)
290 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
290 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
291 return elements[0]
291 return elements[0]
292
292
293 def get_elements(self, css_selector):
293 def get_elements(self, css_selector):
294 return self._get_elements(css_selector)
294 return self._get_elements(css_selector)
295
295
296 def _get_elements(self, css_selector):
296 def _get_elements(self, css_selector):
297 fromstring, tostring, CSSSelector = self.get_imports()
297 fromstring, tostring, CSSSelector = self.get_imports()
298 doc = fromstring(self.response.body)
298 doc = fromstring(self.response.body)
299 sel = CSSSelector(css_selector)
299 sel = CSSSelector(css_selector)
300 elements = sel(doc)
300 elements = sel(doc)
301 return elements
301 return elements
302
302
303 def _element_to_string(self, element):
303 def _element_to_string(self, element):
304 fromstring, tostring, CSSSelector = self.get_imports()
304 fromstring, tostring, CSSSelector = self.get_imports()
305 return tostring(element)
305 return tostring(element)
306
306
307
307
308 class _Url(object):
308 class _Url(object):
309 """
309 """
310 A url object that can be compared with other url orbjects
310 A url object that can be compared with other url orbjects
311 without regard to the vagaries of encoding, escaping, and ordering
311 without regard to the vagaries of encoding, escaping, and ordering
312 of parameters in query strings.
312 of parameters in query strings.
313
313
314 Inspired by
314 Inspired by
315 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
315 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
316 """
316 """
317
317
318 def __init__(self, url):
318 def __init__(self, url):
319 parts = urlparse(url)
319 parts = urlparse(url)
320 _query = frozenset(parse_qsl(parts.query))
320 _query = frozenset(parse_qsl(parts.query))
321 _path = unquote_plus(parts.path)
321 _path = unquote_plus(parts.path)
322 parts = parts._replace(query=_query, path=_path)
322 parts = parts._replace(query=_query, path=_path)
323 self.parts = parts
323 self.parts = parts
324
324
325 def __eq__(self, other):
325 def __eq__(self, other):
326 return self.parts == other.parts
326 return self.parts == other.parts
327
327
328 def __hash__(self):
328 def __hash__(self):
329 return hash(self.parts)
329 return hash(self.parts)
330
330
331
331
332 def run_test_concurrently(times, raise_catched_exc=True):
332 def run_test_concurrently(times, raise_catched_exc=True):
333 """
333 """
334 Add this decorator to small pieces of code that you want to test
334 Add this decorator to small pieces of code that you want to test
335 concurrently
335 concurrently
336
336
337 ex:
337 ex:
338
338
339 @test_concurrently(25)
339 @test_concurrently(25)
340 def my_test_function():
340 def my_test_function():
341 ...
341 ...
342 """
342 """
343 def test_concurrently_decorator(test_func):
343 def test_concurrently_decorator(test_func):
344 def wrapper(*args, **kwargs):
344 def wrapper(*args, **kwargs):
345 exceptions = []
345 exceptions = []
346
346
347 def call_test_func():
347 def call_test_func():
348 try:
348 try:
349 test_func(*args, **kwargs)
349 test_func(*args, **kwargs)
350 except Exception as e:
350 except Exception as e:
351 exceptions.append(e)
351 exceptions.append(e)
352 if raise_catched_exc:
352 if raise_catched_exc:
353 raise
353 raise
354 threads = []
354 threads = []
355 for i in range(times):
355 for i in range(times):
356 threads.append(threading.Thread(target=call_test_func))
356 threads.append(threading.Thread(target=call_test_func))
357 for t in threads:
357 for t in threads:
358 t.start()
358 t.start()
359 for t in threads:
359 for t in threads:
360 t.join()
360 t.join()
361 if exceptions:
361 if exceptions:
362 raise Exception(
362 raise Exception(
363 'test_concurrently intercepted %s exceptions: %s' % (
363 'test_concurrently intercepted %s exceptions: %s' % (
364 len(exceptions), exceptions))
364 len(exceptions), exceptions))
365 return wrapper
365 return wrapper
366 return test_concurrently_decorator
366 return test_concurrently_decorator
367
367
368
368
369 def wait_for_url(url, timeout=10):
369 def wait_for_url(url, timeout=10):
370 """
370 """
371 Wait until URL becomes reachable.
371 Wait until URL becomes reachable.
372
372
373 It polls the URL until the timeout is reached or it became reachable.
373 It polls the URL until the timeout is reached or it became reachable.
374 If will call to `py.test.fail` in case the URL is not reachable.
374 If will call to `py.test.fail` in case the URL is not reachable.
375 """
375 """
376 timeout = time.time() + timeout
376 timeout = time.time() + timeout
377 last = 0
377 last = 0
378 wait = 0.1
378 wait = 0.1
379
379
380 while timeout > last:
380 while timeout > last:
381 last = time.time()
381 last = time.time()
382 if is_url_reachable(url):
382 if is_url_reachable(url):
383 break
383 break
384 elif (last + wait) > time.time():
384 elif (last + wait) > time.time():
385 # Go to sleep because not enough time has passed since last check.
385 # Go to sleep because not enough time has passed since last check.
386 time.sleep(wait)
386 time.sleep(wait)
387 else:
387 else:
388 pytest.fail("Timeout while waiting for URL {}".format(url))
388 pytest.fail("Timeout while waiting for URL {}".format(url))
389
389
390
390
391 def is_url_reachable(url):
391 def is_url_reachable(url):
392 try:
392 try:
393 urllib2.urlopen(url)
393 urllib2.urlopen(url)
394 except urllib2.URLError:
394 except urllib2.URLError:
395 log.exception('URL Reach error')
395 log.exception('URL Reach error')
396 return False
396 return False
397 return True
397 return True
398
398
399
399
400 def repo_on_filesystem(repo_name):
400 def repo_on_filesystem(repo_name):
401 from rhodecode.lib import vcs
401 from rhodecode.lib import vcs
402 from rhodecode.tests import TESTS_TMP_PATH
402 from rhodecode.tests import TESTS_TMP_PATH
403 repo = vcs.get_vcs_instance(
403 repo = vcs.get_vcs_instance(
404 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
404 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
405 return repo is not None
405 return repo is not None
406
406
407
407
408 def commit_change(
408 def commit_change(
409 repo, filename, content, message, vcs_type, parent=None, newfile=False):
409 repo, filename, content, message, vcs_type, parent=None, newfile=False):
410 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
410 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
411
411
412 repo = Repository.get_by_repo_name(repo)
412 repo = Repository.get_by_repo_name(repo)
413 _commit = parent
413 _commit = parent
414 if not parent:
414 if not parent:
415 _commit = EmptyCommit(alias=vcs_type)
415 _commit = EmptyCommit(alias=vcs_type)
416
416
417 if newfile:
417 if newfile:
418 nodes = {
418 nodes = {
419 filename: {
419 filename: {
420 'content': content
420 'content': content
421 }
421 }
422 }
422 }
423 commit = ScmModel().create_nodes(
423 commit = ScmModel().create_nodes(
424 user=TEST_USER_ADMIN_LOGIN, repo=repo,
424 user=TEST_USER_ADMIN_LOGIN, repo=repo,
425 message=message,
425 message=message,
426 nodes=nodes,
426 nodes=nodes,
427 parent_commit=_commit,
427 parent_commit=_commit,
428 author=TEST_USER_ADMIN_LOGIN,
428 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
429 )
429 )
430 else:
430 else:
431 commit = ScmModel().commit_change(
431 commit = ScmModel().commit_change(
432 repo=repo.scm_instance(), repo_name=repo.repo_name,
432 repo=repo.scm_instance(), repo_name=repo.repo_name,
433 commit=parent, user=TEST_USER_ADMIN_LOGIN,
433 commit=parent, user=TEST_USER_ADMIN_LOGIN,
434 author=TEST_USER_ADMIN_LOGIN,
434 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
435 message=message,
435 message=message,
436 content=content,
436 content=content,
437 f_path=filename
437 f_path=filename
438 )
438 )
439 return commit
439 return commit
440
440
441
441
442 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
442 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
443 if not default:
443 if not default:
444 raise ValueError('Permission for default user must be given')
444 raise ValueError('Permission for default user must be given')
445 form_data = [(
445 form_data = [(
446 'csrf_token', csrf_token
446 'csrf_token', csrf_token
447 )]
447 )]
448 # add default
448 # add default
449 form_data.extend([
449 form_data.extend([
450 ('u_perm_1', default)
450 ('u_perm_1', default)
451 ])
451 ])
452
452
453 if grant:
453 if grant:
454 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
454 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
455 form_data.extend([
455 form_data.extend([
456 ('perm_new_member_perm_new{}'.format(cnt), perm),
456 ('perm_new_member_perm_new{}'.format(cnt), perm),
457 ('perm_new_member_id_new{}'.format(cnt), obj_id),
457 ('perm_new_member_id_new{}'.format(cnt), obj_id),
458 ('perm_new_member_name_new{}'.format(cnt), obj_name),
458 ('perm_new_member_name_new{}'.format(cnt), obj_name),
459 ('perm_new_member_type_new{}'.format(cnt), obj_type),
459 ('perm_new_member_type_new{}'.format(cnt), obj_type),
460
460
461 ])
461 ])
462 if revoke:
462 if revoke:
463 for obj_id, obj_type in revoke:
463 for obj_id, obj_type in revoke:
464 form_data.extend([
464 form_data.extend([
465 ('perm_del_member_id_{}'.format(obj_id), obj_id),
465 ('perm_del_member_id_{}'.format(obj_id), obj_id),
466 ('perm_del_member_type_{}'.format(obj_id), obj_type),
466 ('perm_del_member_type_{}'.format(obj_id), obj_type),
467 ])
467 ])
468 return form_data
468 return form_data
@@ -1,147 +1,147 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22
22
23 import pytest
23 import pytest
24
24
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.tests.vcs.conftest import BackendTestMixin
26 from rhodecode.tests.vcs.conftest import BackendTestMixin
27
27
28
28
29 @pytest.mark.usefixtures("vcs_repository_support")
29 @pytest.mark.usefixtures("vcs_repository_support")
30 class TestBranches(BackendTestMixin):
30 class TestBranches(BackendTestMixin):
31
31
32 def test_empty_repository_has_no_branches(self, vcsbackend):
32 def test_empty_repository_has_no_branches(self, vcsbackend):
33 empty_repo = vcsbackend.create_repo()
33 empty_repo = vcsbackend.create_repo()
34 assert empty_repo.branches == {}
34 assert empty_repo.branches == {}
35
35
36 def test_branches_all(self, vcsbackend):
36 def test_branches_all(self, vcsbackend):
37 branch_count = {
37 branch_count = {
38 'git': 1,
38 'git': 1,
39 'hg': 1,
39 'hg': 1,
40 'svn': 0,
40 'svn': 0,
41 }
41 }
42 assert len(self.repo.branches_all) == branch_count[vcsbackend.alias]
42 assert len(self.repo.branches_all) == branch_count[vcsbackend.alias]
43
43
44 def test_closed_branches(self):
44 def test_closed_branches(self):
45 assert len(self.repo.branches_closed) == 0
45 assert len(self.repo.branches_closed) == 0
46
46
47 def test_simple(self, local_dt_to_utc):
47 def test_simple(self, local_dt_to_utc):
48 tip = self.repo.get_commit()
48 tip = self.repo.get_commit()
49 assert tip.message == 'Changes...'
49 assert tip.message == 'Changes...'
50 assert tip.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 21))
50 assert tip.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 21))
51
51
52 @pytest.mark.backends("git", "hg")
52 @pytest.mark.backends("git", "hg")
53 def test_new_branch(self):
53 def test_new_branch(self):
54 # This check must not be removed to ensure the 'branches' LazyProperty
54 # This check must not be removed to ensure the 'branches' LazyProperty
55 # gets hit *before* the new 'foobar' branch got created:
55 # gets hit *before* the new 'foobar' branch got created:
56 assert 'foobar' not in self.repo.branches
56 assert 'foobar' not in self.repo.branches
57 self.imc.add(FileNode(
57 self.imc.add(FileNode(
58 'docs/index.txt',
58 'docs/index.txt',
59 content='Documentation\n'))
59 content='Documentation\n'))
60 foobar_tip = self.imc.commit(
60 foobar_tip = self.imc.commit(
61 message=u'New branch: foobar',
61 message=u'New branch: foobar',
62 author=u'joe',
62 author=u'joe <joe@rhodecode.com>',
63 branch='foobar',
63 branch='foobar',
64 )
64 )
65 assert 'foobar' in self.repo.branches
65 assert 'foobar' in self.repo.branches
66 assert foobar_tip.branch == 'foobar'
66 assert foobar_tip.branch == 'foobar'
67
67
68 @pytest.mark.backends("git", "hg")
68 @pytest.mark.backends("git", "hg")
69 def test_new_head(self):
69 def test_new_head(self):
70 tip = self.repo.get_commit()
70 tip = self.repo.get_commit()
71 self.imc.add(FileNode(
71 self.imc.add(FileNode(
72 'docs/index.txt',
72 'docs/index.txt',
73 content='Documentation\n'))
73 content='Documentation\n'))
74 foobar_tip = self.imc.commit(
74 foobar_tip = self.imc.commit(
75 message=u'New branch: foobar',
75 message=u'New branch: foobar',
76 author=u'joe',
76 author=u'joe <joe@rhodecode.com>',
77 branch='foobar',
77 branch='foobar',
78 parents=[tip],
78 parents=[tip],
79 )
79 )
80 self.imc.change(FileNode(
80 self.imc.change(FileNode(
81 'docs/index.txt',
81 'docs/index.txt',
82 content='Documentation\nand more...\n'))
82 content='Documentation\nand more...\n'))
83 newtip = self.imc.commit(
83 newtip = self.imc.commit(
84 message=u'At default branch',
84 message=u'At default branch',
85 author=u'joe',
85 author=u'joe <joe@rhodecode.com>',
86 branch=foobar_tip.branch,
86 branch=foobar_tip.branch,
87 parents=[foobar_tip],
87 parents=[foobar_tip],
88 )
88 )
89
89
90 newest_tip = self.imc.commit(
90 newest_tip = self.imc.commit(
91 message=u'Merged with %s' % foobar_tip.raw_id,
91 message=u'Merged with %s' % foobar_tip.raw_id,
92 author=u'joe',
92 author=u'joe <joe@rhodecode.com>',
93 branch=self.backend_class.DEFAULT_BRANCH_NAME,
93 branch=self.backend_class.DEFAULT_BRANCH_NAME,
94 parents=[newtip, foobar_tip],
94 parents=[newtip, foobar_tip],
95 )
95 )
96
96
97 assert newest_tip.branch == \
97 assert newest_tip.branch == \
98 self.backend_class.DEFAULT_BRANCH_NAME
98 self.backend_class.DEFAULT_BRANCH_NAME
99
99
100 @pytest.mark.backends("git", "hg")
100 @pytest.mark.backends("git", "hg")
101 def test_branch_with_slash_in_name(self):
101 def test_branch_with_slash_in_name(self):
102 self.imc.add(FileNode('extrafile', content='Some data\n'))
102 self.imc.add(FileNode('extrafile', content='Some data\n'))
103 self.imc.commit(
103 self.imc.commit(
104 u'Branch with a slash!', author=u'joe',
104 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
105 branch='issue/123')
105 branch='issue/123')
106 assert 'issue/123' in self.repo.branches
106 assert 'issue/123' in self.repo.branches
107
107
108 @pytest.mark.backends("git", "hg")
108 @pytest.mark.backends("git", "hg")
109 def test_branch_with_slash_in_name_and_similar_without(self):
109 def test_branch_with_slash_in_name_and_similar_without(self):
110 self.imc.add(FileNode('extrafile', content='Some data\n'))
110 self.imc.add(FileNode('extrafile', content='Some data\n'))
111 self.imc.commit(
111 self.imc.commit(
112 u'Branch with a slash!', author=u'joe',
112 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
113 branch='issue/123')
113 branch='issue/123')
114 self.imc.add(FileNode('extrafile II', content='Some data\n'))
114 self.imc.add(FileNode('extrafile II', content='Some data\n'))
115 self.imc.commit(
115 self.imc.commit(
116 u'Branch without a slash...', author=u'joe',
116 u'Branch without a slash...', author=u'joe <joe@rhodecode.com>',
117 branch='123')
117 branch='123')
118 assert 'issue/123' in self.repo.branches
118 assert 'issue/123' in self.repo.branches
119 assert '123' in self.repo.branches
119 assert '123' in self.repo.branches
120
120
121
121
122 class TestSvnBranches(object):
122 class TestSvnBranches(object):
123
123
124 def test_empty_repository_has_no_tags_and_branches(self, vcsbackend_svn):
124 def test_empty_repository_has_no_tags_and_branches(self, vcsbackend_svn):
125 empty_repo = vcsbackend_svn.create_repo()
125 empty_repo = vcsbackend_svn.create_repo()
126 assert empty_repo.branches == {}
126 assert empty_repo.branches == {}
127 assert empty_repo.tags == {}
127 assert empty_repo.tags == {}
128
128
129 def test_missing_structure_has_no_tags_and_branches(self, vcsbackend_svn):
129 def test_missing_structure_has_no_tags_and_branches(self, vcsbackend_svn):
130 repo = vcsbackend_svn.create_repo(number_of_commits=1)
130 repo = vcsbackend_svn.create_repo(number_of_commits=1)
131 assert repo.branches == {}
131 assert repo.branches == {}
132 assert repo.tags == {}
132 assert repo.tags == {}
133
133
134 def test_discovers_ordered_branches(self, vcsbackend_svn):
134 def test_discovers_ordered_branches(self, vcsbackend_svn):
135 repo = vcsbackend_svn['svn-simple-layout']
135 repo = vcsbackend_svn['svn-simple-layout']
136 expected_branches = [
136 expected_branches = [
137 'branches/add-docs',
137 'branches/add-docs',
138 'branches/argparse',
138 'branches/argparse',
139 'trunk',
139 'trunk',
140 ]
140 ]
141 assert repo.branches.keys() == expected_branches
141 assert repo.branches.keys() == expected_branches
142
142
143 def test_discovers_ordered_tags(self, vcsbackend_svn):
143 def test_discovers_ordered_tags(self, vcsbackend_svn):
144 repo = vcsbackend_svn['svn-simple-layout']
144 repo = vcsbackend_svn['svn-simple-layout']
145 expected_tags = [
145 expected_tags = [
146 'tags/v0.1', 'tags/v0.2', 'tags/v0.3', 'tags/v0.5']
146 'tags/v0.1', 'tags/v0.2', 'tags/v0.3', 'tags/v0.5']
147 assert repo.tags.keys() == expected_tags
147 assert repo.tags.keys() == expected_tags
@@ -1,596 +1,596 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import time
22 import time
23
23
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.vcs.backends.base import (
26 from rhodecode.lib.vcs.backends.base import (
27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
28 from rhodecode.lib.vcs.exceptions import (
28 from rhodecode.lib.vcs.exceptions import (
29 BranchDoesNotExistError, CommitDoesNotExistError,
29 BranchDoesNotExistError, CommitDoesNotExistError,
30 RepositoryError, EmptyRepositoryError)
30 RepositoryError, EmptyRepositoryError)
31 from rhodecode.lib.vcs.nodes import (
31 from rhodecode.lib.vcs.nodes import (
32 FileNode, AddedFileNodesGenerator,
32 FileNode, AddedFileNodesGenerator,
33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
34 from rhodecode.tests import get_new_dir
34 from rhodecode.tests import get_new_dir
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
36
36
37
37
38 class TestBaseChangeset(object):
38 class TestBaseChangeset(object):
39
39
40 def test_is_deprecated(self):
40 def test_is_deprecated(self):
41 from rhodecode.lib.vcs.backends.base import BaseChangeset
41 from rhodecode.lib.vcs.backends.base import BaseChangeset
42 pytest.deprecated_call(BaseChangeset)
42 pytest.deprecated_call(BaseChangeset)
43
43
44
44
45 class TestEmptyCommit(object):
45 class TestEmptyCommit(object):
46
46
47 def test_branch_without_alias_returns_none(self):
47 def test_branch_without_alias_returns_none(self):
48 commit = EmptyCommit()
48 commit = EmptyCommit()
49 assert commit.branch is None
49 assert commit.branch is None
50
50
51
51
52 @pytest.mark.usefixtures("vcs_repository_support")
52 @pytest.mark.usefixtures("vcs_repository_support")
53 class TestCommitsInNonEmptyRepo(BackendTestMixin):
53 class TestCommitsInNonEmptyRepo(BackendTestMixin):
54 recreate_repo_per_test = True
54 recreate_repo_per_test = True
55
55
56 @classmethod
56 @classmethod
57 def _get_commits(cls):
57 def _get_commits(cls):
58 start_date = datetime.datetime(2010, 1, 1, 20)
58 start_date = datetime.datetime(2010, 1, 1, 20)
59 for x in xrange(5):
59 for x in xrange(5):
60 yield {
60 yield {
61 'message': 'Commit %d' % x,
61 'message': 'Commit %d' % x,
62 'author': 'Joe Doe <joe.doe@example.com>',
62 'author': 'Joe Doe <joe.doe@example.com>',
63 'date': start_date + datetime.timedelta(hours=12 * x),
63 'date': start_date + datetime.timedelta(hours=12 * x),
64 'added': [
64 'added': [
65 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
65 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
66 ],
66 ],
67 }
67 }
68
68
69 def test_walk_returns_empty_list_in_case_of_file(self):
69 def test_walk_returns_empty_list_in_case_of_file(self):
70 result = list(self.tip.walk('file_0.txt'))
70 result = list(self.tip.walk('file_0.txt'))
71 assert result == []
71 assert result == []
72
72
73 @pytest.mark.backends("git", "hg")
73 @pytest.mark.backends("git", "hg")
74 def test_new_branch(self):
74 def test_new_branch(self):
75 self.imc.add(FileNode('docs/index.txt',
75 self.imc.add(FileNode('docs/index.txt',
76 content='Documentation\n'))
76 content='Documentation\n'))
77 foobar_tip = self.imc.commit(
77 foobar_tip = self.imc.commit(
78 message=u'New branch: foobar',
78 message=u'New branch: foobar',
79 author=u'joe',
79 author=u'joe <joe@rhodecode.com>',
80 branch='foobar',
80 branch='foobar',
81 )
81 )
82 assert 'foobar' in self.repo.branches
82 assert 'foobar' in self.repo.branches
83 assert foobar_tip.branch == 'foobar'
83 assert foobar_tip.branch == 'foobar'
84 # 'foobar' should be the only branch that contains the new commit
84 # 'foobar' should be the only branch that contains the new commit
85 branch = self.repo.branches.values()
85 branch = self.repo.branches.values()
86 assert branch[0] != branch[1]
86 assert branch[0] != branch[1]
87
87
88 @pytest.mark.backends("git", "hg")
88 @pytest.mark.backends("git", "hg")
89 def test_new_head_in_default_branch(self):
89 def test_new_head_in_default_branch(self):
90 tip = self.repo.get_commit()
90 tip = self.repo.get_commit()
91 self.imc.add(FileNode('docs/index.txt',
91 self.imc.add(FileNode('docs/index.txt',
92 content='Documentation\n'))
92 content='Documentation\n'))
93 foobar_tip = self.imc.commit(
93 foobar_tip = self.imc.commit(
94 message=u'New branch: foobar',
94 message=u'New branch: foobar',
95 author=u'joe',
95 author=u'joe <joe@rhodecode.com>',
96 branch='foobar',
96 branch='foobar',
97 parents=[tip],
97 parents=[tip],
98 )
98 )
99 self.imc.change(FileNode('docs/index.txt',
99 self.imc.change(FileNode('docs/index.txt',
100 content='Documentation\nand more...\n'))
100 content='Documentation\nand more...\n'))
101 newtip = self.imc.commit(
101 newtip = self.imc.commit(
102 message=u'At default branch',
102 message=u'At default branch',
103 author=u'joe',
103 author=u'joe <joe@rhodecode.com>',
104 branch=foobar_tip.branch,
104 branch=foobar_tip.branch,
105 parents=[foobar_tip],
105 parents=[foobar_tip],
106 )
106 )
107
107
108 newest_tip = self.imc.commit(
108 newest_tip = self.imc.commit(
109 message=u'Merged with %s' % foobar_tip.raw_id,
109 message=u'Merged with %s' % foobar_tip.raw_id,
110 author=u'joe',
110 author=u'joe <joe@rhodecode.com>',
111 branch=self.backend_class.DEFAULT_BRANCH_NAME,
111 branch=self.backend_class.DEFAULT_BRANCH_NAME,
112 parents=[newtip, foobar_tip],
112 parents=[newtip, foobar_tip],
113 )
113 )
114
114
115 assert newest_tip.branch == self.backend_class.DEFAULT_BRANCH_NAME
115 assert newest_tip.branch == self.backend_class.DEFAULT_BRANCH_NAME
116
116
117 @pytest.mark.backends("git", "hg")
117 @pytest.mark.backends("git", "hg")
118 def test_get_commits_respects_branch_name(self):
118 def test_get_commits_respects_branch_name(self):
119 """
119 """
120 * e1930d0 (HEAD, master) Back in default branch
120 * e1930d0 (HEAD, master) Back in default branch
121 | * e1930d0 (docs) New Branch: docs2
121 | * e1930d0 (docs) New Branch: docs2
122 | * dcc14fa New branch: docs
122 | * dcc14fa New branch: docs
123 |/
123 |/
124 * e63c41a Initial commit
124 * e63c41a Initial commit
125 ...
125 ...
126 * 624d3db Commit 0
126 * 624d3db Commit 0
127
127
128 :return:
128 :return:
129 """
129 """
130 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
130 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
131 TEST_BRANCH = 'docs'
131 TEST_BRANCH = 'docs'
132 org_tip = self.repo.get_commit()
132 org_tip = self.repo.get_commit()
133
133
134 self.imc.add(FileNode('readme.txt', content='Document\n'))
134 self.imc.add(FileNode('readme.txt', content='Document\n'))
135 initial = self.imc.commit(
135 initial = self.imc.commit(
136 message=u'Initial commit',
136 message=u'Initial commit',
137 author=u'joe',
137 author=u'joe <joe@rhodecode.com>',
138 parents=[org_tip],
138 parents=[org_tip],
139 branch=DEFAULT_BRANCH,)
139 branch=DEFAULT_BRANCH,)
140
140
141 self.imc.add(FileNode('newdoc.txt', content='foobar\n'))
141 self.imc.add(FileNode('newdoc.txt', content='foobar\n'))
142 docs_branch_commit1 = self.imc.commit(
142 docs_branch_commit1 = self.imc.commit(
143 message=u'New branch: docs',
143 message=u'New branch: docs',
144 author=u'joe',
144 author=u'joe <joe@rhodecode.com>',
145 parents=[initial],
145 parents=[initial],
146 branch=TEST_BRANCH,)
146 branch=TEST_BRANCH,)
147
147
148 self.imc.add(FileNode('newdoc2.txt', content='foobar2\n'))
148 self.imc.add(FileNode('newdoc2.txt', content='foobar2\n'))
149 docs_branch_commit2 = self.imc.commit(
149 docs_branch_commit2 = self.imc.commit(
150 message=u'New branch: docs2',
150 message=u'New branch: docs2',
151 author=u'joe',
151 author=u'joe <joe@rhodecode.com>',
152 parents=[docs_branch_commit1],
152 parents=[docs_branch_commit1],
153 branch=TEST_BRANCH,)
153 branch=TEST_BRANCH,)
154
154
155 self.imc.add(FileNode('newfile', content='hello world\n'))
155 self.imc.add(FileNode('newfile', content='hello world\n'))
156 self.imc.commit(
156 self.imc.commit(
157 message=u'Back in default branch',
157 message=u'Back in default branch',
158 author=u'joe',
158 author=u'joe <joe@rhodecode.com>',
159 parents=[initial],
159 parents=[initial],
160 branch=DEFAULT_BRANCH,)
160 branch=DEFAULT_BRANCH,)
161
161
162 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
162 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
163 assert docs_branch_commit1 not in list(default_branch_commits)
163 assert docs_branch_commit1 not in list(default_branch_commits)
164 assert docs_branch_commit2 not in list(default_branch_commits)
164 assert docs_branch_commit2 not in list(default_branch_commits)
165
165
166 docs_branch_commits = self.repo.get_commits(
166 docs_branch_commits = self.repo.get_commits(
167 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
167 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
168 branch_name=TEST_BRANCH)
168 branch_name=TEST_BRANCH)
169 assert docs_branch_commit1 in list(docs_branch_commits)
169 assert docs_branch_commit1 in list(docs_branch_commits)
170 assert docs_branch_commit2 in list(docs_branch_commits)
170 assert docs_branch_commit2 in list(docs_branch_commits)
171
171
172 @pytest.mark.backends("svn")
172 @pytest.mark.backends("svn")
173 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
173 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
174 repo = vcsbackend_svn['svn-simple-layout']
174 repo = vcsbackend_svn['svn-simple-layout']
175 commits = repo.get_commits(branch_name='trunk')
175 commits = repo.get_commits(branch_name='trunk')
176 commit_indexes = [c.idx for c in commits]
176 commit_indexes = [c.idx for c in commits]
177 assert commit_indexes == [1, 2, 3, 7, 12, 15]
177 assert commit_indexes == [1, 2, 3, 7, 12, 15]
178
178
179 def test_get_commit_by_index(self):
179 def test_get_commit_by_index(self):
180 for idx in [1, 2, 3, 4]:
180 for idx in [1, 2, 3, 4]:
181 assert idx == self.repo.get_commit(commit_idx=idx).idx
181 assert idx == self.repo.get_commit(commit_idx=idx).idx
182
182
183 def test_get_commit_by_branch(self):
183 def test_get_commit_by_branch(self):
184 for branch, commit_id in self.repo.branches.iteritems():
184 for branch, commit_id in self.repo.branches.iteritems():
185 assert commit_id == self.repo.get_commit(branch).raw_id
185 assert commit_id == self.repo.get_commit(branch).raw_id
186
186
187 def test_get_commit_by_tag(self):
187 def test_get_commit_by_tag(self):
188 for tag, commit_id in self.repo.tags.iteritems():
188 for tag, commit_id in self.repo.tags.iteritems():
189 assert commit_id == self.repo.get_commit(tag).raw_id
189 assert commit_id == self.repo.get_commit(tag).raw_id
190
190
191 def test_get_commit_parents(self):
191 def test_get_commit_parents(self):
192 repo = self.repo
192 repo = self.repo
193 for test_idx in [1, 2, 3]:
193 for test_idx in [1, 2, 3]:
194 commit = repo.get_commit(commit_idx=test_idx - 1)
194 commit = repo.get_commit(commit_idx=test_idx - 1)
195 assert [commit] == repo.get_commit(commit_idx=test_idx).parents
195 assert [commit] == repo.get_commit(commit_idx=test_idx).parents
196
196
197 def test_get_commit_children(self):
197 def test_get_commit_children(self):
198 repo = self.repo
198 repo = self.repo
199 for test_idx in [1, 2, 3]:
199 for test_idx in [1, 2, 3]:
200 commit = repo.get_commit(commit_idx=test_idx + 1)
200 commit = repo.get_commit(commit_idx=test_idx + 1)
201 assert [commit] == repo.get_commit(commit_idx=test_idx).children
201 assert [commit] == repo.get_commit(commit_idx=test_idx).children
202
202
203
203
204 @pytest.mark.usefixtures("vcs_repository_support")
204 @pytest.mark.usefixtures("vcs_repository_support")
205 class TestCommits(BackendTestMixin):
205 class TestCommits(BackendTestMixin):
206 recreate_repo_per_test = False
206 recreate_repo_per_test = False
207
207
208 @classmethod
208 @classmethod
209 def _get_commits(cls):
209 def _get_commits(cls):
210 start_date = datetime.datetime(2010, 1, 1, 20)
210 start_date = datetime.datetime(2010, 1, 1, 20)
211 for x in xrange(5):
211 for x in xrange(5):
212 yield {
212 yield {
213 'message': u'Commit %d' % x,
213 'message': u'Commit %d' % x,
214 'author': u'Joe Doe <joe.doe@example.com>',
214 'author': u'Joe Doe <joe.doe@example.com>',
215 'date': start_date + datetime.timedelta(hours=12 * x),
215 'date': start_date + datetime.timedelta(hours=12 * x),
216 'added': [
216 'added': [
217 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
217 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
218 ],
218 ],
219 }
219 }
220
220
221 def test_simple(self):
221 def test_simple(self):
222 tip = self.repo.get_commit()
222 tip = self.repo.get_commit()
223 assert tip.date, datetime.datetime(2010, 1, 3 == 20)
223 assert tip.date, datetime.datetime(2010, 1, 3 == 20)
224
224
225 def test_simple_serialized_commit(self):
225 def test_simple_serialized_commit(self):
226 tip = self.repo.get_commit()
226 tip = self.repo.get_commit()
227 # json.dumps(tip) uses .__json__() method
227 # json.dumps(tip) uses .__json__() method
228 data = tip.__json__()
228 data = tip.__json__()
229 assert 'branch' in data
229 assert 'branch' in data
230 assert data['revision']
230 assert data['revision']
231
231
232 def test_retrieve_tip(self):
232 def test_retrieve_tip(self):
233 tip = self.repo.get_commit('tip')
233 tip = self.repo.get_commit('tip')
234 assert tip == self.repo.get_commit()
234 assert tip == self.repo.get_commit()
235
235
236 def test_invalid(self):
236 def test_invalid(self):
237 with pytest.raises(CommitDoesNotExistError):
237 with pytest.raises(CommitDoesNotExistError):
238 self.repo.get_commit(commit_idx=123456789)
238 self.repo.get_commit(commit_idx=123456789)
239
239
240 def test_idx(self):
240 def test_idx(self):
241 commit = self.repo[0]
241 commit = self.repo[0]
242 assert commit.idx == 0
242 assert commit.idx == 0
243
243
244 def test_negative_idx(self):
244 def test_negative_idx(self):
245 commit = self.repo.get_commit(commit_idx=-1)
245 commit = self.repo.get_commit(commit_idx=-1)
246 assert commit.idx >= 0
246 assert commit.idx >= 0
247
247
248 def test_revision_is_deprecated(self):
248 def test_revision_is_deprecated(self):
249 def get_revision(commit):
249 def get_revision(commit):
250 return commit.revision
250 return commit.revision
251
251
252 commit = self.repo[0]
252 commit = self.repo[0]
253 pytest.deprecated_call(get_revision, commit)
253 pytest.deprecated_call(get_revision, commit)
254
254
255 def test_size(self):
255 def test_size(self):
256 tip = self.repo.get_commit()
256 tip = self.repo.get_commit()
257 size = 5 * len('Foobar N') # Size of 5 files
257 size = 5 * len('Foobar N') # Size of 5 files
258 assert tip.size == size
258 assert tip.size == size
259
259
260 def test_size_at_commit(self):
260 def test_size_at_commit(self):
261 tip = self.repo.get_commit()
261 tip = self.repo.get_commit()
262 size = 5 * len('Foobar N') # Size of 5 files
262 size = 5 * len('Foobar N') # Size of 5 files
263 assert self.repo.size_at_commit(tip.raw_id) == size
263 assert self.repo.size_at_commit(tip.raw_id) == size
264
264
265 def test_size_at_first_commit(self):
265 def test_size_at_first_commit(self):
266 commit = self.repo[0]
266 commit = self.repo[0]
267 size = len('Foobar N') # Size of 1 file
267 size = len('Foobar N') # Size of 1 file
268 assert self.repo.size_at_commit(commit.raw_id) == size
268 assert self.repo.size_at_commit(commit.raw_id) == size
269
269
270 def test_author(self):
270 def test_author(self):
271 tip = self.repo.get_commit()
271 tip = self.repo.get_commit()
272 assert_text_equal(tip.author, u'Joe Doe <joe.doe@example.com>')
272 assert_text_equal(tip.author, u'Joe Doe <joe.doe@example.com>')
273
273
274 def test_author_name(self):
274 def test_author_name(self):
275 tip = self.repo.get_commit()
275 tip = self.repo.get_commit()
276 assert_text_equal(tip.author_name, u'Joe Doe')
276 assert_text_equal(tip.author_name, u'Joe Doe')
277
277
278 def test_author_email(self):
278 def test_author_email(self):
279 tip = self.repo.get_commit()
279 tip = self.repo.get_commit()
280 assert_text_equal(tip.author_email, u'joe.doe@example.com')
280 assert_text_equal(tip.author_email, u'joe.doe@example.com')
281
281
282 def test_message(self):
282 def test_message(self):
283 tip = self.repo.get_commit()
283 tip = self.repo.get_commit()
284 assert_text_equal(tip.message, u'Commit 4')
284 assert_text_equal(tip.message, u'Commit 4')
285
285
286 def test_diff(self):
286 def test_diff(self):
287 tip = self.repo.get_commit()
287 tip = self.repo.get_commit()
288 diff = tip.diff()
288 diff = tip.diff()
289 assert "+Foobar 4" in diff.raw
289 assert "+Foobar 4" in diff.raw
290
290
291 def test_prev(self):
291 def test_prev(self):
292 tip = self.repo.get_commit()
292 tip = self.repo.get_commit()
293 prev_commit = tip.prev()
293 prev_commit = tip.prev()
294 assert prev_commit.message == 'Commit 3'
294 assert prev_commit.message == 'Commit 3'
295
295
296 def test_prev_raises_on_first_commit(self):
296 def test_prev_raises_on_first_commit(self):
297 commit = self.repo.get_commit(commit_idx=0)
297 commit = self.repo.get_commit(commit_idx=0)
298 with pytest.raises(CommitDoesNotExistError):
298 with pytest.raises(CommitDoesNotExistError):
299 commit.prev()
299 commit.prev()
300
300
301 def test_prev_works_on_second_commit_issue_183(self):
301 def test_prev_works_on_second_commit_issue_183(self):
302 commit = self.repo.get_commit(commit_idx=1)
302 commit = self.repo.get_commit(commit_idx=1)
303 prev_commit = commit.prev()
303 prev_commit = commit.prev()
304 assert prev_commit.idx == 0
304 assert prev_commit.idx == 0
305
305
306 def test_next(self):
306 def test_next(self):
307 commit = self.repo.get_commit(commit_idx=2)
307 commit = self.repo.get_commit(commit_idx=2)
308 next_commit = commit.next()
308 next_commit = commit.next()
309 assert next_commit.message == 'Commit 3'
309 assert next_commit.message == 'Commit 3'
310
310
311 def test_next_raises_on_tip(self):
311 def test_next_raises_on_tip(self):
312 commit = self.repo.get_commit()
312 commit = self.repo.get_commit()
313 with pytest.raises(CommitDoesNotExistError):
313 with pytest.raises(CommitDoesNotExistError):
314 commit.next()
314 commit.next()
315
315
316 def test_get_path_commit(self):
316 def test_get_path_commit(self):
317 commit = self.repo.get_commit()
317 commit = self.repo.get_commit()
318 commit.get_path_commit('file_4.txt')
318 commit.get_path_commit('file_4.txt')
319 assert commit.message == 'Commit 4'
319 assert commit.message == 'Commit 4'
320
320
321 def test_get_filenodes_generator(self):
321 def test_get_filenodes_generator(self):
322 tip = self.repo.get_commit()
322 tip = self.repo.get_commit()
323 filepaths = [node.path for node in tip.get_filenodes_generator()]
323 filepaths = [node.path for node in tip.get_filenodes_generator()]
324 assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
324 assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
325
325
326 def test_get_file_annotate(self):
326 def test_get_file_annotate(self):
327 file_added_commit = self.repo.get_commit(commit_idx=3)
327 file_added_commit = self.repo.get_commit(commit_idx=3)
328 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
328 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
329
329
330 line_no, commit_id, commit_loader, line = annotations[0]
330 line_no, commit_id, commit_loader, line = annotations[0]
331
331
332 assert line_no == 1
332 assert line_no == 1
333 assert commit_id == file_added_commit.raw_id
333 assert commit_id == file_added_commit.raw_id
334 assert commit_loader() == file_added_commit
334 assert commit_loader() == file_added_commit
335 assert 'Foobar 3' in line
335 assert 'Foobar 3' in line
336
336
337 def test_get_file_annotate_does_not_exist(self):
337 def test_get_file_annotate_does_not_exist(self):
338 file_added_commit = self.repo.get_commit(commit_idx=2)
338 file_added_commit = self.repo.get_commit(commit_idx=2)
339 # TODO: Should use a specific exception class here?
339 # TODO: Should use a specific exception class here?
340 with pytest.raises(Exception):
340 with pytest.raises(Exception):
341 list(file_added_commit.get_file_annotate('file_3.txt'))
341 list(file_added_commit.get_file_annotate('file_3.txt'))
342
342
343 def test_get_file_annotate_tip(self):
343 def test_get_file_annotate_tip(self):
344 tip = self.repo.get_commit()
344 tip = self.repo.get_commit()
345 commit = self.repo.get_commit(commit_idx=3)
345 commit = self.repo.get_commit(commit_idx=3)
346 expected_values = list(commit.get_file_annotate('file_3.txt'))
346 expected_values = list(commit.get_file_annotate('file_3.txt'))
347 annotations = list(tip.get_file_annotate('file_3.txt'))
347 annotations = list(tip.get_file_annotate('file_3.txt'))
348
348
349 # Note: Skip index 2 because the loader function is not the same
349 # Note: Skip index 2 because the loader function is not the same
350 for idx in (0, 1, 3):
350 for idx in (0, 1, 3):
351 assert annotations[0][idx] == expected_values[0][idx]
351 assert annotations[0][idx] == expected_values[0][idx]
352
352
353 def test_get_commits_is_ordered_by_date(self):
353 def test_get_commits_is_ordered_by_date(self):
354 commits = self.repo.get_commits()
354 commits = self.repo.get_commits()
355 assert isinstance(commits, CollectionGenerator)
355 assert isinstance(commits, CollectionGenerator)
356 assert len(commits) == 0 or len(commits) != 0
356 assert len(commits) == 0 or len(commits) != 0
357 commits = list(commits)
357 commits = list(commits)
358 ordered_by_date = sorted(commits, key=lambda commit: commit.date)
358 ordered_by_date = sorted(commits, key=lambda commit: commit.date)
359 assert commits == ordered_by_date
359 assert commits == ordered_by_date
360
360
361 def test_get_commits_respects_start(self):
361 def test_get_commits_respects_start(self):
362 second_id = self.repo.commit_ids[1]
362 second_id = self.repo.commit_ids[1]
363 commits = self.repo.get_commits(start_id=second_id)
363 commits = self.repo.get_commits(start_id=second_id)
364 assert isinstance(commits, CollectionGenerator)
364 assert isinstance(commits, CollectionGenerator)
365 commits = list(commits)
365 commits = list(commits)
366 assert len(commits) == 4
366 assert len(commits) == 4
367
367
368 def test_get_commits_includes_start_commit(self):
368 def test_get_commits_includes_start_commit(self):
369 second_id = self.repo.commit_ids[1]
369 second_id = self.repo.commit_ids[1]
370 commits = self.repo.get_commits(start_id=second_id)
370 commits = self.repo.get_commits(start_id=second_id)
371 assert isinstance(commits, CollectionGenerator)
371 assert isinstance(commits, CollectionGenerator)
372 commits = list(commits)
372 commits = list(commits)
373 assert commits[0].raw_id == second_id
373 assert commits[0].raw_id == second_id
374
374
375 def test_get_commits_respects_end(self):
375 def test_get_commits_respects_end(self):
376 second_id = self.repo.commit_ids[1]
376 second_id = self.repo.commit_ids[1]
377 commits = self.repo.get_commits(end_id=second_id)
377 commits = self.repo.get_commits(end_id=second_id)
378 assert isinstance(commits, CollectionGenerator)
378 assert isinstance(commits, CollectionGenerator)
379 commits = list(commits)
379 commits = list(commits)
380 assert commits[-1].raw_id == second_id
380 assert commits[-1].raw_id == second_id
381 assert len(commits) == 2
381 assert len(commits) == 2
382
382
383 def test_get_commits_respects_both_start_and_end(self):
383 def test_get_commits_respects_both_start_and_end(self):
384 second_id = self.repo.commit_ids[1]
384 second_id = self.repo.commit_ids[1]
385 third_id = self.repo.commit_ids[2]
385 third_id = self.repo.commit_ids[2]
386 commits = self.repo.get_commits(start_id=second_id, end_id=third_id)
386 commits = self.repo.get_commits(start_id=second_id, end_id=third_id)
387 assert isinstance(commits, CollectionGenerator)
387 assert isinstance(commits, CollectionGenerator)
388 commits = list(commits)
388 commits = list(commits)
389 assert len(commits) == 2
389 assert len(commits) == 2
390
390
391 def test_get_commits_on_empty_repo_raises_EmptyRepository_error(self):
391 def test_get_commits_on_empty_repo_raises_EmptyRepository_error(self):
392 repo_path = get_new_dir(str(time.time()))
392 repo_path = get_new_dir(str(time.time()))
393 repo = self.Backend(repo_path, create=True)
393 repo = self.Backend(repo_path, create=True)
394
394
395 with pytest.raises(EmptyRepositoryError):
395 with pytest.raises(EmptyRepositoryError):
396 list(repo.get_commits(start_id='foobar'))
396 list(repo.get_commits(start_id='foobar'))
397
397
398 def test_get_commits_respects_hidden(self):
398 def test_get_commits_respects_hidden(self):
399 commits = self.repo.get_commits(show_hidden=True)
399 commits = self.repo.get_commits(show_hidden=True)
400 assert isinstance(commits, CollectionGenerator)
400 assert isinstance(commits, CollectionGenerator)
401 assert len(commits) == 5
401 assert len(commits) == 5
402
402
403 def test_get_commits_includes_end_commit(self):
403 def test_get_commits_includes_end_commit(self):
404 second_id = self.repo.commit_ids[1]
404 second_id = self.repo.commit_ids[1]
405 commits = self.repo.get_commits(end_id=second_id)
405 commits = self.repo.get_commits(end_id=second_id)
406 assert isinstance(commits, CollectionGenerator)
406 assert isinstance(commits, CollectionGenerator)
407 assert len(commits) == 2
407 assert len(commits) == 2
408 commits = list(commits)
408 commits = list(commits)
409 assert commits[-1].raw_id == second_id
409 assert commits[-1].raw_id == second_id
410
410
411 def test_get_commits_respects_start_date(self):
411 def test_get_commits_respects_start_date(self):
412 start_date = datetime.datetime(2010, 1, 2)
412 start_date = datetime.datetime(2010, 1, 2)
413 commits = self.repo.get_commits(start_date=start_date)
413 commits = self.repo.get_commits(start_date=start_date)
414 assert isinstance(commits, CollectionGenerator)
414 assert isinstance(commits, CollectionGenerator)
415 # Should be 4 commits after 2010-01-02 00:00:00
415 # Should be 4 commits after 2010-01-02 00:00:00
416 assert len(commits) == 4
416 assert len(commits) == 4
417 for c in commits:
417 for c in commits:
418 assert c.date >= start_date
418 assert c.date >= start_date
419
419
420 def test_get_commits_respects_start_date_with_branch(self):
420 def test_get_commits_respects_start_date_with_branch(self):
421 start_date = datetime.datetime(2010, 1, 2)
421 start_date = datetime.datetime(2010, 1, 2)
422 commits = self.repo.get_commits(
422 commits = self.repo.get_commits(
423 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
423 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
424 assert isinstance(commits, CollectionGenerator)
424 assert isinstance(commits, CollectionGenerator)
425 # Should be 4 commits after 2010-01-02 00:00:00
425 # Should be 4 commits after 2010-01-02 00:00:00
426 assert len(commits) == 4
426 assert len(commits) == 4
427 for c in commits:
427 for c in commits:
428 assert c.date >= start_date
428 assert c.date >= start_date
429
429
430 def test_get_commits_respects_start_date_and_end_date(self):
430 def test_get_commits_respects_start_date_and_end_date(self):
431 start_date = datetime.datetime(2010, 1, 2)
431 start_date = datetime.datetime(2010, 1, 2)
432 end_date = datetime.datetime(2010, 1, 3)
432 end_date = datetime.datetime(2010, 1, 3)
433 commits = self.repo.get_commits(start_date=start_date,
433 commits = self.repo.get_commits(start_date=start_date,
434 end_date=end_date)
434 end_date=end_date)
435 assert isinstance(commits, CollectionGenerator)
435 assert isinstance(commits, CollectionGenerator)
436 assert len(commits) == 2
436 assert len(commits) == 2
437 for c in commits:
437 for c in commits:
438 assert c.date >= start_date
438 assert c.date >= start_date
439 assert c.date <= end_date
439 assert c.date <= end_date
440
440
441 def test_get_commits_respects_end_date(self):
441 def test_get_commits_respects_end_date(self):
442 end_date = datetime.datetime(2010, 1, 2)
442 end_date = datetime.datetime(2010, 1, 2)
443 commits = self.repo.get_commits(end_date=end_date)
443 commits = self.repo.get_commits(end_date=end_date)
444 assert isinstance(commits, CollectionGenerator)
444 assert isinstance(commits, CollectionGenerator)
445 assert len(commits) == 1
445 assert len(commits) == 1
446 for c in commits:
446 for c in commits:
447 assert c.date <= end_date
447 assert c.date <= end_date
448
448
449 def test_get_commits_respects_reverse(self):
449 def test_get_commits_respects_reverse(self):
450 commits = self.repo.get_commits() # no longer reverse support
450 commits = self.repo.get_commits() # no longer reverse support
451 assert isinstance(commits, CollectionGenerator)
451 assert isinstance(commits, CollectionGenerator)
452 assert len(commits) == 5
452 assert len(commits) == 5
453 commit_ids = reversed([c.raw_id for c in commits])
453 commit_ids = reversed([c.raw_id for c in commits])
454 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
454 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
455
455
456 def test_get_commits_slice_generator(self):
456 def test_get_commits_slice_generator(self):
457 commits = self.repo.get_commits(
457 commits = self.repo.get_commits(
458 branch_name=self.repo.DEFAULT_BRANCH_NAME)
458 branch_name=self.repo.DEFAULT_BRANCH_NAME)
459 assert isinstance(commits, CollectionGenerator)
459 assert isinstance(commits, CollectionGenerator)
460 commit_slice = list(commits[1:3])
460 commit_slice = list(commits[1:3])
461 assert len(commit_slice) == 2
461 assert len(commit_slice) == 2
462
462
463 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
463 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
464 with pytest.raises(CommitDoesNotExistError):
464 with pytest.raises(CommitDoesNotExistError):
465 list(self.repo.get_commits(start_id='foobar'))
465 list(self.repo.get_commits(start_id='foobar'))
466
466
467 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
467 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
468 with pytest.raises(CommitDoesNotExistError):
468 with pytest.raises(CommitDoesNotExistError):
469 list(self.repo.get_commits(end_id='foobar'))
469 list(self.repo.get_commits(end_id='foobar'))
470
470
471 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
471 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
472 with pytest.raises(BranchDoesNotExistError):
472 with pytest.raises(BranchDoesNotExistError):
473 list(self.repo.get_commits(branch_name='foobar'))
473 list(self.repo.get_commits(branch_name='foobar'))
474
474
475 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
475 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
476 start_id = self.repo.commit_ids[-1]
476 start_id = self.repo.commit_ids[-1]
477 end_id = self.repo.commit_ids[0]
477 end_id = self.repo.commit_ids[0]
478 with pytest.raises(RepositoryError):
478 with pytest.raises(RepositoryError):
479 list(self.repo.get_commits(start_id=start_id, end_id=end_id))
479 list(self.repo.get_commits(start_id=start_id, end_id=end_id))
480
480
481 def test_get_commits_raises_for_numerical_ids(self):
481 def test_get_commits_raises_for_numerical_ids(self):
482 with pytest.raises(TypeError):
482 with pytest.raises(TypeError):
483 self.repo.get_commits(start_id=1, end_id=2)
483 self.repo.get_commits(start_id=1, end_id=2)
484
484
485 def test_commit_equality(self):
485 def test_commit_equality(self):
486 commit1 = self.repo.get_commit(self.repo.commit_ids[0])
486 commit1 = self.repo.get_commit(self.repo.commit_ids[0])
487 commit2 = self.repo.get_commit(self.repo.commit_ids[1])
487 commit2 = self.repo.get_commit(self.repo.commit_ids[1])
488
488
489 assert commit1 == commit1
489 assert commit1 == commit1
490 assert commit2 == commit2
490 assert commit2 == commit2
491 assert commit1 != commit2
491 assert commit1 != commit2
492 assert commit2 != commit1
492 assert commit2 != commit1
493 assert commit1 != None
493 assert commit1 != None
494 assert None != commit1
494 assert None != commit1
495 assert 1 != commit1
495 assert 1 != commit1
496 assert 'string' != commit1
496 assert 'string' != commit1
497
497
498
498
499 @pytest.mark.parametrize("filename, expected", [
499 @pytest.mark.parametrize("filename, expected", [
500 ("README.rst", False),
500 ("README.rst", False),
501 ("README", True),
501 ("README", True),
502 ])
502 ])
503 def test_commit_is_link(vcsbackend, filename, expected):
503 def test_commit_is_link(vcsbackend, filename, expected):
504 commit = vcsbackend.repo.get_commit()
504 commit = vcsbackend.repo.get_commit()
505 link_status = commit.is_link(filename)
505 link_status = commit.is_link(filename)
506 assert link_status is expected
506 assert link_status is expected
507
507
508
508
509 @pytest.mark.usefixtures("vcs_repository_support")
509 @pytest.mark.usefixtures("vcs_repository_support")
510 class TestCommitsChanges(BackendTestMixin):
510 class TestCommitsChanges(BackendTestMixin):
511 recreate_repo_per_test = False
511 recreate_repo_per_test = False
512
512
513 @classmethod
513 @classmethod
514 def _get_commits(cls):
514 def _get_commits(cls):
515 return [
515 return [
516 {
516 {
517 'message': u'Initial',
517 'message': u'Initial',
518 'author': u'Joe Doe <joe.doe@example.com>',
518 'author': u'Joe Doe <joe.doe@example.com>',
519 'date': datetime.datetime(2010, 1, 1, 20),
519 'date': datetime.datetime(2010, 1, 1, 20),
520 'added': [
520 'added': [
521 FileNode('foo/bar', content='foo'),
521 FileNode('foo/bar', content='foo'),
522 FileNode('foo/baΕ‚', content='foo'),
522 FileNode('foo/baΕ‚', content='foo'),
523 FileNode('foobar', content='foo'),
523 FileNode('foobar', content='foo'),
524 FileNode('qwe', content='foo'),
524 FileNode('qwe', content='foo'),
525 ],
525 ],
526 },
526 },
527 {
527 {
528 'message': u'Massive changes',
528 'message': u'Massive changes',
529 'author': u'Joe Doe <joe.doe@example.com>',
529 'author': u'Joe Doe <joe.doe@example.com>',
530 'date': datetime.datetime(2010, 1, 1, 22),
530 'date': datetime.datetime(2010, 1, 1, 22),
531 'added': [FileNode('fallout', content='War never changes')],
531 'added': [FileNode('fallout', content='War never changes')],
532 'changed': [
532 'changed': [
533 FileNode('foo/bar', content='baz'),
533 FileNode('foo/bar', content='baz'),
534 FileNode('foobar', content='baz'),
534 FileNode('foobar', content='baz'),
535 ],
535 ],
536 'removed': [FileNode('qwe')],
536 'removed': [FileNode('qwe')],
537 },
537 },
538 ]
538 ]
539
539
540 def test_initial_commit(self, local_dt_to_utc):
540 def test_initial_commit(self, local_dt_to_utc):
541 commit = self.repo.get_commit(commit_idx=0)
541 commit = self.repo.get_commit(commit_idx=0)
542 assert set(commit.added) == set([
542 assert set(commit.added) == set([
543 commit.get_node('foo/bar'),
543 commit.get_node('foo/bar'),
544 commit.get_node('foo/baΕ‚'),
544 commit.get_node('foo/baΕ‚'),
545 commit.get_node('foobar'),
545 commit.get_node('foobar'),
546 commit.get_node('qwe'),
546 commit.get_node('qwe'),
547 ])
547 ])
548 assert set(commit.changed) == set()
548 assert set(commit.changed) == set()
549 assert set(commit.removed) == set()
549 assert set(commit.removed) == set()
550 assert set(commit.affected_files) == set(
550 assert set(commit.affected_files) == set(
551 ['foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'])
551 ['foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'])
552 assert commit.date == local_dt_to_utc(
552 assert commit.date == local_dt_to_utc(
553 datetime.datetime(2010, 1, 1, 20, 0))
553 datetime.datetime(2010, 1, 1, 20, 0))
554
554
555 def test_head_added(self):
555 def test_head_added(self):
556 commit = self.repo.get_commit()
556 commit = self.repo.get_commit()
557 assert isinstance(commit.added, AddedFileNodesGenerator)
557 assert isinstance(commit.added, AddedFileNodesGenerator)
558 assert set(commit.added) == set([commit.get_node('fallout')])
558 assert set(commit.added) == set([commit.get_node('fallout')])
559 assert isinstance(commit.changed, ChangedFileNodesGenerator)
559 assert isinstance(commit.changed, ChangedFileNodesGenerator)
560 assert set(commit.changed) == set([
560 assert set(commit.changed) == set([
561 commit.get_node('foo/bar'),
561 commit.get_node('foo/bar'),
562 commit.get_node('foobar'),
562 commit.get_node('foobar'),
563 ])
563 ])
564 assert isinstance(commit.removed, RemovedFileNodesGenerator)
564 assert isinstance(commit.removed, RemovedFileNodesGenerator)
565 assert len(commit.removed) == 1
565 assert len(commit.removed) == 1
566 assert list(commit.removed)[0].path == 'qwe'
566 assert list(commit.removed)[0].path == 'qwe'
567
567
568 def test_get_filemode(self):
568 def test_get_filemode(self):
569 commit = self.repo.get_commit()
569 commit = self.repo.get_commit()
570 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
570 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
571
571
572 def test_get_filemode_non_ascii(self):
572 def test_get_filemode_non_ascii(self):
573 commit = self.repo.get_commit()
573 commit = self.repo.get_commit()
574 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
574 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
575 assert FILEMODE_DEFAULT == commit.get_file_mode(u'foo/baΕ‚')
575 assert FILEMODE_DEFAULT == commit.get_file_mode(u'foo/baΕ‚')
576
576
577 def test_get_path_history(self):
577 def test_get_path_history(self):
578 commit = self.repo.get_commit()
578 commit = self.repo.get_commit()
579 history = commit.get_path_history('foo/bar')
579 history = commit.get_path_history('foo/bar')
580 assert len(history) == 2
580 assert len(history) == 2
581
581
582 def test_get_path_history_with_limit(self):
582 def test_get_path_history_with_limit(self):
583 commit = self.repo.get_commit()
583 commit = self.repo.get_commit()
584 history = commit.get_path_history('foo/bar', limit=1)
584 history = commit.get_path_history('foo/bar', limit=1)
585 assert len(history) == 1
585 assert len(history) == 1
586
586
587 def test_get_path_history_first_commit(self):
587 def test_get_path_history_first_commit(self):
588 commit = self.repo[0]
588 commit = self.repo[0]
589 history = commit.get_path_history('foo/bar')
589 history = commit.get_path_history('foo/bar')
590 assert len(history) == 1
590 assert len(history) == 1
591
591
592
592
593 def assert_text_equal(expected, given):
593 def assert_text_equal(expected, given):
594 assert expected == given
594 assert expected == given
595 assert isinstance(expected, unicode)
595 assert isinstance(expected, unicode)
596 assert isinstance(given, unicode)
596 assert isinstance(given, unicode)
@@ -1,1188 +1,1188 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils import make_db_config
26 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Reference, MergeResponse, MergeFailureReason)
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35
35
36
36
37 pytestmark = pytest.mark.backends("hg")
37 pytestmark = pytest.mark.backends("hg")
38
38
39
39
40 def repo_path_generator():
40 def repo_path_generator():
41 """
41 """
42 Return a different path to be used for cloning repos.
42 Return a different path to be used for cloning repos.
43 """
43 """
44 i = 0
44 i = 0
45 while True:
45 while True:
46 i += 1
46 i += 1
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48
48
49 REPO_PATH_GENERATOR = repo_path_generator()
49 REPO_PATH_GENERATOR = repo_path_generator()
50
50
51
51
52 @pytest.fixture(scope='class', autouse=True)
52 @pytest.fixture(scope='class', autouse=True)
53 def repo(request, baseapp):
53 def repo(request, baseapp):
54 repo = MercurialRepository(TEST_HG_REPO)
54 repo = MercurialRepository(TEST_HG_REPO)
55 if request.cls:
55 if request.cls:
56 request.cls.repo = repo
56 request.cls.repo = repo
57 return repo
57 return repo
58
58
59
59
60 class TestMercurialRepository:
60 class TestMercurialRepository:
61
61
62 # pylint: disable=protected-access
62 # pylint: disable=protected-access
63
63
64 def get_clone_repo(self):
64 def get_clone_repo(self):
65 """
65 """
66 Return a clone of the base repo.
66 Return a clone of the base repo.
67 """
67 """
68 clone_path = next(REPO_PATH_GENERATOR)
68 clone_path = next(REPO_PATH_GENERATOR)
69 repo_clone = MercurialRepository(
69 repo_clone = MercurialRepository(
70 clone_path, create=True, src_url=self.repo.path)
70 clone_path, create=True, src_url=self.repo.path)
71
71
72 return repo_clone
72 return repo_clone
73
73
74 def get_empty_repo(self):
74 def get_empty_repo(self):
75 """
75 """
76 Return an empty repo.
76 Return an empty repo.
77 """
77 """
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79
79
80 def test_wrong_repo_path(self):
80 def test_wrong_repo_path(self):
81 wrong_repo_path = '/tmp/errorrepo_hg'
81 wrong_repo_path = '/tmp/errorrepo_hg'
82 with pytest.raises(RepositoryError):
82 with pytest.raises(RepositoryError):
83 MercurialRepository(wrong_repo_path)
83 MercurialRepository(wrong_repo_path)
84
84
85 def test_unicode_path_repo(self):
85 def test_unicode_path_repo(self):
86 with pytest.raises(VCSError):
86 with pytest.raises(VCSError):
87 MercurialRepository(u'iShouldFail')
87 MercurialRepository(u'iShouldFail')
88
88
89 def test_unicode_commit_id(self):
89 def test_unicode_commit_id(self):
90 with pytest.raises(CommitDoesNotExistError):
90 with pytest.raises(CommitDoesNotExistError):
91 self.repo.get_commit(u'unicode-commit-id')
91 self.repo.get_commit(u'unicode-commit-id')
92 with pytest.raises(CommitDoesNotExistError):
92 with pytest.raises(CommitDoesNotExistError):
93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94
94
95 def test_unicode_bookmark(self):
95 def test_unicode_bookmark(self):
96 self.repo.bookmark(u'unicode-bookmark')
96 self.repo.bookmark(u'unicode-bookmark')
97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98
98
99 def test_unicode_branch(self):
99 def test_unicode_branch(self):
100 with pytest.raises(KeyError):
100 with pytest.raises(KeyError):
101 self.repo.branches[u'unicode-branch']
101 self.repo.branches[u'unicode-branch']
102 with pytest.raises(KeyError):
102 with pytest.raises(KeyError):
103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104
104
105 def test_repo_clone(self):
105 def test_repo_clone(self):
106 if os.path.exists(TEST_HG_REPO_CLONE):
106 if os.path.exists(TEST_HG_REPO_CLONE):
107 self.fail(
107 self.fail(
108 'Cannot test mercurial clone repo as location %s already '
108 'Cannot test mercurial clone repo as location %s already '
109 'exists. You should manually remove it first.'
109 'exists. You should manually remove it first.'
110 % TEST_HG_REPO_CLONE)
110 % TEST_HG_REPO_CLONE)
111
111
112 repo = MercurialRepository(TEST_HG_REPO)
112 repo = MercurialRepository(TEST_HG_REPO)
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 src_url=TEST_HG_REPO)
114 src_url=TEST_HG_REPO)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 # Checking hashes of commits should be enough
116 # Checking hashes of commits should be enough
117 for commit in repo.get_commits():
117 for commit in repo.get_commits():
118 raw_id = commit.raw_id
118 raw_id = commit.raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120
120
121 def test_repo_clone_with_update(self):
121 def test_repo_clone_with_update(self):
122 repo = MercurialRepository(TEST_HG_REPO)
122 repo = MercurialRepository(TEST_HG_REPO)
123 repo_clone = MercurialRepository(
123 repo_clone = MercurialRepository(
124 TEST_HG_REPO_CLONE + '_w_update',
124 TEST_HG_REPO_CLONE + '_w_update',
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127
127
128 # check if current workdir was updated
128 # check if current workdir was updated
129 assert os.path.isfile(
129 assert os.path.isfile(
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131
131
132 def test_repo_clone_without_update(self):
132 def test_repo_clone_without_update(self):
133 repo = MercurialRepository(TEST_HG_REPO)
133 repo = MercurialRepository(TEST_HG_REPO)
134 repo_clone = MercurialRepository(
134 repo_clone = MercurialRepository(
135 TEST_HG_REPO_CLONE + '_wo_update',
135 TEST_HG_REPO_CLONE + '_wo_update',
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 assert not os.path.isfile(
138 assert not os.path.isfile(
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140
140
141 def test_commit_ids(self):
141 def test_commit_ids(self):
142 # there are 21 commits at bitbucket now
142 # there are 21 commits at bitbucket now
143 # so we can assume they would be available from now on
143 # so we can assume they would be available from now on
144 subset = set([
144 subset = set([
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
150 '6fff84722075f1607a30f436523403845f84cd9e',
150 '6fff84722075f1607a30f436523403845f84cd9e',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
154 'be90031137367893f1c406e0a8683010fd115b79',
154 'be90031137367893f1c406e0a8683010fd115b79',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
156 '84478366594b424af694a6c784cb991a16b87c21',
156 '84478366594b424af694a6c784cb991a16b87c21',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
164 ])
164 ])
165 assert subset.issubset(set(self.repo.commit_ids))
165 assert subset.issubset(set(self.repo.commit_ids))
166
166
167 # check if we have the proper order of commits
167 # check if we have the proper order of commits
168 org = [
168 org = [
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
174 '6fff84722075f1607a30f436523403845f84cd9e',
174 '6fff84722075f1607a30f436523403845f84cd9e',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
178 'be90031137367893f1c406e0a8683010fd115b79',
178 'be90031137367893f1c406e0a8683010fd115b79',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
180 '84478366594b424af694a6c784cb991a16b87c21',
180 '84478366594b424af694a6c784cb991a16b87c21',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
193 '536c1a19428381cfea92ac44985304f6a8049569',
193 '536c1a19428381cfea92ac44985304f6a8049569',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
200 ]
200 ]
201 assert org == self.repo.commit_ids[:31]
201 assert org == self.repo.commit_ids[:31]
202
202
203 def test_iter_slice(self):
203 def test_iter_slice(self):
204 sliced = list(self.repo[:10])
204 sliced = list(self.repo[:10])
205 itered = list(self.repo)[:10]
205 itered = list(self.repo)[:10]
206 assert sliced == itered
206 assert sliced == itered
207
207
208 def test_slicing(self):
208 def test_slicing(self):
209 # 4 1 5 10 95
209 # 4 1 5 10 95
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
211 (10, 20, 10), (5, 100, 95)]:
211 (10, 20, 10), (5, 100, 95)]:
212 indexes = list(self.repo[sfrom:sto])
212 indexes = list(self.repo[sfrom:sto])
213 assert len(indexes) == size
213 assert len(indexes) == size
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
216
216
217 def test_branches(self):
217 def test_branches(self):
218 # TODO: Need more tests here
218 # TODO: Need more tests here
219
219
220 # active branches
220 # active branches
221 assert 'default' in self.repo.branches
221 assert 'default' in self.repo.branches
222 assert 'stable' in self.repo.branches
222 assert 'stable' in self.repo.branches
223
223
224 # closed
224 # closed
225 assert 'git' in self.repo._get_branches(closed=True)
225 assert 'git' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
227
227
228 for name, id in self.repo.branches.items():
228 for name, id in self.repo.branches.items():
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
230
230
231 def test_tip_in_tags(self):
231 def test_tip_in_tags(self):
232 # tip is always a tag
232 # tip is always a tag
233 assert 'tip' in self.repo.tags
233 assert 'tip' in self.repo.tags
234
234
235 def test_tip_commit_in_tags(self):
235 def test_tip_commit_in_tags(self):
236 tip = self.repo.get_commit()
236 tip = self.repo.get_commit()
237 assert self.repo.tags['tip'] == tip.raw_id
237 assert self.repo.tags['tip'] == tip.raw_id
238
238
239 def test_initial_commit(self):
239 def test_initial_commit(self):
240 init_commit = self.repo.get_commit(commit_idx=0)
240 init_commit = self.repo.get_commit(commit_idx=0)
241 init_author = init_commit.author
241 init_author = init_commit.author
242
242
243 assert init_commit.message == 'initial import'
243 assert init_commit.message == 'initial import'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
245 assert init_author == init_commit.committer
245 assert init_author == init_commit.committer
246 assert sorted(init_commit._file_paths) == sorted([
246 assert sorted(init_commit._file_paths) == sorted([
247 'vcs/__init__.py',
247 'vcs/__init__.py',
248 'vcs/backends/BaseRepository.py',
248 'vcs/backends/BaseRepository.py',
249 'vcs/backends/__init__.py',
249 'vcs/backends/__init__.py',
250 ])
250 ])
251 assert sorted(init_commit._dir_paths) == sorted(
251 assert sorted(init_commit._dir_paths) == sorted(
252 ['', 'vcs', 'vcs/backends'])
252 ['', 'vcs', 'vcs/backends'])
253
253
254 assert init_commit._dir_paths + init_commit._file_paths == \
254 assert init_commit._dir_paths + init_commit._file_paths == \
255 init_commit._paths
255 init_commit._paths
256
256
257 with pytest.raises(NodeDoesNotExistError):
257 with pytest.raises(NodeDoesNotExistError):
258 init_commit.get_node(path='foobar')
258 init_commit.get_node(path='foobar')
259
259
260 node = init_commit.get_node('vcs/')
260 node = init_commit.get_node('vcs/')
261 assert hasattr(node, 'kind')
261 assert hasattr(node, 'kind')
262 assert node.kind == NodeKind.DIR
262 assert node.kind == NodeKind.DIR
263
263
264 node = init_commit.get_node('vcs')
264 node = init_commit.get_node('vcs')
265 assert hasattr(node, 'kind')
265 assert hasattr(node, 'kind')
266 assert node.kind == NodeKind.DIR
266 assert node.kind == NodeKind.DIR
267
267
268 node = init_commit.get_node('vcs/__init__.py')
268 node = init_commit.get_node('vcs/__init__.py')
269 assert hasattr(node, 'kind')
269 assert hasattr(node, 'kind')
270 assert node.kind == NodeKind.FILE
270 assert node.kind == NodeKind.FILE
271
271
272 def test_not_existing_commit(self):
272 def test_not_existing_commit(self):
273 # rawid
273 # rawid
274 with pytest.raises(RepositoryError):
274 with pytest.raises(RepositoryError):
275 self.repo.get_commit('abcd' * 10)
275 self.repo.get_commit('abcd' * 10)
276 # shortid
276 # shortid
277 with pytest.raises(RepositoryError):
277 with pytest.raises(RepositoryError):
278 self.repo.get_commit('erro' * 4)
278 self.repo.get_commit('erro' * 4)
279 # numeric
279 # numeric
280 with pytest.raises(RepositoryError):
280 with pytest.raises(RepositoryError):
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
282
282
283 # Small chance we ever get to this one
283 # Small chance we ever get to this one
284 idx = pow(2, 30)
284 idx = pow(2, 30)
285 with pytest.raises(RepositoryError):
285 with pytest.raises(RepositoryError):
286 self.repo.get_commit(commit_idx=idx)
286 self.repo.get_commit(commit_idx=idx)
287
287
288 def test_commit10(self):
288 def test_commit10(self):
289 commit10 = self.repo.get_commit(commit_idx=10)
289 commit10 = self.repo.get_commit(commit_idx=10)
290 README = """===
290 README = """===
291 VCS
291 VCS
292 ===
292 ===
293
293
294 Various Version Control System management abstraction layer for Python.
294 Various Version Control System management abstraction layer for Python.
295
295
296 Introduction
296 Introduction
297 ------------
297 ------------
298
298
299 TODO: To be written...
299 TODO: To be written...
300
300
301 """
301 """
302 node = commit10.get_node('README.rst')
302 node = commit10.get_node('README.rst')
303 assert node.kind == NodeKind.FILE
303 assert node.kind == NodeKind.FILE
304 assert node.content == README
304 assert node.content == README
305
305
306 def test_local_clone(self):
306 def test_local_clone(self):
307 clone_path = next(REPO_PATH_GENERATOR)
307 clone_path = next(REPO_PATH_GENERATOR)
308 self.repo._local_clone(clone_path)
308 self.repo._local_clone(clone_path)
309 repo_clone = MercurialRepository(clone_path)
309 repo_clone = MercurialRepository(clone_path)
310
310
311 assert self.repo.commit_ids == repo_clone.commit_ids
311 assert self.repo.commit_ids == repo_clone.commit_ids
312
312
313 def test_local_clone_fails_if_target_exists(self):
313 def test_local_clone_fails_if_target_exists(self):
314 with pytest.raises(RepositoryError):
314 with pytest.raises(RepositoryError):
315 self.repo._local_clone(self.repo.path)
315 self.repo._local_clone(self.repo.path)
316
316
317 def test_update(self):
317 def test_update(self):
318 repo_clone = self.get_clone_repo()
318 repo_clone = self.get_clone_repo()
319 branches = repo_clone.branches
319 branches = repo_clone.branches
320
320
321 repo_clone._update('default')
321 repo_clone._update('default')
322 assert branches['default'] == repo_clone._identify()
322 assert branches['default'] == repo_clone._identify()
323 repo_clone._update('stable')
323 repo_clone._update('stable')
324 assert branches['stable'] == repo_clone._identify()
324 assert branches['stable'] == repo_clone._identify()
325
325
326 def test_local_pull_branch(self):
326 def test_local_pull_branch(self):
327 target_repo = self.get_empty_repo()
327 target_repo = self.get_empty_repo()
328 source_repo = self.get_clone_repo()
328 source_repo = self.get_clone_repo()
329
329
330 default = Reference(
330 default = Reference(
331 'branch', 'default', source_repo.branches['default'])
331 'branch', 'default', source_repo.branches['default'])
332 target_repo._local_pull(source_repo.path, default)
332 target_repo._local_pull(source_repo.path, default)
333 target_repo = MercurialRepository(target_repo.path)
333 target_repo = MercurialRepository(target_repo.path)
334 assert (target_repo.branches['default'] ==
334 assert (target_repo.branches['default'] ==
335 source_repo.branches['default'])
335 source_repo.branches['default'])
336
336
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
338 target_repo._local_pull(source_repo.path, stable)
338 target_repo._local_pull(source_repo.path, stable)
339 target_repo = MercurialRepository(target_repo.path)
339 target_repo = MercurialRepository(target_repo.path)
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
341
341
342 def test_local_pull_bookmark(self):
342 def test_local_pull_bookmark(self):
343 target_repo = self.get_empty_repo()
343 target_repo = self.get_empty_repo()
344 source_repo = self.get_clone_repo()
344 source_repo = self.get_clone_repo()
345
345
346 commits = list(source_repo.get_commits(branch_name='default'))
346 commits = list(source_repo.get_commits(branch_name='default'))
347 foo1_id = commits[-5].raw_id
347 foo1_id = commits[-5].raw_id
348 foo1 = Reference('book', 'foo1', foo1_id)
348 foo1 = Reference('book', 'foo1', foo1_id)
349 source_repo._update(foo1_id)
349 source_repo._update(foo1_id)
350 source_repo.bookmark('foo1')
350 source_repo.bookmark('foo1')
351
351
352 foo2_id = commits[-3].raw_id
352 foo2_id = commits[-3].raw_id
353 foo2 = Reference('book', 'foo2', foo2_id)
353 foo2 = Reference('book', 'foo2', foo2_id)
354 source_repo._update(foo2_id)
354 source_repo._update(foo2_id)
355 source_repo.bookmark('foo2')
355 source_repo.bookmark('foo2')
356
356
357 target_repo._local_pull(source_repo.path, foo1)
357 target_repo._local_pull(source_repo.path, foo1)
358 target_repo = MercurialRepository(target_repo.path)
358 target_repo = MercurialRepository(target_repo.path)
359 assert target_repo.branches['default'] == commits[-5].raw_id
359 assert target_repo.branches['default'] == commits[-5].raw_id
360
360
361 target_repo._local_pull(source_repo.path, foo2)
361 target_repo._local_pull(source_repo.path, foo2)
362 target_repo = MercurialRepository(target_repo.path)
362 target_repo = MercurialRepository(target_repo.path)
363 assert target_repo.branches['default'] == commits[-3].raw_id
363 assert target_repo.branches['default'] == commits[-3].raw_id
364
364
365 def test_local_pull_commit(self):
365 def test_local_pull_commit(self):
366 target_repo = self.get_empty_repo()
366 target_repo = self.get_empty_repo()
367 source_repo = self.get_clone_repo()
367 source_repo = self.get_clone_repo()
368
368
369 commits = list(source_repo.get_commits(branch_name='default'))
369 commits = list(source_repo.get_commits(branch_name='default'))
370 commit_id = commits[-5].raw_id
370 commit_id = commits[-5].raw_id
371 commit = Reference('rev', commit_id, commit_id)
371 commit = Reference('rev', commit_id, commit_id)
372 target_repo._local_pull(source_repo.path, commit)
372 target_repo._local_pull(source_repo.path, commit)
373 target_repo = MercurialRepository(target_repo.path)
373 target_repo = MercurialRepository(target_repo.path)
374 assert target_repo.branches['default'] == commit_id
374 assert target_repo.branches['default'] == commit_id
375
375
376 commit_id = commits[-3].raw_id
376 commit_id = commits[-3].raw_id
377 commit = Reference('rev', commit_id, commit_id)
377 commit = Reference('rev', commit_id, commit_id)
378 target_repo._local_pull(source_repo.path, commit)
378 target_repo._local_pull(source_repo.path, commit)
379 target_repo = MercurialRepository(target_repo.path)
379 target_repo = MercurialRepository(target_repo.path)
380 assert target_repo.branches['default'] == commit_id
380 assert target_repo.branches['default'] == commit_id
381
381
382 def test_local_pull_from_same_repo(self):
382 def test_local_pull_from_same_repo(self):
383 reference = Reference('branch', 'default', None)
383 reference = Reference('branch', 'default', None)
384 with pytest.raises(ValueError):
384 with pytest.raises(ValueError):
385 self.repo._local_pull(self.repo.path, reference)
385 self.repo._local_pull(self.repo.path, reference)
386
386
387 def test_validate_pull_reference_raises_on_missing_reference(
387 def test_validate_pull_reference_raises_on_missing_reference(
388 self, vcsbackend_hg):
388 self, vcsbackend_hg):
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
390 reference = Reference(
390 reference = Reference(
391 'book', 'invalid_reference', 'a' * 40)
391 'book', 'invalid_reference', 'a' * 40)
392
392
393 with pytest.raises(CommitDoesNotExistError):
393 with pytest.raises(CommitDoesNotExistError):
394 target_repo._validate_pull_reference(reference)
394 target_repo._validate_pull_reference(reference)
395
395
396 def test_heads(self):
396 def test_heads(self):
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
398
398
399 def test_ancestor(self):
399 def test_ancestor(self):
400 commits = [
400 commits = [
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
404
404
405 def test_local_push(self):
405 def test_local_push(self):
406 target_repo = self.get_empty_repo()
406 target_repo = self.get_empty_repo()
407
407
408 revisions = list(self.repo.get_commits(branch_name='default'))
408 revisions = list(self.repo.get_commits(branch_name='default'))
409 revision = revisions[-5].raw_id
409 revision = revisions[-5].raw_id
410 self.repo._local_push(revision, target_repo.path)
410 self.repo._local_push(revision, target_repo.path)
411
411
412 target_repo = MercurialRepository(target_repo.path)
412 target_repo = MercurialRepository(target_repo.path)
413
413
414 assert target_repo.branches['default'] == revision
414 assert target_repo.branches['default'] == revision
415
415
416 def test_hooks_can_be_enabled_for_local_push(self):
416 def test_hooks_can_be_enabled_for_local_push(self):
417 revision = 'deadbeef'
417 revision = 'deadbeef'
418 repo_path = 'test_group/test_repo'
418 repo_path = 'test_group/test_repo'
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
421 remote_mock.push.assert_called_once_with(
421 remote_mock.push.assert_called_once_with(
422 [revision], repo_path, hooks=True, push_branches=False)
422 [revision], repo_path, hooks=True, push_branches=False)
423
423
424 def test_local_merge(self, vcsbackend_hg):
424 def test_local_merge(self, vcsbackend_hg):
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
428 target_repo = MercurialRepository(target_repo.path)
428 target_repo = MercurialRepository(target_repo.path)
429 target_rev = target_repo.branches['default']
429 target_rev = target_repo.branches['default']
430 target_ref = Reference(
430 target_ref = Reference(
431 type='branch', name='default', commit_id=target_rev)
431 type='branch', name='default', commit_id=target_rev)
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
433 source_repo = MercurialRepository(source_repo.path)
433 source_repo = MercurialRepository(source_repo.path)
434 source_rev = source_repo.branches['default']
434 source_rev = source_repo.branches['default']
435 source_ref = Reference(
435 source_ref = Reference(
436 type='branch', name='default', commit_id=source_rev)
436 type='branch', name='default', commit_id=source_rev)
437
437
438 target_repo._local_pull(source_repo.path, source_ref)
438 target_repo._local_pull(source_repo.path, source_ref)
439
439
440 merge_message = 'Merge message\n\nDescription:...'
440 merge_message = 'Merge message\n\nDescription:...'
441 user_name = 'Albert Einstein'
441 user_name = 'Albert Einstein'
442 user_email = 'albert@einstein.com'
442 user_email = 'albert@einstein.com'
443 merge_commit_id, needs_push = target_repo._local_merge(
443 merge_commit_id, needs_push = target_repo._local_merge(
444 target_ref, merge_message, user_name, user_email, source_ref)
444 target_ref, merge_message, user_name, user_email, source_ref)
445 assert needs_push
445 assert needs_push
446
446
447 target_repo = MercurialRepository(target_repo.path)
447 target_repo = MercurialRepository(target_repo.path)
448 assert target_repo.commit_ids[-3] == target_rev
448 assert target_repo.commit_ids[-3] == target_rev
449 assert target_repo.commit_ids[-2] == source_rev
449 assert target_repo.commit_ids[-2] == source_rev
450 last_commit = target_repo.get_commit(merge_commit_id)
450 last_commit = target_repo.get_commit(merge_commit_id)
451 assert last_commit.message.strip() == merge_message
451 assert last_commit.message.strip() == merge_message
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
453
453
454 assert not os.path.exists(
454 assert not os.path.exists(
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
456
456
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
460 target_rev = target_repo.branches['default']
460 target_rev = target_repo.branches['default']
461 target_ref = Reference(
461 target_ref = Reference(
462 type='branch', name='default', commit_id=target_rev)
462 type='branch', name='default', commit_id=target_rev)
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
464 source_repo = MercurialRepository(source_repo.path)
464 source_repo = MercurialRepository(source_repo.path)
465 source_rev = source_repo.branches['default']
465 source_rev = source_repo.branches['default']
466 source_ref = Reference(
466 source_ref = Reference(
467 type='branch', name='default', commit_id=source_rev)
467 type='branch', name='default', commit_id=source_rev)
468
468
469 target_repo._local_pull(source_repo.path, source_ref)
469 target_repo._local_pull(source_repo.path, source_ref)
470
470
471 merge_message = 'Merge message\n\nDescription:...'
471 merge_message = 'Merge message\n\nDescription:...'
472 user_name = 'Albert Einstein'
472 user_name = 'Albert Einstein'
473 user_email = 'albert@einstein.com'
473 user_email = 'albert@einstein.com'
474 merge_commit_id, needs_push = target_repo._local_merge(
474 merge_commit_id, needs_push = target_repo._local_merge(
475 target_ref, merge_message, user_name, user_email, source_ref)
475 target_ref, merge_message, user_name, user_email, source_ref)
476 assert merge_commit_id == source_rev
476 assert merge_commit_id == source_rev
477 assert needs_push
477 assert needs_push
478
478
479 target_repo = MercurialRepository(target_repo.path)
479 target_repo = MercurialRepository(target_repo.path)
480 assert target_repo.commit_ids[-2] == target_rev
480 assert target_repo.commit_ids[-2] == target_rev
481 assert target_repo.commit_ids[-1] == source_rev
481 assert target_repo.commit_ids[-1] == source_rev
482
482
483 assert not os.path.exists(
483 assert not os.path.exists(
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
485
485
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 target_rev = target_repo.branches['default']
488 target_rev = target_repo.branches['default']
489 target_ref = Reference(
489 target_ref = Reference(
490 type='branch', name='default', commit_id=target_rev)
490 type='branch', name='default', commit_id=target_rev)
491
491
492 merge_message = 'Merge message\n\nDescription:...'
492 merge_message = 'Merge message\n\nDescription:...'
493 user_name = 'Albert Einstein'
493 user_name = 'Albert Einstein'
494 user_email = 'albert@einstein.com'
494 user_email = 'albert@einstein.com'
495 merge_commit_id, needs_push = target_repo._local_merge(
495 merge_commit_id, needs_push = target_repo._local_merge(
496 target_ref, merge_message, user_name, user_email, target_ref)
496 target_ref, merge_message, user_name, user_email, target_ref)
497 assert merge_commit_id == target_rev
497 assert merge_commit_id == target_rev
498 assert not needs_push
498 assert not needs_push
499
499
500 target_repo = MercurialRepository(target_repo.path)
500 target_repo = MercurialRepository(target_repo.path)
501 assert target_repo.commit_ids[-1] == target_rev
501 assert target_repo.commit_ids[-1] == target_rev
502
502
503 assert not os.path.exists(
503 assert not os.path.exists(
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
505
505
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
510 target_repo = MercurialRepository(target_repo.path)
510 target_repo = MercurialRepository(target_repo.path)
511 target_rev = target_repo.branches['default']
511 target_rev = target_repo.branches['default']
512 target_ref = Reference(
512 target_ref = Reference(
513 type='branch', name='default', commit_id=target_rev)
513 type='branch', name='default', commit_id=target_rev)
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
515 source_repo = MercurialRepository(source_repo.path)
515 source_repo = MercurialRepository(source_repo.path)
516 source_rev = source_repo.branches['default']
516 source_rev = source_repo.branches['default']
517 source_ref = Reference(
517 source_ref = Reference(
518 type='branch', name='default', commit_id=source_rev)
518 type='branch', name='default', commit_id=source_rev)
519
519
520 target_repo._local_pull(source_repo.path, source_ref)
520 target_repo._local_pull(source_repo.path, source_ref)
521 with pytest.raises(RepositoryError):
521 with pytest.raises(RepositoryError):
522 target_repo._local_merge(
522 target_repo._local_merge(
523 target_ref, 'merge_message', 'user name', 'user@name.com',
523 target_ref, 'merge_message', 'user name', 'user@name.com',
524 source_ref)
524 source_ref)
525
525
526 # Check we are not left in an intermediate merge state
526 # Check we are not left in an intermediate merge state
527 assert not os.path.exists(
527 assert not os.path.exists(
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
529
529
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
531 commits = [
531 commits = [
532 {'message': 'a'},
532 {'message': 'a'},
533 {'message': 'b', 'branch': 'b'},
533 {'message': 'b', 'branch': 'b'},
534 ]
534 ]
535 repo = backend_hg.create_repo(commits)
535 repo = backend_hg.create_repo(commits)
536 commit_ids = backend_hg.commit_ids
536 commit_ids = backend_hg.commit_ids
537 target_ref = Reference(
537 target_ref = Reference(
538 type='branch', name='default', commit_id=commit_ids['a'])
538 type='branch', name='default', commit_id=commit_ids['a'])
539 source_ref = Reference(
539 source_ref = Reference(
540 type='branch', name='b', commit_id=commit_ids['b'])
540 type='branch', name='b', commit_id=commit_ids['b'])
541 merge_message = 'Merge message\n\nDescription:...'
541 merge_message = 'Merge message\n\nDescription:...'
542 user_name = 'Albert Einstein'
542 user_name = 'Albert Einstein'
543 user_email = 'albert@einstein.com'
543 user_email = 'albert@einstein.com'
544 vcs_repo = repo.scm_instance()
544 vcs_repo = repo.scm_instance()
545 merge_commit_id, needs_push = vcs_repo._local_merge(
545 merge_commit_id, needs_push = vcs_repo._local_merge(
546 target_ref, merge_message, user_name, user_email, source_ref)
546 target_ref, merge_message, user_name, user_email, source_ref)
547 assert merge_commit_id != source_ref.commit_id
547 assert merge_commit_id != source_ref.commit_id
548 assert needs_push is True
548 assert needs_push is True
549 commit = vcs_repo.get_commit(merge_commit_id)
549 commit = vcs_repo.get_commit(merge_commit_id)
550 assert commit.merge is True
550 assert commit.merge is True
551 assert commit.message == merge_message
551 assert commit.message == merge_message
552
552
553 def test_maybe_prepare_merge_workspace(self):
553 def test_maybe_prepare_merge_workspace(self):
554 workspace = self.repo._maybe_prepare_merge_workspace(
554 workspace = self.repo._maybe_prepare_merge_workspace(
555 1, 'pr2', 'unused', 'unused2')
555 1, 'pr2', 'unused', 'unused2')
556
556
557 assert os.path.isdir(workspace)
557 assert os.path.isdir(workspace)
558 workspace_repo = MercurialRepository(workspace)
558 workspace_repo = MercurialRepository(workspace)
559 assert workspace_repo.branches == self.repo.branches
559 assert workspace_repo.branches == self.repo.branches
560
560
561 # Calling it a second time should also succeed
561 # Calling it a second time should also succeed
562 workspace = self.repo._maybe_prepare_merge_workspace(
562 workspace = self.repo._maybe_prepare_merge_workspace(
563 1, 'pr2', 'unused', 'unused2')
563 1, 'pr2', 'unused', 'unused2')
564 assert os.path.isdir(workspace)
564 assert os.path.isdir(workspace)
565
565
566 def test_cleanup_merge_workspace(self):
566 def test_cleanup_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 1, 'pr3', 'unused', 'unused2')
568 1, 'pr3', 'unused', 'unused2')
569
569
570 assert os.path.isdir(workspace)
570 assert os.path.isdir(workspace)
571 self.repo.cleanup_merge_workspace(1, 'pr3')
571 self.repo.cleanup_merge_workspace(1, 'pr3')
572
572
573 assert not os.path.exists(workspace)
573 assert not os.path.exists(workspace)
574
574
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
576 # No assert: because in case of an inexistent workspace this function
576 # No assert: because in case of an inexistent workspace this function
577 # should still succeed.
577 # should still succeed.
578 self.repo.cleanup_merge_workspace(1, 'pr4')
578 self.repo.cleanup_merge_workspace(1, 'pr4')
579
579
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
585 imc = source_repo.in_memory_commit
585 imc = source_repo.in_memory_commit
586 imc.add(FileNode('file_x', content=source_repo.name))
586 imc.add(FileNode('file_x', content=source_repo.name))
587 imc.commit(
587 imc.commit(
588 message=u'Automatic commit from repo merge test',
588 message=u'Automatic commit from repo merge test',
589 author=u'Automatic')
589 author=u'Automatic <automatic@rhodecode.com>')
590 target_commit = target_repo.get_commit()
590 target_commit = target_repo.get_commit()
591 source_commit = source_repo.get_commit()
591 source_commit = source_repo.get_commit()
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
593 bookmark_name = 'bookmark'
593 bookmark_name = 'bookmark'
594 target_repo._update(default_branch)
594 target_repo._update(default_branch)
595 target_repo.bookmark(bookmark_name)
595 target_repo.bookmark(bookmark_name)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
598 workspace_id = 'test-merge'
598 workspace_id = 'test-merge'
599 repo_id = repo_id_generator(target_repo.path)
599 repo_id = repo_id_generator(target_repo.path)
600 merge_response = target_repo.merge(
600 merge_response = target_repo.merge(
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
602 'test user', 'test@rhodecode.com', 'merge message 1',
602 'test user', 'test@rhodecode.com', 'merge message 1',
603 dry_run=False)
603 dry_run=False)
604 expected_merge_response = MergeResponse(
604 expected_merge_response = MergeResponse(
605 True, True, merge_response.merge_ref,
605 True, True, merge_response.merge_ref,
606 MergeFailureReason.NONE)
606 MergeFailureReason.NONE)
607 assert merge_response == expected_merge_response
607 assert merge_response == expected_merge_response
608
608
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
610 target_repo.path)
610 target_repo.path)
611 target_commits = list(target_repo.get_commits())
611 target_commits = list(target_repo.get_commits())
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
613 assert source_ref.commit_id in commit_ids
613 assert source_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
615
615
616 merge_commit = target_commits[-1]
616 merge_commit = target_commits[-1]
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
618 assert merge_commit.message.strip() == 'merge message 1'
618 assert merge_commit.message.strip() == 'merge message 1'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
620
620
621 # Check the bookmark was updated in the target repo
621 # Check the bookmark was updated in the target repo
622 assert (
622 assert (
623 target_repo.bookmarks[bookmark_name] ==
623 target_repo.bookmarks[bookmark_name] ==
624 merge_response.merge_ref.commit_id)
624 merge_response.merge_ref.commit_id)
625
625
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
629 imc = source_repo.in_memory_commit
629 imc = source_repo.in_memory_commit
630 imc.add(FileNode('file_x', content=source_repo.name))
630 imc.add(FileNode('file_x', content=source_repo.name))
631 imc.commit(
631 imc.commit(
632 message=u'Automatic commit from repo merge test',
632 message=u'Automatic commit from repo merge test',
633 author=u'Automatic')
633 author=u'Automatic <automatic@rhodecode.com>')
634 target_commit = target_repo.get_commit()
634 target_commit = target_repo.get_commit()
635 source_commit = source_repo.get_commit()
635 source_commit = source_repo.get_commit()
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
637 bookmark_name = 'bookmark'
637 bookmark_name = 'bookmark'
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
639 source_repo._update(default_branch)
639 source_repo._update(default_branch)
640 source_repo.bookmark(bookmark_name)
640 source_repo.bookmark(bookmark_name)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
642 workspace_id = 'test-merge'
642 workspace_id = 'test-merge'
643 repo_id = repo_id_generator(target_repo.path)
643 repo_id = repo_id_generator(target_repo.path)
644 merge_response = target_repo.merge(
644 merge_response = target_repo.merge(
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
646 'test user', 'test@rhodecode.com', 'merge message 1',
646 'test user', 'test@rhodecode.com', 'merge message 1',
647 dry_run=False)
647 dry_run=False)
648 expected_merge_response = MergeResponse(
648 expected_merge_response = MergeResponse(
649 True, True, merge_response.merge_ref,
649 True, True, merge_response.merge_ref,
650 MergeFailureReason.NONE)
650 MergeFailureReason.NONE)
651 assert merge_response == expected_merge_response
651 assert merge_response == expected_merge_response
652
652
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
654 target_repo.path)
654 target_repo.path)
655 target_commits = list(target_repo.get_commits())
655 target_commits = list(target_repo.get_commits())
656 commit_ids = [c.raw_id for c in target_commits]
656 commit_ids = [c.raw_id for c in target_commits]
657 assert source_ref.commit_id == commit_ids[-1]
657 assert source_ref.commit_id == commit_ids[-1]
658 assert target_ref.commit_id == commit_ids[-2]
658 assert target_ref.commit_id == commit_ids[-2]
659
659
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
665
665
666 # add an extra head to the target repo
666 # add an extra head to the target repo
667 imc = target_repo.in_memory_commit
667 imc = target_repo.in_memory_commit
668 imc.add(FileNode('file_x', content='foo'))
668 imc.add(FileNode('file_x', content='foo'))
669 commits = list(target_repo.get_commits())
669 commits = list(target_repo.get_commits())
670 imc.commit(
670 imc.commit(
671 message=u'Automatic commit from repo merge test',
671 message=u'Automatic commit from repo merge test',
672 author=u'Automatic', parents=commits[0:1])
672 author=u'Automatic <automatic@rhodecode.com>', parents=commits[0:1])
673
673
674 target_commit = target_repo.get_commit()
674 target_commit = target_repo.get_commit()
675 source_commit = source_repo.get_commit()
675 source_commit = source_repo.get_commit()
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
677 target_repo._update(default_branch)
677 target_repo._update(default_branch)
678
678
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
681 workspace_id = 'test-merge'
681 workspace_id = 'test-merge'
682
682
683 assert len(target_repo._heads(branch='default')) == 2
683 assert len(target_repo._heads(branch='default')) == 2
684 heads = target_repo._heads(branch='default')
684 heads = target_repo._heads(branch='default')
685 expected_merge_response = MergeResponse(
685 expected_merge_response = MergeResponse(
686 False, False, None,
686 False, False, None,
687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
688 metadata={'heads': heads})
688 metadata={'heads': heads})
689 repo_id = repo_id_generator(target_repo.path)
689 repo_id = repo_id_generator(target_repo.path)
690 merge_response = target_repo.merge(
690 merge_response = target_repo.merge(
691 repo_id, workspace_id, target_ref, source_repo, source_ref,
691 repo_id, workspace_id, target_ref, source_repo, source_ref,
692 'test user', 'test@rhodecode.com', 'merge message 1',
692 'test user', 'test@rhodecode.com', 'merge message 1',
693 dry_run=False)
693 dry_run=False)
694 assert merge_response == expected_merge_response
694 assert merge_response == expected_merge_response
695
695
696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
698 source_repo = vcsbackend_hg.clone_repo(target_repo)
698 source_repo = vcsbackend_hg.clone_repo(target_repo)
699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
701 imc = source_repo.in_memory_commit
701 imc = source_repo.in_memory_commit
702 imc.add(FileNode('file_x', content=source_repo.name))
702 imc.add(FileNode('file_x', content=source_repo.name))
703 imc.commit(
703 imc.commit(
704 message=u'Automatic commit from repo merge test',
704 message=u'Automatic commit from repo merge test',
705 author=u'Automatic')
705 author=u'Automatic <automatic@rhodecode.com>')
706 target_commit = target_repo.get_commit()
706 target_commit = target_repo.get_commit()
707 source_commit = source_repo.get_commit()
707 source_commit = source_repo.get_commit()
708
708
709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
710
710
711 default_branch = target_repo.DEFAULT_BRANCH_NAME
711 default_branch = target_repo.DEFAULT_BRANCH_NAME
712 bookmark_name = 'bookmark'
712 bookmark_name = 'bookmark'
713 source_repo._update(default_branch)
713 source_repo._update(default_branch)
714 source_repo.bookmark(bookmark_name)
714 source_repo.bookmark(bookmark_name)
715
715
716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
718 repo_id = repo_id_generator(target_repo.path)
718 repo_id = repo_id_generator(target_repo.path)
719 workspace_id = 'test-merge'
719 workspace_id = 'test-merge'
720
720
721 merge_response = target_repo.merge(
721 merge_response = target_repo.merge(
722 repo_id, workspace_id, target_ref, source_repo, source_ref,
722 repo_id, workspace_id, target_ref, source_repo, source_ref,
723 'test user', 'test@rhodecode.com', 'merge message 1',
723 'test user', 'test@rhodecode.com', 'merge message 1',
724 dry_run=False, use_rebase=True)
724 dry_run=False, use_rebase=True)
725
725
726 expected_merge_response = MergeResponse(
726 expected_merge_response = MergeResponse(
727 True, True, merge_response.merge_ref,
727 True, True, merge_response.merge_ref,
728 MergeFailureReason.NONE)
728 MergeFailureReason.NONE)
729 assert merge_response == expected_merge_response
729 assert merge_response == expected_merge_response
730
730
731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
732 target_repo.path)
732 target_repo.path)
733 last_commit = target_repo.get_commit()
733 last_commit = target_repo.get_commit()
734 assert last_commit.message == source_commit.message
734 assert last_commit.message == source_commit.message
735 assert last_commit.author == source_commit.author
735 assert last_commit.author == source_commit.author
736 # This checks that we effectively did a rebase
736 # This checks that we effectively did a rebase
737 assert last_commit.raw_id != source_commit.raw_id
737 assert last_commit.raw_id != source_commit.raw_id
738
738
739 # Check the target has only 4 commits: 2 were already in target and
739 # Check the target has only 4 commits: 2 were already in target and
740 # only two should have been added
740 # only two should have been added
741 assert len(target_repo.commit_ids) == 2 + 2
741 assert len(target_repo.commit_ids) == 2 + 2
742
742
743
743
744 class TestGetShadowInstance(object):
744 class TestGetShadowInstance(object):
745
745
746 @pytest.fixture
746 @pytest.fixture
747 def repo(self, vcsbackend_hg, monkeypatch):
747 def repo(self, vcsbackend_hg, monkeypatch):
748 repo = vcsbackend_hg.repo
748 repo = vcsbackend_hg.repo
749 monkeypatch.setattr(repo, 'config', mock.Mock())
749 monkeypatch.setattr(repo, 'config', mock.Mock())
750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
751 return repo
751 return repo
752
752
753 def test_passes_config(self, repo):
753 def test_passes_config(self, repo):
754 shadow = repo._get_shadow_instance(repo.path)
754 shadow = repo._get_shadow_instance(repo.path)
755 assert shadow.config == repo.config.copy()
755 assert shadow.config == repo.config.copy()
756
756
757 def test_disables_hooks(self, repo):
757 def test_disables_hooks(self, repo):
758 shadow = repo._get_shadow_instance(repo.path)
758 shadow = repo._get_shadow_instance(repo.path)
759 shadow.config.clear_section.assert_called_once_with('hooks')
759 shadow.config.clear_section.assert_called_once_with('hooks')
760
760
761 def test_allows_to_keep_hooks(self, repo):
761 def test_allows_to_keep_hooks(self, repo):
762 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
762 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
763 assert not shadow.config.clear_section.called
763 assert not shadow.config.clear_section.called
764
764
765
765
766 class TestMercurialCommit(object):
766 class TestMercurialCommit(object):
767
767
768 def _test_equality(self, commit):
768 def _test_equality(self, commit):
769 idx = commit.idx
769 idx = commit.idx
770 assert commit == self.repo.get_commit(commit_idx=idx)
770 assert commit == self.repo.get_commit(commit_idx=idx)
771
771
772 def test_equality(self):
772 def test_equality(self):
773 indexes = [0, 10, 20]
773 indexes = [0, 10, 20]
774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
775 for commit in commits:
775 for commit in commits:
776 self._test_equality(commit)
776 self._test_equality(commit)
777
777
778 def test_default_commit(self):
778 def test_default_commit(self):
779 tip = self.repo.get_commit('tip')
779 tip = self.repo.get_commit('tip')
780 assert tip == self.repo.get_commit()
780 assert tip == self.repo.get_commit()
781 assert tip == self.repo.get_commit(commit_id=None)
781 assert tip == self.repo.get_commit(commit_id=None)
782 assert tip == self.repo.get_commit(commit_idx=None)
782 assert tip == self.repo.get_commit(commit_idx=None)
783 assert tip == list(self.repo[-1:])[0]
783 assert tip == list(self.repo[-1:])[0]
784
784
785 def test_root_node(self):
785 def test_root_node(self):
786 tip = self.repo.get_commit('tip')
786 tip = self.repo.get_commit('tip')
787 assert tip.root is tip.get_node('')
787 assert tip.root is tip.get_node('')
788
788
789 def test_lazy_fetch(self):
789 def test_lazy_fetch(self):
790 """
790 """
791 Test if commit's nodes expands and are cached as we walk through
791 Test if commit's nodes expands and are cached as we walk through
792 the commit. This test is somewhat hard to write as order of tests
792 the commit. This test is somewhat hard to write as order of tests
793 is a key here. Written by running command after command in a shell.
793 is a key here. Written by running command after command in a shell.
794 """
794 """
795 commit = self.repo.get_commit(commit_idx=45)
795 commit = self.repo.get_commit(commit_idx=45)
796 assert len(commit.nodes) == 0
796 assert len(commit.nodes) == 0
797 root = commit.root
797 root = commit.root
798 assert len(commit.nodes) == 1
798 assert len(commit.nodes) == 1
799 assert len(root.nodes) == 8
799 assert len(root.nodes) == 8
800 # accessing root.nodes updates commit.nodes
800 # accessing root.nodes updates commit.nodes
801 assert len(commit.nodes) == 9
801 assert len(commit.nodes) == 9
802
802
803 docs = root.get_node('docs')
803 docs = root.get_node('docs')
804 # we haven't yet accessed anything new as docs dir was already cached
804 # we haven't yet accessed anything new as docs dir was already cached
805 assert len(commit.nodes) == 9
805 assert len(commit.nodes) == 9
806 assert len(docs.nodes) == 8
806 assert len(docs.nodes) == 8
807 # accessing docs.nodes updates commit.nodes
807 # accessing docs.nodes updates commit.nodes
808 assert len(commit.nodes) == 17
808 assert len(commit.nodes) == 17
809
809
810 assert docs is commit.get_node('docs')
810 assert docs is commit.get_node('docs')
811 assert docs is root.nodes[0]
811 assert docs is root.nodes[0]
812 assert docs is root.dirs[0]
812 assert docs is root.dirs[0]
813 assert docs is commit.get_node('docs')
813 assert docs is commit.get_node('docs')
814
814
815 def test_nodes_with_commit(self):
815 def test_nodes_with_commit(self):
816 commit = self.repo.get_commit(commit_idx=45)
816 commit = self.repo.get_commit(commit_idx=45)
817 root = commit.root
817 root = commit.root
818 docs = root.get_node('docs')
818 docs = root.get_node('docs')
819 assert docs is commit.get_node('docs')
819 assert docs is commit.get_node('docs')
820 api = docs.get_node('api')
820 api = docs.get_node('api')
821 assert api is commit.get_node('docs/api')
821 assert api is commit.get_node('docs/api')
822 index = api.get_node('index.rst')
822 index = api.get_node('index.rst')
823 assert index is commit.get_node('docs/api/index.rst')
823 assert index is commit.get_node('docs/api/index.rst')
824 assert index is commit.get_node(
824 assert index is commit.get_node(
825 'docs').get_node('api').get_node('index.rst')
825 'docs').get_node('api').get_node('index.rst')
826
826
827 def test_branch_and_tags(self):
827 def test_branch_and_tags(self):
828 commit0 = self.repo.get_commit(commit_idx=0)
828 commit0 = self.repo.get_commit(commit_idx=0)
829 assert commit0.branch == 'default'
829 assert commit0.branch == 'default'
830 assert commit0.tags == []
830 assert commit0.tags == []
831
831
832 commit10 = self.repo.get_commit(commit_idx=10)
832 commit10 = self.repo.get_commit(commit_idx=10)
833 assert commit10.branch == 'default'
833 assert commit10.branch == 'default'
834 assert commit10.tags == []
834 assert commit10.tags == []
835
835
836 commit44 = self.repo.get_commit(commit_idx=44)
836 commit44 = self.repo.get_commit(commit_idx=44)
837 assert commit44.branch == 'web'
837 assert commit44.branch == 'web'
838
838
839 tip = self.repo.get_commit('tip')
839 tip = self.repo.get_commit('tip')
840 assert 'tip' in tip.tags
840 assert 'tip' in tip.tags
841
841
842 def test_bookmarks(self):
842 def test_bookmarks(self):
843 commit0 = self.repo.get_commit(commit_idx=0)
843 commit0 = self.repo.get_commit(commit_idx=0)
844 assert commit0.bookmarks == []
844 assert commit0.bookmarks == []
845
845
846 def _test_file_size(self, idx, path, size):
846 def _test_file_size(self, idx, path, size):
847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
848 assert node.is_file()
848 assert node.is_file()
849 assert node.size == size
849 assert node.size == size
850
850
851 def test_file_size(self):
851 def test_file_size(self):
852 to_check = (
852 to_check = (
853 (10, 'setup.py', 1068),
853 (10, 'setup.py', 1068),
854 (20, 'setup.py', 1106),
854 (20, 'setup.py', 1106),
855 (60, 'setup.py', 1074),
855 (60, 'setup.py', 1074),
856
856
857 (10, 'vcs/backends/base.py', 2921),
857 (10, 'vcs/backends/base.py', 2921),
858 (20, 'vcs/backends/base.py', 3936),
858 (20, 'vcs/backends/base.py', 3936),
859 (60, 'vcs/backends/base.py', 6189),
859 (60, 'vcs/backends/base.py', 6189),
860 )
860 )
861 for idx, path, size in to_check:
861 for idx, path, size in to_check:
862 self._test_file_size(idx, path, size)
862 self._test_file_size(idx, path, size)
863
863
864 def test_file_history_from_commits(self):
864 def test_file_history_from_commits(self):
865 node = self.repo[10].get_node('setup.py')
865 node = self.repo[10].get_node('setup.py')
866 commit_ids = [commit.raw_id for commit in node.history]
866 commit_ids = [commit.raw_id for commit in node.history]
867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
868
868
869 node = self.repo[20].get_node('setup.py')
869 node = self.repo[20].get_node('setup.py')
870 node_ids = [commit.raw_id for commit in node.history]
870 node_ids = [commit.raw_id for commit in node.history]
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873
873
874 # special case we check history from commit that has this particular
874 # special case we check history from commit that has this particular
875 # file changed this means we check if it's included as well
875 # file changed this means we check if it's included as well
876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
877 .get_node('setup.py')
877 .get_node('setup.py')
878 node_ids = [commit.raw_id for commit in node.history]
878 node_ids = [commit.raw_id for commit in node.history]
879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
881
881
882 def test_file_history(self):
882 def test_file_history(self):
883 # we can only check if those commits are present in the history
883 # we can only check if those commits are present in the history
884 # as we cannot update this test every time file is changed
884 # as we cannot update this test every time file is changed
885 files = {
885 files = {
886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
887 'vcs/nodes.py': [
887 'vcs/nodes.py': [
888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
889 'vcs/backends/hg.py': [
889 'vcs/backends/hg.py': [
890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
893 }
893 }
894 for path, indexes in files.items():
894 for path, indexes in files.items():
895 tip = self.repo.get_commit(commit_idx=indexes[-1])
895 tip = self.repo.get_commit(commit_idx=indexes[-1])
896 node = tip.get_node(path)
896 node = tip.get_node(path)
897 node_indexes = [commit.idx for commit in node.history]
897 node_indexes = [commit.idx for commit in node.history]
898 assert set(indexes).issubset(set(node_indexes)), (
898 assert set(indexes).issubset(set(node_indexes)), (
899 "We assumed that %s is subset of commits for which file %s "
899 "We assumed that %s is subset of commits for which file %s "
900 "has been changed, and history of that node returned: %s"
900 "has been changed, and history of that node returned: %s"
901 % (indexes, path, node_indexes))
901 % (indexes, path, node_indexes))
902
902
903 def test_file_annotate(self):
903 def test_file_annotate(self):
904 files = {
904 files = {
905 'vcs/backends/__init__.py': {
905 'vcs/backends/__init__.py': {
906 89: {
906 89: {
907 'lines_no': 31,
907 'lines_no': 31,
908 'commits': [
908 'commits': [
909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
911 32, 32, 32, 32, 37, 32, 37, 37, 32,
911 32, 32, 32, 32, 37, 32, 37, 37, 32,
912 32, 32
912 32, 32
913 ]
913 ]
914 },
914 },
915 20: {
915 20: {
916 'lines_no': 1,
916 'lines_no': 1,
917 'commits': [4]
917 'commits': [4]
918 },
918 },
919 55: {
919 55: {
920 'lines_no': 31,
920 'lines_no': 31,
921 'commits': [
921 'commits': [
922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
924 32, 32, 32, 32, 37, 32, 37, 37, 32,
924 32, 32, 32, 32, 37, 32, 37, 37, 32,
925 32, 32
925 32, 32
926 ]
926 ]
927 }
927 }
928 },
928 },
929 'vcs/exceptions.py': {
929 'vcs/exceptions.py': {
930 89: {
930 89: {
931 'lines_no': 18,
931 'lines_no': 18,
932 'commits': [
932 'commits': [
933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
934 16, 16, 17, 16, 16, 18, 18, 18
934 16, 16, 17, 16, 16, 18, 18, 18
935 ]
935 ]
936 },
936 },
937 20: {
937 20: {
938 'lines_no': 18,
938 'lines_no': 18,
939 'commits': [
939 'commits': [
940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
941 16, 16, 17, 16, 16, 18, 18, 18
941 16, 16, 17, 16, 16, 18, 18, 18
942 ]
942 ]
943 },
943 },
944 55: {
944 55: {
945 'lines_no': 18,
945 'lines_no': 18,
946 'commits': [
946 'commits': [
947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
948 17, 16, 16, 18, 18, 18
948 17, 16, 16, 18, 18, 18
949 ]
949 ]
950 }
950 }
951 },
951 },
952 'MANIFEST.in': {
952 'MANIFEST.in': {
953 89: {
953 89: {
954 'lines_no': 5,
954 'lines_no': 5,
955 'commits': [7, 7, 7, 71, 71]
955 'commits': [7, 7, 7, 71, 71]
956 },
956 },
957 20: {
957 20: {
958 'lines_no': 3,
958 'lines_no': 3,
959 'commits': [7, 7, 7]
959 'commits': [7, 7, 7]
960 },
960 },
961 55: {
961 55: {
962 'lines_no': 3,
962 'lines_no': 3,
963 'commits': [7, 7, 7]
963 'commits': [7, 7, 7]
964 }
964 }
965 }
965 }
966 }
966 }
967
967
968 for fname, commit_dict in files.items():
968 for fname, commit_dict in files.items():
969 for idx, __ in commit_dict.items():
969 for idx, __ in commit_dict.items():
970 commit = self.repo.get_commit(commit_idx=idx)
970 commit = self.repo.get_commit(commit_idx=idx)
971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
973 assert l1_1 == l1_2
973 assert l1_1 == l1_2
974 l1 = l1_2 = [
974 l1 = l1_2 = [
975 x[2]().idx for x in commit.get_file_annotate(fname)]
975 x[2]().idx for x in commit.get_file_annotate(fname)]
976 l2 = files[fname][idx]['commits']
976 l2 = files[fname][idx]['commits']
977 assert l1 == l2, (
977 assert l1 == l2, (
978 "The lists of commit for %s@commit_id%s"
978 "The lists of commit for %s@commit_id%s"
979 "from annotation list should match each other,"
979 "from annotation list should match each other,"
980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
981
981
982 def test_commit_state(self):
982 def test_commit_state(self):
983 """
983 """
984 Tests which files have been added/changed/removed at particular commit
984 Tests which files have been added/changed/removed at particular commit
985 """
985 """
986
986
987 # commit_id 46ad32a4f974:
987 # commit_id 46ad32a4f974:
988 # hg st --rev 46ad32a4f974
988 # hg st --rev 46ad32a4f974
989 # changed: 13
989 # changed: 13
990 # added: 20
990 # added: 20
991 # removed: 1
991 # removed: 1
992 changed = set([
992 changed = set([
993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
995 'vcs/__init__.py', 'vcs/backends/__init__.py',
995 'vcs/__init__.py', 'vcs/backends/__init__.py',
996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
997 'vcs/utils/__init__.py'])
997 'vcs/utils/__init__.py'])
998
998
999 added = set([
999 added = set([
1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1009 'vcs/web/simplevcs/views.py'])
1009 'vcs/web/simplevcs/views.py'])
1010
1010
1011 removed = set(['docs/api.rst'])
1011 removed = set(['docs/api.rst'])
1012
1012
1013 commit64 = self.repo.get_commit('46ad32a4f974')
1013 commit64 = self.repo.get_commit('46ad32a4f974')
1014 assert set((node.path for node in commit64.added)) == added
1014 assert set((node.path for node in commit64.added)) == added
1015 assert set((node.path for node in commit64.changed)) == changed
1015 assert set((node.path for node in commit64.changed)) == changed
1016 assert set((node.path for node in commit64.removed)) == removed
1016 assert set((node.path for node in commit64.removed)) == removed
1017
1017
1018 # commit_id b090f22d27d6:
1018 # commit_id b090f22d27d6:
1019 # hg st --rev b090f22d27d6
1019 # hg st --rev b090f22d27d6
1020 # changed: 13
1020 # changed: 13
1021 # added: 20
1021 # added: 20
1022 # removed: 1
1022 # removed: 1
1023 commit88 = self.repo.get_commit('b090f22d27d6')
1023 commit88 = self.repo.get_commit('b090f22d27d6')
1024 assert set((node.path for node in commit88.added)) == set()
1024 assert set((node.path for node in commit88.added)) == set()
1025 assert set((node.path for node in commit88.changed)) == \
1025 assert set((node.path for node in commit88.changed)) == \
1026 set(['.hgignore'])
1026 set(['.hgignore'])
1027 assert set((node.path for node in commit88.removed)) == set()
1027 assert set((node.path for node in commit88.removed)) == set()
1028
1028
1029 #
1029 #
1030 # 85:
1030 # 85:
1031 # added: 2 [
1031 # added: 2 [
1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1034 # removed: 1 ['vcs/utils/web.py']
1034 # removed: 1 ['vcs/utils/web.py']
1035 commit85 = self.repo.get_commit(commit_idx=85)
1035 commit85 = self.repo.get_commit(commit_idx=85)
1036 assert set((node.path for node in commit85.added)) == set([
1036 assert set((node.path for node in commit85.added)) == set([
1037 'vcs/utils/diffs.py',
1037 'vcs/utils/diffs.py',
1038 'vcs/web/simplevcs/views/diffs.py'])
1038 'vcs/web/simplevcs/views/diffs.py'])
1039 assert set((node.path for node in commit85.changed)) == set([
1039 assert set((node.path for node in commit85.changed)) == set([
1040 'vcs/web/simplevcs/models.py',
1040 'vcs/web/simplevcs/models.py',
1041 'vcs/web/simplevcs/utils.py',
1041 'vcs/web/simplevcs/utils.py',
1042 'vcs/web/simplevcs/views/__init__.py',
1042 'vcs/web/simplevcs/views/__init__.py',
1043 'vcs/web/simplevcs/views/repository.py',
1043 'vcs/web/simplevcs/views/repository.py',
1044 ])
1044 ])
1045 assert set((node.path for node in commit85.removed)) == \
1045 assert set((node.path for node in commit85.removed)) == \
1046 set(['vcs/utils/web.py'])
1046 set(['vcs/utils/web.py'])
1047
1047
1048 def test_files_state(self):
1048 def test_files_state(self):
1049 """
1049 """
1050 Tests state of FileNodes.
1050 Tests state of FileNodes.
1051 """
1051 """
1052 commit = self.repo.get_commit(commit_idx=85)
1052 commit = self.repo.get_commit(commit_idx=85)
1053 node = commit.get_node('vcs/utils/diffs.py')
1053 node = commit.get_node('vcs/utils/diffs.py')
1054 assert node.state, NodeState.ADDED
1054 assert node.state, NodeState.ADDED
1055 assert node.added
1055 assert node.added
1056 assert not node.changed
1056 assert not node.changed
1057 assert not node.not_changed
1057 assert not node.not_changed
1058 assert not node.removed
1058 assert not node.removed
1059
1059
1060 commit = self.repo.get_commit(commit_idx=88)
1060 commit = self.repo.get_commit(commit_idx=88)
1061 node = commit.get_node('.hgignore')
1061 node = commit.get_node('.hgignore')
1062 assert node.state, NodeState.CHANGED
1062 assert node.state, NodeState.CHANGED
1063 assert not node.added
1063 assert not node.added
1064 assert node.changed
1064 assert node.changed
1065 assert not node.not_changed
1065 assert not node.not_changed
1066 assert not node.removed
1066 assert not node.removed
1067
1067
1068 commit = self.repo.get_commit(commit_idx=85)
1068 commit = self.repo.get_commit(commit_idx=85)
1069 node = commit.get_node('setup.py')
1069 node = commit.get_node('setup.py')
1070 assert node.state, NodeState.NOT_CHANGED
1070 assert node.state, NodeState.NOT_CHANGED
1071 assert not node.added
1071 assert not node.added
1072 assert not node.changed
1072 assert not node.changed
1073 assert node.not_changed
1073 assert node.not_changed
1074 assert not node.removed
1074 assert not node.removed
1075
1075
1076 # If node has REMOVED state then trying to fetch it would raise
1076 # If node has REMOVED state then trying to fetch it would raise
1077 # CommitError exception
1077 # CommitError exception
1078 commit = self.repo.get_commit(commit_idx=2)
1078 commit = self.repo.get_commit(commit_idx=2)
1079 path = 'vcs/backends/BaseRepository.py'
1079 path = 'vcs/backends/BaseRepository.py'
1080 with pytest.raises(NodeDoesNotExistError):
1080 with pytest.raises(NodeDoesNotExistError):
1081 commit.get_node(path)
1081 commit.get_node(path)
1082 # but it would be one of ``removed`` (commit's attribute)
1082 # but it would be one of ``removed`` (commit's attribute)
1083 assert path in [rf.path for rf in commit.removed]
1083 assert path in [rf.path for rf in commit.removed]
1084
1084
1085 def test_commit_message_is_unicode(self):
1085 def test_commit_message_is_unicode(self):
1086 for cm in self.repo:
1086 for cm in self.repo:
1087 assert type(cm.message) == unicode
1087 assert type(cm.message) == unicode
1088
1088
1089 def test_commit_author_is_unicode(self):
1089 def test_commit_author_is_unicode(self):
1090 for cm in self.repo:
1090 for cm in self.repo:
1091 assert type(cm.author) == unicode
1091 assert type(cm.author) == unicode
1092
1092
1093 def test_repo_files_content_is_unicode(self):
1093 def test_repo_files_content_is_unicode(self):
1094 test_commit = self.repo.get_commit(commit_idx=100)
1094 test_commit = self.repo.get_commit(commit_idx=100)
1095 for node in test_commit.get_node('/'):
1095 for node in test_commit.get_node('/'):
1096 if node.is_file():
1096 if node.is_file():
1097 assert type(node.content) == unicode
1097 assert type(node.content) == unicode
1098
1098
1099 def test_wrong_path(self):
1099 def test_wrong_path(self):
1100 # There is 'setup.py' in the root dir but not there:
1100 # There is 'setup.py' in the root dir but not there:
1101 path = 'foo/bar/setup.py'
1101 path = 'foo/bar/setup.py'
1102 with pytest.raises(VCSError):
1102 with pytest.raises(VCSError):
1103 self.repo.get_commit().get_node(path)
1103 self.repo.get_commit().get_node(path)
1104
1104
1105 def test_author_email(self):
1105 def test_author_email(self):
1106 assert 'marcin@python-blog.com' == \
1106 assert 'marcin@python-blog.com' == \
1107 self.repo.get_commit('b986218ba1c9').author_email
1107 self.repo.get_commit('b986218ba1c9').author_email
1108 assert 'lukasz.balcerzak@python-center.pl' == \
1108 assert 'lukasz.balcerzak@python-center.pl' == \
1109 self.repo.get_commit('3803844fdbd3').author_email
1109 self.repo.get_commit('3803844fdbd3').author_email
1110 assert '' == self.repo.get_commit('84478366594b').author_email
1110 assert '' == self.repo.get_commit('84478366594b').author_email
1111
1111
1112 def test_author_username(self):
1112 def test_author_username(self):
1113 assert 'Marcin Kuzminski' == \
1113 assert 'Marcin Kuzminski' == \
1114 self.repo.get_commit('b986218ba1c9').author_name
1114 self.repo.get_commit('b986218ba1c9').author_name
1115 assert 'Lukasz Balcerzak' == \
1115 assert 'Lukasz Balcerzak' == \
1116 self.repo.get_commit('3803844fdbd3').author_name
1116 self.repo.get_commit('3803844fdbd3').author_name
1117 assert 'marcink' == \
1117 assert 'marcink' == \
1118 self.repo.get_commit('84478366594b').author_name
1118 self.repo.get_commit('84478366594b').author_name
1119
1119
1120
1120
1121 class TestLargeFileRepo(object):
1121 class TestLargeFileRepo(object):
1122
1122
1123 def test_large_file(self, backend_hg):
1123 def test_large_file(self, backend_hg):
1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1125
1125
1126 tip = repo.scm_instance().get_commit()
1126 tip = repo.scm_instance().get_commit()
1127 node = tip.get_node('.hglf/thisfileislarge')
1127 node = tip.get_node('.hglf/thisfileislarge')
1128
1128
1129 lf_node = node.get_largefile_node()
1129 lf_node = node.get_largefile_node()
1130
1130
1131 assert lf_node.is_largefile() is True
1131 assert lf_node.is_largefile() is True
1132 assert lf_node.size == 1024000
1132 assert lf_node.size == 1024000
1133 assert lf_node.name == '.hglf/thisfileislarge'
1133 assert lf_node.name == '.hglf/thisfileislarge'
1134
1134
1135
1135
1136 class TestGetBranchName(object):
1136 class TestGetBranchName(object):
1137 def test_returns_ref_name_when_type_is_branch(self):
1137 def test_returns_ref_name_when_type_is_branch(self):
1138 ref = self._create_ref('branch', 'fake-name')
1138 ref = self._create_ref('branch', 'fake-name')
1139 result = self.repo._get_branch_name(ref)
1139 result = self.repo._get_branch_name(ref)
1140 assert result == ref.name
1140 assert result == ref.name
1141
1141
1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1144 ref = self._create_ref(type_, 'wrong-fake-name')
1144 ref = self._create_ref(type_, 'wrong-fake-name')
1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1146 remote_mock.ctx_branch.return_value = "fake-name"
1146 remote_mock.ctx_branch.return_value = "fake-name"
1147 result = self.repo._get_branch_name(ref)
1147 result = self.repo._get_branch_name(ref)
1148 assert result == "fake-name"
1148 assert result == "fake-name"
1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1150
1150
1151 def _create_ref(self, type_, name):
1151 def _create_ref(self, type_, name):
1152 ref = mock.Mock()
1152 ref = mock.Mock()
1153 ref.type = type_
1153 ref.type = type_
1154 ref.name = 'wrong-fake-name'
1154 ref.name = 'wrong-fake-name'
1155 ref.commit_id = "deadbeef"
1155 ref.commit_id = "deadbeef"
1156 return ref
1156 return ref
1157
1157
1158
1158
1159 class TestIsTheSameBranch(object):
1159 class TestIsTheSameBranch(object):
1160 def test_returns_true_when_branches_are_equal(self):
1160 def test_returns_true_when_branches_are_equal(self):
1161 source_ref = mock.Mock(name="source-ref")
1161 source_ref = mock.Mock(name="source-ref")
1162 target_ref = mock.Mock(name="target-ref")
1162 target_ref = mock.Mock(name="target-ref")
1163 branch_name_patcher = mock.patch.object(
1163 branch_name_patcher = mock.patch.object(
1164 self.repo, "_get_branch_name", return_value="default")
1164 self.repo, "_get_branch_name", return_value="default")
1165 with branch_name_patcher as branch_name_mock:
1165 with branch_name_patcher as branch_name_mock:
1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1167
1167
1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1169 assert branch_name_mock.call_args_list == expected_calls
1169 assert branch_name_mock.call_args_list == expected_calls
1170 assert result is True
1170 assert result is True
1171
1171
1172 def test_returns_false_when_branches_are_not_equal(self):
1172 def test_returns_false_when_branches_are_not_equal(self):
1173 source_ref = mock.Mock(name="source-ref")
1173 source_ref = mock.Mock(name="source-ref")
1174 source_ref.name = "source-branch"
1174 source_ref.name = "source-branch"
1175 target_ref = mock.Mock(name="target-ref")
1175 target_ref = mock.Mock(name="target-ref")
1176 source_ref.name = "target-branch"
1176 source_ref.name = "target-branch"
1177
1177
1178 def side_effect(ref):
1178 def side_effect(ref):
1179 return ref.name
1179 return ref.name
1180
1180
1181 branch_name_patcher = mock.patch.object(
1181 branch_name_patcher = mock.patch.object(
1182 self.repo, "_get_branch_name", side_effect=side_effect)
1182 self.repo, "_get_branch_name", side_effect=side_effect)
1183 with branch_name_patcher as branch_name_mock:
1183 with branch_name_patcher as branch_name_mock:
1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1185
1185
1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1187 assert branch_name_mock.call_args_list == expected_calls
1187 assert branch_name_mock.call_args_list == expected_calls
1188 assert result is False
1188 assert result is False
@@ -1,353 +1,351 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Tests so called "in memory commits" commit API of vcs.
22 Tests so called "in memory commits" commit API of vcs.
23 """
23 """
24 import datetime
24 import datetime
25
25
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib.utils2 import safe_unicode
28 from rhodecode.lib.utils2 import safe_unicode
29 from rhodecode.lib.vcs.exceptions import (
29 from rhodecode.lib.vcs.exceptions import (
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
32 NodeNotChangedError)
32 NodeNotChangedError)
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def nodes():
38 def nodes():
39 nodes = [
39 nodes = [
40 FileNode('foobar', content='Foo & bar'),
40 FileNode('foobar', content='Foo & bar'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
42 FileNode('foo bar with spaces', content=''),
42 FileNode('foo bar with spaces', content=''),
43 FileNode('foo/bar/baz', content='Inside'),
43 FileNode('foo/bar/baz', content='Inside'),
44 FileNode(
44 FileNode(
45 'foo/bar/file.bin',
45 'foo/bar/file.bin',
46 content=(
46 content=(
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
54 )
54 )
55 ),
55 ),
56 ]
56 ]
57 return nodes
57 return nodes
58
58
59
59
60 @pytest.mark.usefixtures("vcs_repository_support")
60 @pytest.mark.usefixtures("vcs_repository_support")
61 class TestInMemoryCommit(BackendTestMixin):
61 class TestInMemoryCommit(BackendTestMixin):
62 """
62 """
63 This is a backend independent test case class which should be created
63 This is a backend independent test case class which should be created
64 with ``type`` method.
64 with ``type`` method.
65
65
66 It is required to set following attributes at subclass:
66 It is required to set following attributes at subclass:
67
67
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
69 """
69 """
70
70
71 @classmethod
71 @classmethod
72 def _get_commits(cls):
72 def _get_commits(cls):
73 return []
73 return []
74
74
75 def test_add(self, nodes):
75 def test_add(self, nodes):
76 for node in nodes:
76 for node in nodes:
77 self.imc.add(node)
77 self.imc.add(node)
78
78
79 self.commit()
79 self.commit()
80 self.assert_succesful_commit(nodes)
80 self.assert_succesful_commit(nodes)
81
81
82 @pytest.mark.backends("hg")
82 @pytest.mark.backends("hg")
83 def test_add_on_branch_hg(self, nodes):
83 def test_add_on_branch_hg(self, nodes):
84 for node in nodes:
84 for node in nodes:
85 self.imc.add(node)
85 self.imc.add(node)
86 self.commit(branch=u'stable')
86 self.commit(branch=u'stable')
87 self.assert_succesful_commit(nodes)
87 self.assert_succesful_commit(nodes)
88
88
89 @pytest.mark.backends("git")
89 @pytest.mark.backends("git")
90 def test_add_on_branch_git(self, nodes):
90 def test_add_on_branch_git(self, nodes):
91 self.repo._checkout('stable', create=True)
91 self.repo._checkout('stable', create=True)
92
92
93 for node in nodes:
93 for node in nodes:
94 self.imc.add(node)
94 self.imc.add(node)
95 self.commit(branch=u'stable')
95 self.commit(branch=u'stable')
96 self.assert_succesful_commit(nodes)
96 self.assert_succesful_commit(nodes)
97
97
98 def test_add_in_bulk(self, nodes):
98 def test_add_in_bulk(self, nodes):
99 self.imc.add(*nodes)
99 self.imc.add(*nodes)
100
100
101 self.commit()
101 self.commit()
102 self.assert_succesful_commit(nodes)
102 self.assert_succesful_commit(nodes)
103
103
104 def test_add_non_ascii_files(self):
104 def test_add_non_ascii_files(self):
105 nodes = [
105 nodes = [
106 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str', content='Δ‡Δ‡Δ‡Δ‡'),
106 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str', content='Δ‡Δ‡Δ‡Δ‡'),
107 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode', content=u'Δ‡Δ‡Δ‡Δ‡'),
107 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode', content=u'Δ‡Δ‡Δ‡Δ‡'),
108 ]
108 ]
109
109
110 for node in nodes:
110 for node in nodes:
111 self.imc.add(node)
111 self.imc.add(node)
112
112
113 self.commit()
113 self.commit()
114 self.assert_succesful_commit(nodes)
114 self.assert_succesful_commit(nodes)
115
115
116 def commit(self, branch=None):
116 def commit(self, branch=None):
117 self.old_commit_count = len(self.repo.commit_ids)
117 self.old_commit_count = len(self.repo.commit_ids)
118 self.commit_message = u'Test commit with unicode: ΕΌΓ³Ε‚wik'
118 self.commit_message = u'Test commit with unicode: ΕΌΓ³Ε‚wik'
119 self.commit_author = unicode(self.__class__)
119 self.commit_author = u'{} <foo@email.com>'.format(self.__class__.__name__)
120 self.commit = self.imc.commit(
120 self.commit = self.imc.commit(
121 message=self.commit_message, author=self.commit_author,
121 message=self.commit_message, author=self.commit_author,
122 branch=branch)
122 branch=branch)
123
123
124 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
124 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
125 to_add = [
125 to_add = [
126 FileNode('foo/bar/image.png', content='\0'),
126 FileNode('foo/bar/image.png', content='\0'),
127 FileNode('foo/README.txt', content='readme!'),
127 FileNode('foo/README.txt', content='readme!'),
128 ]
128 ]
129 self.imc.add(*to_add)
129 self.imc.add(*to_add)
130 commit = self.imc.commit(u'Initial', u'joe.doe@example.com')
130 commit = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
131 assert isinstance(commit.get_node('foo'), DirNode)
131 assert isinstance(commit.get_node('foo'), DirNode)
132 assert isinstance(commit.get_node('foo/bar'), DirNode)
132 assert isinstance(commit.get_node('foo/bar'), DirNode)
133 self.assert_nodes_in_commit(commit, to_add)
133 self.assert_nodes_in_commit(commit, to_add)
134
134
135 # commit some more files again
135 # commit some more files again
136 to_add = [
136 to_add = [
137 FileNode('foo/bar/foobaz/bar', content='foo'),
137 FileNode('foo/bar/foobaz/bar', content='foo'),
138 FileNode('foo/bar/another/bar', content='foo'),
138 FileNode('foo/bar/another/bar', content='foo'),
139 FileNode('foo/baz.txt', content='foo'),
139 FileNode('foo/baz.txt', content='foo'),
140 FileNode('foobar/foobaz/file', content='foo'),
140 FileNode('foobar/foobaz/file', content='foo'),
141 FileNode('foobar/barbaz', content='foo'),
141 FileNode('foobar/barbaz', content='foo'),
142 ]
142 ]
143 self.imc.add(*to_add)
143 self.imc.add(*to_add)
144 commit = self.imc.commit(u'Another', u'joe.doe@example.com')
144 commit = self.imc.commit(u'Another', u'joe doe <joe.doe@example.com>')
145 self.assert_nodes_in_commit(commit, to_add)
145 self.assert_nodes_in_commit(commit, to_add)
146
146
147 def test_add_raise_already_added(self):
147 def test_add_raise_already_added(self):
148 node = FileNode('foobar', content='baz')
148 node = FileNode('foobar', content='baz')
149 self.imc.add(node)
149 self.imc.add(node)
150 with pytest.raises(NodeAlreadyAddedError):
150 with pytest.raises(NodeAlreadyAddedError):
151 self.imc.add(node)
151 self.imc.add(node)
152
152
153 def test_check_integrity_raise_already_exist(self):
153 def test_check_integrity_raise_already_exist(self):
154 node = FileNode('foobar', content='baz')
154 node = FileNode('foobar', content='baz')
155 self.imc.add(node)
155 self.imc.add(node)
156 self.imc.commit(message=u'Added foobar', author=unicode(self))
156 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
157 self.imc.add(node)
157 self.imc.add(node)
158 with pytest.raises(NodeAlreadyExistsError):
158 with pytest.raises(NodeAlreadyExistsError):
159 self.imc.commit(message='new message', author=str(self))
159 self.imc.commit(message='new message', author=u'{} <foo@bar.com>'.format(self))
160
160
161 def test_change(self):
161 def test_change(self):
162 self.imc.add(FileNode('foo/bar/baz', content='foo'))
162 self.imc.add(FileNode('foo/bar/baz', content='foo'))
163 self.imc.add(FileNode('foo/fbar', content='foobar'))
163 self.imc.add(FileNode('foo/fbar', content='foobar'))
164 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
164 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
165
165
166 # Change node's content
166 # Change node's content
167 node = FileNode('foo/bar/baz', content='My **changed** content')
167 node = FileNode('foo/bar/baz', content='My **changed** content')
168 self.imc.change(node)
168 self.imc.change(node)
169 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
169 self.imc.commit(u'Changed %s' % node.path, u'joe doe <joe.doe@example.com>')
170
170
171 newtip = self.repo.get_commit()
171 newtip = self.repo.get_commit()
172 assert tip != newtip
172 assert tip != newtip
173 assert tip.id != newtip.id
173 assert tip.id != newtip.id
174 self.assert_nodes_in_commit(newtip, (node,))
174 self.assert_nodes_in_commit(newtip, (node,))
175
175
176 def test_change_non_ascii(self):
176 def test_change_non_ascii(self):
177 to_add = [
177 to_add = [
178 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
178 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
179 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
179 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
180 ]
180 ]
181 for node in to_add:
181 for node in to_add:
182 self.imc.add(node)
182 self.imc.add(node)
183
183
184 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
184 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
185
185
186 # Change node's content
186 # Change node's content
187 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
187 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
188 self.imc.change(node)
188 self.imc.change(node)
189 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
189 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
190 u'joe.doe@example.com')
190 author=u'joe doe <joe.doe@example.com>')
191
191
192 node_uni = FileNode(
192 node_uni = FileNode(
193 u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
193 u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
194 self.imc.change(node_uni)
194 self.imc.change(node_uni)
195 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
195 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
196 u'joe.doe@example.com')
196 author=u'joe doe <joe.doe@example.com>')
197
197
198 newtip = self.repo.get_commit()
198 newtip = self.repo.get_commit()
199 assert tip != newtip
199 assert tip != newtip
200 assert tip.id != newtip.id
200 assert tip.id != newtip.id
201
201
202 self.assert_nodes_in_commit(newtip, (node, node_uni))
202 self.assert_nodes_in_commit(newtip, (node, node_uni))
203
203
204 def test_change_raise_empty_repository(self):
204 def test_change_raise_empty_repository(self):
205 node = FileNode('foobar')
205 node = FileNode('foobar')
206 with pytest.raises(EmptyRepositoryError):
206 with pytest.raises(EmptyRepositoryError):
207 self.imc.change(node)
207 self.imc.change(node)
208
208
209 def test_check_integrity_change_raise_node_does_not_exist(self):
209 def test_check_integrity_change_raise_node_does_not_exist(self):
210 node = FileNode('foobar', content='baz')
210 node = FileNode('foobar', content='baz')
211 self.imc.add(node)
211 self.imc.add(node)
212 self.imc.commit(message=u'Added foobar', author=unicode(self))
212 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
213 node = FileNode('not-foobar', content='')
213 node = FileNode('not-foobar', content='')
214 self.imc.change(node)
214 self.imc.change(node)
215 with pytest.raises(NodeDoesNotExistError):
215 with pytest.raises(NodeDoesNotExistError):
216 self.imc.commit(
216 self.imc.commit(message='Changed not existing node', author=u'{} <foo@bar.com>'.format(self))
217 message='Changed not existing node',
218 author=str(self))
219
217
220 def test_change_raise_node_already_changed(self):
218 def test_change_raise_node_already_changed(self):
221 node = FileNode('foobar', content='baz')
219 node = FileNode('foobar', content='baz')
222 self.imc.add(node)
220 self.imc.add(node)
223 self.imc.commit(message=u'Added foobar', author=unicode(self))
221 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
224 node = FileNode('foobar', content='more baz')
222 node = FileNode('foobar', content='more baz')
225 self.imc.change(node)
223 self.imc.change(node)
226 with pytest.raises(NodeAlreadyChangedError):
224 with pytest.raises(NodeAlreadyChangedError):
227 self.imc.change(node)
225 self.imc.change(node)
228
226
229 def test_check_integrity_change_raise_node_not_changed(self, nodes):
227 def test_check_integrity_change_raise_node_not_changed(self, nodes):
230 self.test_add(nodes) # Performs first commit
228 self.test_add(nodes) # Performs first commit
231
229
232 node = FileNode(nodes[0].path, content=nodes[0].content)
230 node = FileNode(nodes[0].path, content=nodes[0].content)
233 self.imc.change(node)
231 self.imc.change(node)
234 with pytest.raises(NodeNotChangedError):
232 with pytest.raises(NodeNotChangedError):
235 self.imc.commit(
233 self.imc.commit(
236 message=u'Trying to mark node as changed without touching it',
234 message=u'Trying to mark node as changed without touching it',
237 author=unicode(self))
235 author=u'{} <foo@bar.com>'.format(self))
238
236
239 def test_change_raise_node_already_removed(self):
237 def test_change_raise_node_already_removed(self):
240 node = FileNode('foobar', content='baz')
238 node = FileNode('foobar', content='baz')
241 self.imc.add(node)
239 self.imc.add(node)
242 self.imc.commit(message=u'Added foobar', author=unicode(self))
240 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
243 self.imc.remove(FileNode('foobar'))
241 self.imc.remove(FileNode('foobar'))
244 with pytest.raises(NodeAlreadyRemovedError):
242 with pytest.raises(NodeAlreadyRemovedError):
245 self.imc.change(node)
243 self.imc.change(node)
246
244
247 def test_remove(self, nodes):
245 def test_remove(self, nodes):
248 self.test_add(nodes) # Performs first commit
246 self.test_add(nodes) # Performs first commit
249
247
250 tip = self.repo.get_commit()
248 tip = self.repo.get_commit()
251 node = nodes[0]
249 node = nodes[0]
252 assert node.content == tip.get_node(node.path).content
250 assert node.content == tip.get_node(node.path).content
253 self.imc.remove(node)
251 self.imc.remove(node)
254 self.imc.commit(
252 self.imc.commit(
255 message=u'Removed %s' % node.path, author=unicode(self))
253 message=u'Removed %s' % node.path, author=u'{} <foo@bar.com>'.format(self))
256
254
257 newtip = self.repo.get_commit()
255 newtip = self.repo.get_commit()
258 assert tip != newtip
256 assert tip != newtip
259 assert tip.id != newtip.id
257 assert tip.id != newtip.id
260 with pytest.raises(NodeDoesNotExistError):
258 with pytest.raises(NodeDoesNotExistError):
261 newtip.get_node(node.path)
259 newtip.get_node(node.path)
262
260
263 def test_remove_last_file_from_directory(self):
261 def test_remove_last_file_from_directory(self):
264 node = FileNode('omg/qwe/foo/bar', content='foobar')
262 node = FileNode('omg/qwe/foo/bar', content='foobar')
265 self.imc.add(node)
263 self.imc.add(node)
266 self.imc.commit(u'added', u'joe doe')
264 self.imc.commit(u'added', author=u'joe doe <joe@doe.com>')
267
265
268 self.imc.remove(node)
266 self.imc.remove(node)
269 tip = self.imc.commit(u'removed', u'joe doe')
267 tip = self.imc.commit(u'removed', u'joe doe <joe@doe.com>')
270 with pytest.raises(NodeDoesNotExistError):
268 with pytest.raises(NodeDoesNotExistError):
271 tip.get_node('omg/qwe/foo/bar')
269 tip.get_node('omg/qwe/foo/bar')
272
270
273 def test_remove_raise_node_does_not_exist(self, nodes):
271 def test_remove_raise_node_does_not_exist(self, nodes):
274 self.imc.remove(nodes[0])
272 self.imc.remove(nodes[0])
275 with pytest.raises(NodeDoesNotExistError):
273 with pytest.raises(NodeDoesNotExistError):
276 self.imc.commit(
274 self.imc.commit(
277 message='Trying to remove node at empty repository',
275 message='Trying to remove node at empty repository',
278 author=str(self))
276 author=u'{} <foo@bar.com>'.format(self))
279
277
280 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
278 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
281 self.test_add(nodes) # Performs first commit
279 self.test_add(nodes) # Performs first commit
282
280
283 node = FileNode('no-such-file')
281 node = FileNode('no-such-file')
284 self.imc.remove(node)
282 self.imc.remove(node)
285 with pytest.raises(NodeDoesNotExistError):
283 with pytest.raises(NodeDoesNotExistError):
286 self.imc.commit(
284 self.imc.commit(
287 message=u'Trying to remove not existing node',
285 message=u'Trying to remove not existing node',
288 author=unicode(self))
286 author=u'{} <foo@bar.com>'.format(self))
289
287
290 def test_remove_raise_node_already_removed(self, nodes):
288 def test_remove_raise_node_already_removed(self, nodes):
291 self.test_add(nodes) # Performs first commit
289 self.test_add(nodes) # Performs first commit
292
290
293 node = FileNode(nodes[0].path)
291 node = FileNode(nodes[0].path)
294 self.imc.remove(node)
292 self.imc.remove(node)
295 with pytest.raises(NodeAlreadyRemovedError):
293 with pytest.raises(NodeAlreadyRemovedError):
296 self.imc.remove(node)
294 self.imc.remove(node)
297
295
298 def test_remove_raise_node_already_changed(self, nodes):
296 def test_remove_raise_node_already_changed(self, nodes):
299 self.test_add(nodes) # Performs first commit
297 self.test_add(nodes) # Performs first commit
300
298
301 node = FileNode(nodes[0].path, content='Bending time')
299 node = FileNode(nodes[0].path, content='Bending time')
302 self.imc.change(node)
300 self.imc.change(node)
303 with pytest.raises(NodeAlreadyChangedError):
301 with pytest.raises(NodeAlreadyChangedError):
304 self.imc.remove(node)
302 self.imc.remove(node)
305
303
306 def test_reset(self):
304 def test_reset(self):
307 self.imc.add(FileNode('foo', content='bar'))
305 self.imc.add(FileNode('foo', content='bar'))
308 # self.imc.change(FileNode('baz', content='new'))
306 # self.imc.change(FileNode('baz', content='new'))
309 # self.imc.remove(FileNode('qwe'))
307 # self.imc.remove(FileNode('qwe'))
310 self.imc.reset()
308 self.imc.reset()
311 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
309 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
312
310
313 def test_multiple_commits(self):
311 def test_multiple_commits(self):
314 N = 3 # number of commits to perform
312 N = 3 # number of commits to perform
315 last = None
313 last = None
316 for x in xrange(N):
314 for x in xrange(N):
317 fname = 'file%s' % str(x).rjust(5, '0')
315 fname = 'file%s' % str(x).rjust(5, '0')
318 content = 'foobar\n' * x
316 content = 'foobar\n' * x
319 node = FileNode(fname, content=content)
317 node = FileNode(fname, content=content)
320 self.imc.add(node)
318 self.imc.add(node)
321 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
319 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs <foo@bar.com>')
322 assert last != commit
320 assert last != commit
323 last = commit
321 last = commit
324
322
325 # Check commit number for same repo
323 # Check commit number for same repo
326 assert len(self.repo.commit_ids) == N
324 assert len(self.repo.commit_ids) == N
327
325
328 # Check commit number for recreated repo
326 # Check commit number for recreated repo
329 repo = self.Backend(self.repo_path)
327 repo = self.Backend(self.repo_path)
330 assert len(repo.commit_ids) == N
328 assert len(repo.commit_ids) == N
331
329
332 def test_date_attr(self, local_dt_to_utc):
330 def test_date_attr(self, local_dt_to_utc):
333 node = FileNode('foobar.txt', content='Foobared!')
331 node = FileNode('foobar.txt', content='Foobared!')
334 self.imc.add(node)
332 self.imc.add(node)
335 date = datetime.datetime(1985, 1, 30, 1, 45)
333 date = datetime.datetime(1985, 1, 30, 1, 45)
336 commit = self.imc.commit(
334 commit = self.imc.commit(
337 u"Committed at time when I was born ;-)",
335 u"Committed at time when I was born ;-)",
338 author=u'lb', date=date)
336 author=u'{} <foo@bar.com>'.format(self), date=date)
339
337
340 assert commit.date == local_dt_to_utc(date)
338 assert commit.date == local_dt_to_utc(date)
341
339
342 def assert_succesful_commit(self, added_nodes):
340 def assert_succesful_commit(self, added_nodes):
343 newtip = self.repo.get_commit()
341 newtip = self.repo.get_commit()
344 assert self.commit == newtip
342 assert self.commit == newtip
345 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
343 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
346 assert newtip.message == self.commit_message
344 assert newtip.message == self.commit_message
347 assert newtip.author == self.commit_author
345 assert newtip.author == self.commit_author
348 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
346 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
349 self.assert_nodes_in_commit(newtip, added_nodes)
347 self.assert_nodes_in_commit(newtip, added_nodes)
350
348
351 def assert_nodes_in_commit(self, commit, nodes):
349 def assert_nodes_in_commit(self, commit, nodes):
352 for node in nodes:
350 for node in nodes:
353 assert commit.get_node(node.path).content == node.content
351 assert commit.get_node(node.path).content == node.content
@@ -1,552 +1,552 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 from urllib2 import URLError
22 from urllib2 import URLError
23
23
24 import mock
24 import mock
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Config, BaseInMemoryCommit, Reference, MergeResponse, MergeFailureReason)
29 Config, BaseInMemoryCommit, Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError
30 from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.tests.vcs.conftest import BackendTestMixin
32 from rhodecode.tests.vcs.conftest import BackendTestMixin
33 from rhodecode.tests import repo_id_generator
33 from rhodecode.tests import repo_id_generator
34
34
35
35
36 @pytest.mark.usefixtures("vcs_repository_support")
36 @pytest.mark.usefixtures("vcs_repository_support")
37 class TestRepositoryBase(BackendTestMixin):
37 class TestRepositoryBase(BackendTestMixin):
38 recreate_repo_per_test = False
38 recreate_repo_per_test = False
39
39
40 def test_init_accepts_unicode_path(self, tmpdir):
40 def test_init_accepts_unicode_path(self, tmpdir):
41 path = unicode(tmpdir.join(u'unicode Γ€'))
41 path = unicode(tmpdir.join(u'unicode Γ€'))
42 self.Backend(path, create=True)
42 self.Backend(path, create=True)
43
43
44 def test_init_accepts_str_path(self, tmpdir):
44 def test_init_accepts_str_path(self, tmpdir):
45 path = str(tmpdir.join('str Γ€'))
45 path = str(tmpdir.join('str Γ€'))
46 self.Backend(path, create=True)
46 self.Backend(path, create=True)
47
47
48 def test_init_fails_if_path_does_not_exist(self, tmpdir):
48 def test_init_fails_if_path_does_not_exist(self, tmpdir):
49 path = unicode(tmpdir.join('i-do-not-exist'))
49 path = unicode(tmpdir.join('i-do-not-exist'))
50 with pytest.raises(VCSError):
50 with pytest.raises(VCSError):
51 self.Backend(path)
51 self.Backend(path)
52
52
53 def test_init_fails_if_path_is_not_a_valid_repository(self, tmpdir):
53 def test_init_fails_if_path_is_not_a_valid_repository(self, tmpdir):
54 path = unicode(tmpdir.mkdir(u'unicode Γ€'))
54 path = unicode(tmpdir.mkdir(u'unicode Γ€'))
55 with pytest.raises(VCSError):
55 with pytest.raises(VCSError):
56 self.Backend(path)
56 self.Backend(path)
57
57
58 def test_has_commits_attribute(self):
58 def test_has_commits_attribute(self):
59 self.repo.commit_ids
59 self.repo.commit_ids
60
60
61 def test_name(self):
61 def test_name(self):
62 assert self.repo.name.startswith('vcs-test')
62 assert self.repo.name.startswith('vcs-test')
63
63
64 @pytest.mark.backends("hg", "git")
64 @pytest.mark.backends("hg", "git")
65 def test_has_default_branch_name(self):
65 def test_has_default_branch_name(self):
66 assert self.repo.DEFAULT_BRANCH_NAME is not None
66 assert self.repo.DEFAULT_BRANCH_NAME is not None
67
67
68 @pytest.mark.backends("svn")
68 @pytest.mark.backends("svn")
69 def test_has_no_default_branch_name(self):
69 def test_has_no_default_branch_name(self):
70 assert self.repo.DEFAULT_BRANCH_NAME is None
70 assert self.repo.DEFAULT_BRANCH_NAME is None
71
71
72 def test_has_empty_commit(self):
72 def test_has_empty_commit(self):
73 assert self.repo.EMPTY_COMMIT_ID is not None
73 assert self.repo.EMPTY_COMMIT_ID is not None
74 assert self.repo.EMPTY_COMMIT is not None
74 assert self.repo.EMPTY_COMMIT is not None
75
75
76 def test_empty_changeset_is_deprecated(self):
76 def test_empty_changeset_is_deprecated(self):
77 def get_empty_changeset(repo):
77 def get_empty_changeset(repo):
78 return repo.EMPTY_CHANGESET
78 return repo.EMPTY_CHANGESET
79 pytest.deprecated_call(get_empty_changeset, self.repo)
79 pytest.deprecated_call(get_empty_changeset, self.repo)
80
80
81 def test_bookmarks(self):
81 def test_bookmarks(self):
82 assert len(self.repo.bookmarks) == 0
82 assert len(self.repo.bookmarks) == 0
83
83
84 # TODO: Cover two cases: Local repo path, remote URL
84 # TODO: Cover two cases: Local repo path, remote URL
85 def test_check_url(self):
85 def test_check_url(self):
86 config = Config()
86 config = Config()
87 assert self.Backend.check_url(self.repo.path, config)
87 assert self.Backend.check_url(self.repo.path, config)
88
88
89 def test_check_url_invalid(self):
89 def test_check_url_invalid(self):
90 config = Config()
90 config = Config()
91 with pytest.raises(URLError):
91 with pytest.raises(URLError):
92 self.Backend.check_url(self.repo.path + "invalid", config)
92 self.Backend.check_url(self.repo.path + "invalid", config)
93
93
94 def test_get_contact(self):
94 def test_get_contact(self):
95 assert self.repo.contact
95 assert self.repo.contact
96
96
97 def test_get_description(self):
97 def test_get_description(self):
98 assert self.repo.description
98 assert self.repo.description
99
99
100 def test_get_hook_location(self):
100 def test_get_hook_location(self):
101 assert len(self.repo.get_hook_location()) != 0
101 assert len(self.repo.get_hook_location()) != 0
102
102
103 def test_last_change(self, local_dt_to_utc):
103 def test_last_change(self, local_dt_to_utc):
104 assert self.repo.last_change >= local_dt_to_utc(
104 assert self.repo.last_change >= local_dt_to_utc(
105 datetime.datetime(2010, 1, 1, 21, 0))
105 datetime.datetime(2010, 1, 1, 21, 0))
106
106
107 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
107 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
108 delta = datetime.timedelta(seconds=1)
108 delta = datetime.timedelta(seconds=1)
109
109
110 start = local_dt_to_utc(datetime.datetime.now())
110 start = local_dt_to_utc(datetime.datetime.now())
111 empty_repo = vcsbackend.create_repo()
111 empty_repo = vcsbackend.create_repo()
112 now = local_dt_to_utc(datetime.datetime.now())
112 now = local_dt_to_utc(datetime.datetime.now())
113 assert empty_repo.last_change >= start - delta
113 assert empty_repo.last_change >= start - delta
114 assert empty_repo.last_change <= now + delta
114 assert empty_repo.last_change <= now + delta
115
115
116 def test_repo_equality(self):
116 def test_repo_equality(self):
117 assert self.repo == self.repo
117 assert self.repo == self.repo
118
118
119 def test_repo_equality_broken_object(self):
119 def test_repo_equality_broken_object(self):
120 import copy
120 import copy
121 _repo = copy.copy(self.repo)
121 _repo = copy.copy(self.repo)
122 delattr(_repo, 'path')
122 delattr(_repo, 'path')
123 assert self.repo != _repo
123 assert self.repo != _repo
124
124
125 def test_repo_equality_other_object(self):
125 def test_repo_equality_other_object(self):
126 class dummy(object):
126 class dummy(object):
127 path = self.repo.path
127 path = self.repo.path
128 assert self.repo != dummy()
128 assert self.repo != dummy()
129
129
130 def test_get_commit_is_implemented(self):
130 def test_get_commit_is_implemented(self):
131 self.repo.get_commit()
131 self.repo.get_commit()
132
132
133 def test_get_commits_is_implemented(self):
133 def test_get_commits_is_implemented(self):
134 commit_iter = iter(self.repo.get_commits())
134 commit_iter = iter(self.repo.get_commits())
135 commit = next(commit_iter)
135 commit = next(commit_iter)
136 assert commit.idx == 0
136 assert commit.idx == 0
137
137
138 def test_supports_iteration(self):
138 def test_supports_iteration(self):
139 repo_iter = iter(self.repo)
139 repo_iter = iter(self.repo)
140 commit = next(repo_iter)
140 commit = next(repo_iter)
141 assert commit.idx == 0
141 assert commit.idx == 0
142
142
143 def test_in_memory_commit(self):
143 def test_in_memory_commit(self):
144 imc = self.repo.in_memory_commit
144 imc = self.repo.in_memory_commit
145 assert isinstance(imc, BaseInMemoryCommit)
145 assert isinstance(imc, BaseInMemoryCommit)
146
146
147 @pytest.mark.backends("hg")
147 @pytest.mark.backends("hg")
148 def test__get_url_unicode(self):
148 def test__get_url_unicode(self):
149 url = u'/home/repos/malmΓΆ'
149 url = u'/home/repos/malmΓΆ'
150 assert self.repo._get_url(url)
150 assert self.repo._get_url(url)
151
151
152
152
153 @pytest.mark.usefixtures("vcs_repository_support")
153 @pytest.mark.usefixtures("vcs_repository_support")
154 class TestDeprecatedRepositoryAPI(BackendTestMixin):
154 class TestDeprecatedRepositoryAPI(BackendTestMixin):
155 recreate_repo_per_test = False
155 recreate_repo_per_test = False
156
156
157 def test_revisions_is_deprecated(self):
157 def test_revisions_is_deprecated(self):
158 def get_revisions(repo):
158 def get_revisions(repo):
159 return repo.revisions
159 return repo.revisions
160 pytest.deprecated_call(get_revisions, self.repo)
160 pytest.deprecated_call(get_revisions, self.repo)
161
161
162 def test_get_changeset_is_deprecated(self):
162 def test_get_changeset_is_deprecated(self):
163 pytest.deprecated_call(self.repo.get_changeset)
163 pytest.deprecated_call(self.repo.get_changeset)
164
164
165 def test_get_changesets_is_deprecated(self):
165 def test_get_changesets_is_deprecated(self):
166 pytest.deprecated_call(self.repo.get_changesets)
166 pytest.deprecated_call(self.repo.get_changesets)
167
167
168 def test_in_memory_changeset_is_deprecated(self):
168 def test_in_memory_changeset_is_deprecated(self):
169 def get_imc(repo):
169 def get_imc(repo):
170 return repo.in_memory_changeset
170 return repo.in_memory_changeset
171 pytest.deprecated_call(get_imc, self.repo)
171 pytest.deprecated_call(get_imc, self.repo)
172
172
173
173
174 # TODO: these tests are incomplete, must check the resulting compare result for
174 # TODO: these tests are incomplete, must check the resulting compare result for
175 # correcteness
175 # correcteness
176 class TestRepositoryCompare:
176 class TestRepositoryCompare:
177
177
178 @pytest.mark.parametrize('merge', [True, False])
178 @pytest.mark.parametrize('merge', [True, False])
179 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
179 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
180 target_repo = vcsbackend.create_repo(number_of_commits=5)
180 target_repo = vcsbackend.create_repo(number_of_commits=5)
181 target_repo.compare(
181 target_repo.compare(
182 target_repo[1].raw_id, target_repo[3].raw_id, target_repo,
182 target_repo[1].raw_id, target_repo[3].raw_id, target_repo,
183 merge=merge)
183 merge=merge)
184
184
185 @pytest.mark.xfail_backends('svn')
185 @pytest.mark.xfail_backends('svn')
186 @pytest.mark.parametrize('merge', [True, False])
186 @pytest.mark.parametrize('merge', [True, False])
187 def test_compare_cloned_repositories(self, vcsbackend, merge):
187 def test_compare_cloned_repositories(self, vcsbackend, merge):
188 target_repo = vcsbackend.create_repo(number_of_commits=5)
188 target_repo = vcsbackend.create_repo(number_of_commits=5)
189 source_repo = vcsbackend.clone_repo(target_repo)
189 source_repo = vcsbackend.clone_repo(target_repo)
190 assert target_repo != source_repo
190 assert target_repo != source_repo
191
191
192 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
192 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
193 source_commit = source_repo.get_commit()
193 source_commit = source_repo.get_commit()
194
194
195 target_repo.compare(
195 target_repo.compare(
196 target_repo[1].raw_id, source_repo[3].raw_id, source_repo,
196 target_repo[1].raw_id, source_repo[3].raw_id, source_repo,
197 merge=merge)
197 merge=merge)
198
198
199 @pytest.mark.xfail_backends('svn')
199 @pytest.mark.xfail_backends('svn')
200 @pytest.mark.parametrize('merge', [True, False])
200 @pytest.mark.parametrize('merge', [True, False])
201 def test_compare_unrelated_repositories(self, vcsbackend, merge):
201 def test_compare_unrelated_repositories(self, vcsbackend, merge):
202 orig = vcsbackend.create_repo(number_of_commits=5)
202 orig = vcsbackend.create_repo(number_of_commits=5)
203 unrelated = vcsbackend.create_repo(number_of_commits=5)
203 unrelated = vcsbackend.create_repo(number_of_commits=5)
204 assert orig != unrelated
204 assert orig != unrelated
205
205
206 orig.compare(
206 orig.compare(
207 orig[1].raw_id, unrelated[3].raw_id, unrelated, merge=merge)
207 orig[1].raw_id, unrelated[3].raw_id, unrelated, merge=merge)
208
208
209
209
210 class TestRepositoryGetCommonAncestor:
210 class TestRepositoryGetCommonAncestor:
211
211
212 def test_get_common_ancestor_from_same_repo_existing(self, vcsbackend):
212 def test_get_common_ancestor_from_same_repo_existing(self, vcsbackend):
213 target_repo = vcsbackend.create_repo(number_of_commits=5)
213 target_repo = vcsbackend.create_repo(number_of_commits=5)
214
214
215 expected_ancestor = target_repo[2].raw_id
215 expected_ancestor = target_repo[2].raw_id
216
216
217 assert target_repo.get_common_ancestor(
217 assert target_repo.get_common_ancestor(
218 commit_id1=target_repo[2].raw_id,
218 commit_id1=target_repo[2].raw_id,
219 commit_id2=target_repo[4].raw_id,
219 commit_id2=target_repo[4].raw_id,
220 repo2=target_repo
220 repo2=target_repo
221 ) == expected_ancestor
221 ) == expected_ancestor
222
222
223 assert target_repo.get_common_ancestor(
223 assert target_repo.get_common_ancestor(
224 commit_id1=target_repo[4].raw_id,
224 commit_id1=target_repo[4].raw_id,
225 commit_id2=target_repo[2].raw_id,
225 commit_id2=target_repo[2].raw_id,
226 repo2=target_repo
226 repo2=target_repo
227 ) == expected_ancestor
227 ) == expected_ancestor
228
228
229 @pytest.mark.xfail_backends("svn")
229 @pytest.mark.xfail_backends("svn")
230 def test_get_common_ancestor_from_cloned_repo_existing(self, vcsbackend):
230 def test_get_common_ancestor_from_cloned_repo_existing(self, vcsbackend):
231 target_repo = vcsbackend.create_repo(number_of_commits=5)
231 target_repo = vcsbackend.create_repo(number_of_commits=5)
232 source_repo = vcsbackend.clone_repo(target_repo)
232 source_repo = vcsbackend.clone_repo(target_repo)
233 assert target_repo != source_repo
233 assert target_repo != source_repo
234
234
235 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
235 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
236 source_commit = source_repo.get_commit()
236 source_commit = source_repo.get_commit()
237
237
238 expected_ancestor = target_repo[4].raw_id
238 expected_ancestor = target_repo[4].raw_id
239
239
240 assert target_repo.get_common_ancestor(
240 assert target_repo.get_common_ancestor(
241 commit_id1=target_repo[4].raw_id,
241 commit_id1=target_repo[4].raw_id,
242 commit_id2=source_commit.raw_id,
242 commit_id2=source_commit.raw_id,
243 repo2=source_repo
243 repo2=source_repo
244 ) == expected_ancestor
244 ) == expected_ancestor
245
245
246 assert target_repo.get_common_ancestor(
246 assert target_repo.get_common_ancestor(
247 commit_id1=source_commit.raw_id,
247 commit_id1=source_commit.raw_id,
248 commit_id2=target_repo[4].raw_id,
248 commit_id2=target_repo[4].raw_id,
249 repo2=target_repo
249 repo2=target_repo
250 ) == expected_ancestor
250 ) == expected_ancestor
251
251
252 @pytest.mark.xfail_backends("svn")
252 @pytest.mark.xfail_backends("svn")
253 def test_get_common_ancestor_from_unrelated_repo_missing(self, vcsbackend):
253 def test_get_common_ancestor_from_unrelated_repo_missing(self, vcsbackend):
254 original = vcsbackend.create_repo(number_of_commits=5)
254 original = vcsbackend.create_repo(number_of_commits=5)
255 unrelated = vcsbackend.create_repo(number_of_commits=5)
255 unrelated = vcsbackend.create_repo(number_of_commits=5)
256 assert original != unrelated
256 assert original != unrelated
257
257
258 assert original.get_common_ancestor(
258 assert original.get_common_ancestor(
259 commit_id1=original[0].raw_id,
259 commit_id1=original[0].raw_id,
260 commit_id2=unrelated[0].raw_id,
260 commit_id2=unrelated[0].raw_id,
261 repo2=unrelated
261 repo2=unrelated
262 ) is None
262 ) is None
263
263
264 assert original.get_common_ancestor(
264 assert original.get_common_ancestor(
265 commit_id1=original[-1].raw_id,
265 commit_id1=original[-1].raw_id,
266 commit_id2=unrelated[-1].raw_id,
266 commit_id2=unrelated[-1].raw_id,
267 repo2=unrelated
267 repo2=unrelated
268 ) is None
268 ) is None
269
269
270
270
271 @pytest.mark.backends("git", "hg")
271 @pytest.mark.backends("git", "hg")
272 class TestRepositoryMerge(object):
272 class TestRepositoryMerge(object):
273 def prepare_for_success(self, vcsbackend):
273 def prepare_for_success(self, vcsbackend):
274 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
274 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
275 self.source_repo = vcsbackend.clone_repo(self.target_repo)
275 self.source_repo = vcsbackend.clone_repo(self.target_repo)
276 vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1')
276 vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1')
277 vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2')
277 vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2')
278 imc = self.source_repo.in_memory_commit
278 imc = self.source_repo.in_memory_commit
279 imc.add(FileNode('file_x', content=self.source_repo.name))
279 imc.add(FileNode('file_x', content=self.source_repo.name))
280 imc.commit(
280 imc.commit(
281 message=u'Automatic commit from repo merge test',
281 message=u'Automatic commit from repo merge test',
282 author=u'Automatic')
282 author=u'Automatic <automatic@rhodecode.com>')
283 self.target_commit = self.target_repo.get_commit()
283 self.target_commit = self.target_repo.get_commit()
284 self.source_commit = self.source_repo.get_commit()
284 self.source_commit = self.source_repo.get_commit()
285 # This only works for Git and Mercurial
285 # This only works for Git and Mercurial
286 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
286 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
287 self.target_ref = Reference('branch', default_branch, self.target_commit.raw_id)
287 self.target_ref = Reference('branch', default_branch, self.target_commit.raw_id)
288 self.source_ref = Reference('branch', default_branch, self.source_commit.raw_id)
288 self.source_ref = Reference('branch', default_branch, self.source_commit.raw_id)
289 self.workspace_id = 'test-merge-{}'.format(vcsbackend.alias)
289 self.workspace_id = 'test-merge-{}'.format(vcsbackend.alias)
290 self.repo_id = repo_id_generator(self.target_repo.path)
290 self.repo_id = repo_id_generator(self.target_repo.path)
291
291
292 def prepare_for_conflict(self, vcsbackend):
292 def prepare_for_conflict(self, vcsbackend):
293 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
293 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
294 self.source_repo = vcsbackend.clone_repo(self.target_repo)
294 self.source_repo = vcsbackend.clone_repo(self.target_repo)
295 vcsbackend.add_file(self.target_repo, 'README_MERGE', 'Version 1')
295 vcsbackend.add_file(self.target_repo, 'README_MERGE', 'Version 1')
296 vcsbackend.add_file(self.source_repo, 'README_MERGE', 'Version 2')
296 vcsbackend.add_file(self.source_repo, 'README_MERGE', 'Version 2')
297 self.target_commit = self.target_repo.get_commit()
297 self.target_commit = self.target_repo.get_commit()
298 self.source_commit = self.source_repo.get_commit()
298 self.source_commit = self.source_repo.get_commit()
299 # This only works for Git and Mercurial
299 # This only works for Git and Mercurial
300 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
300 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
301 self.target_ref = Reference('branch', default_branch, self.target_commit.raw_id)
301 self.target_ref = Reference('branch', default_branch, self.target_commit.raw_id)
302 self.source_ref = Reference('branch', default_branch, self.source_commit.raw_id)
302 self.source_ref = Reference('branch', default_branch, self.source_commit.raw_id)
303 self.workspace_id = 'test-merge-{}'.format(vcsbackend.alias)
303 self.workspace_id = 'test-merge-{}'.format(vcsbackend.alias)
304 self.repo_id = repo_id_generator(self.target_repo.path)
304 self.repo_id = repo_id_generator(self.target_repo.path)
305
305
306 def test_merge_success(self, vcsbackend):
306 def test_merge_success(self, vcsbackend):
307 self.prepare_for_success(vcsbackend)
307 self.prepare_for_success(vcsbackend)
308
308
309 merge_response = self.target_repo.merge(
309 merge_response = self.target_repo.merge(
310 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
310 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
311 self.source_ref,
311 self.source_ref,
312 'test user', 'test@rhodecode.com', 'merge message 1',
312 'test user', 'test@rhodecode.com', 'merge message 1',
313 dry_run=False)
313 dry_run=False)
314 expected_merge_response = MergeResponse(
314 expected_merge_response = MergeResponse(
315 True, True, merge_response.merge_ref,
315 True, True, merge_response.merge_ref,
316 MergeFailureReason.NONE)
316 MergeFailureReason.NONE)
317 assert merge_response == expected_merge_response
317 assert merge_response == expected_merge_response
318
318
319 target_repo = backends.get_backend(vcsbackend.alias)(
319 target_repo = backends.get_backend(vcsbackend.alias)(
320 self.target_repo.path)
320 self.target_repo.path)
321 target_commits = list(target_repo.get_commits())
321 target_commits = list(target_repo.get_commits())
322 commit_ids = [c.raw_id for c in target_commits[:-1]]
322 commit_ids = [c.raw_id for c in target_commits[:-1]]
323 assert self.source_ref.commit_id in commit_ids
323 assert self.source_ref.commit_id in commit_ids
324 assert self.target_ref.commit_id in commit_ids
324 assert self.target_ref.commit_id in commit_ids
325
325
326 merge_commit = target_commits[-1]
326 merge_commit = target_commits[-1]
327 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
327 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
328 assert merge_commit.message.strip() == 'merge message 1'
328 assert merge_commit.message.strip() == 'merge message 1'
329 assert merge_commit.author == 'test user <test@rhodecode.com>'
329 assert merge_commit.author == 'test user <test@rhodecode.com>'
330
330
331 # We call it twice so to make sure we can handle updates
331 # We call it twice so to make sure we can handle updates
332 target_ref = Reference(
332 target_ref = Reference(
333 self.target_ref.type, self.target_ref.name,
333 self.target_ref.type, self.target_ref.name,
334 merge_response.merge_ref.commit_id)
334 merge_response.merge_ref.commit_id)
335
335
336 merge_response = target_repo.merge(
336 merge_response = target_repo.merge(
337 self.repo_id, self.workspace_id, target_ref, self.source_repo, self.source_ref,
337 self.repo_id, self.workspace_id, target_ref, self.source_repo, self.source_ref,
338 'test user', 'test@rhodecode.com', 'merge message 2',
338 'test user', 'test@rhodecode.com', 'merge message 2',
339 dry_run=False)
339 dry_run=False)
340 expected_merge_response = MergeResponse(
340 expected_merge_response = MergeResponse(
341 True, True, merge_response.merge_ref,
341 True, True, merge_response.merge_ref,
342 MergeFailureReason.NONE)
342 MergeFailureReason.NONE)
343 assert merge_response == expected_merge_response
343 assert merge_response == expected_merge_response
344
344
345 target_repo = backends.get_backend(
345 target_repo = backends.get_backend(
346 vcsbackend.alias)(self.target_repo.path)
346 vcsbackend.alias)(self.target_repo.path)
347 merge_commit = target_repo.get_commit(
347 merge_commit = target_repo.get_commit(
348 merge_response.merge_ref.commit_id)
348 merge_response.merge_ref.commit_id)
349 assert merge_commit.message.strip() == 'merge message 1'
349 assert merge_commit.message.strip() == 'merge message 1'
350 assert merge_commit.author == 'test user <test@rhodecode.com>'
350 assert merge_commit.author == 'test user <test@rhodecode.com>'
351
351
352 def test_merge_success_dry_run(self, vcsbackend):
352 def test_merge_success_dry_run(self, vcsbackend):
353 self.prepare_for_success(vcsbackend)
353 self.prepare_for_success(vcsbackend)
354
354
355 merge_response = self.target_repo.merge(
355 merge_response = self.target_repo.merge(
356 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
356 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
357 self.source_ref, dry_run=True)
357 self.source_ref, dry_run=True)
358
358
359 # We call it twice so to make sure we can handle updates
359 # We call it twice so to make sure we can handle updates
360 merge_response_update = self.target_repo.merge(
360 merge_response_update = self.target_repo.merge(
361 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
361 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
362 self.source_ref, dry_run=True)
362 self.source_ref, dry_run=True)
363
363
364 # Multiple merges may differ in their commit id. Therefore we set the
364 # Multiple merges may differ in their commit id. Therefore we set the
365 # commit id to `None` before comparing the merge responses.
365 # commit id to `None` before comparing the merge responses.
366 new_merge_ref = merge_response.merge_ref._replace(commit_id=None)
366 new_merge_ref = merge_response.merge_ref._replace(commit_id=None)
367 merge_response.merge_ref = new_merge_ref
367 merge_response.merge_ref = new_merge_ref
368
368
369 new_update_merge_ref = merge_response_update.merge_ref._replace(commit_id=None)
369 new_update_merge_ref = merge_response_update.merge_ref._replace(commit_id=None)
370 merge_response_update.merge_ref = new_update_merge_ref
370 merge_response_update.merge_ref = new_update_merge_ref
371
371
372 assert merge_response == merge_response_update
372 assert merge_response == merge_response_update
373 assert merge_response.possible is True
373 assert merge_response.possible is True
374 assert merge_response.executed is False
374 assert merge_response.executed is False
375 assert merge_response.merge_ref
375 assert merge_response.merge_ref
376 assert merge_response.failure_reason is MergeFailureReason.NONE
376 assert merge_response.failure_reason is MergeFailureReason.NONE
377
377
378 @pytest.mark.parametrize('dry_run', [True, False])
378 @pytest.mark.parametrize('dry_run', [True, False])
379 def test_merge_conflict(self, vcsbackend, dry_run):
379 def test_merge_conflict(self, vcsbackend, dry_run):
380 self.prepare_for_conflict(vcsbackend)
380 self.prepare_for_conflict(vcsbackend)
381
381
382 expected_merge_response = MergeResponse(
382 expected_merge_response = MergeResponse(
383 False, False, None, MergeFailureReason.MERGE_FAILED)
383 False, False, None, MergeFailureReason.MERGE_FAILED)
384
384
385 merge_response = self.target_repo.merge(
385 merge_response = self.target_repo.merge(
386 self.repo_id, self.workspace_id, self.target_ref,
386 self.repo_id, self.workspace_id, self.target_ref,
387 self.source_repo, self.source_ref,
387 self.source_repo, self.source_ref,
388 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
388 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
389 assert merge_response == expected_merge_response
389 assert merge_response == expected_merge_response
390
390
391 # We call it twice so to make sure we can handle updates
391 # We call it twice so to make sure we can handle updates
392 merge_response = self.target_repo.merge(
392 merge_response = self.target_repo.merge(
393 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
393 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
394 self.source_ref,
394 self.source_ref,
395 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
395 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
396 assert merge_response == expected_merge_response
396 assert merge_response == expected_merge_response
397
397
398 def test_merge_target_is_not_head(self, vcsbackend):
398 def test_merge_target_is_not_head(self, vcsbackend):
399 self.prepare_for_success(vcsbackend)
399 self.prepare_for_success(vcsbackend)
400 target_ref = Reference(
400 target_ref = Reference(
401 self.target_ref.type, self.target_ref.name, '0' * 40)
401 self.target_ref.type, self.target_ref.name, '0' * 40)
402 expected_merge_response = MergeResponse(
402 expected_merge_response = MergeResponse(
403 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
403 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
404 metadata={'target_ref': target_ref})
404 metadata={'target_ref': target_ref})
405 merge_response = self.target_repo.merge(
405 merge_response = self.target_repo.merge(
406 self.repo_id, self.workspace_id, target_ref, self.source_repo,
406 self.repo_id, self.workspace_id, target_ref, self.source_repo,
407 self.source_ref, dry_run=True)
407 self.source_ref, dry_run=True)
408
408
409 assert merge_response == expected_merge_response
409 assert merge_response == expected_merge_response
410
410
411 def test_merge_missing_source_reference(self, vcsbackend):
411 def test_merge_missing_source_reference(self, vcsbackend):
412 self.prepare_for_success(vcsbackend)
412 self.prepare_for_success(vcsbackend)
413
413
414 source_ref = Reference(
414 source_ref = Reference(
415 self.source_ref.type, 'not_existing', self.source_ref.commit_id)
415 self.source_ref.type, 'not_existing', self.source_ref.commit_id)
416 expected_merge_response = MergeResponse(
416 expected_merge_response = MergeResponse(
417 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
417 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
418 metadata={'source_ref': source_ref})
418 metadata={'source_ref': source_ref})
419
419
420 merge_response = self.target_repo.merge(
420 merge_response = self.target_repo.merge(
421 self.repo_id, self.workspace_id, self.target_ref,
421 self.repo_id, self.workspace_id, self.target_ref,
422 self.source_repo, source_ref,
422 self.source_repo, source_ref,
423 dry_run=True)
423 dry_run=True)
424
424
425 assert merge_response == expected_merge_response
425 assert merge_response == expected_merge_response
426
426
427 def test_merge_raises_exception(self, vcsbackend):
427 def test_merge_raises_exception(self, vcsbackend):
428 self.prepare_for_success(vcsbackend)
428 self.prepare_for_success(vcsbackend)
429 expected_merge_response = MergeResponse(
429 expected_merge_response = MergeResponse(
430 False, False, None, MergeFailureReason.UNKNOWN,
430 False, False, None, MergeFailureReason.UNKNOWN,
431 metadata={'exception': 'ErrorForTest'})
431 metadata={'exception': 'ErrorForTest'})
432
432
433 with mock.patch.object(self.target_repo, '_merge_repo',
433 with mock.patch.object(self.target_repo, '_merge_repo',
434 side_effect=RepositoryError()):
434 side_effect=RepositoryError()):
435 merge_response = self.target_repo.merge(
435 merge_response = self.target_repo.merge(
436 self.repo_id, self.workspace_id, self.target_ref,
436 self.repo_id, self.workspace_id, self.target_ref,
437 self.source_repo, self.source_ref,
437 self.source_repo, self.source_ref,
438 dry_run=True)
438 dry_run=True)
439
439
440 assert merge_response == expected_merge_response
440 assert merge_response == expected_merge_response
441
441
442 def test_merge_invalid_user_name(self, vcsbackend):
442 def test_merge_invalid_user_name(self, vcsbackend):
443 repo = vcsbackend.create_repo(number_of_commits=1)
443 repo = vcsbackend.create_repo(number_of_commits=1)
444 ref = Reference('branch', 'master', 'not_used')
444 ref = Reference('branch', 'master', 'not_used')
445 workspace_id = 'test-errors-in-merge'
445 workspace_id = 'test-errors-in-merge'
446 repo_id = repo_id_generator(workspace_id)
446 repo_id = repo_id_generator(workspace_id)
447 with pytest.raises(ValueError):
447 with pytest.raises(ValueError):
448 repo.merge(repo_id, workspace_id, ref, self, ref)
448 repo.merge(repo_id, workspace_id, ref, self, ref)
449
449
450 def test_merge_invalid_user_email(self, vcsbackend):
450 def test_merge_invalid_user_email(self, vcsbackend):
451 repo = vcsbackend.create_repo(number_of_commits=1)
451 repo = vcsbackend.create_repo(number_of_commits=1)
452 ref = Reference('branch', 'master', 'not_used')
452 ref = Reference('branch', 'master', 'not_used')
453 workspace_id = 'test-errors-in-merge'
453 workspace_id = 'test-errors-in-merge'
454 repo_id = repo_id_generator(workspace_id)
454 repo_id = repo_id_generator(workspace_id)
455 with pytest.raises(ValueError):
455 with pytest.raises(ValueError):
456 repo.merge(
456 repo.merge(
457 repo_id, workspace_id, ref, self, ref, 'user name')
457 repo_id, workspace_id, ref, self, ref, 'user name')
458
458
459 def test_merge_invalid_message(self, vcsbackend):
459 def test_merge_invalid_message(self, vcsbackend):
460 repo = vcsbackend.create_repo(number_of_commits=1)
460 repo = vcsbackend.create_repo(number_of_commits=1)
461 ref = Reference('branch', 'master', 'not_used')
461 ref = Reference('branch', 'master', 'not_used')
462 workspace_id = 'test-errors-in-merge'
462 workspace_id = 'test-errors-in-merge'
463 repo_id = repo_id_generator(workspace_id)
463 repo_id = repo_id_generator(workspace_id)
464 with pytest.raises(ValueError):
464 with pytest.raises(ValueError):
465 repo.merge(
465 repo.merge(
466 repo_id, workspace_id, ref, self, ref,
466 repo_id, workspace_id, ref, self, ref,
467 'user name', 'user@email.com')
467 'user name', 'user@email.com')
468
468
469
469
470 @pytest.mark.usefixtures("vcs_repository_support")
470 @pytest.mark.usefixtures("vcs_repository_support")
471 class TestRepositoryStrip(BackendTestMixin):
471 class TestRepositoryStrip(BackendTestMixin):
472 recreate_repo_per_test = True
472 recreate_repo_per_test = True
473
473
474 @classmethod
474 @classmethod
475 def _get_commits(cls):
475 def _get_commits(cls):
476 commits = [
476 commits = [
477 {
477 {
478 'message': 'Initial commit',
478 'message': 'Initial commit',
479 'author': 'Joe Doe <joe.doe@example.com>',
479 'author': 'Joe Doe <joe.doe@example.com>',
480 'date': datetime.datetime(2010, 1, 1, 20),
480 'date': datetime.datetime(2010, 1, 1, 20),
481 'branch': 'master',
481 'branch': 'master',
482 'added': [
482 'added': [
483 FileNode('foobar', content='foobar'),
483 FileNode('foobar', content='foobar'),
484 FileNode('foobar2', content='foobar2'),
484 FileNode('foobar2', content='foobar2'),
485 ],
485 ],
486 },
486 },
487 ]
487 ]
488 for x in xrange(10):
488 for x in xrange(10):
489 commit_data = {
489 commit_data = {
490 'message': 'Changed foobar - commit%s' % x,
490 'message': 'Changed foobar - commit%s' % x,
491 'author': 'Jane Doe <jane.doe@example.com>',
491 'author': 'Jane Doe <jane.doe@example.com>',
492 'date': datetime.datetime(2010, 1, 1, 21, x),
492 'date': datetime.datetime(2010, 1, 1, 21, x),
493 'branch': 'master',
493 'branch': 'master',
494 'changed': [
494 'changed': [
495 FileNode('foobar', 'FOOBAR - %s' % x),
495 FileNode('foobar', 'FOOBAR - %s' % x),
496 ],
496 ],
497 }
497 }
498 commits.append(commit_data)
498 commits.append(commit_data)
499 return commits
499 return commits
500
500
501 @pytest.mark.backends("git", "hg")
501 @pytest.mark.backends("git", "hg")
502 def test_strip_commit(self):
502 def test_strip_commit(self):
503 tip = self.repo.get_commit()
503 tip = self.repo.get_commit()
504 assert tip.idx == 10
504 assert tip.idx == 10
505 self.repo.strip(tip.raw_id, self.repo.DEFAULT_BRANCH_NAME)
505 self.repo.strip(tip.raw_id, self.repo.DEFAULT_BRANCH_NAME)
506
506
507 tip = self.repo.get_commit()
507 tip = self.repo.get_commit()
508 assert tip.idx == 9
508 assert tip.idx == 9
509
509
510 @pytest.mark.backends("git", "hg")
510 @pytest.mark.backends("git", "hg")
511 def test_strip_multiple_commits(self):
511 def test_strip_multiple_commits(self):
512 tip = self.repo.get_commit()
512 tip = self.repo.get_commit()
513 assert tip.idx == 10
513 assert tip.idx == 10
514
514
515 old = self.repo.get_commit(commit_idx=5)
515 old = self.repo.get_commit(commit_idx=5)
516 self.repo.strip(old.raw_id, self.repo.DEFAULT_BRANCH_NAME)
516 self.repo.strip(old.raw_id, self.repo.DEFAULT_BRANCH_NAME)
517
517
518 tip = self.repo.get_commit()
518 tip = self.repo.get_commit()
519 assert tip.idx == 4
519 assert tip.idx == 4
520
520
521
521
522 @pytest.mark.backends('hg', 'git')
522 @pytest.mark.backends('hg', 'git')
523 class TestRepositoryPull(object):
523 class TestRepositoryPull(object):
524
524
525 def test_pull(self, vcsbackend):
525 def test_pull(self, vcsbackend):
526 source_repo = vcsbackend.repo
526 source_repo = vcsbackend.repo
527 target_repo = vcsbackend.create_repo()
527 target_repo = vcsbackend.create_repo()
528 assert len(source_repo.commit_ids) > len(target_repo.commit_ids)
528 assert len(source_repo.commit_ids) > len(target_repo.commit_ids)
529
529
530 target_repo.pull(source_repo.path)
530 target_repo.pull(source_repo.path)
531 # Note: Get a fresh instance, avoids caching trouble
531 # Note: Get a fresh instance, avoids caching trouble
532 target_repo = vcsbackend.backend(target_repo.path)
532 target_repo = vcsbackend.backend(target_repo.path)
533 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
533 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
534
534
535 def test_pull_wrong_path(self, vcsbackend):
535 def test_pull_wrong_path(self, vcsbackend):
536 target_repo = vcsbackend.create_repo()
536 target_repo = vcsbackend.create_repo()
537 with pytest.raises(RepositoryError):
537 with pytest.raises(RepositoryError):
538 target_repo.pull(target_repo.path + "wrong")
538 target_repo.pull(target_repo.path + "wrong")
539
539
540 def test_pull_specific_commits(self, vcsbackend):
540 def test_pull_specific_commits(self, vcsbackend):
541 source_repo = vcsbackend.repo
541 source_repo = vcsbackend.repo
542 target_repo = vcsbackend.create_repo()
542 target_repo = vcsbackend.create_repo()
543
543
544 second_commit = source_repo[1].raw_id
544 second_commit = source_repo[1].raw_id
545 if vcsbackend.alias == 'git':
545 if vcsbackend.alias == 'git':
546 second_commit_ref = 'refs/test-refs/a'
546 second_commit_ref = 'refs/test-refs/a'
547 source_repo.set_refs(second_commit_ref, second_commit)
547 source_repo.set_refs(second_commit_ref, second_commit)
548
548
549 target_repo.pull(source_repo.path, commit_ids=[second_commit])
549 target_repo.pull(source_repo.path, commit_ids=[second_commit])
550 target_repo = vcsbackend.backend(target_repo.path)
550 target_repo = vcsbackend.backend(target_repo.path)
551 assert 2 == len(target_repo.commit_ids)
551 assert 2 == len(target_repo.commit_ids)
552 assert second_commit == target_repo.get_commit().raw_id
552 assert second_commit == target_repo.get_commit().raw_id
@@ -1,67 +1,67 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Test suite for making push/pull operations, on specially modified INI files
22 Test suite for making push/pull operations, on specially modified INI files
23
23
24 .. important::
24 .. important::
25
25
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 to redirect things to stderr instead of stdout.
27 to redirect things to stderr instead of stdout.
28 """
28 """
29
29
30 import os
30 import os
31 import pytest
31 import pytest
32
32
33 from rhodecode.lib.vcs.backends.git.repository import GitRepository
33 from rhodecode.lib.vcs.backends.git.repository import GitRepository
34 from rhodecode.lib.vcs.nodes import FileNode
34 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.tests import GIT_REPO
35 from rhodecode.tests import GIT_REPO
36 from rhodecode.tests.vcs_operations import Command
36 from rhodecode.tests.vcs_operations import Command
37 from .test_vcs_operations import _check_proper_clone, _check_proper_git_push
37 from .test_vcs_operations import _check_proper_clone, _check_proper_git_push
38
38
39
39
40 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
40 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
41 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
41 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
42 cmd = Command('/tmp')
42 cmd = Command('/tmp')
43 stdout, stderr = cmd.execute(
43 stdout, stderr = cmd.execute(
44 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
44 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
45 _check_proper_clone(stdout, stderr, 'git')
45 _check_proper_clone(stdout, stderr, 'git')
46 cmd.assert_returncode_success()
46 cmd.assert_returncode_success()
47
47
48
48
49 def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
49 def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
50 empty_repo = backend_git.create_repo()
50 empty_repo = backend_git.create_repo()
51
51
52 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
52 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
53
53
54 cmd = Command(tmpdir.strpath)
54 cmd = Command(tmpdir.strpath)
55 cmd.execute('git clone', clone_url)
55 cmd.execute('git clone', clone_url)
56
56
57 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
57 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
58 repo.in_memory_commit.add(FileNode('readme.md', content='## Hello'))
58 repo.in_memory_commit.add(FileNode('readme.md', content='## Hello'))
59 repo.in_memory_commit.commit(
59 repo.in_memory_commit.commit(
60 message='Commit on branch Master',
60 message='Commit on branch Master',
61 author='Automatic test',
61 author='Automatic test <automatic@rhodecode.com>',
62 branch='master')
62 branch='master')
63
63
64 repo_cmd = Command(repo.path)
64 repo_cmd = Command(repo.path)
65 stdout, stderr = repo_cmd.execute(
65 stdout, stderr = repo_cmd.execute(
66 'git -c http.postBuffer=1024 push --verbose origin master')
66 'git -c http.postBuffer=1024 push --verbose origin master')
67 _check_proper_git_push(stdout, stderr, branch='master')
67 _check_proper_git_push(stdout, stderr, branch='master')
@@ -1,282 +1,282 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import pytest
23 import pytest
24
24
25 from rhodecode.lib.vcs.backends.git.repository import GitRepository
25 from rhodecode.lib.vcs.backends.git.repository import GitRepository
26 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
26 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
27 from rhodecode.lib.vcs.nodes import FileNode
27 from rhodecode.lib.vcs.nodes import FileNode
28 from rhodecode.model.db import Repository
28 from rhodecode.model.db import Repository
29 from rhodecode.model.meta import Session
29 from rhodecode.model.meta import Session
30 from rhodecode.tests import GIT_REPO, HG_REPO
30 from rhodecode.tests import GIT_REPO, HG_REPO
31
31
32 from rhodecode.tests.vcs_operations import (
32 from rhodecode.tests.vcs_operations import (
33 Command, _check_proper_clone, _check_proper_git_push, _check_proper_hg_push,
33 Command, _check_proper_clone, _check_proper_git_push, _check_proper_hg_push,
34 _add_files_and_push)
34 _add_files_and_push)
35
35
36
36
37 @pytest.mark.usefixtures("disable_locking")
37 @pytest.mark.usefixtures("disable_locking")
38 class TestVCSOperationsSpecial(object):
38 class TestVCSOperationsSpecial(object):
39
39
40 def test_git_sets_default_branch_if_not_master(
40 def test_git_sets_default_branch_if_not_master(
41 self, backend_git, tmpdir, rc_web_server):
41 self, backend_git, tmpdir, rc_web_server):
42 empty_repo = backend_git.create_repo()
42 empty_repo = backend_git.create_repo()
43 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
43 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
44
44
45 cmd = Command(tmpdir.strpath)
45 cmd = Command(tmpdir.strpath)
46 cmd.execute('git clone', clone_url)
46 cmd.execute('git clone', clone_url)
47
47
48 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
48 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
49 repo._checkout('test', create=True)
49 repo._checkout('test', create=True)
50 repo.in_memory_commit.add(FileNode('file', content=''))
50 repo.in_memory_commit.add(FileNode('file', content=''))
51 repo.in_memory_commit.commit(
51 repo.in_memory_commit.commit(
52 message='Commit on branch test',
52 message='Commit on branch test',
53 author='Automatic test',
53 author='Automatic test <automatic@rhodecode.com>',
54 branch='test')
54 branch='test')
55
55
56 repo_cmd = Command(repo.path)
56 repo_cmd = Command(repo.path)
57 stdout, stderr = repo_cmd.execute('git push --verbose origin test')
57 stdout, stderr = repo_cmd.execute('git push --verbose origin test')
58 _check_proper_git_push(
58 _check_proper_git_push(
59 stdout, stderr, branch='test', should_set_default_branch=True)
59 stdout, stderr, branch='test', should_set_default_branch=True)
60
60
61 stdout, stderr = cmd.execute(
61 stdout, stderr = cmd.execute(
62 'git clone', clone_url, empty_repo.repo_name + '-clone')
62 'git clone', clone_url, empty_repo.repo_name + '-clone')
63 _check_proper_clone(stdout, stderr, 'git')
63 _check_proper_clone(stdout, stderr, 'git')
64
64
65 # Doing an explicit commit in order to get latest user logs on MySQL
65 # Doing an explicit commit in order to get latest user logs on MySQL
66 Session().commit()
66 Session().commit()
67
67
68 def test_git_fetches_from_remote_repository_with_annotated_tags(
68 def test_git_fetches_from_remote_repository_with_annotated_tags(
69 self, backend_git, rc_web_server):
69 self, backend_git, rc_web_server):
70 # Note: This is a test specific to the git backend. It checks the
70 # Note: This is a test specific to the git backend. It checks the
71 # integration of fetching from a remote repository which contains
71 # integration of fetching from a remote repository which contains
72 # annotated tags.
72 # annotated tags.
73
73
74 # Dulwich shows this specific behavior only when
74 # Dulwich shows this specific behavior only when
75 # operating against a remote repository.
75 # operating against a remote repository.
76 source_repo = backend_git['annotated-tag']
76 source_repo = backend_git['annotated-tag']
77 target_vcs_repo = backend_git.create_repo().scm_instance()
77 target_vcs_repo = backend_git.create_repo().scm_instance()
78 target_vcs_repo.fetch(rc_web_server.repo_clone_url(source_repo.repo_name))
78 target_vcs_repo.fetch(rc_web_server.repo_clone_url(source_repo.repo_name))
79
79
80 def test_git_push_shows_pull_request_refs(self, backend_git, rc_web_server, tmpdir):
80 def test_git_push_shows_pull_request_refs(self, backend_git, rc_web_server, tmpdir):
81 """
81 """
82 test if remote info about refs is visible
82 test if remote info about refs is visible
83 """
83 """
84 empty_repo = backend_git.create_repo()
84 empty_repo = backend_git.create_repo()
85
85
86 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
86 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
87
87
88 cmd = Command(tmpdir.strpath)
88 cmd = Command(tmpdir.strpath)
89 cmd.execute('git clone', clone_url)
89 cmd.execute('git clone', clone_url)
90
90
91 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
91 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
92 repo.in_memory_commit.add(FileNode('readme.md', content='## Hello'))
92 repo.in_memory_commit.add(FileNode('readme.md', content='## Hello'))
93 repo.in_memory_commit.commit(
93 repo.in_memory_commit.commit(
94 message='Commit on branch Master',
94 message='Commit on branch Master',
95 author='Automatic test',
95 author='Automatic test <automatic@rhodecode.com>',
96 branch='master')
96 branch='master')
97
97
98 repo_cmd = Command(repo.path)
98 repo_cmd = Command(repo.path)
99 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
99 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
100 _check_proper_git_push(stdout, stderr, branch='master')
100 _check_proper_git_push(stdout, stderr, branch='master')
101
101
102 ref = '{}/{}/pull-request/new?branch=master'.format(
102 ref = '{}/{}/pull-request/new?branch=master'.format(
103 rc_web_server.host_url(), empty_repo.repo_name)
103 rc_web_server.host_url(), empty_repo.repo_name)
104 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
104 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
105 assert 'remote: RhodeCode: push completed' in stderr
105 assert 'remote: RhodeCode: push completed' in stderr
106
106
107 # push on the same branch
107 # push on the same branch
108 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
108 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
109 repo.in_memory_commit.add(FileNode('setup.py', content='print\n'))
109 repo.in_memory_commit.add(FileNode('setup.py', content='print\n'))
110 repo.in_memory_commit.commit(
110 repo.in_memory_commit.commit(
111 message='Commit2 on branch Master',
111 message='Commit2 on branch Master',
112 author='Automatic test2',
112 author='Automatic test2 <automatic@rhodecode.com>',
113 branch='master')
113 branch='master')
114
114
115 repo_cmd = Command(repo.path)
115 repo_cmd = Command(repo.path)
116 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
116 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
117 _check_proper_git_push(stdout, stderr, branch='master')
117 _check_proper_git_push(stdout, stderr, branch='master')
118
118
119 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
119 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
120 assert 'remote: RhodeCode: push completed' in stderr
120 assert 'remote: RhodeCode: push completed' in stderr
121
121
122 # new Branch
122 # new Branch
123 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
123 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
124 repo.in_memory_commit.add(FileNode('feature1.py', content='## Hello world'))
124 repo.in_memory_commit.add(FileNode('feature1.py', content='## Hello world'))
125 repo.in_memory_commit.commit(
125 repo.in_memory_commit.commit(
126 message='Commit on branch feature',
126 message='Commit on branch feature',
127 author='Automatic test',
127 author='Automatic test <automatic@rhodecode.com>',
128 branch='feature')
128 branch='feature')
129
129
130 repo_cmd = Command(repo.path)
130 repo_cmd = Command(repo.path)
131 stdout, stderr = repo_cmd.execute('git push --verbose origin feature')
131 stdout, stderr = repo_cmd.execute('git push --verbose origin feature')
132 _check_proper_git_push(stdout, stderr, branch='feature')
132 _check_proper_git_push(stdout, stderr, branch='feature')
133
133
134 ref = '{}/{}/pull-request/new?branch=feature'.format(
134 ref = '{}/{}/pull-request/new?branch=feature'.format(
135 rc_web_server.host_url(), empty_repo.repo_name)
135 rc_web_server.host_url(), empty_repo.repo_name)
136 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
136 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
137 assert 'remote: RhodeCode: push completed' in stderr
137 assert 'remote: RhodeCode: push completed' in stderr
138
138
139 def test_hg_push_shows_pull_request_refs(self, backend_hg, rc_web_server, tmpdir):
139 def test_hg_push_shows_pull_request_refs(self, backend_hg, rc_web_server, tmpdir):
140 empty_repo = backend_hg.create_repo()
140 empty_repo = backend_hg.create_repo()
141
141
142 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
142 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
143
143
144 cmd = Command(tmpdir.strpath)
144 cmd = Command(tmpdir.strpath)
145 cmd.execute('hg clone', clone_url)
145 cmd.execute('hg clone', clone_url)
146
146
147 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
147 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
148 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
148 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
149 repo.in_memory_commit.commit(
149 repo.in_memory_commit.commit(
150 message=u'Commit on branch default',
150 message=u'Commit on branch default',
151 author=u'Automatic test',
151 author=u'Automatic test',
152 branch='default')
152 branch='default')
153
153
154 repo_cmd = Command(repo.path)
154 repo_cmd = Command(repo.path)
155 repo_cmd.execute('hg checkout default')
155 repo_cmd.execute('hg checkout default')
156
156
157 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
157 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
158 _check_proper_hg_push(stdout, stderr, branch='default')
158 _check_proper_hg_push(stdout, stderr, branch='default')
159
159
160 ref = '{}/{}/pull-request/new?branch=default'.format(
160 ref = '{}/{}/pull-request/new?branch=default'.format(
161 rc_web_server.host_url(), empty_repo.repo_name)
161 rc_web_server.host_url(), empty_repo.repo_name)
162 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
162 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
163 assert 'remote: RhodeCode: push completed' in stdout
163 assert 'remote: RhodeCode: push completed' in stdout
164
164
165 # push on the same branch
165 # push on the same branch
166 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
166 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
167 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
167 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
168 repo.in_memory_commit.commit(
168 repo.in_memory_commit.commit(
169 message=u'Commit2 on branch default',
169 message=u'Commit2 on branch default',
170 author=u'Automatic test2',
170 author=u'Automatic test2',
171 branch=u'default')
171 branch=u'default')
172
172
173 repo_cmd = Command(repo.path)
173 repo_cmd = Command(repo.path)
174 repo_cmd.execute('hg checkout default')
174 repo_cmd.execute('hg checkout default')
175
175
176 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
176 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
177 _check_proper_hg_push(stdout, stderr, branch='default')
177 _check_proper_hg_push(stdout, stderr, branch='default')
178
178
179 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
179 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
180 assert 'remote: RhodeCode: push completed' in stdout
180 assert 'remote: RhodeCode: push completed' in stdout
181
181
182 # new Branch
182 # new Branch
183 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
183 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
184 repo.in_memory_commit.add(FileNode(u'feature1.py', content=u'## Hello world'))
184 repo.in_memory_commit.add(FileNode(u'feature1.py', content=u'## Hello world'))
185 repo.in_memory_commit.commit(
185 repo.in_memory_commit.commit(
186 message=u'Commit on branch feature',
186 message=u'Commit on branch feature',
187 author=u'Automatic test',
187 author=u'Automatic test',
188 branch=u'feature')
188 branch=u'feature')
189
189
190 repo_cmd = Command(repo.path)
190 repo_cmd = Command(repo.path)
191 repo_cmd.execute('hg checkout feature')
191 repo_cmd.execute('hg checkout feature')
192
192
193 stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url)
193 stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url)
194 _check_proper_hg_push(stdout, stderr, branch='feature')
194 _check_proper_hg_push(stdout, stderr, branch='feature')
195
195
196 ref = '{}/{}/pull-request/new?branch=feature'.format(
196 ref = '{}/{}/pull-request/new?branch=feature'.format(
197 rc_web_server.host_url(), empty_repo.repo_name)
197 rc_web_server.host_url(), empty_repo.repo_name)
198 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
198 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
199 assert 'remote: RhodeCode: push completed' in stdout
199 assert 'remote: RhodeCode: push completed' in stdout
200
200
201 def test_hg_push_shows_pull_request_refs_book(self, backend_hg, rc_web_server, tmpdir):
201 def test_hg_push_shows_pull_request_refs_book(self, backend_hg, rc_web_server, tmpdir):
202 empty_repo = backend_hg.create_repo()
202 empty_repo = backend_hg.create_repo()
203
203
204 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
204 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
205
205
206 cmd = Command(tmpdir.strpath)
206 cmd = Command(tmpdir.strpath)
207 cmd.execute('hg clone', clone_url)
207 cmd.execute('hg clone', clone_url)
208
208
209 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
209 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
210 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
210 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
211 repo.in_memory_commit.commit(
211 repo.in_memory_commit.commit(
212 message=u'Commit on branch default',
212 message=u'Commit on branch default',
213 author=u'Automatic test',
213 author=u'Automatic test',
214 branch='default')
214 branch='default')
215
215
216 repo_cmd = Command(repo.path)
216 repo_cmd = Command(repo.path)
217 repo_cmd.execute('hg checkout default')
217 repo_cmd.execute('hg checkout default')
218
218
219 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
219 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
220 _check_proper_hg_push(stdout, stderr, branch='default')
220 _check_proper_hg_push(stdout, stderr, branch='default')
221
221
222 ref = '{}/{}/pull-request/new?branch=default'.format(
222 ref = '{}/{}/pull-request/new?branch=default'.format(
223 rc_web_server.host_url(), empty_repo.repo_name)
223 rc_web_server.host_url(), empty_repo.repo_name)
224 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
224 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
225 assert 'remote: RhodeCode: push completed' in stdout
225 assert 'remote: RhodeCode: push completed' in stdout
226
226
227 # add bookmark
227 # add bookmark
228 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
228 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
229 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
229 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
230 repo.in_memory_commit.commit(
230 repo.in_memory_commit.commit(
231 message=u'Commit2 on branch default',
231 message=u'Commit2 on branch default',
232 author=u'Automatic test2',
232 author=u'Automatic test2',
233 branch=u'default')
233 branch=u'default')
234
234
235 repo_cmd = Command(repo.path)
235 repo_cmd = Command(repo.path)
236 repo_cmd.execute('hg checkout default')
236 repo_cmd.execute('hg checkout default')
237 repo_cmd.execute('hg bookmark feature2')
237 repo_cmd.execute('hg bookmark feature2')
238 stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url)
238 stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url)
239 _check_proper_hg_push(stdout, stderr, branch='default')
239 _check_proper_hg_push(stdout, stderr, branch='default')
240
240
241 ref = '{}/{}/pull-request/new?branch=default'.format(
241 ref = '{}/{}/pull-request/new?branch=default'.format(
242 rc_web_server.host_url(), empty_repo.repo_name)
242 rc_web_server.host_url(), empty_repo.repo_name)
243 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
243 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
244 ref = '{}/{}/pull-request/new?bookmark=feature2'.format(
244 ref = '{}/{}/pull-request/new?bookmark=feature2'.format(
245 rc_web_server.host_url(), empty_repo.repo_name)
245 rc_web_server.host_url(), empty_repo.repo_name)
246 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
246 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
247 assert 'remote: RhodeCode: push completed' in stdout
247 assert 'remote: RhodeCode: push completed' in stdout
248 assert 'exporting bookmark feature2' in stdout
248 assert 'exporting bookmark feature2' in stdout
249
249
250 def test_push_is_forbidden_on_archived_repo_hg(self, backend_hg, rc_web_server, tmpdir):
250 def test_push_is_forbidden_on_archived_repo_hg(self, backend_hg, rc_web_server, tmpdir):
251 empty_repo = backend_hg.create_repo()
251 empty_repo = backend_hg.create_repo()
252 repo_name = empty_repo.repo_name
252 repo_name = empty_repo.repo_name
253
253
254 repo = Repository.get_by_repo_name(repo_name)
254 repo = Repository.get_by_repo_name(repo_name)
255 repo.archived = True
255 repo.archived = True
256 Session().commit()
256 Session().commit()
257
257
258 clone_url = rc_web_server.repo_clone_url(repo_name)
258 clone_url = rc_web_server.repo_clone_url(repo_name)
259 stdout, stderr = Command('/tmp').execute(
259 stdout, stderr = Command('/tmp').execute(
260 'hg clone', clone_url, tmpdir.strpath)
260 'hg clone', clone_url, tmpdir.strpath)
261
261
262 stdout, stderr = _add_files_and_push(
262 stdout, stderr = _add_files_and_push(
263 'hg', tmpdir.strpath, clone_url=clone_url)
263 'hg', tmpdir.strpath, clone_url=clone_url)
264
264
265 assert 'abort: HTTP Error 403: Forbidden' in stderr
265 assert 'abort: HTTP Error 403: Forbidden' in stderr
266
266
267 def test_push_is_forbidden_on_archived_repo_git(self, backend_git, rc_web_server, tmpdir):
267 def test_push_is_forbidden_on_archived_repo_git(self, backend_git, rc_web_server, tmpdir):
268 empty_repo = backend_git.create_repo()
268 empty_repo = backend_git.create_repo()
269 repo_name = empty_repo.repo_name
269 repo_name = empty_repo.repo_name
270
270
271 repo = Repository.get_by_repo_name(repo_name)
271 repo = Repository.get_by_repo_name(repo_name)
272 repo.archived = True
272 repo.archived = True
273 Session().commit()
273 Session().commit()
274
274
275 clone_url = rc_web_server.repo_clone_url(repo_name)
275 clone_url = rc_web_server.repo_clone_url(repo_name)
276 stdout, stderr = Command('/tmp').execute(
276 stdout, stderr = Command('/tmp').execute(
277 'git clone', clone_url, tmpdir.strpath)
277 'git clone', clone_url, tmpdir.strpath)
278
278
279 stdout, stderr = _add_files_and_push(
279 stdout, stderr = _add_files_and_push(
280 'git', tmpdir.strpath, clone_url=clone_url)
280 'git', tmpdir.strpath, clone_url=clone_url)
281
281
282 assert "The requested URL returned error: 403" in stderr
282 assert "The requested URL returned error: 403" in stderr
General Comments 0
You need to be logged in to leave comments. Login now