##// END OF EJS Templates
fix(tests): fixed the creation of non-linear commits creation in tests...
super-admin -
r5198:919dd05c default
parent child Browse files
Show More
@@ -1,367 +1,367 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import pytest
21 21
22 22 from rhodecode.model.db import User
23 23 from rhodecode.model.pull_request import PullRequestModel
24 24 from rhodecode.model.repo import RepoModel
25 25 from rhodecode.model.user import UserModel
26 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
27 27 from rhodecode.api.tests.utils import build_data, api_call, assert_error
28 28
29 29
30 30 @pytest.mark.usefixtures("testuser_api", "app")
31 31 class TestCreatePullRequestApi(object):
32 32 finalizers = []
33 33
34 34 def teardown_method(self, method):
35 35 if self.finalizers:
36 36 for finalizer in self.finalizers:
37 37 finalizer()
38 38 self.finalizers = []
39 39
40 40 def test_create_with_wrong_data(self):
41 41 required_data = {
42 42 'source_repo': 'tests/source_repo',
43 43 'target_repo': 'tests/target_repo',
44 44 'source_ref': 'branch:default:initial',
45 45 'target_ref': 'branch:default:new-feature',
46 46 }
47 47 for key in required_data:
48 48 data = required_data.copy()
49 49 data.pop(key)
50 50 id_, params = build_data(
51 51 self.apikey, 'create_pull_request', **data)
52 52 response = api_call(self.app, params)
53 53
54 54 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
55 55 assert_error(id_, expected, given=response.body)
56 56
57 57 @pytest.mark.backends("git", "hg")
58 58 @pytest.mark.parametrize('source_ref', [
59 59 'bookmarg:default:initial'
60 60 ])
61 61 def test_create_with_wrong_refs_data(self, backend, source_ref):
62 62
63 63 data = self._prepare_data(backend)
64 64 data['source_ref'] = source_ref
65 65
66 66 id_, params = build_data(
67 67 self.apikey_regular, 'create_pull_request', **data)
68 68
69 69 response = api_call(self.app, params)
70 70
71 71 expected = "Ref `{}` type is not allowed. " \
72 72 "Only:['bookmark', 'book', 'tag', 'branch'] " \
73 73 "are possible.".format(source_ref)
74 74 assert_error(id_, expected, given=response.body)
75 75
76 76 @pytest.mark.backends("git", "hg")
77 77 def test_create_with_correct_data(self, backend):
78 78 data = self._prepare_data(backend)
79 79 RepoModel().revoke_user_permission(
80 80 self.source.repo_name, User.DEFAULT_USER)
81 81 id_, params = build_data(
82 82 self.apikey_regular, 'create_pull_request', **data)
83 83 response = api_call(self.app, params)
84 84 expected_message = "Created new pull request `{title}`".format(
85 85 title=data['title'])
86 86 result = response.json
87 87 assert result['error'] is None
88 88 assert result['result']['msg'] == expected_message
89 89 pull_request_id = result['result']['pull_request_id']
90 90 pull_request = PullRequestModel().get(pull_request_id)
91 91 assert pull_request.title == data['title']
92 92 assert pull_request.description == data['description']
93 93 assert pull_request.source_ref == data['source_ref']
94 94 assert pull_request.target_ref == data['target_ref']
95 95 assert pull_request.source_repo.repo_name == data['source_repo']
96 96 assert pull_request.target_repo.repo_name == data['target_repo']
97 97 assert pull_request.revisions == [self.commit_ids['change']]
98 98 assert len(pull_request.reviewers) == 1
99 99
100 100 @pytest.mark.backends("git", "hg")
101 101 def test_create_with_empty_description(self, backend):
102 102 data = self._prepare_data(backend)
103 103 data.pop('description')
104 104 id_, params = build_data(
105 105 self.apikey_regular, 'create_pull_request', **data)
106 106 response = api_call(self.app, params)
107 107 expected_message = "Created new pull request `{title}`".format(
108 108 title=data['title'])
109 109 result = response.json
110 110 assert result['error'] is None
111 111 assert result['result']['msg'] == expected_message
112 112 pull_request_id = result['result']['pull_request_id']
113 113 pull_request = PullRequestModel().get(pull_request_id)
114 114 assert pull_request.description == ''
115 115
116 116 @pytest.mark.backends("git", "hg")
117 117 def test_create_with_empty_title(self, backend):
118 118 data = self._prepare_data(backend)
119 119 data.pop('title')
120 120 id_, params = build_data(
121 121 self.apikey_regular, 'create_pull_request', **data)
122 122 response = api_call(self.app, params)
123 123 result = response.json
124 124 pull_request_id = result['result']['pull_request_id']
125 125 pull_request = PullRequestModel().get(pull_request_id)
126 126 data['ref'] = backend.default_branch_name
127 127 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
128 128 assert pull_request.title == title
129 129
130 130 @pytest.mark.backends("git", "hg")
131 131 def test_create_with_reviewers_specified_by_names(
132 132 self, backend, no_notifications):
133 133 data = self._prepare_data(backend)
134 134 reviewers = [
135 135 {'username': TEST_USER_REGULAR_LOGIN,
136 136 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
137 137 {'username': TEST_USER_ADMIN_LOGIN,
138 138 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
139 139 'mandatory': True},
140 140 ]
141 141 data['reviewers'] = reviewers
142 142
143 143 id_, params = build_data(
144 144 self.apikey_regular, 'create_pull_request', **data)
145 145 response = api_call(self.app, params)
146 146
147 147 expected_message = "Created new pull request `{title}`".format(
148 148 title=data['title'])
149 149 result = response.json
150 150 assert result['error'] is None
151 151 assert result['result']['msg'] == expected_message
152 152 pull_request_id = result['result']['pull_request_id']
153 153 pull_request = PullRequestModel().get(pull_request_id)
154 154
155 155 actual_reviewers = []
156 156 for rev in pull_request.reviewers:
157 157 entry = {
158 158 'username': rev.user.username,
159 159 'reasons': rev.reasons,
160 160 }
161 161 if rev.mandatory:
162 162 entry['mandatory'] = rev.mandatory
163 163 actual_reviewers.append(entry)
164 164
165 165 owner_username = pull_request.target_repo.user.username
166 166 for spec_reviewer in reviewers[::]:
167 167 # default reviewer will be added who is an owner of the repo
168 168 # this get's overridden by a add owner to reviewers rule
169 169 if spec_reviewer['username'] == owner_username:
170 170 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
171 171 # since owner is more important, we don't inherit mandatory flag
172 172 del spec_reviewer['mandatory']
173 173
174 174 assert sorted(actual_reviewers, key=lambda e: e['username']) \
175 175 == sorted(reviewers, key=lambda e: e['username'])
176 176
177 177 @pytest.mark.backends("git", "hg")
178 178 def test_create_with_reviewers_specified_by_ids(
179 179 self, backend, no_notifications):
180 180 data = self._prepare_data(backend)
181 181 reviewers = [
182 182 {'username': UserModel().get_by_username(
183 183 TEST_USER_REGULAR_LOGIN).user_id,
184 184 'reasons': ['added manually']},
185 185 {'username': UserModel().get_by_username(
186 186 TEST_USER_ADMIN_LOGIN).user_id,
187 187 'reasons': ['added manually']},
188 188 ]
189 189
190 190 data['reviewers'] = reviewers
191 191 id_, params = build_data(
192 192 self.apikey_regular, 'create_pull_request', **data)
193 193 response = api_call(self.app, params)
194 194
195 195 expected_message = "Created new pull request `{title}`".format(
196 196 title=data['title'])
197 197 result = response.json
198 198 assert result['error'] is None
199 199 assert result['result']['msg'] == expected_message
200 200 pull_request_id = result['result']['pull_request_id']
201 201 pull_request = PullRequestModel().get(pull_request_id)
202 202
203 203 actual_reviewers = []
204 204 for rev in pull_request.reviewers:
205 205 entry = {
206 206 'username': rev.user.user_id,
207 207 'reasons': rev.reasons,
208 208 }
209 209 if rev.mandatory:
210 210 entry['mandatory'] = rev.mandatory
211 211 actual_reviewers.append(entry)
212 212
213 213 owner_user_id = pull_request.target_repo.user.user_id
214 214 for spec_reviewer in reviewers[::]:
215 215 # default reviewer will be added who is an owner of the repo
216 216 # this get's overridden by a add owner to reviewers rule
217 217 if spec_reviewer['username'] == owner_user_id:
218 218 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
219 219
220 220 assert sorted(actual_reviewers, key=lambda e: e['username']) \
221 221 == sorted(reviewers, key=lambda e: e['username'])
222 222
223 223 @pytest.mark.backends("git", "hg")
224 224 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
225 225 data = self._prepare_data(backend)
226 226 data['reviewers'] = [{'username': 'somebody'}]
227 227 id_, params = build_data(
228 228 self.apikey_regular, 'create_pull_request', **data)
229 229 response = api_call(self.app, params)
230 230 expected_message = 'user `somebody` does not exist'
231 231 assert_error(id_, expected_message, given=response.body)
232 232
233 233 @pytest.mark.backends("git", "hg")
234 234 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
235 235 data = self._prepare_data(backend)
236 236 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
237 237 data['reviewers'] = reviewers
238 238 id_, params = build_data(
239 239 self.apikey_regular, 'create_pull_request', **data)
240 240 response = api_call(self.app, params)
241 241 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
242 242 assert_error(id_, expected_message, given=response.body)
243 243
244 244 @pytest.mark.backends("git", "hg")
245 245 def test_create_with_no_commit_hashes(self, backend):
246 246 data = self._prepare_data(backend)
247 247 expected_source_ref = data['source_ref']
248 248 expected_target_ref = data['target_ref']
249 249 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
250 250 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
251 251 id_, params = build_data(
252 252 self.apikey_regular, 'create_pull_request', **data)
253 253 response = api_call(self.app, params)
254 254 expected_message = "Created new pull request `{title}`".format(
255 255 title=data['title'])
256 256 result = response.json
257 257 assert result['result']['msg'] == expected_message
258 258 pull_request_id = result['result']['pull_request_id']
259 259 pull_request = PullRequestModel().get(pull_request_id)
260 260 assert pull_request.source_ref == expected_source_ref
261 261 assert pull_request.target_ref == expected_target_ref
262 262
263 263 @pytest.mark.backends("git", "hg")
264 264 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
265 265 def test_create_fails_with_wrong_repo(self, backend, data_key):
266 266 repo_name = 'fake-repo'
267 267 data = self._prepare_data(backend)
268 268 data[data_key] = repo_name
269 269 id_, params = build_data(
270 270 self.apikey_regular, 'create_pull_request', **data)
271 271 response = api_call(self.app, params)
272 272 expected_message = 'repository `{}` does not exist'.format(repo_name)
273 273 assert_error(id_, expected_message, given=response.body)
274 274
275 275 @pytest.mark.backends("git", "hg")
276 276 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
277 277 def test_create_fails_with_non_existing_branch(self, backend, data_key):
278 278 branch_name = 'test-branch'
279 279 data = self._prepare_data(backend)
280 280 data[data_key] = "branch:{}".format(branch_name)
281 281 id_, params = build_data(
282 282 self.apikey_regular, 'create_pull_request', **data)
283 283 response = api_call(self.app, params)
284 284 expected_message = 'The specified value:{type}:`{name}` ' \
285 285 'does not exist, or is not allowed.'.format(type='branch',
286 286 name=branch_name)
287 287 assert_error(id_, expected_message, given=response.body)
288 288
289 289 @pytest.mark.backends("git", "hg")
290 290 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
291 291 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
292 292 data = self._prepare_data(backend)
293 293 ref = 'stange-ref'
294 294 data[data_key] = ref
295 295 id_, params = build_data(
296 296 self.apikey_regular, 'create_pull_request', **data)
297 297 response = api_call(self.app, params)
298 298 expected_message = (
299 299 'Ref `{ref}` given in a wrong format. Please check the API'
300 300 ' documentation for more details'.format(ref=ref))
301 301 assert_error(id_, expected_message, given=response.body)
302 302
303 303 @pytest.mark.backends("git", "hg")
304 304 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
305 305 def test_create_fails_with_non_existing_ref(self, backend, data_key):
306 306 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
307 307 ref = self._get_full_ref(backend, commit_id)
308 308 data = self._prepare_data(backend)
309 309 data[data_key] = ref
310 310 id_, params = build_data(
311 311 self.apikey_regular, 'create_pull_request', **data)
312 312 response = api_call(self.app, params)
313 313 expected_message = 'Ref `{}` does not exist'.format(ref)
314 314 assert_error(id_, expected_message, given=response.body)
315 315
316 316 @pytest.mark.backends("git", "hg")
317 317 def test_create_fails_when_no_revisions(self, backend):
318 318 data = self._prepare_data(backend, source_head='initial')
319 319 id_, params = build_data(
320 320 self.apikey_regular, 'create_pull_request', **data)
321 321 response = api_call(self.app, params)
322 322 expected_message = 'no commits found for merge between specified references'
323 323 assert_error(id_, expected_message, given=response.body)
324 324
325 325 @pytest.mark.backends("git", "hg")
326 326 def test_create_fails_when_no_permissions(self, backend):
327 327 data = self._prepare_data(backend)
328 328 RepoModel().revoke_user_permission(
329 329 self.source.repo_name, self.test_user)
330 330 RepoModel().revoke_user_permission(
331 331 self.source.repo_name, User.DEFAULT_USER)
332 332
333 333 id_, params = build_data(
334 334 self.apikey_regular, 'create_pull_request', **data)
335 335 response = api_call(self.app, params)
336 336 expected_message = 'repository `{}` does not exist'.format(
337 337 self.source.repo_name)
338 338 assert_error(id_, expected_message, given=response.body)
339 339
340 340 def _prepare_data(
341 341 self, backend, source_head='change', target_head='initial'):
342 342 commits = [
343 343 {'message': 'initial'},
344 344 {'message': 'change'},
345 {'message': 'new-feature', 'parents': ['initial']},
345 {'message': 'new-feature', 'parents': ['initial'], 'branch': 'feature'},
346 346 ]
347 347 self.commit_ids = backend.create_master_repo(commits)
348 348 self.source = backend.create_repo(heads=[source_head])
349 349 self.target = backend.create_repo(heads=[target_head])
350 350
351 351 data = {
352 352 'source_repo': self.source.repo_name,
353 353 'target_repo': self.target.repo_name,
354 354 'source_ref': self._get_full_ref(
355 355 backend, self.commit_ids[source_head]),
356 356 'target_ref': self._get_full_ref(
357 357 backend, self.commit_ids[target_head]),
358 358 'title': 'Test PR 1',
359 359 'description': 'Test'
360 360 }
361 361 RepoModel().grant_user_permission(
362 362 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
363 363 return data
364 364
365 365 def _get_full_ref(self, backend, commit_id):
366 366 return 'branch:{branch}:{commit_id}'.format(
367 367 branch=backend.default_branch_name, commit_id=commit_id)
@@ -1,204 +1,204 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import re
20 20
21 21 import pytest
22 22
23 23 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
24 24 from rhodecode.tests import TestController
25 25 from rhodecode.tests.routes import route_path
26 26
27 27
28 28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
29 29
30 30
31 31 def assert_commits_on_page(response, indexes):
32 32 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.text)]
33 33 assert found_indexes == indexes
34 34
35 35
36 36 class TestChangelogController(TestController):
37 37
38 38 def test_commits_page(self, backend):
39 39 self.log_user()
40 40 response = self.app.get(
41 41 route_path('repo_commits', repo_name=backend.repo_name))
42 42
43 43 first_idx = -1
44 44 last_idx = -DEFAULT_CHANGELOG_SIZE
45 45 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
46 46
47 47 def test_changelog(self, backend):
48 48 self.log_user()
49 49 response = self.app.get(
50 50 route_path('repo_changelog', repo_name=backend.repo_name))
51 51
52 52 first_idx = -1
53 53 last_idx = -DEFAULT_CHANGELOG_SIZE
54 54 self.assert_commit_range_on_page(
55 55 response, first_idx, last_idx, backend)
56 56
57 57 @pytest.mark.backends("hg", "git")
58 58 def test_changelog_filtered_by_branch(self, backend):
59 59 self.log_user()
60 60 self.app.get(
61 61 route_path('repo_changelog', repo_name=backend.repo_name,
62 62 params=dict(branch=backend.default_branch_name)),
63 63 status=200)
64 64
65 65 @pytest.mark.backends("hg", "git")
66 66 def test_commits_filtered_by_branch(self, backend):
67 67 self.log_user()
68 68 self.app.get(
69 69 route_path('repo_commits', repo_name=backend.repo_name,
70 70 params=dict(branch=backend.default_branch_name)),
71 71 status=200)
72 72
73 73 @pytest.mark.backends("svn")
74 74 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
75 75 repo = backend['svn-simple-layout']
76 76 response = self.app.get(
77 77 route_path('repo_changelog', repo_name=repo.repo_name,
78 78 params=dict(branch='trunk')),
79 79 status=200)
80 80
81 81 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
82 82
83 83 def test_commits_filtered_by_wrong_branch(self, backend):
84 84 self.log_user()
85 85 branch = 'wrong-branch-name'
86 86 response = self.app.get(
87 87 route_path('repo_commits', repo_name=backend.repo_name,
88 88 params=dict(branch=branch)),
89 89 status=302)
90 90 expected_url = '/{repo}/commits/{branch}'.format(
91 91 repo=backend.repo_name, branch=branch)
92 92 assert expected_url in response.location
93 93 response = response.follow()
94 94 expected_warning = f'Branch {branch} is not found.'
95 95 assert expected_warning in response.text
96 96
97 97 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
98 98 def test_changelog_filtered_by_branch_with_merges(self, autologin_user, backend):
99 99
100 100 # Note: The changelog of branch "b" does not contain the commit "a1"
101 101 # although this is a parent of commit "b1". And branch "b" has commits
102 102 # which have a smaller index than commit "a1".
103 103 commits = [
104 {'message': 'a'},
104 {'message': 'a', 'branch': 'master'},
105 105 {'message': 'b', 'branch': 'b'},
106 {'message': 'a1', 'parents': ['a']},
106 {'message': 'a1', 'parents': ['a'], 'branch': 'master'},
107 107 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
108 108 ]
109 109 backend.create_repo(commits)
110 110
111 111 self.app.get(
112 112 route_path('repo_changelog', repo_name=backend.repo_name,
113 113 params=dict(branch='b')),
114 114 status=200)
115 115
116 116 @pytest.mark.backends("hg")
117 117 def test_commits_closed_branches(self, autologin_user, backend):
118 118 repo = backend['closed_branch']
119 119 response = self.app.get(
120 120 route_path('repo_commits', repo_name=repo.repo_name,
121 121 params=dict(branch='experimental')),
122 122 status=200)
123 123
124 124 assert_commits_on_page(response, indexes=[3, 1])
125 125
126 126 def test_changelog_pagination(self, backend):
127 127 self.log_user()
128 128 # pagination, walk up to page 6
129 129 changelog_url = route_path(
130 130 'repo_commits', repo_name=backend.repo_name)
131 131
132 132 for page in range(1, 7):
133 133 response = self.app.get(changelog_url, {'page': page})
134 134
135 135 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
136 136 last_idx = -DEFAULT_CHANGELOG_SIZE * page
137 137 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
138 138
139 139 def assert_commit_range_on_page(
140 140 self, response, first_idx, last_idx, backend):
141 141 input_template = (
142 142 """<input class="commit-range" """
143 143 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" """
144 144 """data-short-id="%(short_id)s" id="%(raw_id)s" """
145 145 """name="%(raw_id)s" type="checkbox" value="1" />"""
146 146 )
147 147
148 148 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
149 149 repo = backend.repo
150 150
151 151 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
152 152 response.mustcontain(
153 153 input_template % {'raw_id': first_commit_on_page.raw_id,
154 154 'idx': first_commit_on_page.idx,
155 155 'short_id': first_commit_on_page.short_id})
156 156
157 157 response.mustcontain(commit_span_template % (
158 158 first_commit_on_page.idx, first_commit_on_page.short_id)
159 159 )
160 160
161 161 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
162 162 response.mustcontain(
163 163 input_template % {'raw_id': last_commit_on_page.raw_id,
164 164 'idx': last_commit_on_page.idx,
165 165 'short_id': last_commit_on_page.short_id})
166 166 response.mustcontain(commit_span_template % (
167 167 last_commit_on_page.idx, last_commit_on_page.short_id)
168 168 )
169 169
170 170 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
171 171 first_span_of_next_page = commit_span_template % (
172 172 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
173 173 assert first_span_of_next_page not in response
174 174
175 175 @pytest.mark.parametrize('test_path', [
176 176 'vcs/exceptions.py',
177 177 '/vcs/exceptions.py',
178 178 '//vcs/exceptions.py'
179 179 ])
180 180 def test_commits_with_filenode(self, backend, test_path):
181 181 self.log_user()
182 182 response = self.app.get(
183 183 route_path('repo_commits_file', repo_name=backend.repo_name,
184 184 commit_id='tip', f_path=test_path),
185 185 )
186 186
187 187 # history commits messages
188 188 response.mustcontain('Added exceptions module, this time for real')
189 189 response.mustcontain('Added not implemented hg backend test case')
190 190 response.mustcontain('Added BaseChangeset class')
191 191
192 192 def test_commits_with_filenode_that_is_dirnode(self, backend):
193 193 self.log_user()
194 194 self.app.get(
195 195 route_path('repo_commits_file', repo_name=backend.repo_name,
196 196 commit_id='tip', f_path='/tests'),
197 197 status=302)
198 198
199 199 def test_commits_with_filenode_not_existing(self, backend):
200 200 self.log_user()
201 201 self.app.get(
202 202 route_path('repo_commits_file', repo_name=backend.repo_name,
203 203 commit_id='tip', f_path='wrong_path'),
204 204 status=302)
@@ -1,656 +1,656 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22 import lxml.html
23 23
24 24 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
25 25 from rhodecode.tests import assert_session_flash
26 26 from rhodecode.tests.utils import AssertResponse, commit_change
27 27 from rhodecode.tests.routes import route_path
28 28
29 29
30 30 @pytest.mark.usefixtures("autologin_user", "app")
31 31 class TestCompareView(object):
32 32
33 33 def test_compare_index_is_reached_at_least_once(self, backend):
34 34 repo = backend.repo
35 35 self.app.get(
36 36 route_path('repo_compare_select', repo_name=repo.repo_name))
37 37
38 38 @pytest.mark.xfail_backends("svn", reason="Requires pull")
39 39 def test_compare_remote_with_different_commit_indexes(self, backend):
40 40 # Preparing the following repository structure:
41 41 #
42 42 # Origin repository has two commits:
43 43 #
44 44 # 0 1
45 45 # A -- D
46 46 #
47 47 # The fork of it has a few more commits and "D" has a commit index
48 48 # which does not exist in origin.
49 49 #
50 50 # 0 1 2 3 4
51 51 # A -- -- -- D -- E
52 52 # \- B -- C
53 53 #
54 54
55 55 fork = backend.create_repo()
56 56 origin = backend.create_repo()
57 57
58 58 # prepare fork
59 59 commit0 = commit_change(
60 60 fork.repo_name, filename=b'file1', content=b'A',
61 61 message='A - Initial Commit', vcs_type=backend.alias, parent=None, newfile=True)
62 62
63 63 commit1 = commit_change(
64 64 fork.repo_name, filename=b'file1', content=b'B',
65 65 message='B, child of A', vcs_type=backend.alias, parent=commit0)
66 66
67 67 commit_change( # commit 2
68 68 fork.repo_name, filename=b'file1', content=b'C',
69 69 message='C, child of B', vcs_type=backend.alias, parent=commit1)
70 70
71 71 commit3 = commit_change(
72 72 fork.repo_name, filename=b'file1', content=b'D',
73 message='D, child of A', vcs_type=backend.alias, parent=commit0)
73 message='D, child of A', vcs_type=backend.alias, parent=commit0, branch='feature')
74 74
75 75 commit4 = commit_change(
76 76 fork.repo_name, filename=b'file1', content=b'E',
77 message='E, child of D', vcs_type=backend.alias, parent=commit3)
77 message='E, child of D', vcs_type=backend.alias, parent=commit3, branch='feature')
78 78
79 79 # prepare origin repository, taking just the history up to D
80 80
81 81 origin_repo = origin.scm_instance(cache=False)
82 82 origin_repo.config.clear_section('hooks')
83 83 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
84 84 origin_repo = origin.scm_instance(cache=False) # cache rebuild
85 85
86 86 # Verify test fixture setup
87 87 # This does not work for git
88 88 if backend.alias != 'git':
89 89 assert 5 == len(fork.scm_instance(cache=False).commit_ids)
90 90 assert 2 == len(origin_repo.commit_ids)
91 91
92 92 # Comparing the revisions
93 93 response = self.app.get(
94 94 route_path('repo_compare',
95 95 repo_name=origin.repo_name,
96 96 source_ref_type="rev", source_ref=commit3.raw_id,
97 97 target_ref_type="rev", target_ref=commit4.raw_id,
98 98 params=dict(merge='1', target_repo=fork.repo_name)
99 99 ),
100 100 status=200)
101 101
102 102 compare_page = ComparePage(response)
103 103 compare_page.contains_commits([commit4])
104 104
105 105 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
106 106 def test_compare_forks_on_branch_extra_commits(self, backend):
107 107 repo1 = backend.create_repo()
108 108
109 109 # commit something !
110 110 commit0 = commit_change(
111 111 repo1.repo_name, filename=b'file1', content=b'line1\n',
112 112 message='commit1', vcs_type=backend.alias, parent=None,
113 113 newfile=True)
114 114
115 115 # fork this repo
116 116 repo2 = backend.create_fork()
117 117
118 118 # add two extra commit into fork
119 119 commit1 = commit_change(
120 120 repo2.repo_name, filename=b'file1', content=b'line1\nline2\n',
121 121 message='commit2', vcs_type=backend.alias, parent=commit0)
122 122
123 123 commit2 = commit_change(
124 124 repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
125 125 message='commit3', vcs_type=backend.alias, parent=commit1)
126 126
127 127 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
128 128 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
129 129
130 130 response = self.app.get(
131 131 route_path('repo_compare',
132 132 repo_name=repo1.repo_name,
133 133 source_ref_type="branch", source_ref=commit_id2,
134 134 target_ref_type="branch", target_ref=commit_id1,
135 135 params=dict(merge='1', target_repo=repo2.repo_name)
136 136 ))
137 137
138 138 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
139 139 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
140 140
141 141 compare_page = ComparePage(response)
142 142 compare_page.contains_change_summary(1, 2, 0)
143 143 compare_page.contains_commits([commit1, commit2])
144 144
145 145 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
146 146 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
147 147
148 148 # Swap is removed when comparing branches since it's a PR feature and
149 149 # it is then a preview mode
150 150 compare_page.swap_is_hidden()
151 151 compare_page.target_source_are_disabled()
152 152
153 153 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
154 154 def test_compare_forks_on_branch_extra_commits_origin_has_incoming(self, backend):
155 155 repo1 = backend.create_repo()
156 156
157 157 # commit something !
158 158 commit0 = commit_change(
159 159 repo1.repo_name, filename=b'file1', content=b'line1\n',
160 160 message='commit1', vcs_type=backend.alias, parent=None,
161 161 newfile=True)
162 162
163 163 # fork this repo
164 164 repo2 = backend.create_fork()
165 165
166 166 # now commit something to origin repo
167 167 commit_change(
168 168 repo1.repo_name, filename=b'file2', content=b'line1file2\n',
169 169 message='commit2', vcs_type=backend.alias, parent=commit0,
170 170 newfile=True)
171 171
172 172 # add two extra commit into fork
173 173 commit1 = commit_change(
174 174 repo2.repo_name, filename=b'file1', content=b'line1\nline2\n',
175 175 message='commit2', vcs_type=backend.alias, parent=commit0)
176 176
177 177 commit2 = commit_change(
178 178 repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
179 179 message='commit3', vcs_type=backend.alias, parent=commit1)
180 180
181 181 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
182 182 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
183 183
184 184 response = self.app.get(
185 185 route_path('repo_compare',
186 186 repo_name=repo1.repo_name,
187 187 source_ref_type="branch", source_ref=commit_id2,
188 188 target_ref_type="branch", target_ref=commit_id1,
189 189 params=dict(merge='1', target_repo=repo2.repo_name),
190 190 ))
191 191
192 192 response.mustcontain(f'{repo1.repo_name}@{commit_id2}')
193 193 response.mustcontain(f'{repo2.repo_name}@{commit_id1}')
194 194
195 195 compare_page = ComparePage(response)
196 196 compare_page.contains_change_summary(1, 2, 0)
197 197 compare_page.contains_commits([commit1, commit2])
198 198 anchor = f'a_c-{commit0.short_id}-826e8142e6ba'
199 199 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
200 200
201 201 # Swap is removed when comparing branches since it's a PR feature and
202 202 # it is then a preview mode
203 203 compare_page.swap_is_hidden()
204 204 compare_page.target_source_are_disabled()
205 205
206 206 @pytest.mark.xfail_backends("svn")
207 207 # TODO(marcink): no svn support for compare two seperate repos
208 208 def test_compare_of_unrelated_forks(self, backend):
209 209 orig = backend.create_repo(number_of_commits=1)
210 210 fork = backend.create_repo(number_of_commits=1)
211 211
212 212 response = self.app.get(
213 213 route_path('repo_compare',
214 214 repo_name=orig.repo_name,
215 215 source_ref_type="rev", source_ref="tip",
216 216 target_ref_type="rev", target_ref="tip",
217 217 params=dict(merge='1', target_repo=fork.repo_name),
218 218 ),
219 219 status=302)
220 220 response = response.follow()
221 221 response.mustcontain("Repositories unrelated.")
222 222
223 223 @pytest.mark.xfail_backends("svn")
224 224 def test_compare_cherry_pick_commits_from_bottom(self, backend):
225 225
226 226 # repo1:
227 227 # commit0:
228 228 # commit1:
229 229 # repo1-fork- in which we will cherry pick bottom commits
230 230 # commit0:
231 231 # commit1:
232 232 # commit2: x
233 233 # commit3: x
234 234 # commit4: x
235 235 # commit5:
236 236 # make repo1, and commit1+commit2
237 237
238 238 repo1 = backend.create_repo()
239 239
240 240 # commit something !
241 241 commit0 = commit_change(
242 242 repo1.repo_name, filename=b'file1', content=b'line1\n',
243 243 message='commit1', vcs_type=backend.alias, parent=None,
244 244 newfile=True)
245 245 commit1 = commit_change(
246 246 repo1.repo_name, filename=b'file1', content=b'line1\nline2\n',
247 247 message='commit2', vcs_type=backend.alias, parent=commit0)
248 248
249 249 # fork this repo
250 250 repo2 = backend.create_fork()
251 251
252 252 # now make commit3-6
253 253 commit2 = commit_change(
254 254 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
255 255 message='commit3', vcs_type=backend.alias, parent=commit1)
256 256 commit3 = commit_change(
257 257 repo1.repo_name, filename=b'file1',content=b'line1\nline2\nline3\nline4\n',
258 258 message='commit4', vcs_type=backend.alias, parent=commit2)
259 259 commit4 = commit_change(
260 260 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\n',
261 261 message='commit5', vcs_type=backend.alias, parent=commit3)
262 262 commit_change( # commit 5
263 263 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\nline6\n',
264 264 message='commit6', vcs_type=backend.alias, parent=commit4)
265 265
266 266 response = self.app.get(
267 267 route_path('repo_compare',
268 268 repo_name=repo2.repo_name,
269 269 # parent of commit2, in target repo2
270 270 source_ref_type="rev", source_ref=commit1.raw_id,
271 271 target_ref_type="rev", target_ref=commit4.raw_id,
272 272 params=dict(merge='1', target_repo=repo1.repo_name),
273 273 ))
274 274 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
275 275 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
276 276
277 277 # files
278 278 compare_page = ComparePage(response)
279 279 compare_page.contains_change_summary(1, 3, 0)
280 280 compare_page.contains_commits([commit2, commit3, commit4])
281 281 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
282 282 compare_page.contains_file_links_and_anchors([('file1', anchor),])
283 283
284 284 @pytest.mark.xfail_backends("svn")
285 285 def test_compare_cherry_pick_commits_from_top(self, backend):
286 286 # repo1:
287 287 # commit0:
288 288 # commit1:
289 289 # repo1-fork- in which we will cherry pick bottom commits
290 290 # commit0:
291 291 # commit1:
292 292 # commit2:
293 293 # commit3: x
294 294 # commit4: x
295 295 # commit5: x
296 296
297 297 # make repo1, and commit1+commit2
298 298 repo1 = backend.create_repo()
299 299
300 300 # commit something !
301 301 commit0 = commit_change(
302 302 repo1.repo_name, filename=b'file1', content=b'line1\n',
303 303 message='commit1', vcs_type=backend.alias, parent=None,
304 304 newfile=True)
305 305 commit1 = commit_change(
306 306 repo1.repo_name, filename=b'file1', content=b'line1\nline2\n',
307 307 message='commit2', vcs_type=backend.alias, parent=commit0)
308 308
309 309 # fork this repo
310 310 backend.create_fork()
311 311
312 312 # now make commit3-6
313 313 commit2 = commit_change(
314 314 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
315 315 message='commit3', vcs_type=backend.alias, parent=commit1)
316 316 commit3 = commit_change(
317 317 repo1.repo_name, filename=b'file1',
318 318 content=b'line1\nline2\nline3\nline4\n', message='commit4',
319 319 vcs_type=backend.alias, parent=commit2)
320 320 commit4 = commit_change(
321 321 repo1.repo_name, filename=b'file1',
322 322 content=b'line1\nline2\nline3\nline4\nline5\n', message='commit5',
323 323 vcs_type=backend.alias, parent=commit3)
324 324 commit5 = commit_change(
325 325 repo1.repo_name, filename=b'file1',
326 326 content=b'line1\nline2\nline3\nline4\nline5\nline6\n',
327 327 message='commit6', vcs_type=backend.alias, parent=commit4)
328 328
329 329 response = self.app.get(
330 330 route_path('repo_compare',
331 331 repo_name=repo1.repo_name,
332 332 # parent of commit3, not in source repo2
333 333 source_ref_type="rev", source_ref=commit2.raw_id,
334 334 target_ref_type="rev", target_ref=commit5.raw_id,
335 335 params=dict(merge='1'),))
336 336
337 337 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
338 338 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
339 339
340 340 compare_page = ComparePage(response)
341 341 compare_page.contains_change_summary(1, 3, 0)
342 342 compare_page.contains_commits([commit3, commit4, commit5])
343 343
344 344 # files
345 345 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
346 346 compare_page.contains_file_links_and_anchors([('file1', anchor),])
347 347
348 348 @pytest.mark.xfail_backends("svn")
349 349 def test_compare_remote_branches(self, backend):
350 350 repo1 = backend.repo
351 351 repo2 = backend.create_fork()
352 352
353 353 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
354 354 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
355 355 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
356 356 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
357 357
358 358 response = self.app.get(
359 359 route_path('repo_compare',
360 360 repo_name=repo1.repo_name,
361 361 source_ref_type="rev", source_ref=commit_id1,
362 362 target_ref_type="rev", target_ref=commit_id2,
363 363 params=dict(merge='1', target_repo=repo2.repo_name),
364 364 ))
365 365
366 366 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
367 367 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
368 368
369 369 compare_page = ComparePage(response)
370 370
371 371 # outgoing commits between those commits
372 372 compare_page.contains_commits(
373 373 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
374 374
375 375 # files
376 376 compare_page.contains_file_links_and_anchors([
377 377 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
378 378 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
379 379 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
380 380 ])
381 381
382 382 @pytest.mark.xfail_backends("svn")
383 383 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
384 384 repo1 = backend.create_repo()
385 385 r1_name = repo1.repo_name
386 386
387 387 commit0 = commit_change(
388 388 repo=r1_name, filename=b'file1',
389 389 content=b'line1', message='commit1', vcs_type=backend.alias,
390 390 newfile=True)
391 391 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
392 392
393 393 # fork the repo1
394 394 repo2 = backend.create_fork()
395 395 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
396 396
397 397 self.r2_id = repo2.repo_id
398 398 r2_name = repo2.repo_name
399 399
400 400 commit1 = commit_change(
401 401 repo=r2_name, filename=b'file1-fork',
402 402 content=b'file1-line1-from-fork', message='commit1-fork',
403 403 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
404 404 newfile=True)
405 405
406 406 commit2 = commit_change(
407 407 repo=r2_name, filename=b'file2-fork',
408 408 content=b'file2-line1-from-fork', message='commit2-fork',
409 409 vcs_type=backend.alias, parent=commit1,
410 410 newfile=True)
411 411
412 412 commit_change( # commit 3
413 413 repo=r2_name, filename=b'file3-fork',
414 414 content=b'file3-line1-from-fork', message='commit3-fork',
415 415 vcs_type=backend.alias, parent=commit2, newfile=True)
416 416
417 417 # compare !
418 418 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
419 419 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
420 420
421 421 response = self.app.get(
422 422 route_path('repo_compare',
423 423 repo_name=r2_name,
424 424 source_ref_type="branch", source_ref=commit_id1,
425 425 target_ref_type="branch", target_ref=commit_id2,
426 426 params=dict(merge='1', target_repo=r1_name),
427 427 ))
428 428
429 429 response.mustcontain('%s@%s' % (r2_name, commit_id1))
430 430 response.mustcontain('%s@%s' % (r1_name, commit_id2))
431 431 response.mustcontain('No files')
432 432 response.mustcontain('No commits in this compare')
433 433
434 434 commit0 = commit_change(
435 435 repo=r1_name, filename=b'file2',
436 436 content=b'line1-added-after-fork', message='commit2-parent',
437 437 vcs_type=backend.alias, parent=None, newfile=True)
438 438
439 439 # compare !
440 440 response = self.app.get(
441 441 route_path('repo_compare',
442 442 repo_name=r2_name,
443 443 source_ref_type="branch", source_ref=commit_id1,
444 444 target_ref_type="branch", target_ref=commit_id2,
445 445 params=dict(merge='1', target_repo=r1_name),
446 446 ))
447 447
448 448 response.mustcontain('%s@%s' % (r2_name, commit_id1))
449 449 response.mustcontain('%s@%s' % (r1_name, commit_id2))
450 450
451 451 response.mustcontain("""commit2-parent""")
452 452 response.mustcontain("""line1-added-after-fork""")
453 453 compare_page = ComparePage(response)
454 454 compare_page.contains_change_summary(1, 1, 0)
455 455
456 456 @pytest.mark.xfail_backends("svn")
457 457 def test_compare_commits(self, backend, xhr_header):
458 458 commit0 = backend.repo.get_commit(commit_idx=0)
459 459 commit1 = backend.repo.get_commit(commit_idx=1)
460 460
461 461 response = self.app.get(
462 462 route_path('repo_compare',
463 463 repo_name=backend.repo_name,
464 464 source_ref_type="rev", source_ref=commit0.raw_id,
465 465 target_ref_type="rev", target_ref=commit1.raw_id,
466 466 params=dict(merge='1')
467 467 ),
468 468 extra_environ=xhr_header, )
469 469
470 470 # outgoing commits between those commits
471 471 compare_page = ComparePage(response)
472 472 compare_page.contains_commits(commits=[commit1])
473 473
474 474 def test_errors_when_comparing_unknown_source_repo(self, backend):
475 475 repo = backend.repo
476 476
477 477 self.app.get(
478 478 route_path('repo_compare',
479 479 repo_name='badrepo',
480 480 source_ref_type="rev", source_ref='tip',
481 481 target_ref_type="rev", target_ref='tip',
482 482 params=dict(merge='1', target_repo=repo.repo_name)
483 483 ),
484 484 status=404)
485 485
486 486 def test_errors_when_comparing_unknown_target_repo(self, backend):
487 487 repo = backend.repo
488 488 badrepo = 'badrepo'
489 489
490 490 response = self.app.get(
491 491 route_path('repo_compare',
492 492 repo_name=repo.repo_name,
493 493 source_ref_type="rev", source_ref='tip',
494 494 target_ref_type="rev", target_ref='tip',
495 495 params=dict(merge='1', target_repo=badrepo),
496 496 ),
497 497 status=302)
498 498 redirected = response.follow()
499 499 redirected.mustcontain(
500 500 'Could not find the target repo: `{}`'.format(badrepo))
501 501
502 502 def test_compare_not_in_preview_mode(self, backend_stub):
503 503 commit0 = backend_stub.repo.get_commit(commit_idx=0)
504 504 commit1 = backend_stub.repo.get_commit(commit_idx=1)
505 505
506 506 response = self.app.get(
507 507 route_path('repo_compare',
508 508 repo_name=backend_stub.repo_name,
509 509 source_ref_type="rev", source_ref=commit0.raw_id,
510 510 target_ref_type="rev", target_ref=commit1.raw_id,
511 511 ))
512 512
513 513 # outgoing commits between those commits
514 514 compare_page = ComparePage(response)
515 515 compare_page.swap_is_visible()
516 516 compare_page.target_source_are_enabled()
517 517
518 518 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
519 519 orig = backend_hg.create_repo(number_of_commits=1)
520 520 fork = backend_hg.create_fork()
521 521
522 522 settings_util.create_repo_rhodecode_ui(
523 523 orig, 'extensions', value='', key='largefiles', active=False)
524 524 settings_util.create_repo_rhodecode_ui(
525 525 fork, 'extensions', value='', key='largefiles', active=True)
526 526
527 527 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
528 528 'MercurialRepository.compare')
529 529 with mock.patch(compare_module) as compare_mock:
530 530 compare_mock.side_effect = RepositoryRequirementError()
531 531
532 532 response = self.app.get(
533 533 route_path('repo_compare',
534 534 repo_name=orig.repo_name,
535 535 source_ref_type="rev", source_ref="tip",
536 536 target_ref_type="rev", target_ref="tip",
537 537 params=dict(merge='1', target_repo=fork.repo_name),
538 538 ),
539 539 status=302)
540 540
541 541 assert_session_flash(
542 542 response,
543 543 'Could not compare repos with different large file settings')
544 544
545 545
546 546 @pytest.mark.usefixtures("autologin_user")
547 547 class TestCompareControllerSvn(object):
548 548
549 549 def test_supports_references_with_path(self, app, backend_svn):
550 550 repo = backend_svn['svn-simple-layout']
551 551 commit_id = repo.get_commit(commit_idx=-1).raw_id
552 552 response = app.get(
553 553 route_path('repo_compare',
554 554 repo_name=repo.repo_name,
555 555 source_ref_type="tag",
556 556 source_ref="%s@%s" % ('tags/v0.1', commit_id),
557 557 target_ref_type="tag",
558 558 target_ref="%s@%s" % ('tags/v0.2', commit_id),
559 559 params=dict(merge='1'),
560 560 ),
561 561 status=200)
562 562
563 563 # Expecting no commits, since both paths are at the same revision
564 564 response.mustcontain('No commits in this compare')
565 565
566 566 # Should find only one file changed when comparing those two tags
567 567 response.mustcontain('example.py')
568 568 compare_page = ComparePage(response)
569 569 compare_page.contains_change_summary(1, 5, 1)
570 570
571 571 def test_shows_commits_if_different_ids(self, app, backend_svn):
572 572 repo = backend_svn['svn-simple-layout']
573 573 source_id = repo.get_commit(commit_idx=-6).raw_id
574 574 target_id = repo.get_commit(commit_idx=-1).raw_id
575 575 response = app.get(
576 576 route_path('repo_compare',
577 577 repo_name=repo.repo_name,
578 578 source_ref_type="tag",
579 579 source_ref="%s@%s" % ('tags/v0.1', source_id),
580 580 target_ref_type="tag",
581 581 target_ref="%s@%s" % ('tags/v0.2', target_id),
582 582 params=dict(merge='1')
583 583 ),
584 584 status=200)
585 585
586 586 # It should show commits
587 587 assert 'No commits in this compare' not in response.text
588 588
589 589 # Should find only one file changed when comparing those two tags
590 590 response.mustcontain('example.py')
591 591 compare_page = ComparePage(response)
592 592 compare_page.contains_change_summary(1, 5, 1)
593 593
594 594
595 595 class ComparePage(AssertResponse):
596 596 """
597 597 Abstracts the page template from the tests
598 598 """
599 599
600 600 def contains_file_links_and_anchors(self, files):
601 601 doc = lxml.html.fromstring(self.response.body)
602 602 for filename, file_id in files:
603 603 self.contains_one_anchor(file_id)
604 604 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
605 605 assert len(diffblock) == 2
606 606 for lnk in diffblock[0].cssselect('a'):
607 607 if 'permalink' in lnk.text:
608 608 assert '#{}'.format(file_id) in lnk.attrib['href']
609 609 break
610 610 else:
611 611 pytest.fail('Unable to find permalink')
612 612
613 613 def contains_change_summary(self, files_changed, inserted, deleted):
614 614 template = (
615 615 '{files_changed} file{plural} changed: '
616 616 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
617 617 self.response.mustcontain(template.format(
618 618 files_changed=files_changed,
619 619 plural="s" if files_changed > 1 else "",
620 620 inserted=inserted,
621 621 deleted=deleted))
622 622
623 623 def contains_commits(self, commits, ancestors=None):
624 624 response = self.response
625 625
626 626 for commit in commits:
627 627 # Expecting to see the commit message in an element which
628 628 # has the ID "c-{commit.raw_id}"
629 629 self.element_contains('#c-' + commit.raw_id, commit.message)
630 630 self.contains_one_link(
631 631 'r%s:%s' % (commit.idx, commit.short_id),
632 632 self._commit_url(commit))
633 633
634 634 if ancestors:
635 635 response.mustcontain('Ancestor')
636 636 for ancestor in ancestors:
637 637 self.contains_one_link(
638 638 ancestor.short_id, self._commit_url(ancestor))
639 639
640 640 def _commit_url(self, commit):
641 641 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
642 642
643 643 def swap_is_hidden(self):
644 644 assert '<a id="btn-swap"' not in self.response.text
645 645
646 646 def swap_is_visible(self):
647 647 assert '<a id="btn-swap"' in self.response.text
648 648
649 649 def target_source_are_disabled(self):
650 650 response = self.response
651 651 response.mustcontain("var enable_fields = false;")
652 652 response.mustcontain('.select2("enable", enable_fields)')
653 653
654 654 def target_source_are_enabled(self):
655 655 response = self.response
656 656 response.mustcontain("var enable_fields = true;")
This diff has been collapsed as it changes many lines, (1860 lines changed) Show them Hide them
@@ -1,1651 +1,1935 b''
1
2 1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 2 #
4 3 # This program is free software: you can redistribute it and/or modify
5 4 # it under the terms of the GNU Affero General Public License, version 3
6 5 # (only), as published by the Free Software Foundation.
7 6 #
8 7 # This program is distributed in the hope that it will be useful,
9 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 10 # GNU General Public License for more details.
12 11 #
13 12 # You should have received a copy of the GNU Affero General Public License
14 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 14 #
16 15 # This program is dual-licensed. If you wish to learn more about the
17 16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 18 import mock
20 19 import pytest
21 20
22 21 import rhodecode
23 22 from rhodecode.lib import helpers as h
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
23 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason, Reference
25 24 from rhodecode.lib.vcs.nodes import FileNode
26 25 from rhodecode.lib.ext_json import json
27 26 from rhodecode.model.changeset_status import ChangesetStatusModel
28 27 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
28 PullRequest,
29 ChangesetStatus,
30 UserLog,
31 Notification,
32 ChangesetComment,
33 Repository,
34 )
30 35 from rhodecode.model.meta import Session
31 36 from rhodecode.model.pull_request import PullRequestModel
32 37 from rhodecode.model.user import UserModel
33 38 from rhodecode.model.comment import CommentsModel
34 39 from rhodecode.tests import (
35 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
40 assert_session_flash,
41 TEST_USER_ADMIN_LOGIN,
42 TEST_USER_REGULAR_LOGIN,
43 )
44 from rhodecode.tests.fixture_mods.fixture_utils import PRTestUtility
36 45 from rhodecode.tests.routes import route_path
37 46
38 47
39 @pytest.mark.usefixtures('app', 'autologin_user')
48 @pytest.mark.usefixtures("app", "autologin_user")
40 49 @pytest.mark.backends("git", "hg")
41 50 class TestPullrequestsView(object):
42
43 51 def test_index(self, backend):
44 self.app.get(route_path(
45 'pullrequest_new',
46 repo_name=backend.repo_name))
52 self.app.get(route_path("pullrequest_new", repo_name=backend.repo_name))
47 53
48 54 def test_option_menu_create_pull_request_exists(self, backend):
49 55 repo_name = backend.repo_name
50 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
56 response = self.app.get(h.route_path("repo_summary", repo_name=repo_name))
51 57
52 58 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
53 'pullrequest_new', repo_name=repo_name)
59 "pullrequest_new", repo_name=repo_name
60 )
54 61 response.mustcontain(create_pr_link)
55 62
56 63 def test_create_pr_form_with_raw_commit_id(self, backend):
57 64 repo = backend.repo
58 65
59 66 self.app.get(
60 route_path('pullrequest_new', repo_name=repo.repo_name,
61 commit=repo.get_commit().raw_id),
62 status=200)
67 route_path(
68 "pullrequest_new",
69 repo_name=repo.repo_name,
70 commit=repo.get_commit().raw_id,
71 ),
72 status=200,
73 )
63 74
64 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
65 @pytest.mark.parametrize('range_diff', ["0", "1"])
75 @pytest.mark.parametrize("pr_merge_enabled", [True, False])
76 @pytest.mark.parametrize("range_diff", ["0", "1"])
66 77 def test_show(self, pr_util, pr_merge_enabled, range_diff):
67 78 pull_request = pr_util.create_pull_request(
68 mergeable=pr_merge_enabled, enable_notifications=False)
79 mergeable=pr_merge_enabled, enable_notifications=False
80 )
69 81
70 response = self.app.get(route_path(
71 'pullrequest_show',
72 repo_name=pull_request.target_repo.scm_instance().name,
73 pull_request_id=pull_request.pull_request_id,
74 params={'range-diff': range_diff}))
82 response = self.app.get(
83 route_path(
84 "pullrequest_show",
85 repo_name=pull_request.target_repo.scm_instance().name,
86 pull_request_id=pull_request.pull_request_id,
87 params={"range-diff": range_diff},
88 )
89 )
75 90
76 91 for commit_id in pull_request.revisions:
77 92 response.mustcontain(commit_id)
78 93
79 94 response.mustcontain(pull_request.target_ref_parts.type)
80 95 response.mustcontain(pull_request.target_ref_parts.name)
81 96
82 97 response.mustcontain('class="pull-request-merge"')
83 98
84 99 if pr_merge_enabled:
85 response.mustcontain('Pull request reviewer approval is pending')
100 response.mustcontain("Pull request reviewer approval is pending")
86 101 else:
87 response.mustcontain('Server-side pull request merging is disabled.')
102 response.mustcontain("Server-side pull request merging is disabled.")
88 103
89 104 if range_diff == "1":
90 response.mustcontain('Turn off: Show the diff as commit range')
105 response.mustcontain("Turn off: Show the diff as commit range")
91 106
92 107 def test_show_versions_of_pr(self, backend, csrf_token):
93 108 commits = [
94 {'message': 'initial-commit',
95 'added': [FileNode(b'test-file.txt', b'LINE1\n')]},
96
97 {'message': 'commit-1',
98 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\n')]},
109 {
110 "message": "initial-commit",
111 "added": [FileNode(b"test-file.txt", b"LINE1\n")],
112 },
113 {
114 "message": "commit-1",
115 "changed": [FileNode(b"test-file.txt", b"LINE1\nLINE2\n")],
116 },
99 117 # Above is the initial version of PR that changes a single line
100
101 118 # from now on we'll add 3x commit adding a nother line on each step
102 {'message': 'commit-2',
103 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\n')]},
104
105 {'message': 'commit-3',
106 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\nLINE4\n')]},
107
108 {'message': 'commit-4',
109 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
119 {
120 "message": "commit-2",
121 "changed": [FileNode(b"test-file.txt", b"LINE1\nLINE2\nLINE3\n")],
122 },
123 {
124 "message": "commit-3",
125 "changed": [
126 FileNode(b"test-file.txt", b"LINE1\nLINE2\nLINE3\nLINE4\n")
127 ],
128 },
129 {
130 "message": "commit-4",
131 "changed": [
132 FileNode(b"test-file.txt", b"LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n")
133 ],
134 },
110 135 ]
111 136
112 137 commit_ids = backend.create_master_repo(commits)
113 target = backend.create_repo(heads=['initial-commit'])
114 source = backend.create_repo(heads=['commit-1'])
138 target = backend.create_repo(heads=["initial-commit"])
139 source = backend.create_repo(heads=["commit-1"])
115 140 source_repo_name = source.repo_name
116 141 target_repo_name = target.repo_name
117 142
118 target_ref = 'branch:{branch}:{commit_id}'.format(
119 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
120 source_ref = 'branch:{branch}:{commit_id}'.format(
121 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
143 target_ref = "branch:{branch}:{commit_id}".format(
144 branch=backend.default_branch_name, commit_id=commit_ids["initial-commit"]
145 )
146 source_ref = "branch:{branch}:{commit_id}".format(
147 branch=backend.default_branch_name, commit_id=commit_ids["commit-1"]
148 )
122 149
123 150 response = self.app.post(
124 route_path('pullrequest_create', repo_name=source.repo_name),
151 route_path("pullrequest_create", repo_name=source.repo_name),
125 152 [
126 ('source_repo', source_repo_name),
127 ('source_ref', source_ref),
128 ('target_repo', target_repo_name),
129 ('target_ref', target_ref),
130 ('common_ancestor', commit_ids['initial-commit']),
131 ('pullrequest_title', 'Title'),
132 ('pullrequest_desc', 'Description'),
133 ('description_renderer', 'markdown'),
134 ('__start__', 'review_members:sequence'),
135 ('__start__', 'reviewer:mapping'),
136 ('user_id', '1'),
137 ('__start__', 'reasons:sequence'),
138 ('reason', 'Some reason'),
139 ('__end__', 'reasons:sequence'),
140 ('__start__', 'rules:sequence'),
141 ('__end__', 'rules:sequence'),
142 ('mandatory', 'False'),
143 ('__end__', 'reviewer:mapping'),
144 ('__end__', 'review_members:sequence'),
145 ('__start__', 'revisions:sequence'),
146 ('revisions', commit_ids['commit-1']),
147 ('__end__', 'revisions:sequence'),
148 ('user', ''),
149 ('csrf_token', csrf_token),
153 ("source_repo", source_repo_name),
154 ("source_ref", source_ref),
155 ("target_repo", target_repo_name),
156 ("target_ref", target_ref),
157 ("common_ancestor", commit_ids["initial-commit"]),
158 ("pullrequest_title", "Title"),
159 ("pullrequest_desc", "Description"),
160 ("description_renderer", "markdown"),
161 ("__start__", "review_members:sequence"),
162 ("__start__", "reviewer:mapping"),
163 ("user_id", "1"),
164 ("__start__", "reasons:sequence"),
165 ("reason", "Some reason"),
166 ("__end__", "reasons:sequence"),
167 ("__start__", "rules:sequence"),
168 ("__end__", "rules:sequence"),
169 ("mandatory", "False"),
170 ("__end__", "reviewer:mapping"),
171 ("__end__", "review_members:sequence"),
172 ("__start__", "revisions:sequence"),
173 ("revisions", commit_ids["commit-1"]),
174 ("__end__", "revisions:sequence"),
175 ("user", ""),
176 ("csrf_token", csrf_token),
150 177 ],
151 status=302)
178 status=302,
179 )
152 180
153 location = response.headers['Location']
181 location = response.headers["Location"]
154 182
155 pull_request_id = location.rsplit('/', 1)[1]
156 assert pull_request_id != 'new'
183 pull_request_id = location.rsplit("/", 1)[1]
184 assert pull_request_id != "new"
157 185 pull_request = PullRequest.get(int(pull_request_id))
158 186
159 187 pull_request_id = pull_request.pull_request_id
160 188
161 189 # Show initial version of PR
162 190 response = self.app.get(
163 route_path('pullrequest_show',
164 repo_name=target_repo_name,
165 pull_request_id=pull_request_id))
191 route_path(
192 "pullrequest_show",
193 repo_name=target_repo_name,
194 pull_request_id=pull_request_id,
195 )
196 )
166 197
167 response.mustcontain('commit-1')
168 response.mustcontain(no=['commit-2'])
169 response.mustcontain(no=['commit-3'])
170 response.mustcontain(no=['commit-4'])
198 response.mustcontain("commit-1")
199 response.mustcontain(no=["commit-2"])
200 response.mustcontain(no=["commit-3"])
201 response.mustcontain(no=["commit-4"])
171 202
172 203 response.mustcontain('cb-addition"></span><span>LINE2</span>')
173 response.mustcontain(no=['LINE3'])
174 response.mustcontain(no=['LINE4'])
175 response.mustcontain(no=['LINE5'])
204 response.mustcontain(no=["LINE3"])
205 response.mustcontain(no=["LINE4"])
206 response.mustcontain(no=["LINE5"])
176 207
177 208 # update PR #1
178 209 source_repo = Repository.get_by_repo_name(source_repo_name)
179 backend.pull_heads(source_repo, heads=['commit-2'])
210 backend.pull_heads(source_repo, heads=["commit-2"])
180 211 response = self.app.post(
181 route_path('pullrequest_update',
182 repo_name=target_repo_name, pull_request_id=pull_request_id),
183 params={'update_commits': 'true', 'csrf_token': csrf_token})
212 route_path(
213 "pullrequest_update",
214 repo_name=target_repo_name,
215 pull_request_id=pull_request_id,
216 ),
217 params={"update_commits": "true", "csrf_token": csrf_token},
218 )
184 219
185 220 # update PR #2
186 221 source_repo = Repository.get_by_repo_name(source_repo_name)
187 backend.pull_heads(source_repo, heads=['commit-3'])
222 backend.pull_heads(source_repo, heads=["commit-3"])
188 223 response = self.app.post(
189 route_path('pullrequest_update',
190 repo_name=target_repo_name, pull_request_id=pull_request_id),
191 params={'update_commits': 'true', 'csrf_token': csrf_token})
224 route_path(
225 "pullrequest_update",
226 repo_name=target_repo_name,
227 pull_request_id=pull_request_id,
228 ),
229 params={"update_commits": "true", "csrf_token": csrf_token},
230 )
192 231
193 232 # update PR #3
194 233 source_repo = Repository.get_by_repo_name(source_repo_name)
195 backend.pull_heads(source_repo, heads=['commit-4'])
234 backend.pull_heads(source_repo, heads=["commit-4"])
196 235 response = self.app.post(
197 route_path('pullrequest_update',
198 repo_name=target_repo_name, pull_request_id=pull_request_id),
199 params={'update_commits': 'true', 'csrf_token': csrf_token})
236 route_path(
237 "pullrequest_update",
238 repo_name=target_repo_name,
239 pull_request_id=pull_request_id,
240 ),
241 params={"update_commits": "true", "csrf_token": csrf_token},
242 )
200 243
201 244 # Show final version !
202 245 response = self.app.get(
203 route_path('pullrequest_show',
204 repo_name=target_repo_name,
205 pull_request_id=pull_request_id))
246 route_path(
247 "pullrequest_show",
248 repo_name=target_repo_name,
249 pull_request_id=pull_request_id,
250 )
251 )
206 252
207 253 # 3 updates, and the latest == 4
208 response.mustcontain('4 versions available for this pull request')
209 response.mustcontain(no=['rhodecode diff rendering error'])
254 response.mustcontain("4 versions available for this pull request")
255 response.mustcontain(no=["rhodecode diff rendering error"])
210 256
211 257 # initial show must have 3 commits, and 3 adds
212 response.mustcontain('commit-1')
213 response.mustcontain('commit-2')
214 response.mustcontain('commit-3')
215 response.mustcontain('commit-4')
258 response.mustcontain("commit-1")
259 response.mustcontain("commit-2")
260 response.mustcontain("commit-3")
261 response.mustcontain("commit-4")
216 262
217 263 response.mustcontain('cb-addition"></span><span>LINE2</span>')
218 264 response.mustcontain('cb-addition"></span><span>LINE3</span>')
219 265 response.mustcontain('cb-addition"></span><span>LINE4</span>')
220 266 response.mustcontain('cb-addition"></span><span>LINE5</span>')
221 267
222 268 # fetch versions
223 269 pr = PullRequest.get(pull_request_id)
224 270 versions = [x.pull_request_version_id for x in pr.versions.all()]
225 271 assert len(versions) == 3
226 272
227 273 # show v1,v2,v3,v4
228 274 def cb_line(text):
229 275 return 'cb-addition"></span><span>{}</span>'.format(text)
230 276
231 277 def cb_context(text):
232 return '<span class="cb-code"><span class="cb-action cb-context">' \
233 '</span><span>{}</span></span>'.format(text)
278 return (
279 '<span class="cb-code"><span class="cb-action cb-context">'
280 "</span><span>{}</span></span>".format(text)
281 )
234 282
235 283 commit_tests = {
236 284 # in response, not in response
237 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
238 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
239 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
240 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
285 1: (["commit-1"], ["commit-2", "commit-3", "commit-4"]),
286 2: (["commit-1", "commit-2"], ["commit-3", "commit-4"]),
287 3: (["commit-1", "commit-2", "commit-3"], ["commit-4"]),
288 4: (["commit-1", "commit-2", "commit-3", "commit-4"], []),
241 289 }
242 290 diff_tests = {
243 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
244 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
245 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
246 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
291 1: (["LINE2"], ["LINE3", "LINE4", "LINE5"]),
292 2: (["LINE2", "LINE3"], ["LINE4", "LINE5"]),
293 3: (["LINE2", "LINE3", "LINE4"], ["LINE5"]),
294 4: (["LINE2", "LINE3", "LINE4", "LINE5"], []),
247 295 }
248 296 for idx, ver in enumerate(versions, 1):
297 response = self.app.get(
298 route_path(
299 "pullrequest_show",
300 repo_name=target_repo_name,
301 pull_request_id=pull_request_id,
302 params={"version": ver},
303 )
304 )
249 305
250 response = self.app.get(
251 route_path('pullrequest_show',
252 repo_name=target_repo_name,
253 pull_request_id=pull_request_id,
254 params={'version': ver}))
255
256 response.mustcontain(no=['rhodecode diff rendering error'])
257 response.mustcontain('Showing changes at v{}'.format(idx))
306 response.mustcontain(no=["rhodecode diff rendering error"])
307 response.mustcontain("Showing changes at v{}".format(idx))
258 308
259 309 yes, no = commit_tests[idx]
260 310 for y in yes:
261 311 response.mustcontain(y)
262 312 for n in no:
263 313 response.mustcontain(no=n)
264 314
265 315 yes, no = diff_tests[idx]
266 316 for y in yes:
267 317 response.mustcontain(cb_line(y))
268 318 for n in no:
269 319 response.mustcontain(no=n)
270 320
271 321 # show diff between versions
272 322 diff_compare_tests = {
273 1: (['LINE3'], ['LINE1', 'LINE2']),
274 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
275 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
323 1: (["LINE3"], ["LINE1", "LINE2"]),
324 2: (["LINE3", "LINE4"], ["LINE1", "LINE2"]),
325 3: (["LINE3", "LINE4", "LINE5"], ["LINE1", "LINE2"]),
276 326 }
277 327 for idx, ver in enumerate(versions, 1):
278 328 adds, context = diff_compare_tests[idx]
279 329
280 to_ver = ver+1
330 to_ver = ver + 1
281 331 if idx == 3:
282 to_ver = 'latest'
332 to_ver = "latest"
283 333
284 334 response = self.app.get(
285 route_path('pullrequest_show',
286 repo_name=target_repo_name,
287 pull_request_id=pull_request_id,
288 params={'from_version': versions[0], 'version': to_ver}))
335 route_path(
336 "pullrequest_show",
337 repo_name=target_repo_name,
338 pull_request_id=pull_request_id,
339 params={"from_version": versions[0], "version": to_ver},
340 )
341 )
289 342
290 response.mustcontain(no=['rhodecode diff rendering error'])
343 response.mustcontain(no=["rhodecode diff rendering error"])
291 344
292 345 for a in adds:
293 346 response.mustcontain(cb_line(a))
294 347 for c in context:
295 348 response.mustcontain(cb_context(c))
296 349
297 350 # test version v2 -> v3
298 351 response = self.app.get(
299 route_path('pullrequest_show',
300 repo_name=target_repo_name,
301 pull_request_id=pull_request_id,
302 params={'from_version': versions[1], 'version': versions[2]}))
352 route_path(
353 "pullrequest_show",
354 repo_name=target_repo_name,
355 pull_request_id=pull_request_id,
356 params={"from_version": versions[1], "version": versions[2]},
357 )
358 )
303 359
304 response.mustcontain(cb_context('LINE1'))
305 response.mustcontain(cb_context('LINE2'))
306 response.mustcontain(cb_context('LINE3'))
307 response.mustcontain(cb_line('LINE4'))
360 response.mustcontain(cb_context("LINE1"))
361 response.mustcontain(cb_context("LINE2"))
362 response.mustcontain(cb_context("LINE3"))
363 response.mustcontain(cb_line("LINE4"))
308 364
309 365 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
310 366 # Logout
311 367 response = self.app.post(
312 h.route_path('logout'),
313 params={'csrf_token': csrf_token})
368 h.route_path("logout"), params={"csrf_token": csrf_token}
369 )
314 370 # Login as regular user
315 response = self.app.post(h.route_path('login'),
316 {'username': TEST_USER_REGULAR_LOGIN,
317 'password': 'test12'})
371 response = self.app.post(
372 h.route_path("login"),
373 {"username": TEST_USER_REGULAR_LOGIN, "password": "test12"},
374 )
375
376 pull_request = pr_util.create_pull_request(author=TEST_USER_REGULAR_LOGIN)
318 377
319 pull_request = pr_util.create_pull_request(
320 author=TEST_USER_REGULAR_LOGIN)
378 response = self.app.get(
379 route_path(
380 "pullrequest_show",
381 repo_name=pull_request.target_repo.scm_instance().name,
382 pull_request_id=pull_request.pull_request_id,
383 )
384 )
321 385
322 response = self.app.get(route_path(
323 'pullrequest_show',
324 repo_name=pull_request.target_repo.scm_instance().name,
325 pull_request_id=pull_request.pull_request_id))
326
327 response.mustcontain('Server-side pull request merging is disabled.')
386 response.mustcontain("Server-side pull request merging is disabled.")
328 387
329 388 assert_response = response.assert_response()
330 389 # for regular user without a merge permissions, we don't see it
331 assert_response.no_element_exists('#close-pull-request-action')
390 assert_response.no_element_exists("#close-pull-request-action")
332 391
333 392 user_util.grant_user_permission_to_repo(
334 393 pull_request.target_repo,
335 394 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
336 'repository.write')
337 response = self.app.get(route_path(
338 'pullrequest_show',
339 repo_name=pull_request.target_repo.scm_instance().name,
340 pull_request_id=pull_request.pull_request_id))
395 "repository.write",
396 )
397 response = self.app.get(
398 route_path(
399 "pullrequest_show",
400 repo_name=pull_request.target_repo.scm_instance().name,
401 pull_request_id=pull_request.pull_request_id,
402 )
403 )
341 404
342 response.mustcontain('Server-side pull request merging is disabled.')
405 response.mustcontain("Server-side pull request merging is disabled.")
343 406
344 407 assert_response = response.assert_response()
345 408 # now regular user has a merge permissions, we have CLOSE button
346 assert_response.one_element_exists('#close-pull-request-action')
409 assert_response.one_element_exists("#close-pull-request-action")
347 410
348 411 def test_show_invalid_commit_id(self, pr_util):
349 412 # Simulating invalid revisions which will cause a lookup error
350 413 pull_request = pr_util.create_pull_request()
351 pull_request.revisions = ['invalid']
414 pull_request.revisions = ["invalid"]
352 415 Session().add(pull_request)
353 416 Session().commit()
354 417
355 response = self.app.get(route_path(
356 'pullrequest_show',
357 repo_name=pull_request.target_repo.scm_instance().name,
358 pull_request_id=pull_request.pull_request_id))
418 response = self.app.get(
419 route_path(
420 "pullrequest_show",
421 repo_name=pull_request.target_repo.scm_instance().name,
422 pull_request_id=pull_request.pull_request_id,
423 )
424 )
359 425
360 426 for commit_id in pull_request.revisions:
361 427 response.mustcontain(commit_id)
362 428
363 429 def test_show_invalid_source_reference(self, pr_util):
364 430 pull_request = pr_util.create_pull_request()
365 pull_request.source_ref = 'branch:b:invalid'
431 pull_request.source_ref = "branch:b:invalid"
366 432 Session().add(pull_request)
367 433 Session().commit()
368 434
369 self.app.get(route_path(
370 'pullrequest_show',
371 repo_name=pull_request.target_repo.scm_instance().name,
372 pull_request_id=pull_request.pull_request_id))
435 self.app.get(
436 route_path(
437 "pullrequest_show",
438 repo_name=pull_request.target_repo.scm_instance().name,
439 pull_request_id=pull_request.pull_request_id,
440 )
441 )
373 442
374 443 def test_edit_title_description(self, pr_util, csrf_token):
375 444 pull_request = pr_util.create_pull_request()
376 445 pull_request_id = pull_request.pull_request_id
377 446
378 447 response = self.app.post(
379 route_path('pullrequest_update',
380 repo_name=pull_request.target_repo.repo_name,
381 pull_request_id=pull_request_id),
448 route_path(
449 "pullrequest_update",
450 repo_name=pull_request.target_repo.repo_name,
451 pull_request_id=pull_request_id,
452 ),
382 453 params={
383 'edit_pull_request': 'true',
384 'title': 'New title',
385 'description': 'New description',
386 'csrf_token': csrf_token})
454 "edit_pull_request": "true",
455 "title": "New title",
456 "description": "New description",
457 "csrf_token": csrf_token,
458 },
459 )
387 460
388 461 assert_session_flash(
389 response, 'Pull request title & description updated.',
390 category='success')
462 response, "Pull request title & description updated.", category="success"
463 )
391 464
392 465 pull_request = PullRequest.get(pull_request_id)
393 assert pull_request.title == 'New title'
394 assert pull_request.description == 'New description'
466 assert pull_request.title == "New title"
467 assert pull_request.description == "New description"
395 468
396 469 def test_edit_title_description_special(self, pr_util, csrf_token):
397 470 pull_request = pr_util.create_pull_request()
398 471 pull_request_id = pull_request.pull_request_id
399 472
400 473 response = self.app.post(
401 route_path('pullrequest_update',
402 repo_name=pull_request.target_repo.repo_name,
403 pull_request_id=pull_request_id),
474 route_path(
475 "pullrequest_update",
476 repo_name=pull_request.target_repo.repo_name,
477 pull_request_id=pull_request_id,
478 ),
404 479 params={
405 'edit_pull_request': 'true',
406 'title': 'New title {} {2} {foo}',
407 'description': 'New description',
408 'csrf_token': csrf_token})
480 "edit_pull_request": "true",
481 "title": "New title {} {2} {foo}",
482 "description": "New description",
483 "csrf_token": csrf_token,
484 },
485 )
409 486
410 487 assert_session_flash(
411 response, 'Pull request title & description updated.',
412 category='success')
488 response, "Pull request title & description updated.", category="success"
489 )
413 490
414 491 pull_request = PullRequest.get(pull_request_id)
415 assert pull_request.title_safe == 'New title {{}} {{2}} {{foo}}'
492 assert pull_request.title_safe == "New title {{}} {{2}} {{foo}}"
416 493
417 494 def test_edit_title_description_closed(self, pr_util, csrf_token):
418 495 pull_request = pr_util.create_pull_request()
419 496 pull_request_id = pull_request.pull_request_id
420 497 repo_name = pull_request.target_repo.repo_name
421 498 pr_util.close()
422 499
423 500 response = self.app.post(
424 route_path('pullrequest_update',
425 repo_name=repo_name, pull_request_id=pull_request_id),
501 route_path(
502 "pullrequest_update",
503 repo_name=repo_name,
504 pull_request_id=pull_request_id,
505 ),
426 506 params={
427 'edit_pull_request': 'true',
428 'title': 'New title',
429 'description': 'New description',
430 'csrf_token': csrf_token}, status=200)
507 "edit_pull_request": "true",
508 "title": "New title",
509 "description": "New description",
510 "csrf_token": csrf_token,
511 },
512 status=200,
513 )
431 514 assert_session_flash(
432 response, 'Cannot update closed pull requests.',
433 category='error')
515 response, "Cannot update closed pull requests.", category="error"
516 )
434 517
435 518 def test_update_invalid_source_reference(self, pr_util, csrf_token):
436 519 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
437 520
438 521 pull_request = pr_util.create_pull_request()
439 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
522 pull_request.source_ref = "branch:invalid-branch:invalid-commit-id"
440 523 Session().add(pull_request)
441 524 Session().commit()
442 525
443 526 pull_request_id = pull_request.pull_request_id
444 527
445 528 response = self.app.post(
446 route_path('pullrequest_update',
529 route_path(
530 "pullrequest_update",
447 531 repo_name=pull_request.target_repo.repo_name,
448 pull_request_id=pull_request_id),
449 params={'update_commits': 'true', 'csrf_token': csrf_token})
532 pull_request_id=pull_request_id,
533 ),
534 params={"update_commits": "true", "csrf_token": csrf_token},
535 )
450 536
451 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
452 UpdateFailureReason.MISSING_SOURCE_REF])
453 assert_session_flash(response, expected_msg, category='error')
537 expected_msg = str(
538 PullRequestModel.UPDATE_STATUS_MESSAGES[
539 UpdateFailureReason.MISSING_SOURCE_REF
540 ]
541 )
542 assert_session_flash(response, expected_msg, category="error")
454 543
455 544 def test_missing_target_reference(self, pr_util, csrf_token):
456 545 from rhodecode.lib.vcs.backends.base import MergeFailureReason
457 pull_request = pr_util.create_pull_request(
458 approved=True, mergeable=True)
459 unicode_reference = 'branch:invalid-branch:invalid-commit-id'
546
547 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
548 unicode_reference = "branch:invalid-branch:invalid-commit-id"
460 549 pull_request.target_ref = unicode_reference
461 550 Session().add(pull_request)
462 551 Session().commit()
463 552
464 553 pull_request_id = pull_request.pull_request_id
465 554 pull_request_url = route_path(
466 'pullrequest_show',
555 "pullrequest_show",
467 556 repo_name=pull_request.target_repo.repo_name,
468 pull_request_id=pull_request_id)
557 pull_request_id=pull_request_id,
558 )
469 559
470 560 response = self.app.get(pull_request_url)
471 target_ref_id = 'invalid-branch'
561 # target_ref_id = "invalid-branch"
562
472 563 merge_resp = MergeResponse(
473 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
474 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
564 True,
565 True,
566 Reference("commit", "STUB_COMMIT_ID", "STUB_COMMIT_ID"),
567 MergeFailureReason.MISSING_TARGET_REF,
568 metadata={
569 "target_ref": PullRequest.unicode_to_reference(unicode_reference)
570 },
571 )
475 572 response.assert_response().element_contains(
476 'div[data-role="merge-message"]', merge_resp.merge_status_message)
573 'div[data-role="merge-message"]', merge_resp.merge_status_message
574 )
477 575
478 576 def test_comment_and_close_pull_request_custom_message_approved(
479 self, pr_util, csrf_token, xhr_header):
480
577 self, pr_util, csrf_token, xhr_header
578 ):
481 579 pull_request = pr_util.create_pull_request(approved=True)
482 580 pull_request_id = pull_request.pull_request_id
483 581 author = pull_request.user_id
484 582 repo = pull_request.target_repo.repo_id
485 583
486 584 self.app.post(
487 route_path('pullrequest_comment_create',
488 repo_name=pull_request.target_repo.scm_instance().name,
489 pull_request_id=pull_request_id),
585 route_path(
586 "pullrequest_comment_create",
587 repo_name=pull_request.target_repo.scm_instance().name,
588 pull_request_id=pull_request_id,
589 ),
490 590 params={
491 'close_pull_request': '1',
492 'text': 'Closing a PR',
493 'csrf_token': csrf_token},
494 extra_environ=xhr_header,)
591 "close_pull_request": "1",
592 "text": "Closing a PR",
593 "csrf_token": csrf_token,
594 },
595 extra_environ=xhr_header,
596 )
495 597
496 journal = UserLog.query()\
497 .filter(UserLog.user_id == author)\
498 .filter(UserLog.repository_id == repo) \
499 .order_by(UserLog.user_log_id.asc()) \
598 journal = (
599 UserLog.query()
600 .filter(UserLog.user_id == author)
601 .filter(UserLog.repository_id == repo)
602 .order_by(UserLog.user_log_id.asc())
500 603 .all()
501 assert journal[-1].action == 'repo.pull_request.close'
604 )
605 assert journal[-1].action == "repo.pull_request.close"
502 606
503 607 pull_request = PullRequest.get(pull_request_id)
504 608 assert pull_request.is_closed()
505 609
506 610 status = ChangesetStatusModel().get_status(
507 pull_request.source_repo, pull_request=pull_request)
611 pull_request.source_repo, pull_request=pull_request
612 )
508 613 assert status == ChangesetStatus.STATUS_APPROVED
509 comments = ChangesetComment().query() \
510 .filter(ChangesetComment.pull_request == pull_request) \
511 .order_by(ChangesetComment.comment_id.asc())\
614 comments = (
615 ChangesetComment()
616 .query()
617 .filter(ChangesetComment.pull_request == pull_request)
618 .order_by(ChangesetComment.comment_id.asc())
512 619 .all()
513 assert comments[-1].text == 'Closing a PR'
620 )
621 assert comments[-1].text == "Closing a PR"
514 622
515 623 def test_comment_force_close_pull_request_rejected(
516 self, pr_util, csrf_token, xhr_header):
624 self, pr_util, csrf_token, xhr_header
625 ):
517 626 pull_request = pr_util.create_pull_request()
518 627 pull_request_id = pull_request.pull_request_id
519 628 PullRequestModel().update_reviewers(
520 pull_request_id, [
521 (1, ['reason'], False, 'reviewer', []),
522 (2, ['reason2'], False, 'reviewer', [])],
523 pull_request.author)
629 pull_request_id,
630 [
631 (1, ["reason"], False, "reviewer", []),
632 (2, ["reason2"], False, "reviewer", []),
633 ],
634 pull_request.author,
635 )
524 636 author = pull_request.user_id
525 637 repo = pull_request.target_repo.repo_id
526 638
527 639 self.app.post(
528 route_path('pullrequest_comment_create',
640 route_path(
641 "pullrequest_comment_create",
529 642 repo_name=pull_request.target_repo.scm_instance().name,
530 pull_request_id=pull_request_id),
531 params={
532 'close_pull_request': '1',
533 'csrf_token': csrf_token},
534 extra_environ=xhr_header)
643 pull_request_id=pull_request_id,
644 ),
645 params={"close_pull_request": "1", "csrf_token": csrf_token},
646 extra_environ=xhr_header,
647 )
535 648
536 649 pull_request = PullRequest.get(pull_request_id)
537 650
538 journal = UserLog.query()\
539 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
540 .order_by(UserLog.user_log_id.asc()) \
651 journal = (
652 UserLog.query()
653 .filter(UserLog.user_id == author, UserLog.repository_id == repo)
654 .order_by(UserLog.user_log_id.asc())
541 655 .all()
542 assert journal[-1].action == 'repo.pull_request.close'
656 )
657 assert journal[-1].action == "repo.pull_request.close"
543 658
544 659 # check only the latest status, not the review status
545 660 status = ChangesetStatusModel().get_status(
546 pull_request.source_repo, pull_request=pull_request)
661 pull_request.source_repo, pull_request=pull_request
662 )
547 663 assert status == ChangesetStatus.STATUS_REJECTED
548 664
549 def test_comment_and_close_pull_request(
550 self, pr_util, csrf_token, xhr_header):
665 def test_comment_and_close_pull_request(self, pr_util, csrf_token, xhr_header):
551 666 pull_request = pr_util.create_pull_request()
552 667 pull_request_id = pull_request.pull_request_id
553 668
554 669 response = self.app.post(
555 route_path('pullrequest_comment_create',
556 repo_name=pull_request.target_repo.scm_instance().name,
557 pull_request_id=pull_request.pull_request_id),
558 params={
559 'close_pull_request': 'true',
560 'csrf_token': csrf_token},
561 extra_environ=xhr_header)
670 route_path(
671 "pullrequest_comment_create",
672 repo_name=pull_request.target_repo.scm_instance().name,
673 pull_request_id=pull_request.pull_request_id,
674 ),
675 params={"close_pull_request": "true", "csrf_token": csrf_token},
676 extra_environ=xhr_header,
677 )
562 678
563 679 assert response.json
564 680
565 681 pull_request = PullRequest.get(pull_request_id)
566 682 assert pull_request.is_closed()
567 683
568 684 # check only the latest status, not the review status
569 685 status = ChangesetStatusModel().get_status(
570 pull_request.source_repo, pull_request=pull_request)
686 pull_request.source_repo, pull_request=pull_request
687 )
571 688 assert status == ChangesetStatus.STATUS_REJECTED
572 689
573 690 def test_comment_and_close_pull_request_try_edit_comment(
574 self, pr_util, csrf_token, xhr_header
691 self, pr_util, csrf_token, xhr_header
575 692 ):
576 693 pull_request = pr_util.create_pull_request()
577 694 pull_request_id = pull_request.pull_request_id
578 695 target_scm = pull_request.target_repo.scm_instance()
579 696 target_scm_name = target_scm.name
580 697
581 698 response = self.app.post(
582 699 route_path(
583 'pullrequest_comment_create',
700 "pullrequest_comment_create",
584 701 repo_name=target_scm_name,
585 702 pull_request_id=pull_request_id,
586 703 ),
587 704 params={
588 'close_pull_request': 'true',
589 'csrf_token': csrf_token,
705 "close_pull_request": "true",
706 "csrf_token": csrf_token,
590 707 },
591 extra_environ=xhr_header)
708 extra_environ=xhr_header,
709 )
592 710
593 711 assert response.json
594 712
595 713 pull_request = PullRequest.get(pull_request_id)
596 714 target_scm = pull_request.target_repo.scm_instance()
597 715 target_scm_name = target_scm.name
598 716 assert pull_request.is_closed()
599 717
600 718 # check only the latest status, not the review status
601 719 status = ChangesetStatusModel().get_status(
602 pull_request.source_repo, pull_request=pull_request)
720 pull_request.source_repo, pull_request=pull_request
721 )
603 722 assert status == ChangesetStatus.STATUS_REJECTED
604 723
605 724 for comment_id in response.json.keys():
606 test_text = 'test'
725 test_text = "test"
607 726 response = self.app.post(
608 727 route_path(
609 'pullrequest_comment_edit',
728 "pullrequest_comment_edit",
610 729 repo_name=target_scm_name,
611 730 pull_request_id=pull_request_id,
612 731 comment_id=comment_id,
613 732 ),
614 733 extra_environ=xhr_header,
615 734 params={
616 'csrf_token': csrf_token,
617 'text': test_text,
735 "csrf_token": csrf_token,
736 "text": test_text,
618 737 },
619 738 status=403,
620 739 )
621 740 assert response.status_int == 403
622 741
623 742 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
624 743 pull_request = pr_util.create_pull_request()
625 744 target_scm = pull_request.target_repo.scm_instance()
626 745 target_scm_name = target_scm.name
627 746
628 747 response = self.app.post(
629 748 route_path(
630 'pullrequest_comment_create',
749 "pullrequest_comment_create",
631 750 repo_name=target_scm_name,
632 pull_request_id=pull_request.pull_request_id),
751 pull_request_id=pull_request.pull_request_id,
752 ),
633 753 params={
634 'csrf_token': csrf_token,
635 'text': 'init',
754 "csrf_token": csrf_token,
755 "text": "init",
636 756 },
637 757 extra_environ=xhr_header,
638 758 )
639 759 assert response.json
640 760
641 761 for comment_id in response.json.keys():
642 762 assert comment_id
643 test_text = 'test'
763 test_text = "test"
644 764 self.app.post(
645 765 route_path(
646 'pullrequest_comment_edit',
766 "pullrequest_comment_edit",
647 767 repo_name=target_scm_name,
648 768 pull_request_id=pull_request.pull_request_id,
649 769 comment_id=comment_id,
650 770 ),
651 771 extra_environ=xhr_header,
652 772 params={
653 'csrf_token': csrf_token,
654 'text': test_text,
655 'version': '0',
773 "csrf_token": csrf_token,
774 "text": test_text,
775 "version": "0",
656 776 },
657
658 777 )
659 text_form_db = ChangesetComment.query().filter(
660 ChangesetComment.comment_id == comment_id).first().text
778 text_form_db = (
779 ChangesetComment.query()
780 .filter(ChangesetComment.comment_id == comment_id)
781 .first()
782 .text
783 )
661 784 assert test_text == text_form_db
662 785
663 786 def test_comment_and_comment_edit_special(self, pr_util, csrf_token, xhr_header):
664 787 pull_request = pr_util.create_pull_request()
665 788 target_scm = pull_request.target_repo.scm_instance()
666 789 target_scm_name = target_scm.name
667 790
668 791 response = self.app.post(
669 792 route_path(
670 'pullrequest_comment_create',
793 "pullrequest_comment_create",
671 794 repo_name=target_scm_name,
672 pull_request_id=pull_request.pull_request_id),
795 pull_request_id=pull_request.pull_request_id,
796 ),
673 797 params={
674 'csrf_token': csrf_token,
675 'text': 'init',
798 "csrf_token": csrf_token,
799 "text": "init",
676 800 },
677 801 extra_environ=xhr_header,
678 802 )
679 803 assert response.json
680 804
681 805 for comment_id in response.json.keys():
682 test_text = 'init'
806 test_text = "init"
683 807 response = self.app.post(
684 808 route_path(
685 'pullrequest_comment_edit',
809 "pullrequest_comment_edit",
686 810 repo_name=target_scm_name,
687 811 pull_request_id=pull_request.pull_request_id,
688 812 comment_id=comment_id,
689 813 ),
690 814 extra_environ=xhr_header,
691 815 params={
692 'csrf_token': csrf_token,
693 'text': test_text,
694 'version': '0',
816 "csrf_token": csrf_token,
817 "text": test_text,
818 "version": "0",
695 819 },
696 820 status=404,
697
698 821 )
699 822 assert response.status_int == 404
700 823
701 824 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
702 825 pull_request = pr_util.create_pull_request()
703 826 target_scm = pull_request.target_repo.scm_instance()
704 827 target_scm_name = target_scm.name
705 828
706 829 response = self.app.post(
707 830 route_path(
708 'pullrequest_comment_create',
831 "pullrequest_comment_create",
709 832 repo_name=target_scm_name,
710 pull_request_id=pull_request.pull_request_id),
833 pull_request_id=pull_request.pull_request_id,
834 ),
711 835 params={
712 'csrf_token': csrf_token,
713 'text': 'init',
836 "csrf_token": csrf_token,
837 "text": "init",
714 838 },
715 839 extra_environ=xhr_header,
716 840 )
717 841 assert response.json
718 842 for comment_id in response.json.keys():
719 test_text = 'test'
843 test_text = "test"
720 844 self.app.post(
721 845 route_path(
722 'pullrequest_comment_edit',
846 "pullrequest_comment_edit",
723 847 repo_name=target_scm_name,
724 848 pull_request_id=pull_request.pull_request_id,
725 849 comment_id=comment_id,
726 850 ),
727 851 extra_environ=xhr_header,
728 852 params={
729 'csrf_token': csrf_token,
730 'text': test_text,
731 'version': '0',
853 "csrf_token": csrf_token,
854 "text": test_text,
855 "version": "0",
732 856 },
733
734 857 )
735 test_text_v2 = 'test_v2'
858 test_text_v2 = "test_v2"
736 859 response = self.app.post(
737 860 route_path(
738 'pullrequest_comment_edit',
861 "pullrequest_comment_edit",
739 862 repo_name=target_scm_name,
740 863 pull_request_id=pull_request.pull_request_id,
741 864 comment_id=comment_id,
742 865 ),
743 866 extra_environ=xhr_header,
744 867 params={
745 'csrf_token': csrf_token,
746 'text': test_text_v2,
747 'version': '0',
868 "csrf_token": csrf_token,
869 "text": test_text_v2,
870 "version": "0",
748 871 },
749 872 status=409,
750 873 )
751 874 assert response.status_int == 409
752 875
753 text_form_db = ChangesetComment.query().filter(
754 ChangesetComment.comment_id == comment_id).first().text
876 text_form_db = (
877 ChangesetComment.query()
878 .filter(ChangesetComment.comment_id == comment_id)
879 .first()
880 .text
881 )
755 882
756 883 assert test_text == text_form_db
757 884 assert test_text_v2 != text_form_db
758 885
759 886 def test_comment_and_comment_edit_permissions_forbidden(
760 self, autologin_regular_user, user_regular, user_admin, pr_util,
761 csrf_token, xhr_header):
887 self,
888 autologin_regular_user,
889 user_regular,
890 user_admin,
891 pr_util,
892 csrf_token,
893 xhr_header,
894 ):
762 895 pull_request = pr_util.create_pull_request(
763 author=user_admin.username, enable_notifications=False)
896 author=user_admin.username, enable_notifications=False
897 )
764 898 comment = CommentsModel().create(
765 text='test',
899 text="test",
766 900 repo=pull_request.target_repo.scm_instance().name,
767 901 user=user_admin,
768 902 pull_request=pull_request,
769 903 )
770 904 response = self.app.post(
771 905 route_path(
772 'pullrequest_comment_edit',
906 "pullrequest_comment_edit",
773 907 repo_name=pull_request.target_repo.scm_instance().name,
774 908 pull_request_id=pull_request.pull_request_id,
775 909 comment_id=comment.comment_id,
776 910 ),
777 911 extra_environ=xhr_header,
778 912 params={
779 'csrf_token': csrf_token,
780 'text': 'test_text',
913 "csrf_token": csrf_token,
914 "text": "test_text",
781 915 },
782 916 status=403,
783 917 )
784 918 assert response.status_int == 403
785 919
786 920 def test_create_pull_request(self, backend, csrf_token):
787 921 commits = [
788 {'message': 'ancestor'},
789 {'message': 'change'},
790 {'message': 'change2'},
922 {"message": "ancestor"},
923 {"message": "change"},
924 {"message": "change2"},
791 925 ]
792 926 commit_ids = backend.create_master_repo(commits)
793 target = backend.create_repo(heads=['ancestor'])
794 source = backend.create_repo(heads=['change2'])
927 target = backend.create_repo(heads=["ancestor"])
928 source = backend.create_repo(heads=["change2"])
795 929
796 930 response = self.app.post(
797 route_path('pullrequest_create', repo_name=source.repo_name),
931 route_path("pullrequest_create", repo_name=source.repo_name),
798 932 [
799 ('source_repo', source.repo_name),
800 ('source_ref', 'branch:default:' + commit_ids['change2']),
801 ('target_repo', target.repo_name),
802 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
803 ('common_ancestor', commit_ids['ancestor']),
804 ('pullrequest_title', 'Title'),
805 ('pullrequest_desc', 'Description'),
806 ('description_renderer', 'markdown'),
807 ('__start__', 'review_members:sequence'),
808 ('__start__', 'reviewer:mapping'),
809 ('user_id', '1'),
810 ('__start__', 'reasons:sequence'),
811 ('reason', 'Some reason'),
812 ('__end__', 'reasons:sequence'),
813 ('__start__', 'rules:sequence'),
814 ('__end__', 'rules:sequence'),
815 ('mandatory', 'False'),
816 ('__end__', 'reviewer:mapping'),
817 ('__end__', 'review_members:sequence'),
818 ('__start__', 'revisions:sequence'),
819 ('revisions', commit_ids['change']),
820 ('revisions', commit_ids['change2']),
821 ('__end__', 'revisions:sequence'),
822 ('user', ''),
823 ('csrf_token', csrf_token),
933 ("source_repo", source.repo_name),
934 ("source_ref", "branch:default:" + commit_ids["change2"]),
935 ("target_repo", target.repo_name),
936 ("target_ref", "branch:default:" + commit_ids["ancestor"]),
937 ("common_ancestor", commit_ids["ancestor"]),
938 ("pullrequest_title", "Title"),
939 ("pullrequest_desc", "Description"),
940 ("description_renderer", "markdown"),
941 ("__start__", "review_members:sequence"),
942 ("__start__", "reviewer:mapping"),
943 ("user_id", "1"),
944 ("__start__", "reasons:sequence"),
945 ("reason", "Some reason"),
946 ("__end__", "reasons:sequence"),
947 ("__start__", "rules:sequence"),
948 ("__end__", "rules:sequence"),
949 ("mandatory", "False"),
950 ("__end__", "reviewer:mapping"),
951 ("__end__", "review_members:sequence"),
952 ("__start__", "revisions:sequence"),
953 ("revisions", commit_ids["change"]),
954 ("revisions", commit_ids["change2"]),
955 ("__end__", "revisions:sequence"),
956 ("user", ""),
957 ("csrf_token", csrf_token),
824 958 ],
825 status=302)
959 status=302,
960 )
826 961
827 location = response.headers['Location']
828 pull_request_id = location.rsplit('/', 1)[1]
829 assert pull_request_id != 'new'
962 location = response.headers["Location"]
963 pull_request_id = location.rsplit("/", 1)[1]
964 assert pull_request_id != "new"
830 965 pull_request = PullRequest.get(int(pull_request_id))
831 966
832 967 # check that we have now both revisions
833 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
834 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
835 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
968 assert pull_request.revisions == [commit_ids["change2"], commit_ids["change"]]
969 assert pull_request.source_ref == "branch:default:" + commit_ids["change2"]
970 expected_target_ref = "branch:default:" + commit_ids["ancestor"]
836 971 assert pull_request.target_ref == expected_target_ref
837 972
838 973 def test_reviewer_notifications(self, backend, csrf_token):
839 # We have to use the app.post for this test so it will create the
974 # We have to use the app.post for this test, so it will create the
840 975 # notifications properly with the new PR
841 976 commits = [
842 {'message': 'ancestor',
843 'added': [FileNode(b'file_A', content=b'content_of_ancestor')]},
844 {'message': 'change',
845 'added': [FileNode(b'file_a', content=b'content_of_change')]},
846 {'message': 'change-child'},
847 {'message': 'ancestor-child', 'parents': ['ancestor'],
848 'added': [ FileNode(b'file_B', content=b'content_of_ancestor_child')]},
849 {'message': 'ancestor-child-2'},
977 {
978 "message": "ancestor",
979 "added": [FileNode(b"file_A", content=b"content_of_ancestor")],
980 },
981 {
982 "message": "change",
983 "added": [FileNode(b"file_a", content=b"content_of_change")],
984 },
985 {"message": "change-child"},
986 {
987 "message": "ancestor-child",
988 "parents": ["ancestor"],
989 "branch": "feature",
990 "added": [FileNode(b"file_c", content=b"content_of_ancestor_child")],
991 },
992 {"message": "ancestor-child-2", "branch": "feature"},
850 993 ]
851 994 commit_ids = backend.create_master_repo(commits)
852 target = backend.create_repo(heads=['ancestor-child'])
853 source = backend.create_repo(heads=['change'])
995 target = backend.create_repo(heads=["ancestor-child"])
996 source = backend.create_repo(heads=["change"])
854 997
855 998 response = self.app.post(
856 route_path('pullrequest_create', repo_name=source.repo_name),
999 route_path("pullrequest_create", repo_name=source.repo_name),
857 1000 [
858 ('source_repo', source.repo_name),
859 ('source_ref', 'branch:default:' + commit_ids['change']),
860 ('target_repo', target.repo_name),
861 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
862 ('common_ancestor', commit_ids['ancestor']),
863 ('pullrequest_title', 'Title'),
864 ('pullrequest_desc', 'Description'),
865 ('description_renderer', 'markdown'),
866 ('__start__', 'review_members:sequence'),
867 ('__start__', 'reviewer:mapping'),
868 ('user_id', '2'),
869 ('__start__', 'reasons:sequence'),
870 ('reason', 'Some reason'),
871 ('__end__', 'reasons:sequence'),
872 ('__start__', 'rules:sequence'),
873 ('__end__', 'rules:sequence'),
874 ('mandatory', 'False'),
875 ('__end__', 'reviewer:mapping'),
876 ('__end__', 'review_members:sequence'),
877 ('__start__', 'revisions:sequence'),
878 ('revisions', commit_ids['change']),
879 ('__end__', 'revisions:sequence'),
880 ('user', ''),
881 ('csrf_token', csrf_token),
1001 ("source_repo", source.repo_name),
1002 ("source_ref", "branch:default:" + commit_ids["change"]),
1003 ("target_repo", target.repo_name),
1004 ("target_ref", "branch:default:" + commit_ids["ancestor-child"]),
1005 ("common_ancestor", commit_ids["ancestor"]),
1006 ("pullrequest_title", "Title"),
1007 ("pullrequest_desc", "Description"),
1008 ("description_renderer", "markdown"),
1009 ("__start__", "review_members:sequence"),
1010 ("__start__", "reviewer:mapping"),
1011 ("user_id", "2"),
1012 ("__start__", "reasons:sequence"),
1013 ("reason", "Some reason"),
1014 ("__end__", "reasons:sequence"),
1015 ("__start__", "rules:sequence"),
1016 ("__end__", "rules:sequence"),
1017 ("mandatory", "False"),
1018 ("__end__", "reviewer:mapping"),
1019 ("__end__", "review_members:sequence"),
1020 ("__start__", "revisions:sequence"),
1021 ("revisions", commit_ids["change"]),
1022 ("__end__", "revisions:sequence"),
1023 ("user", ""),
1024 ("csrf_token", csrf_token),
882 1025 ],
883 status=302)
1026 status=302,
1027 )
884 1028
885 location = response.headers['Location']
1029 location = response.headers["Location"]
886 1030
887 pull_request_id = location.rsplit('/', 1)[1]
888 assert pull_request_id != 'new'
1031 pull_request_id = location.rsplit("/", 1)[1]
1032 assert pull_request_id != "new"
889 1033 pull_request = PullRequest.get(int(pull_request_id))
890 1034
891 1035 # Check that a notification was made
892 notifications = Notification.query()\
893 .filter(Notification.created_by == pull_request.author.user_id,
894 Notification.type_ == Notification.TYPE_PULL_REQUEST,
895 Notification.subject.contains(
896 "requested a pull request review. !%s" % pull_request_id))
1036 notifications = Notification.query().filter(
1037 Notification.created_by == pull_request.author.user_id,
1038 Notification.type_ == Notification.TYPE_PULL_REQUEST,
1039 Notification.subject.contains(
1040 "requested a pull request review. !%s" % pull_request_id
1041 ),
1042 )
897 1043 assert len(notifications.all()) == 1
898 1044
899 1045 # Change reviewers and check that a notification was made
900 1046 PullRequestModel().update_reviewers(
901 pull_request.pull_request_id, [
902 (1, [], False, 'reviewer', [])
903 ],
904 pull_request.author)
1047 pull_request.pull_request_id,
1048 [(1, [], False, "reviewer", [])],
1049 pull_request.author,
1050 )
905 1051 assert len(notifications.all()) == 2
906 1052
907 1053 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
908 1054 commits = [
909 {'message': 'ancestor',
910 'added': [FileNode(b'file_A', content=b'content_of_ancestor')]},
911 {'message': 'change',
912 'added': [FileNode(b'file_a', content=b'content_of_change')]},
913 {'message': 'change-child'},
914 {'message': 'ancestor-child', 'parents': ['ancestor'],
915 'added': [
916 FileNode(b'file_B', content=b'content_of_ancestor_child')]},
917 {'message': 'ancestor-child-2'},
1055 {
1056 "message": "ancestor",
1057 "added": [FileNode(b"file_A", content=b"content_of_ancestor")],
1058 },
1059 {
1060 "message": "change",
1061 "added": [FileNode(b"file_a", content=b"content_of_change")],
1062 },
1063 {
1064 "message": "change-child",
1065 "added": [FileNode(b"file_c", content=b"content_of_change_2")],
1066 },
1067 {
1068 "message": "ancestor-child",
1069 "parents": ["ancestor"],
1070 "branch": "feature",
1071 "added": [FileNode(b"file_B", content=b"content_of_ancestor_child")],
1072 },
1073 {"message": "ancestor-child-2", "branch": "feature"},
918 1074 ]
919 1075 commit_ids = backend.create_master_repo(commits)
920 target = backend.create_repo(heads=['ancestor-child'])
921 source = backend.create_repo(heads=['change'])
1076 target = backend.create_repo(heads=["ancestor-child"])
1077 source = backend.create_repo(heads=["change"])
922 1078
923 1079 response = self.app.post(
924 route_path('pullrequest_create', repo_name=source.repo_name),
1080 route_path("pullrequest_create", repo_name=source.repo_name),
925 1081 [
926 ('source_repo', source.repo_name),
927 ('source_ref', 'branch:default:' + commit_ids['change']),
928 ('target_repo', target.repo_name),
929 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
930 ('common_ancestor', commit_ids['ancestor']),
931 ('pullrequest_title', 'Title'),
932 ('pullrequest_desc', 'Description'),
933 ('description_renderer', 'markdown'),
934 ('__start__', 'review_members:sequence'),
935 ('__start__', 'reviewer:mapping'),
936 ('user_id', '1'),
937 ('__start__', 'reasons:sequence'),
938 ('reason', 'Some reason'),
939 ('__end__', 'reasons:sequence'),
940 ('__start__', 'rules:sequence'),
941 ('__end__', 'rules:sequence'),
942 ('mandatory', 'False'),
943 ('__end__', 'reviewer:mapping'),
944 ('__end__', 'review_members:sequence'),
945 ('__start__', 'revisions:sequence'),
946 ('revisions', commit_ids['change']),
947 ('__end__', 'revisions:sequence'),
948 ('user', ''),
949 ('csrf_token', csrf_token),
1082 ("source_repo", source.repo_name),
1083 ("source_ref", "branch:default:" + commit_ids["change"]),
1084 ("target_repo", target.repo_name),
1085 ("target_ref", "branch:default:" + commit_ids["ancestor-child"]),
1086 ("common_ancestor", commit_ids["ancestor"]),
1087 ("pullrequest_title", "Title"),
1088 ("pullrequest_desc", "Description"),
1089 ("description_renderer", "markdown"),
1090 ("__start__", "review_members:sequence"),
1091 ("__start__", "reviewer:mapping"),
1092 ("user_id", "1"),
1093 ("__start__", "reasons:sequence"),
1094 ("reason", "Some reason"),
1095 ("__end__", "reasons:sequence"),
1096 ("__start__", "rules:sequence"),
1097 ("__end__", "rules:sequence"),
1098 ("mandatory", "False"),
1099 ("__end__", "reviewer:mapping"),
1100 ("__end__", "review_members:sequence"),
1101 ("__start__", "revisions:sequence"),
1102 ("revisions", commit_ids["change"]),
1103 ("__end__", "revisions:sequence"),
1104 ("user", ""),
1105 ("csrf_token", csrf_token),
950 1106 ],
951 status=302)
1107 status=302,
1108 )
952 1109
953 location = response.headers['Location']
1110 location = response.headers["Location"]
954 1111
955 pull_request_id = location.rsplit('/', 1)[1]
956 assert pull_request_id != 'new'
1112 pull_request_id = location.rsplit("/", 1)[1]
1113 assert pull_request_id != "new"
957 1114 pull_request = PullRequest.get(int(pull_request_id))
958 1115
959 1116 # target_ref has to point to the ancestor's commit_id in order to
960 1117 # show the correct diff
961 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
1118 expected_target_ref = "branch:default:" + commit_ids["ancestor"]
962 1119 assert pull_request.target_ref == expected_target_ref
963 1120
964 1121 # Check generated diff contents
965 1122 response = response.follow()
966 response.mustcontain(no=['content_of_ancestor'])
967 response.mustcontain(no=['content_of_ancestor-child'])
968 response.mustcontain('content_of_change')
1123 response.mustcontain(no=["content_of_ancestor"])
1124 response.mustcontain(no=["content_of_ancestor-child"])
1125 response.mustcontain("content_of_change")
969 1126
970 1127 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
971 1128 # Clear any previous calls to rcextensions
972 1129 rhodecode.EXTENSIONS.calls.clear()
973 1130
974 pull_request = pr_util.create_pull_request(
975 approved=True, mergeable=True)
1131 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
976 1132 pull_request_id = pull_request.pull_request_id
977 repo_name = pull_request.target_repo.scm_instance().name,
1133 repo_name = (pull_request.target_repo.scm_instance().name,)
978 1134
979 url = route_path('pullrequest_merge',
980 repo_name=str(repo_name[0]),
981 pull_request_id=pull_request_id)
982 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
1135 url = route_path(
1136 "pullrequest_merge",
1137 repo_name=str(repo_name[0]),
1138 pull_request_id=pull_request_id,
1139 )
1140 response = self.app.post(url, params={"csrf_token": csrf_token}).follow()
983 1141
984 1142 pull_request = PullRequest.get(pull_request_id)
985 1143
986 1144 assert response.status_int == 200
987 1145 assert pull_request.is_closed()
988 assert_pull_request_status(
989 pull_request, ChangesetStatus.STATUS_APPROVED)
1146 assert_pull_request_status(pull_request, ChangesetStatus.STATUS_APPROVED)
990 1147
991 1148 # Check the relevant log entries were added
992 1149 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
993 1150 actions = [log.action for log in user_logs]
994 1151 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
995 1152 expected_actions = [
996 'repo.pull_request.close',
997 'repo.pull_request.merge',
998 'repo.pull_request.comment.create'
1153 "repo.pull_request.close",
1154 "repo.pull_request.merge",
1155 "repo.pull_request.comment.create",
999 1156 ]
1000 1157 assert actions == expected_actions
1001 1158
1002 1159 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1003 1160 actions = [log for log in user_logs]
1004 assert actions[-1].action == 'user.push'
1005 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1161 assert actions[-1].action == "user.push"
1162 assert actions[-1].action_data["commit_ids"] == pr_commit_ids
1006 1163
1007 1164 # Check post_push rcextension was really executed
1008 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1165 push_calls = rhodecode.EXTENSIONS.calls["_push_hook"]
1009 1166 assert len(push_calls) == 1
1010 1167 unused_last_call_args, last_call_kwargs = push_calls[0]
1011 assert last_call_kwargs['action'] == 'push'
1012 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1168 assert last_call_kwargs["action"] == "push"
1169 assert last_call_kwargs["commit_ids"] == pr_commit_ids
1013 1170
1014 1171 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1015 1172 pull_request = pr_util.create_pull_request(mergeable=False)
1016 1173 pull_request_id = pull_request.pull_request_id
1017 1174 pull_request = PullRequest.get(pull_request_id)
1018 1175
1019 1176 response = self.app.post(
1020 route_path('pullrequest_merge',
1021 repo_name=pull_request.target_repo.scm_instance().name,
1022 pull_request_id=pull_request.pull_request_id),
1023 params={'csrf_token': csrf_token}).follow()
1177 route_path(
1178 "pullrequest_merge",
1179 repo_name=pull_request.target_repo.scm_instance().name,
1180 pull_request_id=pull_request.pull_request_id,
1181 ),
1182 params={"csrf_token": csrf_token},
1183 ).follow()
1024 1184
1025 1185 assert response.status_int == 200
1026 1186 response.mustcontain(
1027 'Merge is not currently possible because of below failed checks.')
1028 response.mustcontain('Server-side pull request merging is disabled.')
1187 "Merge is not currently possible because of below failed checks."
1188 )
1189 response.mustcontain("Server-side pull request merging is disabled.")
1029 1190
1030 @pytest.mark.skip_backends('svn')
1191 @pytest.mark.skip_backends("svn")
1031 1192 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1032 1193 pull_request = pr_util.create_pull_request(mergeable=True)
1033 1194 pull_request_id = pull_request.pull_request_id
1034 1195 repo_name = pull_request.target_repo.scm_instance().name
1035 1196
1036 1197 response = self.app.post(
1037 route_path('pullrequest_merge',
1038 repo_name=repo_name, pull_request_id=pull_request_id),
1039 params={'csrf_token': csrf_token}).follow()
1198 route_path(
1199 "pullrequest_merge",
1200 repo_name=repo_name,
1201 pull_request_id=pull_request_id,
1202 ),
1203 params={"csrf_token": csrf_token},
1204 ).follow()
1040 1205
1041 1206 assert response.status_int == 200
1042 1207
1043 1208 response.mustcontain(
1044 'Merge is not currently possible because of below failed checks.')
1045 response.mustcontain('Pull request reviewer approval is pending.')
1209 "Merge is not currently possible because of below failed checks."
1210 )
1211 response.mustcontain("Pull request reviewer approval is pending.")
1046 1212
1047 1213 def test_merge_pull_request_renders_failure_reason(
1048 self, user_regular, csrf_token, pr_util):
1214 self, user_regular, csrf_token, pr_util
1215 ):
1049 1216 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1050 1217 pull_request_id = pull_request.pull_request_id
1051 1218 repo_name = pull_request.target_repo.scm_instance().name
1052 1219
1053 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1054 MergeFailureReason.PUSH_FAILED,
1055 metadata={'target': 'shadow repo',
1056 'merge_commit': 'xxx'})
1220 merge_resp = MergeResponse(
1221 True,
1222 False,
1223 Reference("commit", "STUB_COMMIT_ID", "STUB_COMMIT_ID"),
1224 MergeFailureReason.PUSH_FAILED,
1225 metadata={"target": "shadow repo", "merge_commit": "xxx"},
1226 )
1057 1227 model_patcher = mock.patch.multiple(
1058 1228 PullRequestModel,
1059 1229 merge_repo=mock.Mock(return_value=merge_resp),
1060 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1230 merge_status=mock.Mock(return_value=(None, True, "WRONG_MESSAGE")),
1231 )
1061 1232
1062 1233 with model_patcher:
1063 1234 response = self.app.post(
1064 route_path('pullrequest_merge',
1065 repo_name=repo_name,
1066 pull_request_id=pull_request_id),
1067 params={'csrf_token': csrf_token}, status=302)
1235 route_path(
1236 "pullrequest_merge",
1237 repo_name=repo_name,
1238 pull_request_id=pull_request_id,
1239 ),
1240 params={"csrf_token": csrf_token},
1241 status=302,
1242 )
1068 1243
1069 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1070 metadata={'target': 'shadow repo',
1071 'merge_commit': 'xxx'})
1244 merge_resp = MergeResponse(
1245 True,
1246 True,
1247 Reference("commit", "STUB_COMMIT_ID", "STUB_COMMIT_ID"),
1248 MergeFailureReason.PUSH_FAILED,
1249 metadata={"target": "shadow repo", "merge_commit": "xxx"},
1250 )
1072 1251 assert_session_flash(response, merge_resp.merge_status_message)
1073 1252
1074 1253 def test_update_source_revision(self, backend, csrf_token):
1075 1254 commits = [
1076 {'message': 'ancestor'},
1077 {'message': 'change'},
1078 {'message': 'change-2'},
1255 {"message": "ancestor"},
1256 {"message": "change"},
1257 {"message": "change-2"},
1079 1258 ]
1080 1259 commit_ids = backend.create_master_repo(commits)
1081 target = backend.create_repo(heads=['ancestor'])
1082 source = backend.create_repo(heads=['change'])
1260 target = backend.create_repo(heads=["ancestor"])
1261 source = backend.create_repo(heads=["change"])
1083 1262
1084 1263 # create pr from a in source to A in target
1085 1264 pull_request = PullRequest()
1086 1265
1087 1266 pull_request.source_repo = source
1088 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1089 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1267 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1268 branch=backend.default_branch_name, commit_id=commit_ids["change"]
1269 )
1090 1270
1091 1271 pull_request.target_repo = target
1092 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1093 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1272 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1273 branch=backend.default_branch_name, commit_id=commit_ids["ancestor"]
1274 )
1094 1275
1095 pull_request.revisions = [commit_ids['change']]
1276 pull_request.revisions = [commit_ids["change"]]
1096 1277 pull_request.title = "Test"
1097 1278 pull_request.description = "Description"
1098 1279 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1099 1280 pull_request.pull_request_state = PullRequest.STATE_CREATED
1100 1281 Session().add(pull_request)
1101 1282 Session().commit()
1102 1283 pull_request_id = pull_request.pull_request_id
1103 1284
1104 1285 # source has ancestor - change - change-2
1105 backend.pull_heads(source, heads=['change-2'])
1286 backend.pull_heads(source, heads=["change-2"])
1106 1287 target_repo_name = target.repo_name
1107 1288
1108 1289 # update PR
1109 1290 self.app.post(
1110 route_path('pullrequest_update',
1111 repo_name=target_repo_name, pull_request_id=pull_request_id),
1112 params={'update_commits': 'true', 'csrf_token': csrf_token})
1291 route_path(
1292 "pullrequest_update",
1293 repo_name=target_repo_name,
1294 pull_request_id=pull_request_id,
1295 ),
1296 params={"update_commits": "true", "csrf_token": csrf_token},
1297 )
1113 1298
1114 1299 response = self.app.get(
1115 route_path('pullrequest_show',
1116 repo_name=target_repo_name,
1117 pull_request_id=pull_request.pull_request_id))
1300 route_path(
1301 "pullrequest_show",
1302 repo_name=target_repo_name,
1303 pull_request_id=pull_request.pull_request_id,
1304 )
1305 )
1118 1306
1119 1307 assert response.status_int == 200
1120 response.mustcontain('Pull request updated to')
1121 response.mustcontain('with 1 added, 0 removed commits.')
1308 response.mustcontain("Pull request updated to")
1309 response.mustcontain("with 1 added, 0 removed commits.")
1122 1310
1123 1311 # check that we have now both revisions
1124 1312 pull_request = PullRequest.get(pull_request_id)
1125 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1313 assert pull_request.revisions == [commit_ids["change-2"], commit_ids["change"]]
1126 1314
1127 1315 def test_update_target_revision(self, backend, csrf_token):
1316 """
1317 Checks when we add more commits into a target branch, and update PR
1318 """
1319
1128 1320 commits = [
1129 {'message': 'ancestor'},
1130 {'message': 'change'},
1131 {'message': 'ancestor-new', 'parents': ['ancestor']},
1132 {'message': 'change-rebased'},
1321 {"message": "commit-a"}, # main branch (our PR target)
1322 {"message": "commit-b"}, # Initial source
1323 {"message": "commit-c"},
1324
1325 {"message": "commit-a-prime", "branch": "feature", "parents": ["commit-a"]}, # main branch (source)
1133 1326 ]
1327
1134 1328 commit_ids = backend.create_master_repo(commits)
1135 target = backend.create_repo(heads=['ancestor'])
1136 source = backend.create_repo(heads=['change'])
1329 target = backend.create_repo(heads=["commit-a"])
1330 source = backend.create_repo(heads=["commit-b"])
1331 target_repo_name = target.repo_name
1137 1332
1138 # create pr from a in source to A in target
1333 # create pr from commit-b to commit-a
1139 1334 pull_request = PullRequest()
1140 1335
1141 pull_request.source_repo = source
1142 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1143 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1336 pull_request.target_repo = target
1337 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1338 branch=backend.default_branch_name, commit_id=commit_ids["commit-a"]
1339 )
1144 1340
1145 pull_request.target_repo = target
1146 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1147 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1341 pull_request.source_repo = source
1342 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1343 branch=backend.default_branch_name, commit_id=commit_ids["commit-b"]
1344 )
1148 1345
1149 pull_request.revisions = [commit_ids['change']]
1346 pull_request.revisions = [commit_ids["commit-b"]]
1150 1347 pull_request.title = "Test"
1151 1348 pull_request.description = "Description"
1152 1349 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1153 1350 pull_request.pull_request_state = PullRequest.STATE_CREATED
1154 1351
1155 1352 Session().add(pull_request)
1156 1353 Session().commit()
1157 1354 pull_request_id = pull_request.pull_request_id
1158 1355
1159 # target has ancestor - ancestor-new
1356 # target - add one commit on top commit-a -> commit-b
1357 backend.pull_heads(target, heads=["commit-b"])
1358
1160 1359 # source has ancestor - ancestor-new - change-rebased
1161 backend.pull_heads(target, heads=['ancestor-new'])
1162 backend.pull_heads(source, heads=['change-rebased'])
1163 target_repo_name = target.repo_name
1360 backend.pull_heads(source, heads=["commit-c"])
1164 1361
1165 1362 # update PR
1166 url = route_path('pullrequest_update',
1167 repo_name=target_repo_name,
1168 pull_request_id=pull_request_id)
1169 self.app.post(url,
1170 params={'update_commits': 'true', 'csrf_token': csrf_token},
1171 status=200)
1363 url = route_path(
1364 "pullrequest_update",
1365 repo_name=target_repo_name,
1366 pull_request_id=pull_request_id,
1367 )
1368 self.app.post(
1369 url, params={"update_commits": "true", "csrf_token": csrf_token}, status=200
1370 )
1172 1371
1173 1372 # check that we have now both revisions
1174 1373 pull_request = PullRequest.get(pull_request_id)
1175 assert pull_request.revisions == [commit_ids['change-rebased']]
1176 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1177 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1374 assert pull_request.revisions == [commit_ids["commit-c"]]
1375 assert pull_request.target_ref == "branch:{branch}:{commit_id}".format(
1376 branch=backend.default_branch_name, commit_id=commit_ids["commit-b"]
1377 )
1178 1378
1179 1379 response = self.app.get(
1180 route_path('pullrequest_show',
1181 repo_name=target_repo_name,
1182 pull_request_id=pull_request.pull_request_id))
1380 route_path(
1381 "pullrequest_show",
1382 repo_name=target_repo_name,
1383 pull_request_id=pull_request.pull_request_id,
1384 )
1385 )
1183 1386 assert response.status_int == 200
1184 response.mustcontain('Pull request updated to')
1185 response.mustcontain('with 1 added, 1 removed commits.')
1387 response.mustcontain("Pull request updated to")
1388 response.mustcontain("with 1 added, 1 removed commits.")
1186 1389
1187 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1390 def test_update_target_revision_with_removal_of_1_commit_git(
1391 self, backend_git, csrf_token
1392 ):
1188 1393 backend = backend_git
1189 1394 commits = [
1190 {'message': 'master-commit-1'},
1191 {'message': 'master-commit-2-change-1'},
1192 {'message': 'master-commit-3-change-2'},
1193
1194 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1195 {'message': 'feat-commit-2'},
1395 {"message": "master-commit-1"},
1396 {"message": "master-commit-2-change-1"},
1397 {"message": "master-commit-3-change-2"},
1398 {
1399 "message": "feat-commit-1",
1400 "parents": ["master-commit-1"],
1401 "branch": "feature",
1402 },
1403 {"message": "feat-commit-2", "branch": "feature"},
1196 1404 ]
1197 1405 commit_ids = backend.create_master_repo(commits)
1198 target = backend.create_repo(heads=['master-commit-3-change-2'])
1199 source = backend.create_repo(heads=['feat-commit-2'])
1406 target = backend.create_repo(heads=["master-commit-3-change-2"])
1407 source = backend.create_repo(heads=["feat-commit-2"])
1200 1408
1201 1409 # create pr from a in source to A in target
1202 1410 pull_request = PullRequest()
1203 1411 pull_request.source_repo = source
1204 1412
1205 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1413 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1206 1414 branch=backend.default_branch_name,
1207 commit_id=commit_ids['master-commit-3-change-2'])
1415 commit_id=commit_ids["master-commit-3-change-2"],
1416 )
1208 1417
1209 1418 pull_request.target_repo = target
1210 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1211 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1419 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1420 branch=backend.default_branch_name, commit_id=commit_ids["feat-commit-2"]
1421 )
1212 1422
1213 1423 pull_request.revisions = [
1214 commit_ids['feat-commit-1'],
1215 commit_ids['feat-commit-2']
1424 commit_ids["feat-commit-1"],
1425 commit_ids["feat-commit-2"],
1216 1426 ]
1217 1427 pull_request.title = "Test"
1218 1428 pull_request.description = "Description"
1219 1429 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1220 1430 pull_request.pull_request_state = PullRequest.STATE_CREATED
1221 1431 Session().add(pull_request)
1222 1432 Session().commit()
1223 1433 pull_request_id = pull_request.pull_request_id
1224 1434
1225 1435 # PR is created, now we simulate a force-push into target,
1226 1436 # that drops a 2 last commits
1227 1437 vcsrepo = target.scm_instance()
1228 vcsrepo.config.clear_section('hooks')
1229 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1438 vcsrepo.config.clear_section("hooks")
1439 vcsrepo.run_git_command(["reset", "--soft", "HEAD~2"])
1230 1440 target_repo_name = target.repo_name
1231 1441
1232 1442 # update PR
1233 url = route_path('pullrequest_update',
1234 repo_name=target_repo_name,
1235 pull_request_id=pull_request_id)
1236 self.app.post(url,
1237 params={'update_commits': 'true', 'csrf_token': csrf_token},
1238 status=200)
1443 url = route_path(
1444 "pullrequest_update",
1445 repo_name=target_repo_name,
1446 pull_request_id=pull_request_id,
1447 )
1448 self.app.post(
1449 url, params={"update_commits": "true", "csrf_token": csrf_token}, status=200
1450 )
1239 1451
1240 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1452 response = self.app.get(
1453 route_path("pullrequest_new", repo_name=target_repo_name)
1454 )
1241 1455 assert response.status_int == 200
1242 response.mustcontain('Pull request updated to')
1243 response.mustcontain('with 0 added, 0 removed commits.')
1456 response.mustcontain("Pull request updated to")
1457 response.mustcontain("with 0 added, 0 removed commits.")
1244 1458
1245 def test_update_of_ancestor_reference(self, backend, csrf_token):
1459 def test_update_pr_ancestor_reference(self, csrf_token, pr_util: PRTestUtility):
1246 1460 commits = [
1247 {'message': 'ancestor'},
1248 {'message': 'change'},
1249 {'message': 'change-2'},
1250 {'message': 'ancestor-new', 'parents': ['ancestor']},
1251 {'message': 'change-rebased'},
1461 {"message": "ancestor"},
1462 {"message": "change"},
1463 {"message": "change-2"},
1464
1465 {"message": "ancestor-new", "parents": ["ancestor"], "branch": "feature"},
1466 {"message": "change-rebased", "branch": "feature"},
1252 1467 ]
1253 commit_ids = backend.create_master_repo(commits)
1254 target = backend.create_repo(heads=['ancestor'])
1255 source = backend.create_repo(heads=['change'])
1256 1468
1257 # create pr from a in source to A in target
1258 pull_request = PullRequest()
1259 pull_request.source_repo = source
1469 pull_request = pr_util.create_pull_request(
1470 commits,
1471 target_head="ancestor",
1472 source_head="change",
1473 revisions=["change"],
1474 )
1475 pull_request_id = pull_request.pull_request_id
1476 target_repo_name = pr_util.target_repository.repo_name
1477 commit_ids = pr_util.commit_ids
1478
1479 assert pull_request.revisions == [commit_ids["change"]]
1480 assert list(pull_request.target_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name]
1481 assert list(pull_request.source_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name]
1260 1482
1261 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1262 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1263 pull_request.target_repo = target
1264 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1265 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1266 pull_request.revisions = [commit_ids['change']]
1267 pull_request.title = "Test"
1268 pull_request.description = "Description"
1269 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1270 pull_request.pull_request_state = PullRequest.STATE_CREATED
1271 Session().add(pull_request)
1483 branch = "feature"
1484 pr_util.update_target_repository(head="ancestor-new", do_fetch=True)
1485 pr_util.set_pr_target_ref(ref_type="branch", ref_name=branch, ref_commit_id=commit_ids["ancestor-new"])
1486
1487 pr_util.update_source_repository(head="change-rebased", do_fetch=True)
1488 pr_util.set_pr_source_ref(ref_type="branch", ref_name=branch, ref_commit_id=commit_ids["change-rebased"])
1489
1490 assert list(pull_request.target_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name, branch]
1491 assert list(pull_request.source_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name, branch]
1492
1493 Session().add(pr_util.pull_request)
1272 1494 Session().commit()
1273 pull_request_id = pull_request.pull_request_id
1274 1495
1275 # target has ancestor - ancestor-new
1276 # source has ancestor - ancestor-new - change-rebased
1277 backend.pull_heads(target, heads=['ancestor-new'])
1278 backend.pull_heads(source, heads=['change-rebased'])
1279 target_repo_name = target.repo_name
1496 self.app.post(
1497 route_path(
1498 "pullrequest_update",
1499 repo_name=target_repo_name,
1500 pull_request_id=pull_request_id,
1501 ),
1502 params={"update_commits": "true", "csrf_token": csrf_token, "force_refresh": True},
1503 status=200,
1504 )
1505
1280 1506
1281 # update PR
1282 self.app.post(
1283 route_path('pullrequest_update',
1284 repo_name=target_repo_name, pull_request_id=pull_request_id),
1285 params={'update_commits': 'true', 'csrf_token': csrf_token},
1286 status=200)
1507 # response = self.app.get(
1508 # route_path(
1509 # "pullrequest_show", repo_name=target_repo_name, pull_request_id=pull_request_id,
1510 # params={"force_refresh": True}
1511 # ),
1512 # )
1513 #
1514 # response.mustcontain("Pull request updated to")
1515 # response.mustcontain("with 1 added, 0 removed commits.")
1287 1516
1288 # Expect the target reference to be updated correctly
1289 1517 pull_request = PullRequest.get(pull_request_id)
1290 assert pull_request.revisions == [commit_ids['change-rebased']]
1291 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1292 branch=backend.default_branch_name,
1293 commit_id=commit_ids['ancestor-new'])
1294 assert pull_request.target_ref == expected_target_ref
1518
1519 assert pull_request.target_ref == "branch:{branch}:{commit_id}".format(
1520 branch="feature", commit_id=commit_ids["ancestor-new"])
1521
1522 assert pull_request.revisions == [commit_ids["change-rebased"]]
1523
1295 1524
1296 1525 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1297 branch_name = 'development'
1526 branch_name = "development"
1298 1527 commits = [
1299 {'message': 'initial-commit'},
1300 {'message': 'old-feature'},
1301 {'message': 'new-feature', 'branch': branch_name},
1528 {"message": "initial-commit"},
1529 {"message": "old-feature"},
1530 {"message": "new-feature", "branch": branch_name},
1302 1531 ]
1303 1532 repo = backend_git.create_repo(commits)
1304 1533 repo_name = repo.repo_name
1305 1534 commit_ids = backend_git.commit_ids
1306 1535
1307 1536 pull_request = PullRequest()
1308 1537 pull_request.source_repo = repo
1309 1538 pull_request.target_repo = repo
1310 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1311 branch=branch_name, commit_id=commit_ids['new-feature'])
1312 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1313 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1314 pull_request.revisions = [commit_ids['new-feature']]
1539 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1540 branch=branch_name, commit_id=commit_ids["new-feature"]
1541 )
1542 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1543 branch=backend_git.default_branch_name, commit_id=commit_ids["old-feature"]
1544 )
1545 pull_request.revisions = [commit_ids["new-feature"]]
1315 1546 pull_request.title = "Test"
1316 1547 pull_request.description = "Description"
1317 1548 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1318 1549 pull_request.pull_request_state = PullRequest.STATE_CREATED
1319 1550 Session().add(pull_request)
1320 1551 Session().commit()
1321 1552
1322 1553 pull_request_id = pull_request.pull_request_id
1323 1554
1324 1555 vcs = repo.scm_instance()
1325 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1556 vcs.remove_ref("refs/heads/{}".format(branch_name))
1326 1557 # NOTE(marcink): run GC to ensure the commits are gone
1327 1558 vcs.run_gc()
1328 1559
1329 response = self.app.get(route_path(
1330 'pullrequest_show',
1331 repo_name=repo_name,
1332 pull_request_id=pull_request_id))
1560 response = self.app.get(
1561 route_path(
1562 "pullrequest_show", repo_name=repo_name, pull_request_id=pull_request_id
1563 )
1564 )
1333 1565
1334 1566 assert response.status_int == 200
1335 1567
1336 1568 response.assert_response().element_contains(
1337 '#changeset_compare_view_content .alert strong',
1338 'Missing commits')
1569 "#changeset_compare_view_content .alert strong", "Missing commits"
1570 )
1339 1571 response.assert_response().element_contains(
1340 '#changeset_compare_view_content .alert',
1341 'This pull request cannot be displayed, because one or more'
1342 ' commits no longer exist in the source repository.')
1572 "#changeset_compare_view_content .alert",
1573 "This pull request cannot be displayed, because one or more"
1574 " commits no longer exist in the source repository.",
1575 )
1343 1576
1344 def test_strip_commits_from_pull_request(
1345 self, backend, pr_util, csrf_token):
1577 def test_strip_commits_from_pull_request(self, backend, pr_util):
1346 1578 commits = [
1347 {'message': 'initial-commit'},
1348 {'message': 'old-feature'},
1349 {'message': 'new-feature', 'parents': ['initial-commit']},
1579 {"message": "initial-commit"},
1580 {"message": "old-feature"},
1581 {"message": "new-feature"},
1350 1582 ]
1351 1583 pull_request = pr_util.create_pull_request(
1352 commits, target_head='initial-commit', source_head='new-feature',
1353 revisions=['new-feature'])
1584 commits,
1585 target_head="initial-commit",
1586 source_head="new-feature",
1587 revisions=["new-feature"],
1588 )
1354 1589
1355 1590 vcs = pr_util.source_repository.scm_instance()
1356 if backend.alias == 'git':
1357 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1591 if backend.alias == "git":
1592 vcs.strip(pr_util.commit_ids["new-feature"], branch_name=pr_util.backend.default_branch_name)
1358 1593 else:
1359 vcs.strip(pr_util.commit_ids['new-feature'])
1594 vcs.strip(pr_util.commit_ids["new-feature"])
1360 1595
1361 response = self.app.get(route_path(
1362 'pullrequest_show',
1363 repo_name=pr_util.target_repository.repo_name,
1364 pull_request_id=pull_request.pull_request_id))
1596 response = self.app.get(
1597 route_path(
1598 "pullrequest_show",
1599 repo_name=pr_util.target_repository.repo_name,
1600 pull_request_id=pull_request.pull_request_id,
1601 )
1602 )
1365 1603
1366 1604 assert response.status_int == 200
1367 1605
1368 1606 response.assert_response().element_contains(
1369 '#changeset_compare_view_content .alert strong',
1370 'Missing commits')
1371 response.assert_response().element_contains(
1372 '#changeset_compare_view_content .alert',
1373 'This pull request cannot be displayed, because one or more'
1374 ' commits no longer exist in the source repository.')
1607 "#changeset_compare_view_content .alert strong", "Missing commits"
1608 )
1375 1609 response.assert_response().element_contains(
1376 '#update_commits',
1377 'Update commits')
1610 "#changeset_compare_view_content .alert",
1611 "This pull request cannot be displayed, because one or more"
1612 " commits no longer exist in the source repository.",
1613 )
1614 response.assert_response().element_contains("#update_commits", "Update commits")
1378 1615
1379 def test_strip_commits_and_update(
1380 self, backend, pr_util, csrf_token):
1616 def test_strip_commits_and_update(self, backend, pr_util, csrf_token):
1381 1617 commits = [
1382 {'message': 'initial-commit'},
1383 {'message': 'old-feature'},
1384 {'message': 'new-feature', 'parents': ['old-feature']},
1618 {"message": "initial-commit"},
1619 {"message": "old-feature"},
1620 {"message": "new-feature", "parents": ["old-feature"]},
1385 1621 ]
1386 1622 pull_request = pr_util.create_pull_request(
1387 commits, target_head='old-feature', source_head='new-feature',
1388 revisions=['new-feature'], mergeable=True)
1623 commits,
1624 target_head="old-feature",
1625 source_head="new-feature",
1626 revisions=["new-feature"],
1627 mergeable=True,
1628 )
1389 1629 pr_id = pull_request.pull_request_id
1390 1630 target_repo_name = pull_request.target_repo.repo_name
1391 1631
1392 1632 vcs = pr_util.source_repository.scm_instance()
1393 if backend.alias == 'git':
1394 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1633 if backend.alias == "git":
1634 vcs.strip(pr_util.commit_ids["new-feature"], branch_name="master")
1395 1635 else:
1396 vcs.strip(pr_util.commit_ids['new-feature'])
1636 vcs.strip(pr_util.commit_ids["new-feature"])
1397 1637
1398 url = route_path('pullrequest_update',
1399 repo_name=target_repo_name,
1400 pull_request_id=pr_id)
1401 response = self.app.post(url,
1402 params={'update_commits': 'true',
1403 'csrf_token': csrf_token})
1638 url = route_path(
1639 "pullrequest_update", repo_name=target_repo_name, pull_request_id=pr_id
1640 )
1641 response = self.app.post(
1642 url, params={"update_commits": "true", "csrf_token": csrf_token}
1643 )
1404 1644
1405 1645 assert response.status_int == 200
1406 assert json.loads(response.body) == json.loads('{"response": true, "redirect_url": null}')
1646 assert json.loads(response.body) == json.loads(
1647 '{"response": true, "redirect_url": null}'
1648 )
1407 1649
1408 1650 # Make sure that after update, it won't raise 500 errors
1409 response = self.app.get(route_path(
1410 'pullrequest_show',
1411 repo_name=target_repo_name,
1412 pull_request_id=pr_id))
1651 response = self.app.get(
1652 route_path(
1653 "pullrequest_show", repo_name=target_repo_name, pull_request_id=pr_id
1654 )
1655 )
1413 1656
1414 1657 assert response.status_int == 200
1415 1658 response.assert_response().element_contains(
1416 '#changeset_compare_view_content .alert strong',
1417 'Missing commits')
1659 "#changeset_compare_view_content .alert strong", "Missing commits"
1660 )
1418 1661
1419 1662 def test_branch_is_a_link(self, pr_util):
1420 1663 pull_request = pr_util.create_pull_request()
1421 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1422 pull_request.target_ref = 'branch:target:abcdef1234567890'
1664 pull_request.source_ref = "branch:origin:1234567890abcdef"
1665 pull_request.target_ref = "branch:target:abcdef1234567890"
1423 1666 Session().add(pull_request)
1424 1667 Session().commit()
1425 1668
1426 response = self.app.get(route_path(
1427 'pullrequest_show',
1428 repo_name=pull_request.target_repo.scm_instance().name,
1429 pull_request_id=pull_request.pull_request_id))
1669 response = self.app.get(
1670 route_path(
1671 "pullrequest_show",
1672 repo_name=pull_request.target_repo.scm_instance().name,
1673 pull_request_id=pull_request.pull_request_id,
1674 )
1675 )
1430 1676 assert response.status_int == 200
1431 1677
1432 source = response.assert_response().get_element('.pr-source-info')
1678 source = response.assert_response().get_element(".pr-source-info")
1433 1679 source_parent = source.getparent()
1434 1680 assert len(source_parent) == 1
1435 1681
1436 target = response.assert_response().get_element('.pr-target-info')
1682 target = response.assert_response().get_element(".pr-target-info")
1437 1683 target_parent = target.getparent()
1438 1684 assert len(target_parent) == 1
1439 1685
1440 1686 expected_origin_link = route_path(
1441 'repo_commits',
1687 "repo_commits",
1442 1688 repo_name=pull_request.source_repo.scm_instance().name,
1443 params=dict(branch='origin'))
1689 params=dict(branch="origin"),
1690 )
1444 1691 expected_target_link = route_path(
1445 'repo_commits',
1692 "repo_commits",
1446 1693 repo_name=pull_request.target_repo.scm_instance().name,
1447 params=dict(branch='target'))
1448 assert source_parent.attrib['href'] == expected_origin_link
1449 assert target_parent.attrib['href'] == expected_target_link
1694 params=dict(branch="target"),
1695 )
1696 assert source_parent.attrib["href"] == expected_origin_link
1697 assert target_parent.attrib["href"] == expected_target_link
1450 1698
1451 1699 def test_bookmark_is_not_a_link(self, pr_util):
1452 1700 pull_request = pr_util.create_pull_request()
1453 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1454 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1701 pull_request.source_ref = "bookmark:origin:1234567890abcdef"
1702 pull_request.target_ref = "bookmark:target:abcdef1234567890"
1455 1703 Session().add(pull_request)
1456 1704 Session().commit()
1457 1705
1458 response = self.app.get(route_path(
1459 'pullrequest_show',
1460 repo_name=pull_request.target_repo.scm_instance().name,
1461 pull_request_id=pull_request.pull_request_id))
1706 response = self.app.get(
1707 route_path(
1708 "pullrequest_show",
1709 repo_name=pull_request.target_repo.scm_instance().name,
1710 pull_request_id=pull_request.pull_request_id,
1711 )
1712 )
1462 1713 assert response.status_int == 200
1463 1714
1464 source = response.assert_response().get_element('.pr-source-info')
1465 assert source.text.strip() == 'bookmark:origin'
1466 assert source.getparent().attrib.get('href') is None
1715 source = response.assert_response().get_element(".pr-source-info")
1716 assert source.text.strip() == "bookmark:origin"
1717 assert source.getparent().attrib.get("href") is None
1467 1718
1468 target = response.assert_response().get_element('.pr-target-info')
1469 assert target.text.strip() == 'bookmark:target'
1470 assert target.getparent().attrib.get('href') is None
1719 target = response.assert_response().get_element(".pr-target-info")
1720 assert target.text.strip() == "bookmark:target"
1721 assert target.getparent().attrib.get("href") is None
1471 1722
1472 1723 def test_tag_is_not_a_link(self, pr_util):
1473 1724 pull_request = pr_util.create_pull_request()
1474 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1475 pull_request.target_ref = 'tag:target:abcdef1234567890'
1725 pull_request.source_ref = "tag:origin:1234567890abcdef"
1726 pull_request.target_ref = "tag:target:abcdef1234567890"
1476 1727 Session().add(pull_request)
1477 1728 Session().commit()
1478 1729
1479 response = self.app.get(route_path(
1480 'pullrequest_show',
1481 repo_name=pull_request.target_repo.scm_instance().name,
1482 pull_request_id=pull_request.pull_request_id))
1730 response = self.app.get(
1731 route_path(
1732 "pullrequest_show",
1733 repo_name=pull_request.target_repo.scm_instance().name,
1734 pull_request_id=pull_request.pull_request_id,
1735 )
1736 )
1483 1737 assert response.status_int == 200
1484 1738
1485 source = response.assert_response().get_element('.pr-source-info')
1486 assert source.text.strip() == 'tag:origin'
1487 assert source.getparent().attrib.get('href') is None
1739 source = response.assert_response().get_element(".pr-source-info")
1740 assert source.text.strip() == "tag:origin"
1741 assert source.getparent().attrib.get("href") is None
1488 1742
1489 target = response.assert_response().get_element('.pr-target-info')
1490 assert target.text.strip() == 'tag:target'
1491 assert target.getparent().attrib.get('href') is None
1743 target = response.assert_response().get_element(".pr-target-info")
1744 assert target.text.strip() == "tag:target"
1745 assert target.getparent().attrib.get("href") is None
1492 1746
1493 @pytest.mark.parametrize('mergeable', [True, False])
1494 def test_shadow_repository_link(
1495 self, mergeable, pr_util, http_host_only_stub):
1747 @pytest.mark.parametrize("mergeable", [True, False])
1748 def test_shadow_repository_link(self, mergeable, pr_util, http_host_only_stub):
1496 1749 """
1497 1750 Check that the pull request summary page displays a link to the shadow
1498 1751 repository if the pull request is mergeable. If it is not mergeable
1499 1752 the link should not be displayed.
1500 1753 """
1501 1754 pull_request = pr_util.create_pull_request(
1502 mergeable=mergeable, enable_notifications=False)
1755 mergeable=mergeable, enable_notifications=False
1756 )
1503 1757 target_repo = pull_request.target_repo.scm_instance()
1504 1758 pr_id = pull_request.pull_request_id
1505 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1506 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1759 shadow_url = "{host}/{repo}/pull-request/{pr_id}/repository".format(
1760 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id
1761 )
1507 1762
1508 response = self.app.get(route_path(
1509 'pullrequest_show',
1510 repo_name=target_repo.name,
1511 pull_request_id=pr_id))
1763 response = self.app.get(
1764 route_path(
1765 "pullrequest_show", repo_name=target_repo.name, pull_request_id=pr_id
1766 )
1767 )
1512 1768
1513 1769 if mergeable:
1514 1770 response.assert_response().element_value_contains(
1515 'input.pr-mergeinfo', shadow_url)
1771 "input.pr-mergeinfo", shadow_url
1772 )
1516 1773 response.assert_response().element_value_contains(
1517 'input.pr-mergeinfo ', 'pr-merge')
1774 "input.pr-mergeinfo ", "pr-merge"
1775 )
1518 1776 else:
1519 response.assert_response().no_element_exists('.pr-mergeinfo')
1777 response.assert_response().no_element_exists(".pr-mergeinfo")
1520 1778
1521 1779
1522 @pytest.mark.usefixtures('app')
1780 @pytest.mark.usefixtures("app")
1523 1781 @pytest.mark.backends("git", "hg")
1524 1782 class TestPullrequestsControllerDelete(object):
1525 1783 def test_pull_request_delete_button_permissions_admin(
1526 self, autologin_user, user_admin, pr_util):
1784 self, autologin_user, user_admin, pr_util
1785 ):
1527 1786 pull_request = pr_util.create_pull_request(
1528 author=user_admin.username, enable_notifications=False)
1787 author=user_admin.username, enable_notifications=False
1788 )
1529 1789
1530 response = self.app.get(route_path(
1531 'pullrequest_show',
1532 repo_name=pull_request.target_repo.scm_instance().name,
1533 pull_request_id=pull_request.pull_request_id))
1790 response = self.app.get(
1791 route_path(
1792 "pullrequest_show",
1793 repo_name=pull_request.target_repo.scm_instance().name,
1794 pull_request_id=pull_request.pull_request_id,
1795 )
1796 )
1534 1797
1535 1798 response.mustcontain('id="delete_pullrequest"')
1536 response.mustcontain('Confirm to delete this pull request')
1799 response.mustcontain("Confirm to delete this pull request")
1537 1800
1538 1801 def test_pull_request_delete_button_permissions_owner(
1539 self, autologin_regular_user, user_regular, pr_util):
1802 self, autologin_regular_user, user_regular, pr_util
1803 ):
1540 1804 pull_request = pr_util.create_pull_request(
1541 author=user_regular.username, enable_notifications=False)
1805 author=user_regular.username, enable_notifications=False
1806 )
1542 1807
1543 response = self.app.get(route_path(
1544 'pullrequest_show',
1545 repo_name=pull_request.target_repo.scm_instance().name,
1546 pull_request_id=pull_request.pull_request_id))
1808 response = self.app.get(
1809 route_path(
1810 "pullrequest_show",
1811 repo_name=pull_request.target_repo.scm_instance().name,
1812 pull_request_id=pull_request.pull_request_id,
1813 )
1814 )
1547 1815
1548 1816 response.mustcontain('id="delete_pullrequest"')
1549 response.mustcontain('Confirm to delete this pull request')
1817 response.mustcontain("Confirm to delete this pull request")
1550 1818
1551 1819 def test_pull_request_delete_button_permissions_forbidden(
1552 self, autologin_regular_user, user_regular, user_admin, pr_util):
1820 self, autologin_regular_user, user_regular, user_admin, pr_util
1821 ):
1553 1822 pull_request = pr_util.create_pull_request(
1554 author=user_admin.username, enable_notifications=False)
1823 author=user_admin.username, enable_notifications=False
1824 )
1555 1825
1556 response = self.app.get(route_path(
1557 'pullrequest_show',
1558 repo_name=pull_request.target_repo.scm_instance().name,
1559 pull_request_id=pull_request.pull_request_id))
1826 response = self.app.get(
1827 route_path(
1828 "pullrequest_show",
1829 repo_name=pull_request.target_repo.scm_instance().name,
1830 pull_request_id=pull_request.pull_request_id,
1831 )
1832 )
1560 1833 response.mustcontain(no=['id="delete_pullrequest"'])
1561 response.mustcontain(no=['Confirm to delete this pull request'])
1834 response.mustcontain(no=["Confirm to delete this pull request"])
1562 1835
1563 1836 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1564 self, autologin_regular_user, user_regular, user_admin, pr_util,
1565 user_util):
1566
1837 self, autologin_regular_user, user_regular, user_admin, pr_util, user_util
1838 ):
1567 1839 pull_request = pr_util.create_pull_request(
1568 author=user_admin.username, enable_notifications=False)
1840 author=user_admin.username, enable_notifications=False
1841 )
1569 1842
1570 1843 user_util.grant_user_permission_to_repo(
1571 pull_request.target_repo, user_regular,
1572 'repository.write')
1844 pull_request.target_repo, user_regular, "repository.write"
1845 )
1573 1846
1574 response = self.app.get(route_path(
1575 'pullrequest_show',
1576 repo_name=pull_request.target_repo.scm_instance().name,
1577 pull_request_id=pull_request.pull_request_id))
1847 response = self.app.get(
1848 route_path(
1849 "pullrequest_show",
1850 repo_name=pull_request.target_repo.scm_instance().name,
1851 pull_request_id=pull_request.pull_request_id,
1852 )
1853 )
1578 1854
1579 1855 response.mustcontain('id="open_edit_pullrequest"')
1580 1856 response.mustcontain('id="delete_pullrequest"')
1581 response.mustcontain(no=['Confirm to delete this pull request'])
1857 response.mustcontain(no=["Confirm to delete this pull request"])
1582 1858
1583 1859 def test_delete_comment_returns_404_if_comment_does_not_exist(
1584 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1585
1860 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header
1861 ):
1586 1862 pull_request = pr_util.create_pull_request(
1587 author=user_admin.username, enable_notifications=False)
1863 author=user_admin.username, enable_notifications=False
1864 )
1588 1865
1589 1866 self.app.post(
1590 1867 route_path(
1591 'pullrequest_comment_delete',
1592 repo_name=pull_request.target_repo.scm_instance().name,
1593 pull_request_id=pull_request.pull_request_id,
1594 comment_id=1024404),
1868 "pullrequest_comment_delete",
1869 repo_name=pull_request.target_repo.scm_instance().name,
1870 pull_request_id=pull_request.pull_request_id,
1871 comment_id=1024404,
1872 ),
1595 1873 extra_environ=xhr_header,
1596 params={'csrf_token': csrf_token},
1597 status=404
1874 params={"csrf_token": csrf_token},
1875 status=404,
1598 1876 )
1599 1877
1600 1878 def test_delete_comment(
1601 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1602
1879 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header
1880 ):
1603 1881 pull_request = pr_util.create_pull_request(
1604 author=user_admin.username, enable_notifications=False)
1882 author=user_admin.username, enable_notifications=False
1883 )
1605 1884 comment = pr_util.create_comment()
1606 1885 comment_id = comment.comment_id
1607 1886
1608 1887 response = self.app.post(
1609 1888 route_path(
1610 'pullrequest_comment_delete',
1611 repo_name=pull_request.target_repo.scm_instance().name,
1612 pull_request_id=pull_request.pull_request_id,
1613 comment_id=comment_id),
1889 "pullrequest_comment_delete",
1890 repo_name=pull_request.target_repo.scm_instance().name,
1891 pull_request_id=pull_request.pull_request_id,
1892 comment_id=comment_id,
1893 ),
1614 1894 extra_environ=xhr_header,
1615 params={'csrf_token': csrf_token},
1616 status=200
1895 params={"csrf_token": csrf_token},
1896 status=200,
1617 1897 )
1618 assert response.text == 'true'
1898 assert response.text == "true"
1619 1899
1620 @pytest.mark.parametrize('url_type', [
1621 'pullrequest_new',
1622 'pullrequest_create',
1623 'pullrequest_update',
1624 'pullrequest_merge',
1625 ])
1900 @pytest.mark.parametrize(
1901 "url_type",
1902 [
1903 "pullrequest_new",
1904 "pullrequest_create",
1905 "pullrequest_update",
1906 "pullrequest_merge",
1907 ],
1908 )
1626 1909 def test_pull_request_is_forbidden_on_archived_repo(
1627 self, autologin_user, backend, xhr_header, user_util, url_type):
1628
1910 self, autologin_user, backend, xhr_header, user_util, url_type
1911 ):
1629 1912 # create a temporary repo
1630 1913 source = user_util.create_repo(repo_type=backend.alias)
1631 1914 repo_name = source.repo_name
1632 1915 repo = Repository.get_by_repo_name(repo_name)
1633 1916 repo.archived = True
1634 1917 Session().commit()
1635 1918
1636 1919 response = self.app.get(
1637 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1920 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302
1921 )
1638 1922
1639 msg = 'Action not supported for archived repository.'
1923 msg = "Action not supported for archived repository."
1640 1924 assert_session_flash(response, msg)
1641 1925
1642 1926
1643 1927 def assert_pull_request_status(pull_request, expected_status):
1644 1928 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1645 1929 assert status == expected_status
1646 1930
1647 1931
1648 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1932 @pytest.mark.parametrize("route", ["pullrequest_new", "pullrequest_create"])
1649 1933 @pytest.mark.usefixtures("autologin_user")
1650 1934 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1651 1935 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,1044 +1,1044 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Scm model for RhodeCode
21 21 """
22 22
23 23 import os.path
24 24 import traceback
25 25 import logging
26 26 import io
27 27
28 28 from sqlalchemy import func
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 import rhodecode
32 32 from rhodecode.lib.str_utils import safe_bytes
33 33 from rhodecode.lib.vcs import get_backend
34 34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 35 from rhodecode.lib.vcs.nodes import FileNode
36 36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 37 from rhodecode.lib import helpers as h, rc_cache
38 38 from rhodecode.lib.auth import (
39 39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 40 HasUserGroupPermissionAny)
41 41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 42 from rhodecode.lib import hooks_utils
43 43 from rhodecode.lib.utils import (
44 44 get_filesystem_repos, make_db_config)
45 45 from rhodecode.lib.str_utils import safe_str
46 46 from rhodecode.lib.system_info import get_system_info
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.db import (
49 49 or_, false, null,
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest, FileStore)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker(*self.perm_set)
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 for db_obj in self.obj_list:
139 139 # check permission at this level
140 140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 141 name = db_obj.__dict__.get(self.obj_attr, None)
142 142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 153
154 154 super().__init__(
155 155 obj_list=db_repo_list,
156 156 obj_attr='_repo_name', perm_set=perm_set,
157 157 perm_checker=HasRepoPermissionAny,
158 158 extra_kwargs=extra_kwargs)
159 159
160 160
161 161 class RepoGroupList(_PermCheckIterator):
162 162
163 163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 164 if not perm_set:
165 165 perm_set = ['group.read', 'group.write', 'group.admin']
166 166
167 167 super().__init__(
168 168 obj_list=db_repo_group_list,
169 169 obj_attr='_group_name', perm_set=perm_set,
170 170 perm_checker=HasRepoGroupPermissionAny,
171 171 extra_kwargs=extra_kwargs)
172 172
173 173
174 174 class UserGroupList(_PermCheckIterator):
175 175
176 176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 177 if not perm_set:
178 178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 179
180 180 super().__init__(
181 181 obj_list=db_user_group_list,
182 182 obj_attr='users_group_name', perm_set=perm_set,
183 183 perm_checker=HasUserGroupPermissionAny,
184 184 extra_kwargs=extra_kwargs)
185 185
186 186
187 187 class ScmModel(BaseModel):
188 188 """
189 189 Generic Scm Model
190 190 """
191 191
192 192 @LazyProperty
193 193 def repos_path(self):
194 194 """
195 195 Gets the repositories root path from database
196 196 """
197 197
198 198 settings_model = VcsSettingsModel(sa=self.sa)
199 199 return settings_model.get_repos_location()
200 200
201 201 def repo_scan(self, repos_path=None):
202 202 """
203 203 Listing of repositories in given path. This path should not be a
204 204 repository itself. Return a dictionary of repository objects
205 205
206 206 :param repos_path: path to directory containing repositories
207 207 """
208 208
209 209 if repos_path is None:
210 210 repos_path = self.repos_path
211 211
212 212 log.info('scanning for repositories in %s', repos_path)
213 213
214 214 config = make_db_config()
215 215 config.set('extensions', 'largefiles', '')
216 216 repos = {}
217 217
218 218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 219 # name need to be decomposed and put back together using the /
220 220 # since this is internal storage separator for rhodecode
221 221 name = Repository.normalize_repo_name(name)
222 222
223 223 try:
224 224 if name in repos:
225 225 raise RepositoryError('Duplicate repository name %s '
226 226 'found in %s' % (name, path))
227 227 elif path[0] in rhodecode.BACKENDS:
228 228 backend = get_backend(path[0])
229 229 repos[name] = backend(path[1], config=config,
230 230 with_wire={"cache": False})
231 231 except OSError:
232 232 continue
233 233 except RepositoryError:
234 234 log.exception('Failed to create a repo')
235 235 continue
236 236
237 237 log.debug('found %s paths with repositories', len(repos))
238 238 return repos
239 239
240 240 def get_repos(self, all_repos=None, sort_key=None):
241 241 """
242 242 Get all repositories from db and for each repo create it's
243 243 backend instance and fill that backed with information from database
244 244
245 245 :param all_repos: list of repository names as strings
246 246 give specific repositories list, good for filtering
247 247
248 248 :param sort_key: initial sorting of repositories
249 249 """
250 250 if all_repos is None:
251 251 all_repos = self.sa.query(Repository)\
252 252 .filter(Repository.group_id == null())\
253 253 .order_by(func.lower(Repository.repo_name)).all()
254 254 repo_iter = SimpleCachedRepoList(
255 255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 256 return repo_iter
257 257
258 258 def get_repo_groups(self, all_groups=None):
259 259 if all_groups is None:
260 260 all_groups = RepoGroup.query()\
261 261 .filter(RepoGroup.group_parent_id == null()).all()
262 262 return [x for x in RepoGroupList(all_groups)]
263 263
264 264 def mark_for_invalidation(self, repo_name, delete=False):
265 265 """
266 266 Mark caches of this repo invalid in the database. `delete` flag
267 267 removes the cache entries
268 268
269 269 :param repo_name: the repo_name for which caches should be marked
270 270 invalid, or deleted
271 271 :param delete: delete the entry keys instead of setting bool
272 272 flag on them, and also purge caches used by the dogpile
273 273 """
274 274 repo = Repository.get_by_repo_name(repo_name)
275 275
276 276 if repo:
277 277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 278 repo_id=repo.repo_id)
279 279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 280
281 281 repo_id = repo.repo_id
282 282 config = repo._config
283 283 config.set('extensions', 'largefiles', '')
284 284 repo.update_commit_cache(config=config, cs_cache=None)
285 285 if delete:
286 286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288 288
289 289 def toggle_following_repo(self, follow_repo_id, user_id):
290 290
291 291 f = self.sa.query(UserFollowing)\
292 292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 293 .filter(UserFollowing.user_id == user_id).scalar()
294 294
295 295 if f is not None:
296 296 try:
297 297 self.sa.delete(f)
298 298 return
299 299 except Exception:
300 300 log.error(traceback.format_exc())
301 301 raise
302 302
303 303 try:
304 304 f = UserFollowing()
305 305 f.user_id = user_id
306 306 f.follows_repo_id = follow_repo_id
307 307 self.sa.add(f)
308 308 except Exception:
309 309 log.error(traceback.format_exc())
310 310 raise
311 311
312 312 def toggle_following_user(self, follow_user_id, user_id):
313 313 f = self.sa.query(UserFollowing)\
314 314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 315 .filter(UserFollowing.user_id == user_id).scalar()
316 316
317 317 if f is not None:
318 318 try:
319 319 self.sa.delete(f)
320 320 return
321 321 except Exception:
322 322 log.error(traceback.format_exc())
323 323 raise
324 324
325 325 try:
326 326 f = UserFollowing()
327 327 f.user_id = user_id
328 328 f.follows_user_id = follow_user_id
329 329 self.sa.add(f)
330 330 except Exception:
331 331 log.error(traceback.format_exc())
332 332 raise
333 333
334 334 def is_following_repo(self, repo_name, user_id, cache=False):
335 335 r = self.sa.query(Repository)\
336 336 .filter(Repository.repo_name == repo_name).scalar()
337 337
338 338 f = self.sa.query(UserFollowing)\
339 339 .filter(UserFollowing.follows_repository == r)\
340 340 .filter(UserFollowing.user_id == user_id).scalar()
341 341
342 342 return f is not None
343 343
344 344 def is_following_user(self, username, user_id, cache=False):
345 345 u = User.get_by_username(username)
346 346
347 347 f = self.sa.query(UserFollowing)\
348 348 .filter(UserFollowing.follows_user == u)\
349 349 .filter(UserFollowing.user_id == user_id).scalar()
350 350
351 351 return f is not None
352 352
353 353 def get_followers(self, repo):
354 354 repo = self._get_repo(repo)
355 355
356 356 return self.sa.query(UserFollowing)\
357 357 .filter(UserFollowing.follows_repository == repo).count()
358 358
359 359 def get_forks(self, repo):
360 360 repo = self._get_repo(repo)
361 361 return self.sa.query(Repository)\
362 362 .filter(Repository.fork == repo).count()
363 363
364 364 def get_pull_requests(self, repo):
365 365 repo = self._get_repo(repo)
366 366 return self.sa.query(PullRequest)\
367 367 .filter(PullRequest.target_repo == repo)\
368 368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369 369
370 370 def get_artifacts(self, repo):
371 371 repo = self._get_repo(repo)
372 372 return self.sa.query(FileStore)\
373 373 .filter(FileStore.repo == repo)\
374 374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
375 375
376 376 def mark_as_fork(self, repo, fork, user):
377 377 repo = self._get_repo(repo)
378 378 fork = self._get_repo(fork)
379 379 if fork and repo.repo_id == fork.repo_id:
380 380 raise Exception("Cannot set repository as fork of itself")
381 381
382 382 if fork and repo.repo_type != fork.repo_type:
383 383 raise RepositoryError(
384 384 "Cannot set repository as fork of repository with other type")
385 385
386 386 repo.fork = fork
387 387 self.sa.add(repo)
388 388 return repo
389 389
390 390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 391 dbrepo = self._get_repo(repo)
392 392 remote_uri = remote_uri or dbrepo.clone_uri
393 393 if not remote_uri:
394 394 raise Exception("This repository doesn't have a clone uri")
395 395
396 396 repo = dbrepo.scm_instance(cache=False)
397 397 repo.config.clear_section('hooks')
398 398
399 399 try:
400 400 # NOTE(marcink): add extra validation so we skip invalid urls
401 401 # this is due this tasks can be executed via scheduler without
402 402 # proper validation of remote_uri
403 403 if validate_uri:
404 404 config = make_db_config(clear_session=False)
405 405 url_validator(remote_uri, dbrepo.repo_type, config)
406 406 except InvalidCloneUrl:
407 407 raise
408 408
409 409 repo_name = dbrepo.repo_name
410 410 try:
411 411 # TODO: we need to make sure those operations call proper hooks !
412 412 repo.fetch(remote_uri)
413 413
414 414 self.mark_for_invalidation(repo_name)
415 415 except Exception:
416 416 log.error(traceback.format_exc())
417 417 raise
418 418
419 419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 420 dbrepo = self._get_repo(repo)
421 421 remote_uri = remote_uri or dbrepo.push_uri
422 422 if not remote_uri:
423 423 raise Exception("This repository doesn't have a clone uri")
424 424
425 425 repo = dbrepo.scm_instance(cache=False)
426 426 repo.config.clear_section('hooks')
427 427
428 428 try:
429 429 # NOTE(marcink): add extra validation so we skip invalid urls
430 430 # this is due this tasks can be executed via scheduler without
431 431 # proper validation of remote_uri
432 432 if validate_uri:
433 433 config = make_db_config(clear_session=False)
434 434 url_validator(remote_uri, dbrepo.repo_type, config)
435 435 except InvalidCloneUrl:
436 436 raise
437 437
438 438 try:
439 439 repo.push(remote_uri)
440 440 except Exception:
441 441 log.error(traceback.format_exc())
442 442 raise
443 443
444 444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 content: bytes, f_path: bytes):
445 content: bytes, f_path: bytes, branch: str = None):
446 446 """
447 447 Commits changes
448 448 """
449 449 user = self._get_user(user)
450 450
451 451 # message and author needs to be unicode
452 452 # proper backend should then translate that into required type
453 453 message = safe_str(message)
454 454 author = safe_str(author)
455 455 imc = repo.in_memory_commit
456 456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 457 try:
458 458 # TODO: handle pre-push action !
459 459 tip = imc.commit(
460 460 message=message, author=author, parents=[commit],
461 branch=commit.branch)
461 branch=branch or commit.branch)
462 462 except Exception as e:
463 463 log.error(traceback.format_exc())
464 464 raise IMCCommitError(str(e))
465 465 finally:
466 466 # always clear caches, if commit fails we want fresh object also
467 467 self.mark_for_invalidation(repo_name)
468 468
469 469 # We trigger the post-push action
470 470 hooks_utils.trigger_post_push_hook(
471 471 username=user.username, action='push_local', hook_type='post_push',
472 472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 473 return tip
474 474
475 475 def _sanitize_path(self, f_path: bytes):
476 476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 478 if f_path:
479 479 f_path = os.path.normpath(f_path)
480 480 return f_path
481 481
482 482 def get_dirnode_metadata(self, request, commit, dir_node):
483 483 if not dir_node.is_dir():
484 484 return []
485 485
486 486 data = []
487 487 for node in dir_node:
488 488 if not node.is_file():
489 489 # we skip file-nodes
490 490 continue
491 491
492 492 last_commit = node.last_commit
493 493 last_commit_date = last_commit.date
494 494 data.append({
495 495 'name': node.name,
496 496 'size': h.format_byte_size_binary(node.size),
497 497 'modified_at': h.format_date(last_commit_date),
498 498 'modified_ts': last_commit_date.isoformat(),
499 499 'revision': last_commit.revision,
500 500 'short_id': last_commit.short_id,
501 501 'message': h.escape(last_commit.message),
502 502 'author': h.escape(last_commit.author),
503 503 'user_profile': h.gravatar_with_user(
504 504 request, last_commit.author),
505 505 })
506 506
507 507 return data
508 508
509 509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 510 extended_info=False, content=False, max_file_bytes=None):
511 511 """
512 512 recursive walk in root dir and return a set of all path in that dir
513 513 based on repository walk function
514 514
515 515 :param repo_name: name of repository
516 516 :param commit_id: commit id for which to list nodes
517 517 :param root_path: root path to list
518 518 :param flat: return as a list, if False returns a dict with description
519 519 :param extended_info: show additional info such as md5, binary, size etc
520 520 :param content: add nodes content to the return data
521 521 :param max_file_bytes: will not return file contents over this limit
522 522
523 523 """
524 524 _files = list()
525 525 _dirs = list()
526 526
527 527 try:
528 528 _repo = self._get_repo(repo_name)
529 529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 530 root_path = root_path.lstrip('/')
531 531
532 532 # get RootNode, inject pre-load options before walking
533 533 top_node = commit.get_node(root_path)
534 534 extended_info_pre_load = []
535 535 if extended_info:
536 536 extended_info_pre_load += ['md5']
537 537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538 538
539 539 for __, dirs, files in commit.walk(top_node):
540 540
541 541 for f in files:
542 542 _content = None
543 543 _data = f_name = f.str_path
544 544
545 545 if not flat:
546 546 _data = {
547 547 "name": h.escape(f_name),
548 548 "type": "file",
549 549 }
550 550 if extended_info:
551 551 _data.update({
552 552 "md5": f.md5,
553 553 "binary": f.is_binary,
554 554 "size": f.size,
555 555 "extension": f.extension,
556 556 "mimetype": f.mimetype,
557 557 "lines": f.lines()[0]
558 558 })
559 559
560 560 if content:
561 561 over_size_limit = (max_file_bytes is not None
562 562 and f.size > max_file_bytes)
563 563 full_content = None
564 564 if not f.is_binary and not over_size_limit:
565 565 full_content = f.str_content
566 566
567 567 _data.update({
568 568 "content": full_content,
569 569 })
570 570 _files.append(_data)
571 571
572 572 for d in dirs:
573 573 _data = d_name = d.str_path
574 574 if not flat:
575 575 _data = {
576 576 "name": h.escape(d_name),
577 577 "type": "dir",
578 578 }
579 579 if extended_info:
580 580 _data.update({
581 581 "md5": "",
582 582 "binary": False,
583 583 "size": 0,
584 584 "extension": "",
585 585 })
586 586 if content:
587 587 _data.update({
588 588 "content": None
589 589 })
590 590 _dirs.append(_data)
591 591 except RepositoryError:
592 592 log.exception("Exception in get_nodes")
593 593 raise
594 594
595 595 return _dirs, _files
596 596
597 597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 598 """
599 599 Generate files for quick filter in files view
600 600 """
601 601
602 602 _files = list()
603 603 _dirs = list()
604 604 try:
605 605 _repo = self._get_repo(repo_name)
606 606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 607 root_path = root_path.lstrip('/')
608 608
609 609 top_node = commit.get_node(root_path)
610 610 top_node.default_pre_load = []
611 611
612 612 for __, dirs, files in commit.walk(top_node):
613 613 for f in files:
614 614
615 615 _data = {
616 616 "name": h.escape(f.str_path),
617 617 "type": "file",
618 618 }
619 619
620 620 _files.append(_data)
621 621
622 622 for d in dirs:
623 623
624 624 _data = {
625 625 "name": h.escape(d.str_path),
626 626 "type": "dir",
627 627 }
628 628
629 629 _dirs.append(_data)
630 630 except RepositoryError:
631 631 log.exception("Exception in get_quick_filter_nodes")
632 632 raise
633 633
634 634 return _dirs, _files
635 635
636 636 def get_node(self, repo_name, commit_id, file_path,
637 637 extended_info=False, content=False, max_file_bytes=None, cache=True):
638 638 """
639 639 retrieve single node from commit
640 640 """
641 641
642 642 try:
643 643
644 644 _repo = self._get_repo(repo_name)
645 645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
646 646
647 647 file_node = commit.get_node(file_path)
648 648 if file_node.is_dir():
649 649 raise RepositoryError('The given path is a directory')
650 650
651 651 _content = None
652 652 f_name = file_node.str_path
653 653
654 654 file_data = {
655 655 "name": h.escape(f_name),
656 656 "type": "file",
657 657 }
658 658
659 659 if extended_info:
660 660 file_data.update({
661 661 "extension": file_node.extension,
662 662 "mimetype": file_node.mimetype,
663 663 })
664 664
665 665 if cache:
666 666 md5 = file_node.md5
667 667 is_binary = file_node.is_binary
668 668 size = file_node.size
669 669 else:
670 670 is_binary, md5, size, _content = file_node.metadata_uncached()
671 671
672 672 file_data.update({
673 673 "md5": md5,
674 674 "binary": is_binary,
675 675 "size": size,
676 676 })
677 677
678 678 if content and cache:
679 679 # get content + cache
680 680 size = file_node.size
681 681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
682 682 full_content = None
683 683 all_lines = 0
684 684 if not file_node.is_binary and not over_size_limit:
685 685 full_content = safe_str(file_node.content)
686 686 all_lines, empty_lines = file_node.count_lines(full_content)
687 687
688 688 file_data.update({
689 689 "content": full_content,
690 690 "lines": all_lines
691 691 })
692 692 elif content:
693 693 # get content *without* cache
694 694 if _content is None:
695 695 is_binary, md5, size, _content = file_node.metadata_uncached()
696 696
697 697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
698 698 full_content = None
699 699 all_lines = 0
700 700 if not is_binary and not over_size_limit:
701 701 full_content = safe_str(_content)
702 702 all_lines, empty_lines = file_node.count_lines(full_content)
703 703
704 704 file_data.update({
705 705 "content": full_content,
706 706 "lines": all_lines
707 707 })
708 708
709 709 except RepositoryError:
710 710 log.exception("Exception in get_node")
711 711 raise
712 712
713 713 return file_data
714 714
715 715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
716 716 """
717 717 Fetch node tree for usage in full text search
718 718 """
719 719
720 720 tree_info = list()
721 721
722 722 try:
723 723 _repo = self._get_repo(repo_name)
724 724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
725 725 root_path = root_path.lstrip('/')
726 726 top_node = commit.get_node(root_path)
727 727 top_node.default_pre_load = []
728 728
729 729 for __, dirs, files in commit.walk(top_node):
730 730
731 731 for f in files:
732 732 is_binary, md5, size, _content = f.metadata_uncached()
733 733 _data = {
734 734 "name": f.str_path,
735 735 "md5": md5,
736 736 "extension": f.extension,
737 737 "binary": is_binary,
738 738 "size": size
739 739 }
740 740
741 741 tree_info.append(_data)
742 742
743 743 except RepositoryError:
744 744 log.exception("Exception in get_nodes")
745 745 raise
746 746
747 747 return tree_info
748 748
749 749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
750 750 author=None, trigger_push_hook=True):
751 751 """
752 752 Commits given multiple nodes into repo
753 753
754 754 :param user: RhodeCode User object or user_id, the commiter
755 755 :param repo: RhodeCode Repository object
756 756 :param message: commit message
757 757 :param nodes: mapping {filename:{'content':content},...}
758 758 :param parent_commit: parent commit, can be empty than it's
759 759 initial commit
760 760 :param author: author of commit, cna be different that commiter
761 761 only for git
762 762 :param trigger_push_hook: trigger push hooks
763 763
764 764 :returns: new committed commit
765 765 """
766 766
767 767 user = self._get_user(user)
768 768 scm_instance = repo.scm_instance(cache=False)
769 769
770 770 message = safe_str(message)
771 771 commiter = user.full_contact
772 772 author = safe_str(author) if author else commiter
773 773
774 774 imc = scm_instance.in_memory_commit
775 775
776 776 if not parent_commit:
777 777 parent_commit = EmptyCommit(alias=scm_instance.alias)
778 778
779 779 if isinstance(parent_commit, EmptyCommit):
780 780 # EmptyCommit means we're editing empty repository
781 781 parents = None
782 782 else:
783 783 parents = [parent_commit]
784 784
785 785 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 786 processed_nodes = []
787 787 for filename, content_dict in nodes.items():
788 788 if not isinstance(filename, bytes):
789 789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 790 content = content_dict['content']
791 791 if not isinstance(content, upload_file_types + (bytes,)):
792 792 raise ValueError('content key value in nodes needs to be bytes')
793 793
794 794 for f_path in nodes:
795 795 f_path = self._sanitize_path(f_path)
796 796 content = nodes[f_path]['content']
797 797
798 798 # decoding here will force that we have proper encoded values
799 799 # in any other case this will throw exceptions and deny commit
800 800
801 801 if isinstance(content, bytes):
802 802 pass
803 803 elif isinstance(content, upload_file_types):
804 804 content = content.read()
805 805 else:
806 806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 807 processed_nodes.append((f_path, content))
808 808
809 809 # add multiple nodes
810 810 for path, content in processed_nodes:
811 811 imc.add(FileNode(path, content=content))
812 812
813 813 # TODO: handle pre push scenario
814 814 tip = imc.commit(message=message,
815 815 author=author,
816 816 parents=parents,
817 817 branch=parent_commit.branch)
818 818
819 819 self.mark_for_invalidation(repo.repo_name)
820 820 if trigger_push_hook:
821 821 hooks_utils.trigger_post_push_hook(
822 822 username=user.username, action='push_local',
823 823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 824 hook_type='post_push',
825 825 commit_ids=[tip.raw_id])
826 826 return tip
827 827
828 828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 829 author=None, trigger_push_hook=True):
830 830 user = self._get_user(user)
831 831 scm_instance = repo.scm_instance(cache=False)
832 832
833 833 message = safe_str(message)
834 834 commiter = user.full_contact
835 835 author = safe_str(author) if author else commiter
836 836
837 837 imc = scm_instance.in_memory_commit
838 838
839 839 if not parent_commit:
840 840 parent_commit = EmptyCommit(alias=scm_instance.alias)
841 841
842 842 if isinstance(parent_commit, EmptyCommit):
843 843 # EmptyCommit means we we're editing empty repository
844 844 parents = None
845 845 else:
846 846 parents = [parent_commit]
847 847
848 848 # add multiple nodes
849 849 for _filename, data in nodes.items():
850 850 # new filename, can be renamed from the old one, also sanitaze
851 851 # the path for any hack around relative paths like ../../ etc.
852 852 filename = self._sanitize_path(data['filename'])
853 853 old_filename = self._sanitize_path(_filename)
854 854 content = data['content']
855 855 file_mode = data.get('mode')
856 856 filenode = FileNode(old_filename, content=content, mode=file_mode)
857 857 op = data['op']
858 858 if op == 'add':
859 859 imc.add(filenode)
860 860 elif op == 'del':
861 861 imc.remove(filenode)
862 862 elif op == 'mod':
863 863 if filename != old_filename:
864 864 # TODO: handle renames more efficient, needs vcs lib changes
865 865 imc.remove(filenode)
866 866 imc.add(FileNode(filename, content=content, mode=file_mode))
867 867 else:
868 868 imc.change(filenode)
869 869
870 870 try:
871 871 # TODO: handle pre push scenario commit changes
872 872 tip = imc.commit(message=message,
873 873 author=author,
874 874 parents=parents,
875 875 branch=parent_commit.branch)
876 876 except NodeNotChangedError:
877 877 raise
878 878 except Exception as e:
879 879 log.exception("Unexpected exception during call to imc.commit")
880 880 raise IMCCommitError(str(e))
881 881 finally:
882 882 # always clear caches, if commit fails we want fresh object also
883 883 self.mark_for_invalidation(repo.repo_name)
884 884
885 885 if trigger_push_hook:
886 886 hooks_utils.trigger_post_push_hook(
887 887 username=user.username, action='push_local', hook_type='post_push',
888 888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
889 889 commit_ids=[tip.raw_id])
890 890
891 891 return tip
892 892
893 893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
894 894 author=None, trigger_push_hook=True):
895 895 """
896 896 Deletes given multiple nodes into `repo`
897 897
898 898 :param user: RhodeCode User object or user_id, the committer
899 899 :param repo: RhodeCode Repository object
900 900 :param message: commit message
901 901 :param nodes: mapping {filename:{'content':content},...}
902 902 :param parent_commit: parent commit, can be empty than it's initial
903 903 commit
904 904 :param author: author of commit, cna be different that commiter only
905 905 for git
906 906 :param trigger_push_hook: trigger push hooks
907 907
908 908 :returns: new commit after deletion
909 909 """
910 910
911 911 user = self._get_user(user)
912 912 scm_instance = repo.scm_instance(cache=False)
913 913
914 914 processed_nodes = []
915 915 for f_path in nodes:
916 916 f_path = self._sanitize_path(f_path)
917 917 # content can be empty but for compatibility it allows same dicts
918 918 # structure as add_nodes
919 919 content = nodes[f_path].get('content')
920 920 processed_nodes.append((safe_bytes(f_path), content))
921 921
922 922 message = safe_str(message)
923 923 commiter = user.full_contact
924 924 author = safe_str(author) if author else commiter
925 925
926 926 imc = scm_instance.in_memory_commit
927 927
928 928 if not parent_commit:
929 929 parent_commit = EmptyCommit(alias=scm_instance.alias)
930 930
931 931 if isinstance(parent_commit, EmptyCommit):
932 932 # EmptyCommit means we we're editing empty repository
933 933 parents = None
934 934 else:
935 935 parents = [parent_commit]
936 936 # add multiple nodes
937 937 for path, content in processed_nodes:
938 938 imc.remove(FileNode(path, content=content))
939 939
940 940 # TODO: handle pre push scenario
941 941 tip = imc.commit(message=message,
942 942 author=author,
943 943 parents=parents,
944 944 branch=parent_commit.branch)
945 945
946 946 self.mark_for_invalidation(repo.repo_name)
947 947 if trigger_push_hook:
948 948 hooks_utils.trigger_post_push_hook(
949 949 username=user.username, action='push_local', hook_type='post_push',
950 950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
951 951 commit_ids=[tip.raw_id])
952 952 return tip
953 953
954 954 def strip(self, repo, commit_id, branch):
955 955 scm_instance = repo.scm_instance(cache=False)
956 956 scm_instance.config.clear_section('hooks')
957 957 scm_instance.strip(commit_id, branch)
958 958 self.mark_for_invalidation(repo.repo_name)
959 959
960 960 def get_unread_journal(self):
961 961 return self.sa.query(UserLog).count()
962 962
963 963 @classmethod
964 964 def backend_landing_ref(cls, repo_type):
965 965 """
966 966 Return a default landing ref based on a repository type.
967 967 """
968 968
969 969 landing_ref = {
970 970 'hg': ('branch:default', 'default'),
971 971 'git': ('branch:master', 'master'),
972 972 'svn': ('rev:tip', 'latest tip'),
973 973 'default': ('rev:tip', 'latest tip'),
974 974 }
975 975
976 976 return landing_ref.get(repo_type) or landing_ref['default']
977 977
978 978 def get_repo_landing_revs(self, translator, repo=None):
979 979 """
980 980 Generates select option with tags branches and bookmarks (for hg only)
981 981 grouped by type
982 982
983 983 :param repo:
984 984 """
985 985 from rhodecode.lib.vcs.backends.git import GitRepository
986 986
987 987 _ = translator
988 988 repo = self._get_repo(repo)
989 989
990 990 if repo:
991 991 repo_type = repo.repo_type
992 992 else:
993 993 repo_type = 'default'
994 994
995 995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
996 996
997 997 default_ref_options = [
998 998 [default_landing_ref, landing_ref_lbl]
999 999 ]
1000 1000 default_choices = [
1001 1001 default_landing_ref
1002 1002 ]
1003 1003
1004 1004 if not repo:
1005 1005 # presented at NEW repo creation
1006 1006 return default_choices, default_ref_options
1007 1007
1008 1008 repo = repo.scm_instance()
1009 1009
1010 1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1011 1011 choices = [default_landing_ref]
1012 1012
1013 1013 # branches
1014 1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1015 1015 if not branch_group:
1016 1016 # new repo, or without maybe a branch?
1017 1017 branch_group = default_ref_options
1018 1018
1019 1019 branches_group = (branch_group, _("Branches"))
1020 1020 ref_options.append(branches_group)
1021 1021 choices.extend([x[0] for x in branches_group[0]])
1022 1022
1023 1023 # bookmarks for HG
1024 1024 if repo.alias == 'hg':
1025 1025 bookmarks_group = (
1026 1026 [(f'book:{safe_str(b)}', safe_str(b))
1027 1027 for b in repo.bookmarks],
1028 1028 _("Bookmarks"))
1029 1029 ref_options.append(bookmarks_group)
1030 1030 choices.extend([x[0] for x in bookmarks_group[0]])
1031 1031
1032 1032 # tags
1033 1033 tags_group = (
1034 1034 [(f'tag:{safe_str(t)}', safe_str(t))
1035 1035 for t in repo.tags],
1036 1036 _("Tags"))
1037 1037 ref_options.append(tags_group)
1038 1038 choices.extend([x[0] for x in tags_group[0]])
1039 1039
1040 1040 return choices, ref_options
1041 1041
1042 1042 def get_server_info(self, environ=None):
1043 1043 server_info = get_system_info(environ)
1044 1044 return server_info
@@ -1,1735 +1,1750 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import collections
21 21 import datetime
22 22 import os
23 23 import re
24 24 import pprint
25 25 import shutil
26 26 import socket
27 27 import subprocess
28 28 import time
29 29 import uuid
30 30 import dateutil.tz
31 31 import logging
32 32 import functools
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39 import pyramid.paster
40 40
41 41 import rhodecode
42 42 import rhodecode.lib
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.pull_request import PullRequestModel
50 50 from rhodecode.model.repo import RepoModel
51 51 from rhodecode.model.repo_group import RepoGroupModel
52 52 from rhodecode.model.user import UserModel
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.user_group import UserGroupModel
55 55 from rhodecode.model.integration import IntegrationModel
56 56 from rhodecode.integrations import integration_type_registry
57 57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 58 from rhodecode.lib.utils import repo2db_mapper
59 59 from rhodecode.lib.str_utils import safe_bytes
60 60 from rhodecode.lib.hash_utils import sha1_safe
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 log = logging.getLogger(__name__)
72 72
73 73
74 74 def cmp(a, b):
75 75 # backport cmp from python2 so we can still use it in the custom code in this module
76 76 return (a > b) - (a < b)
77 77
78 78
79 79 @pytest.fixture(scope='session', autouse=True)
80 80 def activate_example_rcextensions(request):
81 81 """
82 82 Patch in an example rcextensions module which verifies passed in kwargs.
83 83 """
84 84 from rhodecode.config import rcextensions
85 85
86 86 old_extensions = rhodecode.EXTENSIONS
87 87 rhodecode.EXTENSIONS = rcextensions
88 88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89 89
90 90 @request.addfinalizer
91 91 def cleanup():
92 92 rhodecode.EXTENSIONS = old_extensions
93 93
94 94
95 95 @pytest.fixture()
96 96 def capture_rcextensions():
97 97 """
98 98 Returns the recorded calls to entry points in rcextensions.
99 99 """
100 100 calls = rhodecode.EXTENSIONS.calls
101 101 calls.clear()
102 102 # Note: At this moment, it is still the empty dict, but that will
103 103 # be filled during the test run and since it is a reference this
104 104 # is enough to make it work.
105 105 return calls
106 106
107 107
108 108 @pytest.fixture(scope='session')
109 109 def http_environ_session():
110 110 """
111 111 Allow to use "http_environ" in session scope.
112 112 """
113 113 return plain_http_environ()
114 114
115 115
116 116 def plain_http_host_stub():
117 117 """
118 118 Value of HTTP_HOST in the test run.
119 119 """
120 120 return 'example.com:80'
121 121
122 122
123 123 @pytest.fixture()
124 124 def http_host_stub():
125 125 """
126 126 Value of HTTP_HOST in the test run.
127 127 """
128 128 return plain_http_host_stub()
129 129
130 130
131 131 def plain_http_host_only_stub():
132 132 """
133 133 Value of HTTP_HOST in the test run.
134 134 """
135 135 return plain_http_host_stub().split(':')[0]
136 136
137 137
138 138 @pytest.fixture()
139 139 def http_host_only_stub():
140 140 """
141 141 Value of HTTP_HOST in the test run.
142 142 """
143 143 return plain_http_host_only_stub()
144 144
145 145
146 146 def plain_http_environ():
147 147 """
148 148 HTTP extra environ keys.
149 149
150 150 User by the test application and as well for setting up the pylons
151 151 environment. In the case of the fixture "app" it should be possible
152 152 to override this for a specific test case.
153 153 """
154 154 return {
155 155 'SERVER_NAME': plain_http_host_only_stub(),
156 156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
157 157 'HTTP_HOST': plain_http_host_stub(),
158 158 'HTTP_USER_AGENT': 'rc-test-agent',
159 159 'REQUEST_METHOD': 'GET'
160 160 }
161 161
162 162
163 163 @pytest.fixture()
164 164 def http_environ():
165 165 """
166 166 HTTP extra environ keys.
167 167
168 168 User by the test application and as well for setting up the pylons
169 169 environment. In the case of the fixture "app" it should be possible
170 170 to override this for a specific test case.
171 171 """
172 172 return plain_http_environ()
173 173
174 174
175 175 @pytest.fixture(scope='session')
176 176 def baseapp(ini_config, vcsserver, http_environ_session):
177 177 from rhodecode.lib.pyramid_utils import get_app_config
178 178 from rhodecode.config.middleware import make_pyramid_app
179 179
180 180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
181 181 pyramid.paster.setup_logging(ini_config)
182 182
183 183 settings = get_app_config(ini_config)
184 184 app = make_pyramid_app({'__file__': ini_config}, **settings)
185 185
186 186 return app
187 187
188 188
189 189 @pytest.fixture(scope='function')
190 190 def app(request, config_stub, baseapp, http_environ):
191 191 app = CustomTestApp(
192 192 baseapp,
193 193 extra_environ=http_environ)
194 194 if request.cls:
195 195 request.cls.app = app
196 196 return app
197 197
198 198
199 199 @pytest.fixture(scope='session')
200 200 def app_settings(baseapp, ini_config):
201 201 """
202 202 Settings dictionary used to create the app.
203 203
204 204 Parses the ini file and passes the result through the sanitize and apply
205 205 defaults mechanism in `rhodecode.config.middleware`.
206 206 """
207 207 return baseapp.config.get_settings()
208 208
209 209
210 210 @pytest.fixture(scope='session')
211 211 def db_connection(ini_settings):
212 212 # Initialize the database connection.
213 213 config_utils.initialize_database(ini_settings)
214 214
215 215
216 216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
217 217
218 218
219 219 def _autologin_user(app, *args):
220 220 session = login_user_session(app, *args)
221 221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 222 return LoginData(csrf_token, session['rhodecode_user'])
223 223
224 224
225 225 @pytest.fixture()
226 226 def autologin_user(app):
227 227 """
228 228 Utility fixture which makes sure that the admin user is logged in
229 229 """
230 230 return _autologin_user(app)
231 231
232 232
233 233 @pytest.fixture()
234 234 def autologin_regular_user(app):
235 235 """
236 236 Utility fixture which makes sure that the regular user is logged in
237 237 """
238 238 return _autologin_user(
239 239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240 240
241 241
242 242 @pytest.fixture(scope='function')
243 243 def csrf_token(request, autologin_user):
244 244 return autologin_user.csrf_token
245 245
246 246
247 247 @pytest.fixture(scope='function')
248 248 def xhr_header(request):
249 249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
250 250
251 251
252 252 @pytest.fixture()
253 253 def real_crypto_backend(monkeypatch):
254 254 """
255 255 Switch the production crypto backend on for this test.
256 256
257 257 During the test run the crypto backend is replaced with a faster
258 258 implementation based on the MD5 algorithm.
259 259 """
260 260 monkeypatch.setattr(rhodecode, 'is_test', False)
261 261
262 262
263 263 @pytest.fixture(scope='class')
264 264 def index_location(request, baseapp):
265 265 index_location = baseapp.config.get_settings()['search.location']
266 266 if request.cls:
267 267 request.cls.index_location = index_location
268 268 return index_location
269 269
270 270
271 271 @pytest.fixture(scope='session', autouse=True)
272 272 def tests_tmp_path(request):
273 273 """
274 274 Create temporary directory to be used during the test session.
275 275 """
276 276 if not os.path.exists(TESTS_TMP_PATH):
277 277 os.makedirs(TESTS_TMP_PATH)
278 278
279 279 if not request.config.getoption('--keep-tmp-path'):
280 280 @request.addfinalizer
281 281 def remove_tmp_path():
282 282 shutil.rmtree(TESTS_TMP_PATH)
283 283
284 284 return TESTS_TMP_PATH
285 285
286 286
287 287 @pytest.fixture()
288 288 def test_repo_group(request):
289 289 """
290 290 Create a temporary repository group, and destroy it after
291 291 usage automatically
292 292 """
293 293 fixture = Fixture()
294 294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
295 295 repo_group = fixture.create_repo_group(repogroupid)
296 296
297 297 def _cleanup():
298 298 fixture.destroy_repo_group(repogroupid)
299 299
300 300 request.addfinalizer(_cleanup)
301 301 return repo_group
302 302
303 303
304 304 @pytest.fixture()
305 305 def test_user_group(request):
306 306 """
307 307 Create a temporary user group, and destroy it after
308 308 usage automatically
309 309 """
310 310 fixture = Fixture()
311 311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
312 312 user_group = fixture.create_user_group(usergroupid)
313 313
314 314 def _cleanup():
315 315 fixture.destroy_user_group(user_group)
316 316
317 317 request.addfinalizer(_cleanup)
318 318 return user_group
319 319
320 320
321 321 @pytest.fixture(scope='session')
322 322 def test_repo(request):
323 323 container = TestRepoContainer()
324 324 request.addfinalizer(container._cleanup)
325 325 return container
326 326
327 327
328 328 class TestRepoContainer(object):
329 329 """
330 330 Container for test repositories which are used read only.
331 331
332 332 Repositories will be created on demand and re-used during the lifetime
333 333 of this object.
334 334
335 335 Usage to get the svn test repository "minimal"::
336 336
337 337 test_repo = TestContainer()
338 338 repo = test_repo('minimal', 'svn')
339 339
340 340 """
341 341
342 342 dump_extractors = {
343 343 'git': utils.extract_git_repo_from_dump,
344 344 'hg': utils.extract_hg_repo_from_dump,
345 345 'svn': utils.extract_svn_repo_from_dump,
346 346 }
347 347
348 348 def __init__(self):
349 349 self._cleanup_repos = []
350 350 self._fixture = Fixture()
351 351 self._repos = {}
352 352
353 353 def __call__(self, dump_name, backend_alias, config=None):
354 354 key = (dump_name, backend_alias)
355 355 if key not in self._repos:
356 356 repo = self._create_repo(dump_name, backend_alias, config)
357 357 self._repos[key] = repo.repo_id
358 358 return Repository.get(self._repos[key])
359 359
360 360 def _create_repo(self, dump_name, backend_alias, config):
361 361 repo_name = '%s-%s' % (backend_alias, dump_name)
362 362 backend = get_backend(backend_alias)
363 363 dump_extractor = self.dump_extractors[backend_alias]
364 364 repo_path = dump_extractor(dump_name, repo_name)
365 365
366 366 vcs_repo = backend(repo_path, config=config)
367 367 repo2db_mapper({repo_name: vcs_repo})
368 368
369 369 repo = RepoModel().get_by_repo_name(repo_name)
370 370 self._cleanup_repos.append(repo_name)
371 371 return repo
372 372
373 373 def _cleanup(self):
374 374 for repo_name in reversed(self._cleanup_repos):
375 375 self._fixture.destroy_repo(repo_name)
376 376
377 377
378 378 def backend_base(request, backend_alias, baseapp, test_repo):
379 379 if backend_alias not in request.config.getoption('--backends'):
380 380 pytest.skip("Backend %s not selected." % (backend_alias, ))
381 381
382 382 utils.check_xfail_backends(request.node, backend_alias)
383 383 utils.check_skip_backends(request.node, backend_alias)
384 384
385 385 repo_name = 'vcs_test_%s' % (backend_alias, )
386 386 backend = Backend(
387 387 alias=backend_alias,
388 388 repo_name=repo_name,
389 389 test_name=request.node.name,
390 390 test_repo_container=test_repo)
391 391 request.addfinalizer(backend.cleanup)
392 392 return backend
393 393
394 394
395 395 @pytest.fixture()
396 396 def backend(request, backend_alias, baseapp, test_repo):
397 397 """
398 398 Parametrized fixture which represents a single backend implementation.
399 399
400 400 It respects the option `--backends` to focus the test run on specific
401 401 backend implementations.
402 402
403 403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
404 404 for specific backends. This is intended as a utility for incremental
405 405 development of a new backend implementation.
406 406 """
407 407 return backend_base(request, backend_alias, baseapp, test_repo)
408 408
409 409
410 410 @pytest.fixture()
411 411 def backend_git(request, baseapp, test_repo):
412 412 return backend_base(request, 'git', baseapp, test_repo)
413 413
414 414
415 415 @pytest.fixture()
416 416 def backend_hg(request, baseapp, test_repo):
417 417 return backend_base(request, 'hg', baseapp, test_repo)
418 418
419 419
420 420 @pytest.fixture()
421 421 def backend_svn(request, baseapp, test_repo):
422 422 return backend_base(request, 'svn', baseapp, test_repo)
423 423
424 424
425 425 @pytest.fixture()
426 426 def backend_random(backend_git):
427 427 """
428 428 Use this to express that your tests need "a backend.
429 429
430 430 A few of our tests need a backend, so that we can run the code. This
431 431 fixture is intended to be used for such cases. It will pick one of the
432 432 backends and run the tests.
433 433
434 434 The fixture `backend` would run the test multiple times for each
435 435 available backend which is a pure waste of time if the test is
436 436 independent of the backend type.
437 437 """
438 438 # TODO: johbo: Change this to pick a random backend
439 439 return backend_git
440 440
441 441
442 442 @pytest.fixture()
443 443 def backend_stub(backend_git):
444 444 """
445 445 Use this to express that your tests need a backend stub
446 446
447 447 TODO: mikhail: Implement a real stub logic instead of returning
448 448 a git backend
449 449 """
450 450 return backend_git
451 451
452 452
453 453 @pytest.fixture()
454 454 def repo_stub(backend_stub):
455 455 """
456 456 Use this to express that your tests need a repository stub
457 457 """
458 458 return backend_stub.create_repo()
459 459
460 460
461 461 class Backend(object):
462 462 """
463 463 Represents the test configuration for one supported backend
464 464
465 465 Provides easy access to different test repositories based on
466 466 `__getitem__`. Such repositories will only be created once per test
467 467 session.
468 468 """
469 469
470 470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
471 471 _master_repo = None
472 472 _master_repo_path = ''
473 473 _commit_ids = {}
474 474
475 475 def __init__(self, alias, repo_name, test_name, test_repo_container):
476 476 self.alias = alias
477 477 self.repo_name = repo_name
478 478 self._cleanup_repos = []
479 479 self._test_name = test_name
480 480 self._test_repo_container = test_repo_container
481 481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
482 482 # Fixture will survive in the end.
483 483 self._fixture = Fixture()
484 484
485 485 def __getitem__(self, key):
486 486 return self._test_repo_container(key, self.alias)
487 487
488 488 def create_test_repo(self, key, config=None):
489 489 return self._test_repo_container(key, self.alias, config)
490 490
491 491 @property
492 492 def repo_id(self):
493 493 # just fake some repo_id
494 494 return self.repo.repo_id
495 495
496 496 @property
497 497 def repo(self):
498 498 """
499 499 Returns the "current" repository. This is the vcs_test repo or the
500 500 last repo which has been created with `create_repo`.
501 501 """
502 502 from rhodecode.model.db import Repository
503 503 return Repository.get_by_repo_name(self.repo_name)
504 504
505 505 @property
506 506 def default_branch_name(self):
507 507 VcsRepository = get_backend(self.alias)
508 508 return VcsRepository.DEFAULT_BRANCH_NAME
509 509
510 510 @property
511 511 def default_head_id(self):
512 512 """
513 513 Returns the default head id of the underlying backend.
514 514
515 515 This will be the default branch name in case the backend does have a
516 516 default branch. In the other cases it will point to a valid head
517 517 which can serve as the base to create a new commit on top of it.
518 518 """
519 519 vcsrepo = self.repo.scm_instance()
520 520 head_id = (
521 521 vcsrepo.DEFAULT_BRANCH_NAME or
522 522 vcsrepo.commit_ids[-1])
523 523 return head_id
524 524
525 525 @property
526 526 def commit_ids(self):
527 527 """
528 528 Returns the list of commits for the last created repository
529 529 """
530 530 return self._commit_ids
531 531
532 532 def create_master_repo(self, commits):
533 533 """
534 534 Create a repository and remember it as a template.
535 535
536 536 This allows to easily create derived repositories to construct
537 537 more complex scenarios for diff, compare and pull requests.
538 538
539 539 Returns a commit map which maps from commit message to raw_id.
540 540 """
541 541 self._master_repo = self.create_repo(commits=commits)
542 542 self._master_repo_path = self._master_repo.repo_full_path
543 543
544 544 return self._commit_ids
545 545
546 546 def create_repo(
547 547 self, commits=None, number_of_commits=0, heads=None,
548 548 name_suffix='', bare=False, **kwargs):
549 549 """
550 550 Create a repository and record it for later cleanup.
551 551
552 552 :param commits: Optional. A sequence of dict instances.
553 553 Will add a commit per entry to the new repository.
554 554 :param number_of_commits: Optional. If set to a number, this number of
555 555 commits will be added to the new repository.
556 556 :param heads: Optional. Can be set to a sequence of of commit
557 557 names which shall be pulled in from the master repository.
558 558 :param name_suffix: adds special suffix to generated repo name
559 559 :param bare: set a repo as bare (no checkout)
560 560 """
561 561 self.repo_name = self._next_repo_name() + name_suffix
562 562 repo = self._fixture.create_repo(
563 563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
564 564 self._cleanup_repos.append(repo.repo_name)
565 565
566 566 commits = commits or [
567 {'message': 'Commit %s of %s' % (x, self.repo_name)}
567 {'message': f'Commit {x} of {self.repo_name}'}
568 568 for x in range(number_of_commits)]
569 569 vcs_repo = repo.scm_instance()
570 570 vcs_repo.count()
571 571 self._add_commits_to_repo(vcs_repo, commits)
572 572 if heads:
573 573 self.pull_heads(repo, heads)
574 574
575 575 return repo
576 576
577 def pull_heads(self, repo, heads):
577 def pull_heads(self, repo, heads, do_fetch=False):
578 578 """
579 579 Make sure that repo contains all commits mentioned in `heads`
580 580 """
581 581 vcsrepo = repo.scm_instance()
582 582 vcsrepo.config.clear_section('hooks')
583 583 commit_ids = [self._commit_ids[h] for h in heads]
584 if do_fetch:
585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
584 586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
585 587
586 588 def create_fork(self):
587 589 repo_to_fork = self.repo_name
588 590 self.repo_name = self._next_repo_name()
589 591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
590 592 self._cleanup_repos.append(self.repo_name)
591 593 return repo
592 594
593 595 def new_repo_name(self, suffix=''):
594 596 self.repo_name = self._next_repo_name() + suffix
595 597 self._cleanup_repos.append(self.repo_name)
596 598 return self.repo_name
597 599
598 600 def _next_repo_name(self):
599 return u"%s_%s" % (
601 return "%s_%s" % (
600 602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
601 603
602 def ensure_file(self, filename, content='Test content\n'):
604 def ensure_file(self, filename, content=b'Test content\n'):
603 605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
604 606 commits = [
605 607 {'added': [
606 608 FileNode(filename, content=content),
607 609 ]},
608 610 ]
609 611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
610 612
611 613 def enable_downloads(self):
612 614 repo = self.repo
613 615 repo.enable_downloads = True
614 616 Session().add(repo)
615 617 Session().commit()
616 618
617 619 def cleanup(self):
618 620 for repo_name in reversed(self._cleanup_repos):
619 621 self._fixture.destroy_repo(repo_name)
620 622
621 623 def _add_commits_to_repo(self, repo, commits):
622 624 commit_ids = _add_commits_to_repo(repo, commits)
623 625 if not commit_ids:
624 626 return
625 627 self._commit_ids = commit_ids
626 628
627 629 # Creating refs for Git to allow fetching them from remote repository
628 630 if self.alias == 'git':
629 631 refs = {}
630 632 for message in self._commit_ids:
631 # TODO: mikhail: do more special chars replacements
632 ref_name = 'refs/test-refs/{}'.format(
633 message.replace(' ', ''))
633 cleanup_message = message.replace(' ', '')
634 ref_name = f'refs/test-refs/{cleanup_message}'
634 635 refs[ref_name] = self._commit_ids[message]
635 636 self._create_refs(repo, refs)
636 637
637 638 def _create_refs(self, repo, refs):
638 for ref_name in refs:
639 repo.set_refs(ref_name, refs[ref_name])
639 for ref_name, ref_val in refs.items():
640 repo.set_refs(ref_name, ref_val)
640 641
641 642
642 643 class VcsBackend(object):
643 644 """
644 645 Represents the test configuration for one supported vcs backend.
645 646 """
646 647
647 648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
648 649
649 650 def __init__(self, alias, repo_path, test_name, test_repo_container):
650 651 self.alias = alias
651 652 self._repo_path = repo_path
652 653 self._cleanup_repos = []
653 654 self._test_name = test_name
654 655 self._test_repo_container = test_repo_container
655 656
656 657 def __getitem__(self, key):
657 658 return self._test_repo_container(key, self.alias).scm_instance()
658 659
659 660 def __repr__(self):
660 661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
661 662
662 663 @property
663 664 def repo(self):
664 665 """
665 666 Returns the "current" repository. This is the vcs_test repo of the last
666 667 repo which has been created.
667 668 """
668 669 Repository = get_backend(self.alias)
669 670 return Repository(self._repo_path)
670 671
671 672 @property
672 673 def backend(self):
673 674 """
674 675 Returns the backend implementation class.
675 676 """
676 677 return get_backend(self.alias)
677 678
678 679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
679 680 bare=False):
680 681 repo_name = self._next_repo_name()
681 682 self._repo_path = get_new_dir(repo_name)
682 683 repo_class = get_backend(self.alias)
683 684 src_url = None
684 685 if _clone_repo:
685 686 src_url = _clone_repo.path
686 687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
687 688 self._cleanup_repos.append(repo)
688 689
689 690 commits = commits or [
690 691 {'message': 'Commit %s of %s' % (x, repo_name)}
691 692 for x in range(number_of_commits)]
692 693 _add_commits_to_repo(repo, commits)
693 694 return repo
694 695
695 696 def clone_repo(self, repo):
696 697 return self.create_repo(_clone_repo=repo)
697 698
698 699 def cleanup(self):
699 700 for repo in self._cleanup_repos:
700 701 shutil.rmtree(repo.path)
701 702
702 703 def new_repo_path(self):
703 704 repo_name = self._next_repo_name()
704 705 self._repo_path = get_new_dir(repo_name)
705 706 return self._repo_path
706 707
707 708 def _next_repo_name(self):
708 709
709 710 return "{}_{}".format(
710 711 self.invalid_repo_name.sub('_', self._test_name),
711 712 len(self._cleanup_repos)
712 713 )
713 714
714 715 def add_file(self, repo, filename, content='Test content\n'):
715 716 imc = repo.in_memory_commit
716 717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
717 718 imc.commit(
718 719 message='Automatic commit from vcsbackend fixture',
719 720 author='Automatic <automatic@rhodecode.com>')
720 721
721 722 def ensure_file(self, filename, content='Test content\n'):
722 723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
723 724 self.add_file(self.repo, filename, content)
724 725
725 726
726 727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
727 728 if backend_alias not in request.config.getoption('--backends'):
728 729 pytest.skip("Backend %s not selected." % (backend_alias, ))
729 730
730 731 utils.check_xfail_backends(request.node, backend_alias)
731 732 utils.check_skip_backends(request.node, backend_alias)
732 733
733 734 repo_name = f'vcs_test_{backend_alias}'
734 735 repo_path = os.path.join(tests_tmp_path, repo_name)
735 736 backend = VcsBackend(
736 737 alias=backend_alias,
737 738 repo_path=repo_path,
738 739 test_name=request.node.name,
739 740 test_repo_container=test_repo)
740 741 request.addfinalizer(backend.cleanup)
741 742 return backend
742 743
743 744
744 745 @pytest.fixture()
745 746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
746 747 """
747 748 Parametrized fixture which represents a single vcs backend implementation.
748 749
749 750 See the fixture `backend` for more details. This one implements the same
750 751 concept, but on vcs level. So it does not provide model instances etc.
751 752
752 753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
753 754 for how this works.
754 755 """
755 756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
756 757
757 758
758 759 @pytest.fixture()
759 760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
760 761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
761 762
762 763
763 764 @pytest.fixture()
764 765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
765 766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
766 767
767 768
768 769 @pytest.fixture()
769 770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
770 771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
771 772
772 773
773 774 @pytest.fixture()
774 775 def vcsbackend_stub(vcsbackend_git):
775 776 """
776 777 Use this to express that your test just needs a stub of a vcsbackend.
777 778
778 779 Plan is to eventually implement an in-memory stub to speed tests up.
779 780 """
780 781 return vcsbackend_git
781 782
782 783
783 784 def _add_commits_to_repo(vcs_repo, commits):
784 785 commit_ids = {}
785 786 if not commits:
786 787 return commit_ids
787 788
788 789 imc = vcs_repo.in_memory_commit
789 commit = None
790 790
791 791 for idx, commit in enumerate(commits):
792 message = str(commit.get('message', 'Commit %s' % idx))
792 message = str(commit.get('message', f'Commit {idx}'))
793 793
794 794 for node in commit.get('added', []):
795 795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 796 for node in commit.get('changed', []):
797 797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 798 for node in commit.get('removed', []):
799 799 imc.remove(FileNode(safe_bytes(node.path)))
800 800
801 801 parents = [
802 802 vcs_repo.get_commit(commit_id=commit_ids[p])
803 803 for p in commit.get('parents', [])]
804 804
805 805 operations = ('added', 'changed', 'removed')
806 806 if not any((commit.get(o) for o in operations)):
807 807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
808 808
809 809 commit = imc.commit(
810 810 message=message,
811 811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
812 812 date=commit.get('date'),
813 813 branch=commit.get('branch'),
814 814 parents=parents)
815 815
816 816 commit_ids[commit.message] = commit.raw_id
817 817
818 818 return commit_ids
819 819
820 820
821 821 @pytest.fixture()
822 822 def reposerver(request):
823 823 """
824 824 Allows to serve a backend repository
825 825 """
826 826
827 827 repo_server = RepoServer()
828 828 request.addfinalizer(repo_server.cleanup)
829 829 return repo_server
830 830
831 831
832 832 class RepoServer(object):
833 833 """
834 834 Utility to serve a local repository for the duration of a test case.
835 835
836 836 Supports only Subversion so far.
837 837 """
838 838
839 839 url = None
840 840
841 841 def __init__(self):
842 842 self._cleanup_servers = []
843 843
844 844 def serve(self, vcsrepo):
845 845 if vcsrepo.alias != 'svn':
846 846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847 847
848 848 proc = subprocess.Popen(
849 849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
850 850 '--root', vcsrepo.path])
851 851 self._cleanup_servers.append(proc)
852 852 self.url = 'svn://localhost'
853 853
854 854 def cleanup(self):
855 855 for proc in self._cleanup_servers:
856 856 proc.terminate()
857 857
858 858
859 859 @pytest.fixture()
860 860 def pr_util(backend, request, config_stub):
861 861 """
862 862 Utility for tests of models and for functional tests around pull requests.
863 863
864 864 It gives an instance of :class:`PRTestUtility` which provides various
865 865 utility methods around one pull request.
866 866
867 867 This fixture uses `backend` and inherits its parameterization.
868 868 """
869 869
870 870 util = PRTestUtility(backend)
871 871 request.addfinalizer(util.cleanup)
872 872
873 873 return util
874 874
875 875
876 876 class PRTestUtility(object):
877 877
878 878 pull_request = None
879 879 pull_request_id = None
880 880 mergeable_patcher = None
881 881 mergeable_mock = None
882 882 notification_patcher = None
883 commit_ids: dict
883 884
884 885 def __init__(self, backend):
885 886 self.backend = backend
886 887
887 888 def create_pull_request(
888 889 self, commits=None, target_head=None, source_head=None,
889 890 revisions=None, approved=False, author=None, mergeable=False,
890 891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
891 title=u"Test", description=u"Description"):
892 title="Test", description="Description"):
892 893 self.set_mergeable(mergeable)
893 894 if not enable_notifications:
894 895 # mock notification side effect
895 896 self.notification_patcher = mock.patch(
896 897 'rhodecode.model.notification.NotificationModel.create')
897 898 self.notification_patcher.start()
898 899
899 900 if not self.pull_request:
900 901 if not commits:
901 902 commits = [
902 903 {'message': 'c1'},
903 904 {'message': 'c2'},
904 905 {'message': 'c3'},
905 906 ]
906 907 target_head = 'c1'
907 908 source_head = 'c2'
908 909 revisions = ['c2']
909 910
910 911 self.commit_ids = self.backend.create_master_repo(commits)
911 912 self.target_repository = self.backend.create_repo(
912 913 heads=[target_head], name_suffix=name_suffix)
913 914 self.source_repository = self.backend.create_repo(
914 915 heads=[source_head], name_suffix=name_suffix)
915 916 self.author = author or UserModel().get_by_username(
916 917 TEST_USER_ADMIN_LOGIN)
917 918
918 919 model = PullRequestModel()
919 920 self.create_parameters = {
920 921 'created_by': self.author,
921 922 'source_repo': self.source_repository.repo_name,
922 923 'source_ref': self._default_branch_reference(source_head),
923 924 'target_repo': self.target_repository.repo_name,
924 925 'target_ref': self._default_branch_reference(target_head),
925 926 'revisions': [self.commit_ids[r] for r in revisions],
926 927 'reviewers': reviewers or self._get_reviewers(),
927 928 'observers': observers or self._get_observers(),
928 929 'title': title,
929 930 'description': description,
930 931 }
931 932 self.pull_request = model.create(**self.create_parameters)
932 933 assert model.get_versions(self.pull_request) == []
933 934
934 935 self.pull_request_id = self.pull_request.pull_request_id
935 936
936 937 if approved:
937 938 self.approve()
938 939
939 940 Session().add(self.pull_request)
940 941 Session().commit()
941 942
942 943 return self.pull_request
943 944
944 945 def approve(self):
945 946 self.create_status_votes(
946 947 ChangesetStatus.STATUS_APPROVED,
947 948 *self.pull_request.reviewers)
948 949
949 950 def close(self):
950 951 PullRequestModel().close_pull_request(self.pull_request, self.author)
951 952
952 def _default_branch_reference(self, commit_message):
953 reference = '%s:%s:%s' % (
954 'branch',
955 self.backend.default_branch_name,
956 self.commit_ids[commit_message])
953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
954 default_branch = branch or self.backend.default_branch_name
955 message = self.commit_ids[commit_message]
956 reference = f'branch:{default_branch}:{message}'
957
957 958 return reference
958 959
959 960 def _get_reviewers(self):
960 961 role = PullRequestReviewers.ROLE_REVIEWER
961 962 return [
962 963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
963 964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
964 965 ]
965 966
966 967 def _get_observers(self):
967 968 return [
968 969
969 970 ]
970 971
971 def update_source_repository(self, head=None):
972 def update_source_repository(self, head=None, do_fetch=False):
973 heads = [head or 'c3']
974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
975
976 def update_target_repository(self, head=None, do_fetch=False):
972 977 heads = [head or 'c3']
973 self.backend.pull_heads(self.source_repository, heads=heads)
978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
979
980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
982 self.pull_request.target_ref = full_ref
983 return full_ref
984
985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
987 self.pull_request.source_ref = full_ref
988 return full_ref
974 989
975 990 def add_one_commit(self, head=None):
976 991 self.update_source_repository(head=head)
977 992 old_commit_ids = set(self.pull_request.revisions)
978 993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
979 994 commit_ids = set(self.pull_request.revisions)
980 995 new_commit_ids = commit_ids - old_commit_ids
981 996 assert len(new_commit_ids) == 1
982 997 return new_commit_ids.pop()
983 998
984 999 def remove_one_commit(self):
985 1000 assert len(self.pull_request.revisions) == 2
986 1001 source_vcs = self.source_repository.scm_instance()
987 1002 removed_commit_id = source_vcs.commit_ids[-1]
988 1003
989 1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
990 1005 # remove the if once that's sorted out.
991 1006 if self.backend.alias == "git":
992 1007 kwargs = {'branch_name': self.backend.default_branch_name}
993 1008 else:
994 1009 kwargs = {}
995 1010 source_vcs.strip(removed_commit_id, **kwargs)
996 1011
997 1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
998 1013 assert len(self.pull_request.revisions) == 1
999 1014 return removed_commit_id
1000 1015
1001 1016 def create_comment(self, linked_to=None):
1002 1017 comment = CommentsModel().create(
1003 text=u"Test comment",
1018 text="Test comment",
1004 1019 repo=self.target_repository.repo_name,
1005 1020 user=self.author,
1006 1021 pull_request=self.pull_request)
1007 1022 assert comment.pull_request_version_id is None
1008 1023
1009 1024 if linked_to:
1010 1025 PullRequestModel()._link_comments_to_version(linked_to)
1011 1026
1012 1027 return comment
1013 1028
1014 1029 def create_inline_comment(
1015 1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1016 1031 comment = CommentsModel().create(
1017 text=u"Test comment",
1032 text="Test comment",
1018 1033 repo=self.target_repository.repo_name,
1019 1034 user=self.author,
1020 1035 line_no=line_no,
1021 1036 f_path=file_path,
1022 1037 pull_request=self.pull_request)
1023 1038 assert comment.pull_request_version_id is None
1024 1039
1025 1040 if linked_to:
1026 1041 PullRequestModel()._link_comments_to_version(linked_to)
1027 1042
1028 1043 return comment
1029 1044
1030 1045 def create_version_of_pull_request(self):
1031 1046 pull_request = self.create_pull_request()
1032 1047 version = PullRequestModel()._create_version_from_snapshot(
1033 1048 pull_request)
1034 1049 return version
1035 1050
1036 1051 def create_status_votes(self, status, *reviewers):
1037 1052 for reviewer in reviewers:
1038 1053 ChangesetStatusModel().set_status(
1039 1054 repo=self.pull_request.target_repo,
1040 1055 status=status,
1041 1056 user=reviewer.user_id,
1042 1057 pull_request=self.pull_request)
1043 1058
1044 1059 def set_mergeable(self, value):
1045 1060 if not self.mergeable_patcher:
1046 1061 self.mergeable_patcher = mock.patch.object(
1047 1062 VcsSettingsModel, 'get_general_settings')
1048 1063 self.mergeable_mock = self.mergeable_patcher.start()
1049 1064 self.mergeable_mock.return_value = {
1050 1065 'rhodecode_pr_merge_enabled': value}
1051 1066
1052 1067 def cleanup(self):
1053 1068 # In case the source repository is already cleaned up, the pull
1054 1069 # request will already be deleted.
1055 1070 pull_request = PullRequest().get(self.pull_request_id)
1056 1071 if pull_request:
1057 1072 PullRequestModel().delete(pull_request, pull_request.author)
1058 1073 Session().commit()
1059 1074
1060 1075 if self.notification_patcher:
1061 1076 self.notification_patcher.stop()
1062 1077
1063 1078 if self.mergeable_patcher:
1064 1079 self.mergeable_patcher.stop()
1065 1080
1066 1081
1067 1082 @pytest.fixture()
1068 1083 def user_admin(baseapp):
1069 1084 """
1070 1085 Provides the default admin test user as an instance of `db.User`.
1071 1086 """
1072 1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1073 1088 return user
1074 1089
1075 1090
1076 1091 @pytest.fixture()
1077 1092 def user_regular(baseapp):
1078 1093 """
1079 1094 Provides the default regular test user as an instance of `db.User`.
1080 1095 """
1081 1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1082 1097 return user
1083 1098
1084 1099
1085 1100 @pytest.fixture()
1086 1101 def user_util(request, db_connection):
1087 1102 """
1088 1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1089 1104 """
1090 1105 utility = UserUtility(test_name=request.node.name)
1091 1106 request.addfinalizer(utility.cleanup)
1092 1107 return utility
1093 1108
1094 1109
1095 1110 # TODO: johbo: Split this up into utilities per domain or something similar
1096 1111 class UserUtility(object):
1097 1112
1098 1113 def __init__(self, test_name="test"):
1099 1114 self._test_name = self._sanitize_name(test_name)
1100 1115 self.fixture = Fixture()
1101 1116 self.repo_group_ids = []
1102 1117 self.repos_ids = []
1103 1118 self.user_ids = []
1104 1119 self.user_group_ids = []
1105 1120 self.user_repo_permission_ids = []
1106 1121 self.user_group_repo_permission_ids = []
1107 1122 self.user_repo_group_permission_ids = []
1108 1123 self.user_group_repo_group_permission_ids = []
1109 1124 self.user_user_group_permission_ids = []
1110 1125 self.user_group_user_group_permission_ids = []
1111 1126 self.user_permissions = []
1112 1127
1113 1128 def _sanitize_name(self, name):
1114 1129 for char in ['[', ']']:
1115 1130 name = name.replace(char, '_')
1116 1131 return name
1117 1132
1118 1133 def create_repo_group(
1119 1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1120 1135 group_name = "{prefix}_repogroup_{count}".format(
1121 1136 prefix=self._test_name,
1122 1137 count=len(self.repo_group_ids))
1123 1138 repo_group = self.fixture.create_repo_group(
1124 1139 group_name, cur_user=owner)
1125 1140 if auto_cleanup:
1126 1141 self.repo_group_ids.append(repo_group.group_id)
1127 1142 return repo_group
1128 1143
1129 1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1130 1145 auto_cleanup=True, repo_type='hg', bare=False):
1131 1146 repo_name = "{prefix}_repository_{count}".format(
1132 1147 prefix=self._test_name,
1133 1148 count=len(self.repos_ids))
1134 1149
1135 1150 repository = self.fixture.create_repo(
1136 1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1137 1152 if auto_cleanup:
1138 1153 self.repos_ids.append(repository.repo_id)
1139 1154 return repository
1140 1155
1141 1156 def create_user(self, auto_cleanup=True, **kwargs):
1142 1157 user_name = "{prefix}_user_{count}".format(
1143 1158 prefix=self._test_name,
1144 1159 count=len(self.user_ids))
1145 1160 user = self.fixture.create_user(user_name, **kwargs)
1146 1161 if auto_cleanup:
1147 1162 self.user_ids.append(user.user_id)
1148 1163 return user
1149 1164
1150 1165 def create_additional_user_email(self, user, email):
1151 1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1152 1167 return uem
1153 1168
1154 1169 def create_user_with_group(self):
1155 1170 user = self.create_user()
1156 1171 user_group = self.create_user_group(members=[user])
1157 1172 return user, user_group
1158 1173
1159 1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1160 1175 auto_cleanup=True, **kwargs):
1161 1176 group_name = "{prefix}_usergroup_{count}".format(
1162 1177 prefix=self._test_name,
1163 1178 count=len(self.user_group_ids))
1164 1179 user_group = self.fixture.create_user_group(
1165 1180 group_name, cur_user=owner, **kwargs)
1166 1181
1167 1182 if auto_cleanup:
1168 1183 self.user_group_ids.append(user_group.users_group_id)
1169 1184 if members:
1170 1185 for user in members:
1171 1186 UserGroupModel().add_user_to_group(user_group, user)
1172 1187 return user_group
1173 1188
1174 1189 def grant_user_permission(self, user_name, permission_name):
1175 1190 self.inherit_default_user_permissions(user_name, False)
1176 1191 self.user_permissions.append((user_name, permission_name))
1177 1192
1178 1193 def grant_user_permission_to_repo_group(
1179 1194 self, repo_group, user, permission_name):
1180 1195 permission = RepoGroupModel().grant_user_permission(
1181 1196 repo_group, user, permission_name)
1182 1197 self.user_repo_group_permission_ids.append(
1183 1198 (repo_group.group_id, user.user_id))
1184 1199 return permission
1185 1200
1186 1201 def grant_user_group_permission_to_repo_group(
1187 1202 self, repo_group, user_group, permission_name):
1188 1203 permission = RepoGroupModel().grant_user_group_permission(
1189 1204 repo_group, user_group, permission_name)
1190 1205 self.user_group_repo_group_permission_ids.append(
1191 1206 (repo_group.group_id, user_group.users_group_id))
1192 1207 return permission
1193 1208
1194 1209 def grant_user_permission_to_repo(
1195 1210 self, repo, user, permission_name):
1196 1211 permission = RepoModel().grant_user_permission(
1197 1212 repo, user, permission_name)
1198 1213 self.user_repo_permission_ids.append(
1199 1214 (repo.repo_id, user.user_id))
1200 1215 return permission
1201 1216
1202 1217 def grant_user_group_permission_to_repo(
1203 1218 self, repo, user_group, permission_name):
1204 1219 permission = RepoModel().grant_user_group_permission(
1205 1220 repo, user_group, permission_name)
1206 1221 self.user_group_repo_permission_ids.append(
1207 1222 (repo.repo_id, user_group.users_group_id))
1208 1223 return permission
1209 1224
1210 1225 def grant_user_permission_to_user_group(
1211 1226 self, target_user_group, user, permission_name):
1212 1227 permission = UserGroupModel().grant_user_permission(
1213 1228 target_user_group, user, permission_name)
1214 1229 self.user_user_group_permission_ids.append(
1215 1230 (target_user_group.users_group_id, user.user_id))
1216 1231 return permission
1217 1232
1218 1233 def grant_user_group_permission_to_user_group(
1219 1234 self, target_user_group, user_group, permission_name):
1220 1235 permission = UserGroupModel().grant_user_group_permission(
1221 1236 target_user_group, user_group, permission_name)
1222 1237 self.user_group_user_group_permission_ids.append(
1223 1238 (target_user_group.users_group_id, user_group.users_group_id))
1224 1239 return permission
1225 1240
1226 1241 def revoke_user_permission(self, user_name, permission_name):
1227 1242 self.inherit_default_user_permissions(user_name, True)
1228 1243 UserModel().revoke_perm(user_name, permission_name)
1229 1244
1230 1245 def inherit_default_user_permissions(self, user_name, value):
1231 1246 user = UserModel().get_by_username(user_name)
1232 1247 user.inherit_default_permissions = value
1233 1248 Session().add(user)
1234 1249 Session().commit()
1235 1250
1236 1251 def cleanup(self):
1237 1252 self._cleanup_permissions()
1238 1253 self._cleanup_repos()
1239 1254 self._cleanup_repo_groups()
1240 1255 self._cleanup_user_groups()
1241 1256 self._cleanup_users()
1242 1257
1243 1258 def _cleanup_permissions(self):
1244 1259 if self.user_permissions:
1245 1260 for user_name, permission_name in self.user_permissions:
1246 1261 self.revoke_user_permission(user_name, permission_name)
1247 1262
1248 1263 for permission in self.user_repo_permission_ids:
1249 1264 RepoModel().revoke_user_permission(*permission)
1250 1265
1251 1266 for permission in self.user_group_repo_permission_ids:
1252 1267 RepoModel().revoke_user_group_permission(*permission)
1253 1268
1254 1269 for permission in self.user_repo_group_permission_ids:
1255 1270 RepoGroupModel().revoke_user_permission(*permission)
1256 1271
1257 1272 for permission in self.user_group_repo_group_permission_ids:
1258 1273 RepoGroupModel().revoke_user_group_permission(*permission)
1259 1274
1260 1275 for permission in self.user_user_group_permission_ids:
1261 1276 UserGroupModel().revoke_user_permission(*permission)
1262 1277
1263 1278 for permission in self.user_group_user_group_permission_ids:
1264 1279 UserGroupModel().revoke_user_group_permission(*permission)
1265 1280
1266 1281 def _cleanup_repo_groups(self):
1267 1282 def _repo_group_compare(first_group_id, second_group_id):
1268 1283 """
1269 1284 Gives higher priority to the groups with the most complex paths
1270 1285 """
1271 1286 first_group = RepoGroup.get(first_group_id)
1272 1287 second_group = RepoGroup.get(second_group_id)
1273 1288 first_group_parts = (
1274 1289 len(first_group.group_name.split('/')) if first_group else 0)
1275 1290 second_group_parts = (
1276 1291 len(second_group.group_name.split('/')) if second_group else 0)
1277 1292 return cmp(second_group_parts, first_group_parts)
1278 1293
1279 1294 sorted_repo_group_ids = sorted(
1280 1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1281 1296 for repo_group_id in sorted_repo_group_ids:
1282 1297 self.fixture.destroy_repo_group(repo_group_id)
1283 1298
1284 1299 def _cleanup_repos(self):
1285 1300 sorted_repos_ids = sorted(self.repos_ids)
1286 1301 for repo_id in sorted_repos_ids:
1287 1302 self.fixture.destroy_repo(repo_id)
1288 1303
1289 1304 def _cleanup_user_groups(self):
1290 1305 def _user_group_compare(first_group_id, second_group_id):
1291 1306 """
1292 1307 Gives higher priority to the groups with the most complex paths
1293 1308 """
1294 1309 first_group = UserGroup.get(first_group_id)
1295 1310 second_group = UserGroup.get(second_group_id)
1296 1311 first_group_parts = (
1297 1312 len(first_group.users_group_name.split('/'))
1298 1313 if first_group else 0)
1299 1314 second_group_parts = (
1300 1315 len(second_group.users_group_name.split('/'))
1301 1316 if second_group else 0)
1302 1317 return cmp(second_group_parts, first_group_parts)
1303 1318
1304 1319 sorted_user_group_ids = sorted(
1305 1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1306 1321 for user_group_id in sorted_user_group_ids:
1307 1322 self.fixture.destroy_user_group(user_group_id)
1308 1323
1309 1324 def _cleanup_users(self):
1310 1325 for user_id in self.user_ids:
1311 1326 self.fixture.destroy_user(user_id)
1312 1327
1313 1328
1314 1329 @pytest.fixture(scope='session')
1315 1330 def testrun():
1316 1331 return {
1317 1332 'uuid': uuid.uuid4(),
1318 1333 'start': datetime.datetime.utcnow().isoformat(),
1319 1334 'timestamp': int(time.time()),
1320 1335 }
1321 1336
1322 1337
1323 1338 class AppenlightClient(object):
1324 1339
1325 1340 url_template = '{url}?protocol_version=0.5'
1326 1341
1327 1342 def __init__(
1328 1343 self, url, api_key, add_server=True, add_timestamp=True,
1329 1344 namespace=None, request=None, testrun=None):
1330 1345 self.url = self.url_template.format(url=url)
1331 1346 self.api_key = api_key
1332 1347 self.add_server = add_server
1333 1348 self.add_timestamp = add_timestamp
1334 1349 self.namespace = namespace
1335 1350 self.request = request
1336 1351 self.server = socket.getfqdn(socket.gethostname())
1337 1352 self.tags_before = {}
1338 1353 self.tags_after = {}
1339 1354 self.stats = []
1340 1355 self.testrun = testrun or {}
1341 1356
1342 1357 def tag_before(self, tag, value):
1343 1358 self.tags_before[tag] = value
1344 1359
1345 1360 def tag_after(self, tag, value):
1346 1361 self.tags_after[tag] = value
1347 1362
1348 1363 def collect(self, data):
1349 1364 if self.add_server:
1350 1365 data.setdefault('server', self.server)
1351 1366 if self.add_timestamp:
1352 1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1353 1368 if self.namespace:
1354 1369 data.setdefault('namespace', self.namespace)
1355 1370 if self.request:
1356 1371 data.setdefault('request', self.request)
1357 1372 self.stats.append(data)
1358 1373
1359 1374 def send_stats(self):
1360 1375 tags = [
1361 1376 ('testrun', self.request),
1362 1377 ('testrun.start', self.testrun['start']),
1363 1378 ('testrun.timestamp', self.testrun['timestamp']),
1364 1379 ('test', self.namespace),
1365 1380 ]
1366 1381 for key, value in self.tags_before.items():
1367 1382 tags.append((key + '.before', value))
1368 1383 try:
1369 1384 delta = self.tags_after[key] - value
1370 1385 tags.append((key + '.delta', delta))
1371 1386 except Exception:
1372 1387 pass
1373 1388 for key, value in self.tags_after.items():
1374 1389 tags.append((key + '.after', value))
1375 1390 self.collect({
1376 1391 'message': "Collected tags",
1377 1392 'tags': tags,
1378 1393 })
1379 1394
1380 1395 response = requests.post(
1381 1396 self.url,
1382 1397 headers={
1383 1398 'X-appenlight-api-key': self.api_key},
1384 1399 json=self.stats,
1385 1400 )
1386 1401
1387 1402 if not response.status_code == 200:
1388 1403 pprint.pprint(self.stats)
1389 1404 print(response.headers)
1390 1405 print(response.text)
1391 1406 raise Exception('Sending to appenlight failed')
1392 1407
1393 1408
1394 1409 @pytest.fixture()
1395 1410 def gist_util(request, db_connection):
1396 1411 """
1397 1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1398 1413 """
1399 1414 utility = GistUtility()
1400 1415 request.addfinalizer(utility.cleanup)
1401 1416 return utility
1402 1417
1403 1418
1404 1419 class GistUtility(object):
1405 1420 def __init__(self):
1406 1421 self.fixture = Fixture()
1407 1422 self.gist_ids = []
1408 1423
1409 1424 def create_gist(self, **kwargs):
1410 1425 gist = self.fixture.create_gist(**kwargs)
1411 1426 self.gist_ids.append(gist.gist_id)
1412 1427 return gist
1413 1428
1414 1429 def cleanup(self):
1415 1430 for id_ in self.gist_ids:
1416 1431 self.fixture.destroy_gists(str(id_))
1417 1432
1418 1433
1419 1434 @pytest.fixture()
1420 1435 def enabled_backends(request):
1421 1436 backends = request.config.option.backends
1422 1437 return backends[:]
1423 1438
1424 1439
1425 1440 @pytest.fixture()
1426 1441 def settings_util(request, db_connection):
1427 1442 """
1428 1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1429 1444 """
1430 1445 utility = SettingsUtility()
1431 1446 request.addfinalizer(utility.cleanup)
1432 1447 return utility
1433 1448
1434 1449
1435 1450 class SettingsUtility(object):
1436 1451 def __init__(self):
1437 1452 self.rhodecode_ui_ids = []
1438 1453 self.rhodecode_setting_ids = []
1439 1454 self.repo_rhodecode_ui_ids = []
1440 1455 self.repo_rhodecode_setting_ids = []
1441 1456
1442 1457 def create_repo_rhodecode_ui(
1443 1458 self, repo, section, value, key=None, active=True, cleanup=True):
1444 1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1445 1460
1446 1461 setting = RepoRhodeCodeUi()
1447 1462 setting.repository_id = repo.repo_id
1448 1463 setting.ui_section = section
1449 1464 setting.ui_value = value
1450 1465 setting.ui_key = key
1451 1466 setting.ui_active = active
1452 1467 Session().add(setting)
1453 1468 Session().commit()
1454 1469
1455 1470 if cleanup:
1456 1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1457 1472 return setting
1458 1473
1459 1474 def create_rhodecode_ui(
1460 1475 self, section, value, key=None, active=True, cleanup=True):
1461 1476 key = key or sha1_safe(f'{section}{value}')
1462 1477
1463 1478 setting = RhodeCodeUi()
1464 1479 setting.ui_section = section
1465 1480 setting.ui_value = value
1466 1481 setting.ui_key = key
1467 1482 setting.ui_active = active
1468 1483 Session().add(setting)
1469 1484 Session().commit()
1470 1485
1471 1486 if cleanup:
1472 1487 self.rhodecode_ui_ids.append(setting.ui_id)
1473 1488 return setting
1474 1489
1475 1490 def create_repo_rhodecode_setting(
1476 1491 self, repo, name, value, type_, cleanup=True):
1477 1492 setting = RepoRhodeCodeSetting(
1478 1493 repo.repo_id, key=name, val=value, type=type_)
1479 1494 Session().add(setting)
1480 1495 Session().commit()
1481 1496
1482 1497 if cleanup:
1483 1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1484 1499 return setting
1485 1500
1486 1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1487 1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1488 1503 Session().add(setting)
1489 1504 Session().commit()
1490 1505
1491 1506 if cleanup:
1492 1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1493 1508
1494 1509 return setting
1495 1510
1496 1511 def cleanup(self):
1497 1512 for id_ in self.rhodecode_ui_ids:
1498 1513 setting = RhodeCodeUi.get(id_)
1499 1514 Session().delete(setting)
1500 1515
1501 1516 for id_ in self.rhodecode_setting_ids:
1502 1517 setting = RhodeCodeSetting.get(id_)
1503 1518 Session().delete(setting)
1504 1519
1505 1520 for id_ in self.repo_rhodecode_ui_ids:
1506 1521 setting = RepoRhodeCodeUi.get(id_)
1507 1522 Session().delete(setting)
1508 1523
1509 1524 for id_ in self.repo_rhodecode_setting_ids:
1510 1525 setting = RepoRhodeCodeSetting.get(id_)
1511 1526 Session().delete(setting)
1512 1527
1513 1528 Session().commit()
1514 1529
1515 1530
1516 1531 @pytest.fixture()
1517 1532 def no_notifications(request):
1518 1533 notification_patcher = mock.patch(
1519 1534 'rhodecode.model.notification.NotificationModel.create')
1520 1535 notification_patcher.start()
1521 1536 request.addfinalizer(notification_patcher.stop)
1522 1537
1523 1538
1524 1539 @pytest.fixture(scope='session')
1525 1540 def repeat(request):
1526 1541 """
1527 1542 The number of repetitions is based on this fixture.
1528 1543
1529 1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1530 1545 tests are not too slow in our default test suite.
1531 1546 """
1532 1547 return request.config.getoption('--repeat')
1533 1548
1534 1549
1535 1550 @pytest.fixture()
1536 1551 def rhodecode_fixtures():
1537 1552 return Fixture()
1538 1553
1539 1554
1540 1555 @pytest.fixture()
1541 1556 def context_stub():
1542 1557 """
1543 1558 Stub context object.
1544 1559 """
1545 1560 context = pyramid.testing.DummyResource()
1546 1561 return context
1547 1562
1548 1563
1549 1564 @pytest.fixture()
1550 1565 def request_stub():
1551 1566 """
1552 1567 Stub request object.
1553 1568 """
1554 1569 from rhodecode.lib.base import bootstrap_request
1555 1570 request = bootstrap_request(scheme='https')
1556 1571 return request
1557 1572
1558 1573
1559 1574 @pytest.fixture()
1560 1575 def config_stub(request, request_stub):
1561 1576 """
1562 1577 Set up pyramid.testing and return the Configurator.
1563 1578 """
1564 1579 from rhodecode.lib.base import bootstrap_config
1565 1580 config = bootstrap_config(request=request_stub)
1566 1581
1567 1582 @request.addfinalizer
1568 1583 def cleanup():
1569 1584 pyramid.testing.tearDown()
1570 1585
1571 1586 return config
1572 1587
1573 1588
1574 1589 @pytest.fixture()
1575 1590 def StubIntegrationType():
1576 1591 class _StubIntegrationType(IntegrationTypeBase):
1577 1592 """ Test integration type class """
1578 1593
1579 1594 key = 'test'
1580 1595 display_name = 'Test integration type'
1581 1596 description = 'A test integration type for testing'
1582 1597
1583 1598 @classmethod
1584 1599 def icon(cls):
1585 1600 return 'test_icon_html_image'
1586 1601
1587 1602 def __init__(self, settings):
1588 1603 super(_StubIntegrationType, self).__init__(settings)
1589 1604 self.sent_events = [] # for testing
1590 1605
1591 1606 def send_event(self, event):
1592 1607 self.sent_events.append(event)
1593 1608
1594 1609 def settings_schema(self):
1595 1610 class SettingsSchema(colander.Schema):
1596 1611 test_string_field = colander.SchemaNode(
1597 1612 colander.String(),
1598 1613 missing=colander.required,
1599 1614 title='test string field',
1600 1615 )
1601 1616 test_int_field = colander.SchemaNode(
1602 1617 colander.Int(),
1603 1618 title='some integer setting',
1604 1619 )
1605 1620 return SettingsSchema()
1606 1621
1607 1622
1608 1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1609 1624 return _StubIntegrationType
1610 1625
1611 1626
1612 1627 @pytest.fixture()
1613 1628 def stub_integration_settings():
1614 1629 return {
1615 1630 'test_string_field': 'some data',
1616 1631 'test_int_field': 100,
1617 1632 }
1618 1633
1619 1634
1620 1635 @pytest.fixture()
1621 1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1622 1637 stub_integration_settings):
1623 1638 integration = IntegrationModel().create(
1624 1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1625 1640 name='test repo integration',
1626 1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1627 1642
1628 1643 @request.addfinalizer
1629 1644 def cleanup():
1630 1645 IntegrationModel().delete(integration)
1631 1646
1632 1647 return integration
1633 1648
1634 1649
1635 1650 @pytest.fixture()
1636 1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1637 1652 stub_integration_settings):
1638 1653 integration = IntegrationModel().create(
1639 1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1640 1655 name='test repogroup integration',
1641 1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1642 1657
1643 1658 @request.addfinalizer
1644 1659 def cleanup():
1645 1660 IntegrationModel().delete(integration)
1646 1661
1647 1662 return integration
1648 1663
1649 1664
1650 1665 @pytest.fixture()
1651 1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1652 1667 StubIntegrationType, stub_integration_settings):
1653 1668 integration = IntegrationModel().create(
1654 1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1655 1670 name='test recursive repogroup integration',
1656 1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1657 1672
1658 1673 @request.addfinalizer
1659 1674 def cleanup():
1660 1675 IntegrationModel().delete(integration)
1661 1676
1662 1677 return integration
1663 1678
1664 1679
1665 1680 @pytest.fixture()
1666 1681 def global_integration_stub(request, StubIntegrationType,
1667 1682 stub_integration_settings):
1668 1683 integration = IntegrationModel().create(
1669 1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1670 1685 name='test global integration',
1671 1686 repo=None, repo_group=None, child_repos_only=None)
1672 1687
1673 1688 @request.addfinalizer
1674 1689 def cleanup():
1675 1690 IntegrationModel().delete(integration)
1676 1691
1677 1692 return integration
1678 1693
1679 1694
1680 1695 @pytest.fixture()
1681 1696 def root_repos_integration_stub(request, StubIntegrationType,
1682 1697 stub_integration_settings):
1683 1698 integration = IntegrationModel().create(
1684 1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1685 1700 name='test global integration',
1686 1701 repo=None, repo_group=None, child_repos_only=True)
1687 1702
1688 1703 @request.addfinalizer
1689 1704 def cleanup():
1690 1705 IntegrationModel().delete(integration)
1691 1706
1692 1707 return integration
1693 1708
1694 1709
1695 1710 @pytest.fixture()
1696 1711 def local_dt_to_utc():
1697 1712 def _factory(dt):
1698 1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1699 1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1700 1715 return _factory
1701 1716
1702 1717
1703 1718 @pytest.fixture()
1704 1719 def disable_anonymous_user(request, baseapp):
1705 1720 set_anonymous_access(False)
1706 1721
1707 1722 @request.addfinalizer
1708 1723 def cleanup():
1709 1724 set_anonymous_access(True)
1710 1725
1711 1726
1712 1727 @pytest.fixture(scope='module')
1713 1728 def rc_fixture(request):
1714 1729 return Fixture()
1715 1730
1716 1731
1717 1732 @pytest.fixture()
1718 1733 def repo_groups(request):
1719 1734 fixture = Fixture()
1720 1735
1721 1736 session = Session()
1722 1737 zombie_group = fixture.create_repo_group('zombie')
1723 1738 parent_group = fixture.create_repo_group('parent')
1724 1739 child_group = fixture.create_repo_group('parent/child')
1725 1740 groups_in_db = session.query(RepoGroup).all()
1726 1741 assert len(groups_in_db) == 3
1727 1742 assert child_group.group_parent_id == parent_group.group_id
1728 1743
1729 1744 @request.addfinalizer
1730 1745 def cleanup():
1731 1746 fixture.destroy_repo_group(zombie_group)
1732 1747 fixture.destroy_repo_group(child_group)
1733 1748 fixture.destroy_repo_group(parent_group)
1734 1749
1735 1750 return zombie_group, parent_group, child_group
@@ -1,189 +1,189 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import pytest
21 21 from mock import Mock, patch, DEFAULT
22 22
23 23 import rhodecode
24 24 from rhodecode.model import db, scm
25 25
26 26
27 27 def test_scm_instance_config(backend):
28 28 repo = backend.create_repo()
29 29 with patch.multiple('rhodecode.model.db.Repository',
30 30 _get_instance=DEFAULT,
31 31 _get_instance_cached=DEFAULT) as mocks:
32 32
33 33 repo.scm_instance()
34 34 mocks['_get_instance'].assert_called_with(
35 35 config=None, cache=False)
36 36
37 37 repo.scm_instance(vcs_full_cache=False)
38 38 mocks['_get_instance'].assert_called_with(
39 39 config=None, cache=False)
40 40
41 41 repo.scm_instance(vcs_full_cache=True)
42 42 mocks['_get_instance_cached'].assert_called()
43 43
44 44
45 45 def test_get_instance_config(backend):
46 46 repo = backend.create_repo()
47 47 vcs_class = Mock()
48 48 with patch.multiple('rhodecode.lib.vcs.backends',
49 49 get_scm=DEFAULT,
50 50 get_backend=DEFAULT) as mocks:
51 51 mocks['get_scm'].return_value = backend.alias
52 52 mocks['get_backend'].return_value = vcs_class
53 53 with patch('rhodecode.model.db.Repository._config') as config_mock:
54 54 repo._get_instance()
55 55 vcs_class.assert_called_with(
56 56 repo_path=repo.repo_full_path, config=config_mock,
57 57 create=False, with_wire={'cache': True, 'repo_state_uid': None})
58 58
59 59 new_config = {'override': 'old_config'}
60 60 repo._get_instance(config=new_config)
61 61 vcs_class.assert_called_with(
62 62 repo_path=repo.repo_full_path, config=new_config, create=False,
63 63 with_wire={'cache': True, 'repo_state_uid': None})
64 64
65 65
66 66 def test_mark_for_invalidation_config(backend):
67 67 repo = backend.create_repo()
68 68 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
69 69 scm.ScmModel().mark_for_invalidation(repo.repo_name)
70 70 _, kwargs = _mock.call_args
71 71 assert kwargs['config'].__dict__ == repo._config.__dict__
72 72
73 73
74 74 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
75 75 commits = [{'message': 'A'}, {'message': 'B'}]
76 76 repo = backend.create_repo(commits=commits)
77 77 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
78 78 assert repo.changeset_cache['revision'] == 1
79 79
80 80
81 81 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
82 82 repo = backend.create_repo()
83 83 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
84 84 assert repo.changeset_cache['revision'] == -1
85 85
86 86
87 87 def test_strip_with_multiple_heads(backend_hg):
88 88 commits = [
89 89 {'message': 'A'},
90 90 {'message': 'a'},
91 91 {'message': 'b'},
92 {'message': 'B', 'parents': ['A']},
93 {'message': 'a1'},
92 {'message': 'B', 'parents': ['A'], 'branch': 'feature'},
93 {'message': 'a1', 'branch': 'feature'},
94 94 ]
95 95 repo = backend_hg.create_repo(commits=commits)
96 96 commit_ids = backend_hg.commit_ids
97 97
98 98 model = scm.ScmModel()
99 99 model.strip(repo, commit_ids['b'], branch=None)
100 100
101 101 vcs_repo = repo.scm_instance()
102 102 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
103 103 assert len(rest_commit_ids) == 4
104 104 assert commit_ids['b'] not in rest_commit_ids
105 105
106 106
107 107 def test_strip_with_single_heads(backend_hg):
108 108 commits = [
109 109 {'message': 'A'},
110 110 {'message': 'a'},
111 111 {'message': 'b'},
112 112 ]
113 113 repo = backend_hg.create_repo(commits=commits)
114 114 commit_ids = backend_hg.commit_ids
115 115
116 116 model = scm.ScmModel()
117 117 model.strip(repo, commit_ids['b'], branch=None)
118 118
119 119 vcs_repo = repo.scm_instance()
120 120 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
121 121 assert len(rest_commit_ids) == 2
122 122 assert commit_ids['b'] not in rest_commit_ids
123 123
124 124
125 125 def test_get_nodes_returns_unicode_flat(backend):
126 126 repo = backend.repo
127 127 commit_id = repo.get_commit(commit_idx=0).raw_id
128 128 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=True)
129 129 assert_contains_only_str_chars(directories)
130 130 assert_contains_only_str_chars(files)
131 131
132 132
133 133 def test_get_nodes_returns_unicode_non_flat(backend):
134 134 repo = backend.repo
135 135 commit_id = repo.get_commit(commit_idx=0).raw_id
136 136
137 137 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=False)
138 138 # johbo: Checking only the names for now, since that is the critical
139 139 # part.
140 140 assert_contains_only_str_chars([d['name'] for d in directories])
141 141 assert_contains_only_str_chars([f['name'] for f in files])
142 142
143 143
144 144 def test_get_nodes_max_file_bytes(backend_random):
145 145 repo = backend_random.repo
146 146 max_file_bytes = 10
147 147 directories, files = scm.ScmModel().get_nodes(
148 148 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
149 149 extended_info=True, flat=False)
150 150 assert any(file['content'] and len(file['content']) > max_file_bytes
151 151 for file in files)
152 152
153 153 directories, files = scm.ScmModel().get_nodes(
154 154 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
155 155 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
156 156 assert all(
157 157 file['content'] is None if file['size'] > max_file_bytes else True
158 158 for file in files)
159 159
160 160
161 161 def assert_contains_only_str_chars(structure):
162 162 assert structure
163 163 for value in structure:
164 164 assert isinstance(value, str)
165 165
166 166
167 167 @pytest.mark.backends("hg", "git")
168 168 def test_get_non_str_reference(backend):
169 169 model = scm.ScmModel()
170 170 special_name = "AdΔ±nΔ±"
171 171 non_str_list = [special_name]
172 172
173 173 def scm_instance():
174 174 return Mock(
175 175 branches=non_str_list, bookmarks=non_str_list,
176 176 tags=non_str_list, alias=backend.alias)
177 177
178 178 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
179 179 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
180 180 if backend.alias == 'hg':
181 181 valid_choices = [
182 182 'rev:tip', f'branch:{special_name}',
183 183 f'book:{special_name}', f'tag:{special_name}']
184 184 else:
185 185 valid_choices = [
186 186 'rev:tip', f'branch:{special_name}',
187 187 f'tag:{special_name}']
188 188
189 189 assert choices == valid_choices
@@ -1,493 +1,494 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import threading
20 20 import time
21 21 import sys
22 22 import logging
23 23 import os.path
24 24 import subprocess
25 25 import tempfile
26 26 import urllib.request
27 27 import urllib.error
28 28 import urllib.parse
29 29 from lxml.html import fromstring, tostring
30 30 from lxml.cssselect import CSSSelector
31 31 from urllib.parse import unquote_plus
32 32 import webob
33 33
34 34 from webtest.app import TestResponse, TestApp
35 35
36 36
37 37 import pytest
38 38
39 39 try:
40 40 import rc_testdata
41 41 except ImportError:
42 42 raise ImportError('Failed to import rc_testdata, '
43 43 'please make sure this package is installed from requirements_test.txt')
44 44
45 45 from rhodecode.model.db import User, Repository
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.scm import ScmModel
48 48 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
49 49 from rhodecode.lib.vcs.backends.base import EmptyCommit
50 50 from rhodecode.tests import login_user_session
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 def print_to_func(value, print_to=sys.stderr):
56 56 print(value, file=print_to)
57 57
58 58
59 59 class CustomTestResponse(TestResponse):
60 60
61 61 def _save_output(self, out):
62 62 f = tempfile.NamedTemporaryFile(mode='w', delete=False, prefix='rc-test-', suffix='.html')
63 63 f.write(out)
64 64 return f.name
65 65
66 66 def mustcontain(self, *strings, **kw):
67 67 """
68 68 Assert that the response contains all the strings passed
69 69 in as arguments.
70 70
71 71 Equivalent to::
72 72
73 73 assert string in res
74 74 """
75 75 print_body = kw.pop('print_body', False)
76 76 print_to = kw.pop('print_to', sys.stderr)
77 77
78 78 if 'no' in kw:
79 79 no = kw['no']
80 80 del kw['no']
81 81 if isinstance(no, str):
82 82 no = [no]
83 83 else:
84 84 no = []
85 85 if kw:
86 86 raise TypeError(f"The only keyword argument allowed is 'no' got {kw}")
87 87
88 88 f = self._save_output(str(self))
89 89
90 90 for s in strings:
91 91 if s not in self:
92 92 print_to_func(f"Actual response (no {s!r}):", print_to=print_to)
93 93 print_to_func(f"body output saved as `{f}`", print_to=print_to)
94 94 if print_body:
95 95 print_to_func(str(self), print_to=print_to)
96 96 raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}")
97 97
98 98 for no_s in no:
99 99 if no_s in self:
100 100 print_to_func(f"Actual response (has {no_s!r})", print_to=print_to)
101 101 print_to_func(f"body output saved as `{f}`", print_to=print_to)
102 102 if print_body:
103 103 print_to_func(str(self), print_to=print_to)
104 104 raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}")
105 105
106 106 def assert_response(self):
107 107 return AssertResponse(self)
108 108
109 109 def get_session_from_response(self):
110 110 """
111 111 This returns the session from a response object.
112 112 """
113 113 from rhodecode.lib.rc_beaker import session_factory_from_settings
114 114 session = session_factory_from_settings(self.test_app._pyramid_settings)
115 115 return session(self.request)
116 116
117 117
118 118 class TestRequest(webob.BaseRequest):
119 119
120 120 # for py.test, so it doesn't try to run this tas by name starting with test...
121 121 disabled = True
122 122 ResponseClass = CustomTestResponse
123 123
124 124 def add_response_callback(self, callback):
125 125 pass
126 126
127 127 @classmethod
128 128 def blank(cls, path, environ=None, base_url=None,
129 129 headers=None, POST=None, **kw):
130 130
131 131 if not path.isascii():
132 132 # our custom quote path if it contains non-ascii chars
133 133 path = urllib.parse.quote(path)
134 134
135 135 return super(TestRequest, cls).blank(
136 136 path, environ=environ, base_url=base_url, headers=headers, POST=POST, **kw)
137 137
138 138
139 139 class CustomTestApp(TestApp):
140 140 """
141 141 Custom app to make mustcontain more Useful, and extract special methods
142 142 """
143 143 RequestClass = TestRequest
144 144 rc_login_data = {}
145 145 rc_current_session = None
146 146
147 147 def login(self, username=None, password=None):
148 148 from rhodecode.lib import auth
149 149
150 150 if username and password:
151 151 session = login_user_session(self, username, password)
152 152 else:
153 153 session = login_user_session(self)
154 154
155 155 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
156 156 self.rc_current_session = session
157 157 return session['rhodecode_user']
158 158
159 159 @property
160 160 def csrf_token(self):
161 161 return self.rc_login_data['csrf_token']
162 162
163 163 @property
164 164 def _pyramid_registry(self):
165 165 return self.app.config.registry
166 166
167 167 @property
168 168 def _pyramid_settings(self):
169 169 return self._pyramid_registry.settings
170 170
171 171 def do_request(self, req, status=None, expect_errors=None):
172 172 # you can put custom code here
173 173 return super().do_request(req, status, expect_errors)
174 174
175 175
176 176 def set_anonymous_access(enabled):
177 177 """(Dis)allows anonymous access depending on parameter `enabled`"""
178 178 user = User.get_default_user()
179 179 user.active = enabled
180 180 Session().add(user)
181 181 Session().commit()
182 182 time.sleep(1.5) # must sleep for cache (1s to expire)
183 183 log.info('anonymous access is now: %s', enabled)
184 184 assert enabled == User.get_default_user().active, (
185 185 'Cannot set anonymous access')
186 186
187 187
188 188 def check_xfail_backends(node, backend_alias):
189 189 # Using "xfail_backends" here intentionally, since this marks work
190 190 # which is "to be done" soon.
191 191 skip_marker = node.get_closest_marker('xfail_backends')
192 192 if skip_marker and backend_alias in skip_marker.args:
193 193 msg = "Support for backend %s to be developed." % (backend_alias, )
194 194 msg = skip_marker.kwargs.get('reason', msg)
195 195 pytest.xfail(msg)
196 196
197 197
198 198 def check_skip_backends(node, backend_alias):
199 199 # Using "skip_backends" here intentionally, since this marks work which is
200 200 # not supported.
201 201 skip_marker = node.get_closest_marker('skip_backends')
202 202 if skip_marker and backend_alias in skip_marker.args:
203 203 msg = "Feature not supported for backend %s." % (backend_alias, )
204 204 msg = skip_marker.kwargs.get('reason', msg)
205 205 pytest.skip(msg)
206 206
207 207
208 208 def extract_git_repo_from_dump(dump_name, repo_name):
209 209 """Create git repo `repo_name` from dump `dump_name`."""
210 210 repos_path = ScmModel().repos_path
211 211 target_path = os.path.join(repos_path, repo_name)
212 212 rc_testdata.extract_git_dump(dump_name, target_path)
213 213 return target_path
214 214
215 215
216 216 def extract_hg_repo_from_dump(dump_name, repo_name):
217 217 """Create hg repo `repo_name` from dump `dump_name`."""
218 218 repos_path = ScmModel().repos_path
219 219 target_path = os.path.join(repos_path, repo_name)
220 220 rc_testdata.extract_hg_dump(dump_name, target_path)
221 221 return target_path
222 222
223 223
224 224 def extract_svn_repo_from_dump(dump_name, repo_name):
225 225 """Create a svn repo `repo_name` from dump `dump_name`."""
226 226 repos_path = ScmModel().repos_path
227 227 target_path = os.path.join(repos_path, repo_name)
228 228 SubversionRepository(target_path, create=True)
229 229 _load_svn_dump_into_repo(dump_name, target_path)
230 230 return target_path
231 231
232 232
233 233 def assert_message_in_log(log_records, message, levelno, module):
234 234 messages = [
235 235 r.message for r in log_records
236 236 if r.module == module and r.levelno == levelno
237 237 ]
238 238 assert message in messages
239 239
240 240
241 241 def _load_svn_dump_into_repo(dump_name, repo_path):
242 242 """
243 243 Utility to populate a svn repository with a named dump
244 244
245 245 Currently the dumps are in rc_testdata. They might later on be
246 246 integrated with the main repository once they stabilize more.
247 247 """
248 248 dump = rc_testdata.load_svn_dump(dump_name)
249 249 load_dump = subprocess.Popen(
250 250 ['svnadmin', 'load', repo_path],
251 251 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
252 252 stderr=subprocess.PIPE)
253 253 out, err = load_dump.communicate(dump)
254 254 if load_dump.returncode != 0:
255 255 log.error("Output of load_dump command: %s", out)
256 256 log.error("Error output of load_dump command: %s", err)
257 257 raise Exception(
258 258 'Failed to load dump "%s" into repository at path "%s".'
259 259 % (dump_name, repo_path))
260 260
261 261
262 262 class AssertResponse(object):
263 263 """
264 264 Utility that helps to assert things about a given HTML response.
265 265 """
266 266
267 267 def __init__(self, response):
268 268 self.response = response
269 269
270 270 def get_imports(self):
271 271 return fromstring, tostring, CSSSelector
272 272
273 273 def one_element_exists(self, css_selector):
274 274 self.get_element(css_selector)
275 275
276 276 def no_element_exists(self, css_selector):
277 277 assert not self._get_elements(css_selector)
278 278
279 279 def element_equals_to(self, css_selector, expected_content):
280 280 element = self.get_element(css_selector)
281 281 element_text = self._element_to_string(element)
282 282
283 283 assert expected_content in element_text
284 284
285 285 def element_contains(self, css_selector, expected_content):
286 286 element = self.get_element(css_selector)
287 287 assert expected_content in element.text_content()
288 288
289 289 def element_value_contains(self, css_selector, expected_content):
290 290 element = self.get_element(css_selector)
291 291 assert expected_content in element.value
292 292
293 293 def contains_one_link(self, link_text, href):
294 294 fromstring, tostring, CSSSelector = self.get_imports()
295 295 doc = fromstring(self.response.body)
296 296 sel = CSSSelector('a[href]')
297 297 elements = [
298 298 e for e in sel(doc) if e.text_content().strip() == link_text]
299 299 assert len(elements) == 1, "Did not find link or found multiple links"
300 300 self._ensure_url_equal(elements[0].attrib.get('href'), href)
301 301
302 302 def contains_one_anchor(self, anchor_id):
303 303 fromstring, tostring, CSSSelector = self.get_imports()
304 304 doc = fromstring(self.response.body)
305 305 sel = CSSSelector('#' + anchor_id)
306 306 elements = sel(doc)
307 307 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
308 308
309 309 def _ensure_url_equal(self, found, expected):
310 310 assert _Url(found) == _Url(expected)
311 311
312 312 def get_element(self, css_selector):
313 313 elements = self._get_elements(css_selector)
314 314 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
315 315 return elements[0]
316 316
317 317 def get_elements(self, css_selector):
318 318 return self._get_elements(css_selector)
319 319
320 320 def _get_elements(self, css_selector):
321 321 fromstring, tostring, CSSSelector = self.get_imports()
322 322 doc = fromstring(self.response.body)
323 323 sel = CSSSelector(css_selector)
324 324 elements = sel(doc)
325 325 return elements
326 326
327 327 def _element_to_string(self, element):
328 328 fromstring, tostring, CSSSelector = self.get_imports()
329 329 return tostring(element, encoding='unicode')
330 330
331 331
332 332 class _Url(object):
333 333 """
334 334 A url object that can be compared with other url orbjects
335 335 without regard to the vagaries of encoding, escaping, and ordering
336 336 of parameters in query strings.
337 337
338 338 Inspired by
339 339 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
340 340 """
341 341
342 342 def __init__(self, url):
343 343 parts = urllib.parse.urlparse(url)
344 344 _query = frozenset(urllib.parse.parse_qsl(parts.query))
345 345 _path = unquote_plus(parts.path)
346 346 parts = parts._replace(query=_query, path=_path)
347 347 self.parts = parts
348 348
349 349 def __eq__(self, other):
350 350 return self.parts == other.parts
351 351
352 352 def __hash__(self):
353 353 return hash(self.parts)
354 354
355 355
356 356 def run_test_concurrently(times, raise_catched_exc=True):
357 357 """
358 358 Add this decorator to small pieces of code that you want to test
359 359 concurrently
360 360
361 361 ex:
362 362
363 363 @test_concurrently(25)
364 364 def my_test_function():
365 365 ...
366 366 """
367 367 def test_concurrently_decorator(test_func):
368 368 def wrapper(*args, **kwargs):
369 369 exceptions = []
370 370
371 371 def call_test_func():
372 372 try:
373 373 test_func(*args, **kwargs)
374 374 except Exception as e:
375 375 exceptions.append(e)
376 376 if raise_catched_exc:
377 377 raise
378 378 threads = []
379 379 for i in range(times):
380 380 threads.append(threading.Thread(target=call_test_func))
381 381 for t in threads:
382 382 t.start()
383 383 for t in threads:
384 384 t.join()
385 385 if exceptions:
386 386 raise Exception(
387 387 'test_concurrently intercepted %s exceptions: %s' % (
388 388 len(exceptions), exceptions))
389 389 return wrapper
390 390 return test_concurrently_decorator
391 391
392 392
393 393 def wait_for_url(url, timeout=10):
394 394 """
395 395 Wait until URL becomes reachable.
396 396
397 397 It polls the URL until the timeout is reached or it became reachable.
398 398 If will call to `py.test.fail` in case the URL is not reachable.
399 399 """
400 400 timeout = time.time() + timeout
401 401 last = 0
402 402 wait = 0.1
403 403
404 404 while timeout > last:
405 405 last = time.time()
406 406 if is_url_reachable(url, log_exc=False):
407 407 break
408 408 elif (last + wait) > time.time():
409 409 # Go to sleep because not enough time has passed since last check.
410 410 time.sleep(wait)
411 411 else:
412 412 pytest.fail(f"Timeout while waiting for URL {url}")
413 413
414 414
415 415 def is_url_reachable(url: str, log_exc: bool = False) -> bool:
416 416 try:
417 417 urllib.request.urlopen(url)
418 418 except urllib.error.URLError:
419 419 if log_exc:
420 420 log.exception(f'URL `{url}` reach error')
421 421 return False
422 422 return True
423 423
424 424
425 425 def repo_on_filesystem(repo_name):
426 426 from rhodecode.lib import vcs
427 427 from rhodecode.tests import TESTS_TMP_PATH
428 428 repo = vcs.get_vcs_instance(
429 429 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
430 430 return repo is not None
431 431
432 432
433 433 def commit_change(
434 repo, filename: bytes, content: bytes, message, vcs_type, parent=None, newfile=False):
434 repo, filename: bytes, content: bytes, message, vcs_type, parent=None, branch=None, newfile=False):
435 435 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
436 436
437 437 repo = Repository.get_by_repo_name(repo)
438 438 _commit = parent
439 439 if not parent:
440 440 _commit = EmptyCommit(alias=vcs_type)
441 441
442 442 if newfile:
443 443 nodes = {
444 444 filename: {
445 445 'content': content
446 446 }
447 447 }
448 448 commit = ScmModel().create_nodes(
449 449 user=TEST_USER_ADMIN_LOGIN, repo=repo,
450 450 message=message,
451 451 nodes=nodes,
452 452 parent_commit=_commit,
453 453 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
454 454 )
455 455 else:
456 456 commit = ScmModel().commit_change(
457 457 repo=repo.scm_instance(), repo_name=repo.repo_name,
458 458 commit=parent, user=TEST_USER_ADMIN_LOGIN,
459 459 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
460 460 message=message,
461 461 content=content,
462 f_path=filename
462 f_path=filename,
463 branch=branch
463 464 )
464 465 return commit
465 466
466 467
467 468 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
468 469 if not default:
469 470 raise ValueError('Permission for default user must be given')
470 471 form_data = [(
471 472 'csrf_token', csrf_token
472 473 )]
473 474 # add default
474 475 form_data.extend([
475 476 ('u_perm_1', default)
476 477 ])
477 478
478 479 if grant:
479 480 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
480 481 form_data.extend([
481 482 ('perm_new_member_perm_new{}'.format(cnt), perm),
482 483 ('perm_new_member_id_new{}'.format(cnt), obj_id),
483 484 ('perm_new_member_name_new{}'.format(cnt), obj_name),
484 485 ('perm_new_member_type_new{}'.format(cnt), obj_type),
485 486
486 487 ])
487 488 if revoke:
488 489 for obj_id, obj_type in revoke:
489 490 form_data.extend([
490 491 ('perm_del_member_id_{}'.format(obj_id), obj_id),
491 492 ('perm_del_member_type_{}'.format(obj_id), obj_type),
492 493 ])
493 494 return form_data
General Comments 0
You need to be logged in to leave comments. Login now