##// END OF EJS Templates
fix(tests): fixed the creation of non-linear commits creation in tests...
super-admin -
r5198:919dd05c default
parent child Browse files
Show More
@@ -1,367 +1,367 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import pytest
20 import pytest
21
21
22 from rhodecode.model.db import User
22 from rhodecode.model.db import User
23 from rhodecode.model.pull_request import PullRequestModel
23 from rhodecode.model.pull_request import PullRequestModel
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.model.user import UserModel
25 from rhodecode.model.user import UserModel
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
27 from rhodecode.api.tests.utils import build_data, api_call, assert_error
27 from rhodecode.api.tests.utils import build_data, api_call, assert_error
28
28
29
29
30 @pytest.mark.usefixtures("testuser_api", "app")
30 @pytest.mark.usefixtures("testuser_api", "app")
31 class TestCreatePullRequestApi(object):
31 class TestCreatePullRequestApi(object):
32 finalizers = []
32 finalizers = []
33
33
34 def teardown_method(self, method):
34 def teardown_method(self, method):
35 if self.finalizers:
35 if self.finalizers:
36 for finalizer in self.finalizers:
36 for finalizer in self.finalizers:
37 finalizer()
37 finalizer()
38 self.finalizers = []
38 self.finalizers = []
39
39
40 def test_create_with_wrong_data(self):
40 def test_create_with_wrong_data(self):
41 required_data = {
41 required_data = {
42 'source_repo': 'tests/source_repo',
42 'source_repo': 'tests/source_repo',
43 'target_repo': 'tests/target_repo',
43 'target_repo': 'tests/target_repo',
44 'source_ref': 'branch:default:initial',
44 'source_ref': 'branch:default:initial',
45 'target_ref': 'branch:default:new-feature',
45 'target_ref': 'branch:default:new-feature',
46 }
46 }
47 for key in required_data:
47 for key in required_data:
48 data = required_data.copy()
48 data = required_data.copy()
49 data.pop(key)
49 data.pop(key)
50 id_, params = build_data(
50 id_, params = build_data(
51 self.apikey, 'create_pull_request', **data)
51 self.apikey, 'create_pull_request', **data)
52 response = api_call(self.app, params)
52 response = api_call(self.app, params)
53
53
54 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
54 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
55 assert_error(id_, expected, given=response.body)
55 assert_error(id_, expected, given=response.body)
56
56
57 @pytest.mark.backends("git", "hg")
57 @pytest.mark.backends("git", "hg")
58 @pytest.mark.parametrize('source_ref', [
58 @pytest.mark.parametrize('source_ref', [
59 'bookmarg:default:initial'
59 'bookmarg:default:initial'
60 ])
60 ])
61 def test_create_with_wrong_refs_data(self, backend, source_ref):
61 def test_create_with_wrong_refs_data(self, backend, source_ref):
62
62
63 data = self._prepare_data(backend)
63 data = self._prepare_data(backend)
64 data['source_ref'] = source_ref
64 data['source_ref'] = source_ref
65
65
66 id_, params = build_data(
66 id_, params = build_data(
67 self.apikey_regular, 'create_pull_request', **data)
67 self.apikey_regular, 'create_pull_request', **data)
68
68
69 response = api_call(self.app, params)
69 response = api_call(self.app, params)
70
70
71 expected = "Ref `{}` type is not allowed. " \
71 expected = "Ref `{}` type is not allowed. " \
72 "Only:['bookmark', 'book', 'tag', 'branch'] " \
72 "Only:['bookmark', 'book', 'tag', 'branch'] " \
73 "are possible.".format(source_ref)
73 "are possible.".format(source_ref)
74 assert_error(id_, expected, given=response.body)
74 assert_error(id_, expected, given=response.body)
75
75
76 @pytest.mark.backends("git", "hg")
76 @pytest.mark.backends("git", "hg")
77 def test_create_with_correct_data(self, backend):
77 def test_create_with_correct_data(self, backend):
78 data = self._prepare_data(backend)
78 data = self._prepare_data(backend)
79 RepoModel().revoke_user_permission(
79 RepoModel().revoke_user_permission(
80 self.source.repo_name, User.DEFAULT_USER)
80 self.source.repo_name, User.DEFAULT_USER)
81 id_, params = build_data(
81 id_, params = build_data(
82 self.apikey_regular, 'create_pull_request', **data)
82 self.apikey_regular, 'create_pull_request', **data)
83 response = api_call(self.app, params)
83 response = api_call(self.app, params)
84 expected_message = "Created new pull request `{title}`".format(
84 expected_message = "Created new pull request `{title}`".format(
85 title=data['title'])
85 title=data['title'])
86 result = response.json
86 result = response.json
87 assert result['error'] is None
87 assert result['error'] is None
88 assert result['result']['msg'] == expected_message
88 assert result['result']['msg'] == expected_message
89 pull_request_id = result['result']['pull_request_id']
89 pull_request_id = result['result']['pull_request_id']
90 pull_request = PullRequestModel().get(pull_request_id)
90 pull_request = PullRequestModel().get(pull_request_id)
91 assert pull_request.title == data['title']
91 assert pull_request.title == data['title']
92 assert pull_request.description == data['description']
92 assert pull_request.description == data['description']
93 assert pull_request.source_ref == data['source_ref']
93 assert pull_request.source_ref == data['source_ref']
94 assert pull_request.target_ref == data['target_ref']
94 assert pull_request.target_ref == data['target_ref']
95 assert pull_request.source_repo.repo_name == data['source_repo']
95 assert pull_request.source_repo.repo_name == data['source_repo']
96 assert pull_request.target_repo.repo_name == data['target_repo']
96 assert pull_request.target_repo.repo_name == data['target_repo']
97 assert pull_request.revisions == [self.commit_ids['change']]
97 assert pull_request.revisions == [self.commit_ids['change']]
98 assert len(pull_request.reviewers) == 1
98 assert len(pull_request.reviewers) == 1
99
99
100 @pytest.mark.backends("git", "hg")
100 @pytest.mark.backends("git", "hg")
101 def test_create_with_empty_description(self, backend):
101 def test_create_with_empty_description(self, backend):
102 data = self._prepare_data(backend)
102 data = self._prepare_data(backend)
103 data.pop('description')
103 data.pop('description')
104 id_, params = build_data(
104 id_, params = build_data(
105 self.apikey_regular, 'create_pull_request', **data)
105 self.apikey_regular, 'create_pull_request', **data)
106 response = api_call(self.app, params)
106 response = api_call(self.app, params)
107 expected_message = "Created new pull request `{title}`".format(
107 expected_message = "Created new pull request `{title}`".format(
108 title=data['title'])
108 title=data['title'])
109 result = response.json
109 result = response.json
110 assert result['error'] is None
110 assert result['error'] is None
111 assert result['result']['msg'] == expected_message
111 assert result['result']['msg'] == expected_message
112 pull_request_id = result['result']['pull_request_id']
112 pull_request_id = result['result']['pull_request_id']
113 pull_request = PullRequestModel().get(pull_request_id)
113 pull_request = PullRequestModel().get(pull_request_id)
114 assert pull_request.description == ''
114 assert pull_request.description == ''
115
115
116 @pytest.mark.backends("git", "hg")
116 @pytest.mark.backends("git", "hg")
117 def test_create_with_empty_title(self, backend):
117 def test_create_with_empty_title(self, backend):
118 data = self._prepare_data(backend)
118 data = self._prepare_data(backend)
119 data.pop('title')
119 data.pop('title')
120 id_, params = build_data(
120 id_, params = build_data(
121 self.apikey_regular, 'create_pull_request', **data)
121 self.apikey_regular, 'create_pull_request', **data)
122 response = api_call(self.app, params)
122 response = api_call(self.app, params)
123 result = response.json
123 result = response.json
124 pull_request_id = result['result']['pull_request_id']
124 pull_request_id = result['result']['pull_request_id']
125 pull_request = PullRequestModel().get(pull_request_id)
125 pull_request = PullRequestModel().get(pull_request_id)
126 data['ref'] = backend.default_branch_name
126 data['ref'] = backend.default_branch_name
127 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
127 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
128 assert pull_request.title == title
128 assert pull_request.title == title
129
129
130 @pytest.mark.backends("git", "hg")
130 @pytest.mark.backends("git", "hg")
131 def test_create_with_reviewers_specified_by_names(
131 def test_create_with_reviewers_specified_by_names(
132 self, backend, no_notifications):
132 self, backend, no_notifications):
133 data = self._prepare_data(backend)
133 data = self._prepare_data(backend)
134 reviewers = [
134 reviewers = [
135 {'username': TEST_USER_REGULAR_LOGIN,
135 {'username': TEST_USER_REGULAR_LOGIN,
136 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
136 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
137 {'username': TEST_USER_ADMIN_LOGIN,
137 {'username': TEST_USER_ADMIN_LOGIN,
138 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
138 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
139 'mandatory': True},
139 'mandatory': True},
140 ]
140 ]
141 data['reviewers'] = reviewers
141 data['reviewers'] = reviewers
142
142
143 id_, params = build_data(
143 id_, params = build_data(
144 self.apikey_regular, 'create_pull_request', **data)
144 self.apikey_regular, 'create_pull_request', **data)
145 response = api_call(self.app, params)
145 response = api_call(self.app, params)
146
146
147 expected_message = "Created new pull request `{title}`".format(
147 expected_message = "Created new pull request `{title}`".format(
148 title=data['title'])
148 title=data['title'])
149 result = response.json
149 result = response.json
150 assert result['error'] is None
150 assert result['error'] is None
151 assert result['result']['msg'] == expected_message
151 assert result['result']['msg'] == expected_message
152 pull_request_id = result['result']['pull_request_id']
152 pull_request_id = result['result']['pull_request_id']
153 pull_request = PullRequestModel().get(pull_request_id)
153 pull_request = PullRequestModel().get(pull_request_id)
154
154
155 actual_reviewers = []
155 actual_reviewers = []
156 for rev in pull_request.reviewers:
156 for rev in pull_request.reviewers:
157 entry = {
157 entry = {
158 'username': rev.user.username,
158 'username': rev.user.username,
159 'reasons': rev.reasons,
159 'reasons': rev.reasons,
160 }
160 }
161 if rev.mandatory:
161 if rev.mandatory:
162 entry['mandatory'] = rev.mandatory
162 entry['mandatory'] = rev.mandatory
163 actual_reviewers.append(entry)
163 actual_reviewers.append(entry)
164
164
165 owner_username = pull_request.target_repo.user.username
165 owner_username = pull_request.target_repo.user.username
166 for spec_reviewer in reviewers[::]:
166 for spec_reviewer in reviewers[::]:
167 # default reviewer will be added who is an owner of the repo
167 # default reviewer will be added who is an owner of the repo
168 # this get's overridden by a add owner to reviewers rule
168 # this get's overridden by a add owner to reviewers rule
169 if spec_reviewer['username'] == owner_username:
169 if spec_reviewer['username'] == owner_username:
170 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
170 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
171 # since owner is more important, we don't inherit mandatory flag
171 # since owner is more important, we don't inherit mandatory flag
172 del spec_reviewer['mandatory']
172 del spec_reviewer['mandatory']
173
173
174 assert sorted(actual_reviewers, key=lambda e: e['username']) \
174 assert sorted(actual_reviewers, key=lambda e: e['username']) \
175 == sorted(reviewers, key=lambda e: e['username'])
175 == sorted(reviewers, key=lambda e: e['username'])
176
176
177 @pytest.mark.backends("git", "hg")
177 @pytest.mark.backends("git", "hg")
178 def test_create_with_reviewers_specified_by_ids(
178 def test_create_with_reviewers_specified_by_ids(
179 self, backend, no_notifications):
179 self, backend, no_notifications):
180 data = self._prepare_data(backend)
180 data = self._prepare_data(backend)
181 reviewers = [
181 reviewers = [
182 {'username': UserModel().get_by_username(
182 {'username': UserModel().get_by_username(
183 TEST_USER_REGULAR_LOGIN).user_id,
183 TEST_USER_REGULAR_LOGIN).user_id,
184 'reasons': ['added manually']},
184 'reasons': ['added manually']},
185 {'username': UserModel().get_by_username(
185 {'username': UserModel().get_by_username(
186 TEST_USER_ADMIN_LOGIN).user_id,
186 TEST_USER_ADMIN_LOGIN).user_id,
187 'reasons': ['added manually']},
187 'reasons': ['added manually']},
188 ]
188 ]
189
189
190 data['reviewers'] = reviewers
190 data['reviewers'] = reviewers
191 id_, params = build_data(
191 id_, params = build_data(
192 self.apikey_regular, 'create_pull_request', **data)
192 self.apikey_regular, 'create_pull_request', **data)
193 response = api_call(self.app, params)
193 response = api_call(self.app, params)
194
194
195 expected_message = "Created new pull request `{title}`".format(
195 expected_message = "Created new pull request `{title}`".format(
196 title=data['title'])
196 title=data['title'])
197 result = response.json
197 result = response.json
198 assert result['error'] is None
198 assert result['error'] is None
199 assert result['result']['msg'] == expected_message
199 assert result['result']['msg'] == expected_message
200 pull_request_id = result['result']['pull_request_id']
200 pull_request_id = result['result']['pull_request_id']
201 pull_request = PullRequestModel().get(pull_request_id)
201 pull_request = PullRequestModel().get(pull_request_id)
202
202
203 actual_reviewers = []
203 actual_reviewers = []
204 for rev in pull_request.reviewers:
204 for rev in pull_request.reviewers:
205 entry = {
205 entry = {
206 'username': rev.user.user_id,
206 'username': rev.user.user_id,
207 'reasons': rev.reasons,
207 'reasons': rev.reasons,
208 }
208 }
209 if rev.mandatory:
209 if rev.mandatory:
210 entry['mandatory'] = rev.mandatory
210 entry['mandatory'] = rev.mandatory
211 actual_reviewers.append(entry)
211 actual_reviewers.append(entry)
212
212
213 owner_user_id = pull_request.target_repo.user.user_id
213 owner_user_id = pull_request.target_repo.user.user_id
214 for spec_reviewer in reviewers[::]:
214 for spec_reviewer in reviewers[::]:
215 # default reviewer will be added who is an owner of the repo
215 # default reviewer will be added who is an owner of the repo
216 # this get's overridden by a add owner to reviewers rule
216 # this get's overridden by a add owner to reviewers rule
217 if spec_reviewer['username'] == owner_user_id:
217 if spec_reviewer['username'] == owner_user_id:
218 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
218 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
219
219
220 assert sorted(actual_reviewers, key=lambda e: e['username']) \
220 assert sorted(actual_reviewers, key=lambda e: e['username']) \
221 == sorted(reviewers, key=lambda e: e['username'])
221 == sorted(reviewers, key=lambda e: e['username'])
222
222
223 @pytest.mark.backends("git", "hg")
223 @pytest.mark.backends("git", "hg")
224 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
224 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
225 data = self._prepare_data(backend)
225 data = self._prepare_data(backend)
226 data['reviewers'] = [{'username': 'somebody'}]
226 data['reviewers'] = [{'username': 'somebody'}]
227 id_, params = build_data(
227 id_, params = build_data(
228 self.apikey_regular, 'create_pull_request', **data)
228 self.apikey_regular, 'create_pull_request', **data)
229 response = api_call(self.app, params)
229 response = api_call(self.app, params)
230 expected_message = 'user `somebody` does not exist'
230 expected_message = 'user `somebody` does not exist'
231 assert_error(id_, expected_message, given=response.body)
231 assert_error(id_, expected_message, given=response.body)
232
232
233 @pytest.mark.backends("git", "hg")
233 @pytest.mark.backends("git", "hg")
234 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
234 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
235 data = self._prepare_data(backend)
235 data = self._prepare_data(backend)
236 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
236 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
237 data['reviewers'] = reviewers
237 data['reviewers'] = reviewers
238 id_, params = build_data(
238 id_, params = build_data(
239 self.apikey_regular, 'create_pull_request', **data)
239 self.apikey_regular, 'create_pull_request', **data)
240 response = api_call(self.app, params)
240 response = api_call(self.app, params)
241 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
241 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
242 assert_error(id_, expected_message, given=response.body)
242 assert_error(id_, expected_message, given=response.body)
243
243
244 @pytest.mark.backends("git", "hg")
244 @pytest.mark.backends("git", "hg")
245 def test_create_with_no_commit_hashes(self, backend):
245 def test_create_with_no_commit_hashes(self, backend):
246 data = self._prepare_data(backend)
246 data = self._prepare_data(backend)
247 expected_source_ref = data['source_ref']
247 expected_source_ref = data['source_ref']
248 expected_target_ref = data['target_ref']
248 expected_target_ref = data['target_ref']
249 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
249 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
250 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
250 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
251 id_, params = build_data(
251 id_, params = build_data(
252 self.apikey_regular, 'create_pull_request', **data)
252 self.apikey_regular, 'create_pull_request', **data)
253 response = api_call(self.app, params)
253 response = api_call(self.app, params)
254 expected_message = "Created new pull request `{title}`".format(
254 expected_message = "Created new pull request `{title}`".format(
255 title=data['title'])
255 title=data['title'])
256 result = response.json
256 result = response.json
257 assert result['result']['msg'] == expected_message
257 assert result['result']['msg'] == expected_message
258 pull_request_id = result['result']['pull_request_id']
258 pull_request_id = result['result']['pull_request_id']
259 pull_request = PullRequestModel().get(pull_request_id)
259 pull_request = PullRequestModel().get(pull_request_id)
260 assert pull_request.source_ref == expected_source_ref
260 assert pull_request.source_ref == expected_source_ref
261 assert pull_request.target_ref == expected_target_ref
261 assert pull_request.target_ref == expected_target_ref
262
262
263 @pytest.mark.backends("git", "hg")
263 @pytest.mark.backends("git", "hg")
264 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
264 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
265 def test_create_fails_with_wrong_repo(self, backend, data_key):
265 def test_create_fails_with_wrong_repo(self, backend, data_key):
266 repo_name = 'fake-repo'
266 repo_name = 'fake-repo'
267 data = self._prepare_data(backend)
267 data = self._prepare_data(backend)
268 data[data_key] = repo_name
268 data[data_key] = repo_name
269 id_, params = build_data(
269 id_, params = build_data(
270 self.apikey_regular, 'create_pull_request', **data)
270 self.apikey_regular, 'create_pull_request', **data)
271 response = api_call(self.app, params)
271 response = api_call(self.app, params)
272 expected_message = 'repository `{}` does not exist'.format(repo_name)
272 expected_message = 'repository `{}` does not exist'.format(repo_name)
273 assert_error(id_, expected_message, given=response.body)
273 assert_error(id_, expected_message, given=response.body)
274
274
275 @pytest.mark.backends("git", "hg")
275 @pytest.mark.backends("git", "hg")
276 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
276 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
277 def test_create_fails_with_non_existing_branch(self, backend, data_key):
277 def test_create_fails_with_non_existing_branch(self, backend, data_key):
278 branch_name = 'test-branch'
278 branch_name = 'test-branch'
279 data = self._prepare_data(backend)
279 data = self._prepare_data(backend)
280 data[data_key] = "branch:{}".format(branch_name)
280 data[data_key] = "branch:{}".format(branch_name)
281 id_, params = build_data(
281 id_, params = build_data(
282 self.apikey_regular, 'create_pull_request', **data)
282 self.apikey_regular, 'create_pull_request', **data)
283 response = api_call(self.app, params)
283 response = api_call(self.app, params)
284 expected_message = 'The specified value:{type}:`{name}` ' \
284 expected_message = 'The specified value:{type}:`{name}` ' \
285 'does not exist, or is not allowed.'.format(type='branch',
285 'does not exist, or is not allowed.'.format(type='branch',
286 name=branch_name)
286 name=branch_name)
287 assert_error(id_, expected_message, given=response.body)
287 assert_error(id_, expected_message, given=response.body)
288
288
289 @pytest.mark.backends("git", "hg")
289 @pytest.mark.backends("git", "hg")
290 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
290 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
291 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
291 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
292 data = self._prepare_data(backend)
292 data = self._prepare_data(backend)
293 ref = 'stange-ref'
293 ref = 'stange-ref'
294 data[data_key] = ref
294 data[data_key] = ref
295 id_, params = build_data(
295 id_, params = build_data(
296 self.apikey_regular, 'create_pull_request', **data)
296 self.apikey_regular, 'create_pull_request', **data)
297 response = api_call(self.app, params)
297 response = api_call(self.app, params)
298 expected_message = (
298 expected_message = (
299 'Ref `{ref}` given in a wrong format. Please check the API'
299 'Ref `{ref}` given in a wrong format. Please check the API'
300 ' documentation for more details'.format(ref=ref))
300 ' documentation for more details'.format(ref=ref))
301 assert_error(id_, expected_message, given=response.body)
301 assert_error(id_, expected_message, given=response.body)
302
302
303 @pytest.mark.backends("git", "hg")
303 @pytest.mark.backends("git", "hg")
304 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
304 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
305 def test_create_fails_with_non_existing_ref(self, backend, data_key):
305 def test_create_fails_with_non_existing_ref(self, backend, data_key):
306 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
306 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
307 ref = self._get_full_ref(backend, commit_id)
307 ref = self._get_full_ref(backend, commit_id)
308 data = self._prepare_data(backend)
308 data = self._prepare_data(backend)
309 data[data_key] = ref
309 data[data_key] = ref
310 id_, params = build_data(
310 id_, params = build_data(
311 self.apikey_regular, 'create_pull_request', **data)
311 self.apikey_regular, 'create_pull_request', **data)
312 response = api_call(self.app, params)
312 response = api_call(self.app, params)
313 expected_message = 'Ref `{}` does not exist'.format(ref)
313 expected_message = 'Ref `{}` does not exist'.format(ref)
314 assert_error(id_, expected_message, given=response.body)
314 assert_error(id_, expected_message, given=response.body)
315
315
316 @pytest.mark.backends("git", "hg")
316 @pytest.mark.backends("git", "hg")
317 def test_create_fails_when_no_revisions(self, backend):
317 def test_create_fails_when_no_revisions(self, backend):
318 data = self._prepare_data(backend, source_head='initial')
318 data = self._prepare_data(backend, source_head='initial')
319 id_, params = build_data(
319 id_, params = build_data(
320 self.apikey_regular, 'create_pull_request', **data)
320 self.apikey_regular, 'create_pull_request', **data)
321 response = api_call(self.app, params)
321 response = api_call(self.app, params)
322 expected_message = 'no commits found for merge between specified references'
322 expected_message = 'no commits found for merge between specified references'
323 assert_error(id_, expected_message, given=response.body)
323 assert_error(id_, expected_message, given=response.body)
324
324
325 @pytest.mark.backends("git", "hg")
325 @pytest.mark.backends("git", "hg")
326 def test_create_fails_when_no_permissions(self, backend):
326 def test_create_fails_when_no_permissions(self, backend):
327 data = self._prepare_data(backend)
327 data = self._prepare_data(backend)
328 RepoModel().revoke_user_permission(
328 RepoModel().revoke_user_permission(
329 self.source.repo_name, self.test_user)
329 self.source.repo_name, self.test_user)
330 RepoModel().revoke_user_permission(
330 RepoModel().revoke_user_permission(
331 self.source.repo_name, User.DEFAULT_USER)
331 self.source.repo_name, User.DEFAULT_USER)
332
332
333 id_, params = build_data(
333 id_, params = build_data(
334 self.apikey_regular, 'create_pull_request', **data)
334 self.apikey_regular, 'create_pull_request', **data)
335 response = api_call(self.app, params)
335 response = api_call(self.app, params)
336 expected_message = 'repository `{}` does not exist'.format(
336 expected_message = 'repository `{}` does not exist'.format(
337 self.source.repo_name)
337 self.source.repo_name)
338 assert_error(id_, expected_message, given=response.body)
338 assert_error(id_, expected_message, given=response.body)
339
339
340 def _prepare_data(
340 def _prepare_data(
341 self, backend, source_head='change', target_head='initial'):
341 self, backend, source_head='change', target_head='initial'):
342 commits = [
342 commits = [
343 {'message': 'initial'},
343 {'message': 'initial'},
344 {'message': 'change'},
344 {'message': 'change'},
345 {'message': 'new-feature', 'parents': ['initial']},
345 {'message': 'new-feature', 'parents': ['initial'], 'branch': 'feature'},
346 ]
346 ]
347 self.commit_ids = backend.create_master_repo(commits)
347 self.commit_ids = backend.create_master_repo(commits)
348 self.source = backend.create_repo(heads=[source_head])
348 self.source = backend.create_repo(heads=[source_head])
349 self.target = backend.create_repo(heads=[target_head])
349 self.target = backend.create_repo(heads=[target_head])
350
350
351 data = {
351 data = {
352 'source_repo': self.source.repo_name,
352 'source_repo': self.source.repo_name,
353 'target_repo': self.target.repo_name,
353 'target_repo': self.target.repo_name,
354 'source_ref': self._get_full_ref(
354 'source_ref': self._get_full_ref(
355 backend, self.commit_ids[source_head]),
355 backend, self.commit_ids[source_head]),
356 'target_ref': self._get_full_ref(
356 'target_ref': self._get_full_ref(
357 backend, self.commit_ids[target_head]),
357 backend, self.commit_ids[target_head]),
358 'title': 'Test PR 1',
358 'title': 'Test PR 1',
359 'description': 'Test'
359 'description': 'Test'
360 }
360 }
361 RepoModel().grant_user_permission(
361 RepoModel().grant_user_permission(
362 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
362 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
363 return data
363 return data
364
364
365 def _get_full_ref(self, backend, commit_id):
365 def _get_full_ref(self, backend, commit_id):
366 return 'branch:{branch}:{commit_id}'.format(
366 return 'branch:{branch}:{commit_id}'.format(
367 branch=backend.default_branch_name, commit_id=commit_id)
367 branch=backend.default_branch_name, commit_id=commit_id)
@@ -1,204 +1,204 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import re
19 import re
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
23 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
24 from rhodecode.tests import TestController
24 from rhodecode.tests import TestController
25 from rhodecode.tests.routes import route_path
25 from rhodecode.tests.routes import route_path
26
26
27
27
28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
29
29
30
30
31 def assert_commits_on_page(response, indexes):
31 def assert_commits_on_page(response, indexes):
32 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.text)]
32 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.text)]
33 assert found_indexes == indexes
33 assert found_indexes == indexes
34
34
35
35
36 class TestChangelogController(TestController):
36 class TestChangelogController(TestController):
37
37
38 def test_commits_page(self, backend):
38 def test_commits_page(self, backend):
39 self.log_user()
39 self.log_user()
40 response = self.app.get(
40 response = self.app.get(
41 route_path('repo_commits', repo_name=backend.repo_name))
41 route_path('repo_commits', repo_name=backend.repo_name))
42
42
43 first_idx = -1
43 first_idx = -1
44 last_idx = -DEFAULT_CHANGELOG_SIZE
44 last_idx = -DEFAULT_CHANGELOG_SIZE
45 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
45 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
46
46
47 def test_changelog(self, backend):
47 def test_changelog(self, backend):
48 self.log_user()
48 self.log_user()
49 response = self.app.get(
49 response = self.app.get(
50 route_path('repo_changelog', repo_name=backend.repo_name))
50 route_path('repo_changelog', repo_name=backend.repo_name))
51
51
52 first_idx = -1
52 first_idx = -1
53 last_idx = -DEFAULT_CHANGELOG_SIZE
53 last_idx = -DEFAULT_CHANGELOG_SIZE
54 self.assert_commit_range_on_page(
54 self.assert_commit_range_on_page(
55 response, first_idx, last_idx, backend)
55 response, first_idx, last_idx, backend)
56
56
57 @pytest.mark.backends("hg", "git")
57 @pytest.mark.backends("hg", "git")
58 def test_changelog_filtered_by_branch(self, backend):
58 def test_changelog_filtered_by_branch(self, backend):
59 self.log_user()
59 self.log_user()
60 self.app.get(
60 self.app.get(
61 route_path('repo_changelog', repo_name=backend.repo_name,
61 route_path('repo_changelog', repo_name=backend.repo_name,
62 params=dict(branch=backend.default_branch_name)),
62 params=dict(branch=backend.default_branch_name)),
63 status=200)
63 status=200)
64
64
65 @pytest.mark.backends("hg", "git")
65 @pytest.mark.backends("hg", "git")
66 def test_commits_filtered_by_branch(self, backend):
66 def test_commits_filtered_by_branch(self, backend):
67 self.log_user()
67 self.log_user()
68 self.app.get(
68 self.app.get(
69 route_path('repo_commits', repo_name=backend.repo_name,
69 route_path('repo_commits', repo_name=backend.repo_name,
70 params=dict(branch=backend.default_branch_name)),
70 params=dict(branch=backend.default_branch_name)),
71 status=200)
71 status=200)
72
72
73 @pytest.mark.backends("svn")
73 @pytest.mark.backends("svn")
74 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
74 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
75 repo = backend['svn-simple-layout']
75 repo = backend['svn-simple-layout']
76 response = self.app.get(
76 response = self.app.get(
77 route_path('repo_changelog', repo_name=repo.repo_name,
77 route_path('repo_changelog', repo_name=repo.repo_name,
78 params=dict(branch='trunk')),
78 params=dict(branch='trunk')),
79 status=200)
79 status=200)
80
80
81 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
81 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
82
82
83 def test_commits_filtered_by_wrong_branch(self, backend):
83 def test_commits_filtered_by_wrong_branch(self, backend):
84 self.log_user()
84 self.log_user()
85 branch = 'wrong-branch-name'
85 branch = 'wrong-branch-name'
86 response = self.app.get(
86 response = self.app.get(
87 route_path('repo_commits', repo_name=backend.repo_name,
87 route_path('repo_commits', repo_name=backend.repo_name,
88 params=dict(branch=branch)),
88 params=dict(branch=branch)),
89 status=302)
89 status=302)
90 expected_url = '/{repo}/commits/{branch}'.format(
90 expected_url = '/{repo}/commits/{branch}'.format(
91 repo=backend.repo_name, branch=branch)
91 repo=backend.repo_name, branch=branch)
92 assert expected_url in response.location
92 assert expected_url in response.location
93 response = response.follow()
93 response = response.follow()
94 expected_warning = f'Branch {branch} is not found.'
94 expected_warning = f'Branch {branch} is not found.'
95 assert expected_warning in response.text
95 assert expected_warning in response.text
96
96
97 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
97 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
98 def test_changelog_filtered_by_branch_with_merges(self, autologin_user, backend):
98 def test_changelog_filtered_by_branch_with_merges(self, autologin_user, backend):
99
99
100 # Note: The changelog of branch "b" does not contain the commit "a1"
100 # Note: The changelog of branch "b" does not contain the commit "a1"
101 # although this is a parent of commit "b1". And branch "b" has commits
101 # although this is a parent of commit "b1". And branch "b" has commits
102 # which have a smaller index than commit "a1".
102 # which have a smaller index than commit "a1".
103 commits = [
103 commits = [
104 {'message': 'a'},
104 {'message': 'a', 'branch': 'master'},
105 {'message': 'b', 'branch': 'b'},
105 {'message': 'b', 'branch': 'b'},
106 {'message': 'a1', 'parents': ['a']},
106 {'message': 'a1', 'parents': ['a'], 'branch': 'master'},
107 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
107 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
108 ]
108 ]
109 backend.create_repo(commits)
109 backend.create_repo(commits)
110
110
111 self.app.get(
111 self.app.get(
112 route_path('repo_changelog', repo_name=backend.repo_name,
112 route_path('repo_changelog', repo_name=backend.repo_name,
113 params=dict(branch='b')),
113 params=dict(branch='b')),
114 status=200)
114 status=200)
115
115
116 @pytest.mark.backends("hg")
116 @pytest.mark.backends("hg")
117 def test_commits_closed_branches(self, autologin_user, backend):
117 def test_commits_closed_branches(self, autologin_user, backend):
118 repo = backend['closed_branch']
118 repo = backend['closed_branch']
119 response = self.app.get(
119 response = self.app.get(
120 route_path('repo_commits', repo_name=repo.repo_name,
120 route_path('repo_commits', repo_name=repo.repo_name,
121 params=dict(branch='experimental')),
121 params=dict(branch='experimental')),
122 status=200)
122 status=200)
123
123
124 assert_commits_on_page(response, indexes=[3, 1])
124 assert_commits_on_page(response, indexes=[3, 1])
125
125
126 def test_changelog_pagination(self, backend):
126 def test_changelog_pagination(self, backend):
127 self.log_user()
127 self.log_user()
128 # pagination, walk up to page 6
128 # pagination, walk up to page 6
129 changelog_url = route_path(
129 changelog_url = route_path(
130 'repo_commits', repo_name=backend.repo_name)
130 'repo_commits', repo_name=backend.repo_name)
131
131
132 for page in range(1, 7):
132 for page in range(1, 7):
133 response = self.app.get(changelog_url, {'page': page})
133 response = self.app.get(changelog_url, {'page': page})
134
134
135 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
135 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
136 last_idx = -DEFAULT_CHANGELOG_SIZE * page
136 last_idx = -DEFAULT_CHANGELOG_SIZE * page
137 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
137 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
138
138
139 def assert_commit_range_on_page(
139 def assert_commit_range_on_page(
140 self, response, first_idx, last_idx, backend):
140 self, response, first_idx, last_idx, backend):
141 input_template = (
141 input_template = (
142 """<input class="commit-range" """
142 """<input class="commit-range" """
143 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" """
143 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" """
144 """data-short-id="%(short_id)s" id="%(raw_id)s" """
144 """data-short-id="%(short_id)s" id="%(raw_id)s" """
145 """name="%(raw_id)s" type="checkbox" value="1" />"""
145 """name="%(raw_id)s" type="checkbox" value="1" />"""
146 )
146 )
147
147
148 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
148 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
149 repo = backend.repo
149 repo = backend.repo
150
150
151 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
151 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
152 response.mustcontain(
152 response.mustcontain(
153 input_template % {'raw_id': first_commit_on_page.raw_id,
153 input_template % {'raw_id': first_commit_on_page.raw_id,
154 'idx': first_commit_on_page.idx,
154 'idx': first_commit_on_page.idx,
155 'short_id': first_commit_on_page.short_id})
155 'short_id': first_commit_on_page.short_id})
156
156
157 response.mustcontain(commit_span_template % (
157 response.mustcontain(commit_span_template % (
158 first_commit_on_page.idx, first_commit_on_page.short_id)
158 first_commit_on_page.idx, first_commit_on_page.short_id)
159 )
159 )
160
160
161 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
161 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
162 response.mustcontain(
162 response.mustcontain(
163 input_template % {'raw_id': last_commit_on_page.raw_id,
163 input_template % {'raw_id': last_commit_on_page.raw_id,
164 'idx': last_commit_on_page.idx,
164 'idx': last_commit_on_page.idx,
165 'short_id': last_commit_on_page.short_id})
165 'short_id': last_commit_on_page.short_id})
166 response.mustcontain(commit_span_template % (
166 response.mustcontain(commit_span_template % (
167 last_commit_on_page.idx, last_commit_on_page.short_id)
167 last_commit_on_page.idx, last_commit_on_page.short_id)
168 )
168 )
169
169
170 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
170 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
171 first_span_of_next_page = commit_span_template % (
171 first_span_of_next_page = commit_span_template % (
172 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
172 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
173 assert first_span_of_next_page not in response
173 assert first_span_of_next_page not in response
174
174
175 @pytest.mark.parametrize('test_path', [
175 @pytest.mark.parametrize('test_path', [
176 'vcs/exceptions.py',
176 'vcs/exceptions.py',
177 '/vcs/exceptions.py',
177 '/vcs/exceptions.py',
178 '//vcs/exceptions.py'
178 '//vcs/exceptions.py'
179 ])
179 ])
180 def test_commits_with_filenode(self, backend, test_path):
180 def test_commits_with_filenode(self, backend, test_path):
181 self.log_user()
181 self.log_user()
182 response = self.app.get(
182 response = self.app.get(
183 route_path('repo_commits_file', repo_name=backend.repo_name,
183 route_path('repo_commits_file', repo_name=backend.repo_name,
184 commit_id='tip', f_path=test_path),
184 commit_id='tip', f_path=test_path),
185 )
185 )
186
186
187 # history commits messages
187 # history commits messages
188 response.mustcontain('Added exceptions module, this time for real')
188 response.mustcontain('Added exceptions module, this time for real')
189 response.mustcontain('Added not implemented hg backend test case')
189 response.mustcontain('Added not implemented hg backend test case')
190 response.mustcontain('Added BaseChangeset class')
190 response.mustcontain('Added BaseChangeset class')
191
191
192 def test_commits_with_filenode_that_is_dirnode(self, backend):
192 def test_commits_with_filenode_that_is_dirnode(self, backend):
193 self.log_user()
193 self.log_user()
194 self.app.get(
194 self.app.get(
195 route_path('repo_commits_file', repo_name=backend.repo_name,
195 route_path('repo_commits_file', repo_name=backend.repo_name,
196 commit_id='tip', f_path='/tests'),
196 commit_id='tip', f_path='/tests'),
197 status=302)
197 status=302)
198
198
199 def test_commits_with_filenode_not_existing(self, backend):
199 def test_commits_with_filenode_not_existing(self, backend):
200 self.log_user()
200 self.log_user()
201 self.app.get(
201 self.app.get(
202 route_path('repo_commits_file', repo_name=backend.repo_name,
202 route_path('repo_commits_file', repo_name=backend.repo_name,
203 commit_id='tip', f_path='wrong_path'),
203 commit_id='tip', f_path='wrong_path'),
204 status=302)
204 status=302)
@@ -1,656 +1,656 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22 import lxml.html
22 import lxml.html
23
23
24 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
24 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
25 from rhodecode.tests import assert_session_flash
25 from rhodecode.tests import assert_session_flash
26 from rhodecode.tests.utils import AssertResponse, commit_change
26 from rhodecode.tests.utils import AssertResponse, commit_change
27 from rhodecode.tests.routes import route_path
27 from rhodecode.tests.routes import route_path
28
28
29
29
30 @pytest.mark.usefixtures("autologin_user", "app")
30 @pytest.mark.usefixtures("autologin_user", "app")
31 class TestCompareView(object):
31 class TestCompareView(object):
32
32
33 def test_compare_index_is_reached_at_least_once(self, backend):
33 def test_compare_index_is_reached_at_least_once(self, backend):
34 repo = backend.repo
34 repo = backend.repo
35 self.app.get(
35 self.app.get(
36 route_path('repo_compare_select', repo_name=repo.repo_name))
36 route_path('repo_compare_select', repo_name=repo.repo_name))
37
37
38 @pytest.mark.xfail_backends("svn", reason="Requires pull")
38 @pytest.mark.xfail_backends("svn", reason="Requires pull")
39 def test_compare_remote_with_different_commit_indexes(self, backend):
39 def test_compare_remote_with_different_commit_indexes(self, backend):
40 # Preparing the following repository structure:
40 # Preparing the following repository structure:
41 #
41 #
42 # Origin repository has two commits:
42 # Origin repository has two commits:
43 #
43 #
44 # 0 1
44 # 0 1
45 # A -- D
45 # A -- D
46 #
46 #
47 # The fork of it has a few more commits and "D" has a commit index
47 # The fork of it has a few more commits and "D" has a commit index
48 # which does not exist in origin.
48 # which does not exist in origin.
49 #
49 #
50 # 0 1 2 3 4
50 # 0 1 2 3 4
51 # A -- -- -- D -- E
51 # A -- -- -- D -- E
52 # \- B -- C
52 # \- B -- C
53 #
53 #
54
54
55 fork = backend.create_repo()
55 fork = backend.create_repo()
56 origin = backend.create_repo()
56 origin = backend.create_repo()
57
57
58 # prepare fork
58 # prepare fork
59 commit0 = commit_change(
59 commit0 = commit_change(
60 fork.repo_name, filename=b'file1', content=b'A',
60 fork.repo_name, filename=b'file1', content=b'A',
61 message='A - Initial Commit', vcs_type=backend.alias, parent=None, newfile=True)
61 message='A - Initial Commit', vcs_type=backend.alias, parent=None, newfile=True)
62
62
63 commit1 = commit_change(
63 commit1 = commit_change(
64 fork.repo_name, filename=b'file1', content=b'B',
64 fork.repo_name, filename=b'file1', content=b'B',
65 message='B, child of A', vcs_type=backend.alias, parent=commit0)
65 message='B, child of A', vcs_type=backend.alias, parent=commit0)
66
66
67 commit_change( # commit 2
67 commit_change( # commit 2
68 fork.repo_name, filename=b'file1', content=b'C',
68 fork.repo_name, filename=b'file1', content=b'C',
69 message='C, child of B', vcs_type=backend.alias, parent=commit1)
69 message='C, child of B', vcs_type=backend.alias, parent=commit1)
70
70
71 commit3 = commit_change(
71 commit3 = commit_change(
72 fork.repo_name, filename=b'file1', content=b'D',
72 fork.repo_name, filename=b'file1', content=b'D',
73 message='D, child of A', vcs_type=backend.alias, parent=commit0)
73 message='D, child of A', vcs_type=backend.alias, parent=commit0, branch='feature')
74
74
75 commit4 = commit_change(
75 commit4 = commit_change(
76 fork.repo_name, filename=b'file1', content=b'E',
76 fork.repo_name, filename=b'file1', content=b'E',
77 message='E, child of D', vcs_type=backend.alias, parent=commit3)
77 message='E, child of D', vcs_type=backend.alias, parent=commit3, branch='feature')
78
78
79 # prepare origin repository, taking just the history up to D
79 # prepare origin repository, taking just the history up to D
80
80
81 origin_repo = origin.scm_instance(cache=False)
81 origin_repo = origin.scm_instance(cache=False)
82 origin_repo.config.clear_section('hooks')
82 origin_repo.config.clear_section('hooks')
83 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
83 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
84 origin_repo = origin.scm_instance(cache=False) # cache rebuild
84 origin_repo = origin.scm_instance(cache=False) # cache rebuild
85
85
86 # Verify test fixture setup
86 # Verify test fixture setup
87 # This does not work for git
87 # This does not work for git
88 if backend.alias != 'git':
88 if backend.alias != 'git':
89 assert 5 == len(fork.scm_instance(cache=False).commit_ids)
89 assert 5 == len(fork.scm_instance(cache=False).commit_ids)
90 assert 2 == len(origin_repo.commit_ids)
90 assert 2 == len(origin_repo.commit_ids)
91
91
92 # Comparing the revisions
92 # Comparing the revisions
93 response = self.app.get(
93 response = self.app.get(
94 route_path('repo_compare',
94 route_path('repo_compare',
95 repo_name=origin.repo_name,
95 repo_name=origin.repo_name,
96 source_ref_type="rev", source_ref=commit3.raw_id,
96 source_ref_type="rev", source_ref=commit3.raw_id,
97 target_ref_type="rev", target_ref=commit4.raw_id,
97 target_ref_type="rev", target_ref=commit4.raw_id,
98 params=dict(merge='1', target_repo=fork.repo_name)
98 params=dict(merge='1', target_repo=fork.repo_name)
99 ),
99 ),
100 status=200)
100 status=200)
101
101
102 compare_page = ComparePage(response)
102 compare_page = ComparePage(response)
103 compare_page.contains_commits([commit4])
103 compare_page.contains_commits([commit4])
104
104
105 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
105 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
106 def test_compare_forks_on_branch_extra_commits(self, backend):
106 def test_compare_forks_on_branch_extra_commits(self, backend):
107 repo1 = backend.create_repo()
107 repo1 = backend.create_repo()
108
108
109 # commit something !
109 # commit something !
110 commit0 = commit_change(
110 commit0 = commit_change(
111 repo1.repo_name, filename=b'file1', content=b'line1\n',
111 repo1.repo_name, filename=b'file1', content=b'line1\n',
112 message='commit1', vcs_type=backend.alias, parent=None,
112 message='commit1', vcs_type=backend.alias, parent=None,
113 newfile=True)
113 newfile=True)
114
114
115 # fork this repo
115 # fork this repo
116 repo2 = backend.create_fork()
116 repo2 = backend.create_fork()
117
117
118 # add two extra commit into fork
118 # add two extra commit into fork
119 commit1 = commit_change(
119 commit1 = commit_change(
120 repo2.repo_name, filename=b'file1', content=b'line1\nline2\n',
120 repo2.repo_name, filename=b'file1', content=b'line1\nline2\n',
121 message='commit2', vcs_type=backend.alias, parent=commit0)
121 message='commit2', vcs_type=backend.alias, parent=commit0)
122
122
123 commit2 = commit_change(
123 commit2 = commit_change(
124 repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
124 repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
125 message='commit3', vcs_type=backend.alias, parent=commit1)
125 message='commit3', vcs_type=backend.alias, parent=commit1)
126
126
127 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
127 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
128 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
128 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
129
129
130 response = self.app.get(
130 response = self.app.get(
131 route_path('repo_compare',
131 route_path('repo_compare',
132 repo_name=repo1.repo_name,
132 repo_name=repo1.repo_name,
133 source_ref_type="branch", source_ref=commit_id2,
133 source_ref_type="branch", source_ref=commit_id2,
134 target_ref_type="branch", target_ref=commit_id1,
134 target_ref_type="branch", target_ref=commit_id1,
135 params=dict(merge='1', target_repo=repo2.repo_name)
135 params=dict(merge='1', target_repo=repo2.repo_name)
136 ))
136 ))
137
137
138 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
138 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
139 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
139 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
140
140
141 compare_page = ComparePage(response)
141 compare_page = ComparePage(response)
142 compare_page.contains_change_summary(1, 2, 0)
142 compare_page.contains_change_summary(1, 2, 0)
143 compare_page.contains_commits([commit1, commit2])
143 compare_page.contains_commits([commit1, commit2])
144
144
145 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
145 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
146 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
146 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
147
147
148 # Swap is removed when comparing branches since it's a PR feature and
148 # Swap is removed when comparing branches since it's a PR feature and
149 # it is then a preview mode
149 # it is then a preview mode
150 compare_page.swap_is_hidden()
150 compare_page.swap_is_hidden()
151 compare_page.target_source_are_disabled()
151 compare_page.target_source_are_disabled()
152
152
153 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
153 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
154 def test_compare_forks_on_branch_extra_commits_origin_has_incoming(self, backend):
154 def test_compare_forks_on_branch_extra_commits_origin_has_incoming(self, backend):
155 repo1 = backend.create_repo()
155 repo1 = backend.create_repo()
156
156
157 # commit something !
157 # commit something !
158 commit0 = commit_change(
158 commit0 = commit_change(
159 repo1.repo_name, filename=b'file1', content=b'line1\n',
159 repo1.repo_name, filename=b'file1', content=b'line1\n',
160 message='commit1', vcs_type=backend.alias, parent=None,
160 message='commit1', vcs_type=backend.alias, parent=None,
161 newfile=True)
161 newfile=True)
162
162
163 # fork this repo
163 # fork this repo
164 repo2 = backend.create_fork()
164 repo2 = backend.create_fork()
165
165
166 # now commit something to origin repo
166 # now commit something to origin repo
167 commit_change(
167 commit_change(
168 repo1.repo_name, filename=b'file2', content=b'line1file2\n',
168 repo1.repo_name, filename=b'file2', content=b'line1file2\n',
169 message='commit2', vcs_type=backend.alias, parent=commit0,
169 message='commit2', vcs_type=backend.alias, parent=commit0,
170 newfile=True)
170 newfile=True)
171
171
172 # add two extra commit into fork
172 # add two extra commit into fork
173 commit1 = commit_change(
173 commit1 = commit_change(
174 repo2.repo_name, filename=b'file1', content=b'line1\nline2\n',
174 repo2.repo_name, filename=b'file1', content=b'line1\nline2\n',
175 message='commit2', vcs_type=backend.alias, parent=commit0)
175 message='commit2', vcs_type=backend.alias, parent=commit0)
176
176
177 commit2 = commit_change(
177 commit2 = commit_change(
178 repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
178 repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
179 message='commit3', vcs_type=backend.alias, parent=commit1)
179 message='commit3', vcs_type=backend.alias, parent=commit1)
180
180
181 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
181 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
182 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
182 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
183
183
184 response = self.app.get(
184 response = self.app.get(
185 route_path('repo_compare',
185 route_path('repo_compare',
186 repo_name=repo1.repo_name,
186 repo_name=repo1.repo_name,
187 source_ref_type="branch", source_ref=commit_id2,
187 source_ref_type="branch", source_ref=commit_id2,
188 target_ref_type="branch", target_ref=commit_id1,
188 target_ref_type="branch", target_ref=commit_id1,
189 params=dict(merge='1', target_repo=repo2.repo_name),
189 params=dict(merge='1', target_repo=repo2.repo_name),
190 ))
190 ))
191
191
192 response.mustcontain(f'{repo1.repo_name}@{commit_id2}')
192 response.mustcontain(f'{repo1.repo_name}@{commit_id2}')
193 response.mustcontain(f'{repo2.repo_name}@{commit_id1}')
193 response.mustcontain(f'{repo2.repo_name}@{commit_id1}')
194
194
195 compare_page = ComparePage(response)
195 compare_page = ComparePage(response)
196 compare_page.contains_change_summary(1, 2, 0)
196 compare_page.contains_change_summary(1, 2, 0)
197 compare_page.contains_commits([commit1, commit2])
197 compare_page.contains_commits([commit1, commit2])
198 anchor = f'a_c-{commit0.short_id}-826e8142e6ba'
198 anchor = f'a_c-{commit0.short_id}-826e8142e6ba'
199 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
199 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
200
200
201 # Swap is removed when comparing branches since it's a PR feature and
201 # Swap is removed when comparing branches since it's a PR feature and
202 # it is then a preview mode
202 # it is then a preview mode
203 compare_page.swap_is_hidden()
203 compare_page.swap_is_hidden()
204 compare_page.target_source_are_disabled()
204 compare_page.target_source_are_disabled()
205
205
206 @pytest.mark.xfail_backends("svn")
206 @pytest.mark.xfail_backends("svn")
207 # TODO(marcink): no svn support for compare two seperate repos
207 # TODO(marcink): no svn support for compare two seperate repos
208 def test_compare_of_unrelated_forks(self, backend):
208 def test_compare_of_unrelated_forks(self, backend):
209 orig = backend.create_repo(number_of_commits=1)
209 orig = backend.create_repo(number_of_commits=1)
210 fork = backend.create_repo(number_of_commits=1)
210 fork = backend.create_repo(number_of_commits=1)
211
211
212 response = self.app.get(
212 response = self.app.get(
213 route_path('repo_compare',
213 route_path('repo_compare',
214 repo_name=orig.repo_name,
214 repo_name=orig.repo_name,
215 source_ref_type="rev", source_ref="tip",
215 source_ref_type="rev", source_ref="tip",
216 target_ref_type="rev", target_ref="tip",
216 target_ref_type="rev", target_ref="tip",
217 params=dict(merge='1', target_repo=fork.repo_name),
217 params=dict(merge='1', target_repo=fork.repo_name),
218 ),
218 ),
219 status=302)
219 status=302)
220 response = response.follow()
220 response = response.follow()
221 response.mustcontain("Repositories unrelated.")
221 response.mustcontain("Repositories unrelated.")
222
222
223 @pytest.mark.xfail_backends("svn")
223 @pytest.mark.xfail_backends("svn")
224 def test_compare_cherry_pick_commits_from_bottom(self, backend):
224 def test_compare_cherry_pick_commits_from_bottom(self, backend):
225
225
226 # repo1:
226 # repo1:
227 # commit0:
227 # commit0:
228 # commit1:
228 # commit1:
229 # repo1-fork- in which we will cherry pick bottom commits
229 # repo1-fork- in which we will cherry pick bottom commits
230 # commit0:
230 # commit0:
231 # commit1:
231 # commit1:
232 # commit2: x
232 # commit2: x
233 # commit3: x
233 # commit3: x
234 # commit4: x
234 # commit4: x
235 # commit5:
235 # commit5:
236 # make repo1, and commit1+commit2
236 # make repo1, and commit1+commit2
237
237
238 repo1 = backend.create_repo()
238 repo1 = backend.create_repo()
239
239
240 # commit something !
240 # commit something !
241 commit0 = commit_change(
241 commit0 = commit_change(
242 repo1.repo_name, filename=b'file1', content=b'line1\n',
242 repo1.repo_name, filename=b'file1', content=b'line1\n',
243 message='commit1', vcs_type=backend.alias, parent=None,
243 message='commit1', vcs_type=backend.alias, parent=None,
244 newfile=True)
244 newfile=True)
245 commit1 = commit_change(
245 commit1 = commit_change(
246 repo1.repo_name, filename=b'file1', content=b'line1\nline2\n',
246 repo1.repo_name, filename=b'file1', content=b'line1\nline2\n',
247 message='commit2', vcs_type=backend.alias, parent=commit0)
247 message='commit2', vcs_type=backend.alias, parent=commit0)
248
248
249 # fork this repo
249 # fork this repo
250 repo2 = backend.create_fork()
250 repo2 = backend.create_fork()
251
251
252 # now make commit3-6
252 # now make commit3-6
253 commit2 = commit_change(
253 commit2 = commit_change(
254 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
254 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
255 message='commit3', vcs_type=backend.alias, parent=commit1)
255 message='commit3', vcs_type=backend.alias, parent=commit1)
256 commit3 = commit_change(
256 commit3 = commit_change(
257 repo1.repo_name, filename=b'file1',content=b'line1\nline2\nline3\nline4\n',
257 repo1.repo_name, filename=b'file1',content=b'line1\nline2\nline3\nline4\n',
258 message='commit4', vcs_type=backend.alias, parent=commit2)
258 message='commit4', vcs_type=backend.alias, parent=commit2)
259 commit4 = commit_change(
259 commit4 = commit_change(
260 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\n',
260 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\n',
261 message='commit5', vcs_type=backend.alias, parent=commit3)
261 message='commit5', vcs_type=backend.alias, parent=commit3)
262 commit_change( # commit 5
262 commit_change( # commit 5
263 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\nline6\n',
263 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\nline6\n',
264 message='commit6', vcs_type=backend.alias, parent=commit4)
264 message='commit6', vcs_type=backend.alias, parent=commit4)
265
265
266 response = self.app.get(
266 response = self.app.get(
267 route_path('repo_compare',
267 route_path('repo_compare',
268 repo_name=repo2.repo_name,
268 repo_name=repo2.repo_name,
269 # parent of commit2, in target repo2
269 # parent of commit2, in target repo2
270 source_ref_type="rev", source_ref=commit1.raw_id,
270 source_ref_type="rev", source_ref=commit1.raw_id,
271 target_ref_type="rev", target_ref=commit4.raw_id,
271 target_ref_type="rev", target_ref=commit4.raw_id,
272 params=dict(merge='1', target_repo=repo1.repo_name),
272 params=dict(merge='1', target_repo=repo1.repo_name),
273 ))
273 ))
274 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
274 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
275 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
275 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
276
276
277 # files
277 # files
278 compare_page = ComparePage(response)
278 compare_page = ComparePage(response)
279 compare_page.contains_change_summary(1, 3, 0)
279 compare_page.contains_change_summary(1, 3, 0)
280 compare_page.contains_commits([commit2, commit3, commit4])
280 compare_page.contains_commits([commit2, commit3, commit4])
281 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
281 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
282 compare_page.contains_file_links_and_anchors([('file1', anchor),])
282 compare_page.contains_file_links_and_anchors([('file1', anchor),])
283
283
284 @pytest.mark.xfail_backends("svn")
284 @pytest.mark.xfail_backends("svn")
285 def test_compare_cherry_pick_commits_from_top(self, backend):
285 def test_compare_cherry_pick_commits_from_top(self, backend):
286 # repo1:
286 # repo1:
287 # commit0:
287 # commit0:
288 # commit1:
288 # commit1:
289 # repo1-fork- in which we will cherry pick bottom commits
289 # repo1-fork- in which we will cherry pick bottom commits
290 # commit0:
290 # commit0:
291 # commit1:
291 # commit1:
292 # commit2:
292 # commit2:
293 # commit3: x
293 # commit3: x
294 # commit4: x
294 # commit4: x
295 # commit5: x
295 # commit5: x
296
296
297 # make repo1, and commit1+commit2
297 # make repo1, and commit1+commit2
298 repo1 = backend.create_repo()
298 repo1 = backend.create_repo()
299
299
300 # commit something !
300 # commit something !
301 commit0 = commit_change(
301 commit0 = commit_change(
302 repo1.repo_name, filename=b'file1', content=b'line1\n',
302 repo1.repo_name, filename=b'file1', content=b'line1\n',
303 message='commit1', vcs_type=backend.alias, parent=None,
303 message='commit1', vcs_type=backend.alias, parent=None,
304 newfile=True)
304 newfile=True)
305 commit1 = commit_change(
305 commit1 = commit_change(
306 repo1.repo_name, filename=b'file1', content=b'line1\nline2\n',
306 repo1.repo_name, filename=b'file1', content=b'line1\nline2\n',
307 message='commit2', vcs_type=backend.alias, parent=commit0)
307 message='commit2', vcs_type=backend.alias, parent=commit0)
308
308
309 # fork this repo
309 # fork this repo
310 backend.create_fork()
310 backend.create_fork()
311
311
312 # now make commit3-6
312 # now make commit3-6
313 commit2 = commit_change(
313 commit2 = commit_change(
314 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
314 repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n',
315 message='commit3', vcs_type=backend.alias, parent=commit1)
315 message='commit3', vcs_type=backend.alias, parent=commit1)
316 commit3 = commit_change(
316 commit3 = commit_change(
317 repo1.repo_name, filename=b'file1',
317 repo1.repo_name, filename=b'file1',
318 content=b'line1\nline2\nline3\nline4\n', message='commit4',
318 content=b'line1\nline2\nline3\nline4\n', message='commit4',
319 vcs_type=backend.alias, parent=commit2)
319 vcs_type=backend.alias, parent=commit2)
320 commit4 = commit_change(
320 commit4 = commit_change(
321 repo1.repo_name, filename=b'file1',
321 repo1.repo_name, filename=b'file1',
322 content=b'line1\nline2\nline3\nline4\nline5\n', message='commit5',
322 content=b'line1\nline2\nline3\nline4\nline5\n', message='commit5',
323 vcs_type=backend.alias, parent=commit3)
323 vcs_type=backend.alias, parent=commit3)
324 commit5 = commit_change(
324 commit5 = commit_change(
325 repo1.repo_name, filename=b'file1',
325 repo1.repo_name, filename=b'file1',
326 content=b'line1\nline2\nline3\nline4\nline5\nline6\n',
326 content=b'line1\nline2\nline3\nline4\nline5\nline6\n',
327 message='commit6', vcs_type=backend.alias, parent=commit4)
327 message='commit6', vcs_type=backend.alias, parent=commit4)
328
328
329 response = self.app.get(
329 response = self.app.get(
330 route_path('repo_compare',
330 route_path('repo_compare',
331 repo_name=repo1.repo_name,
331 repo_name=repo1.repo_name,
332 # parent of commit3, not in source repo2
332 # parent of commit3, not in source repo2
333 source_ref_type="rev", source_ref=commit2.raw_id,
333 source_ref_type="rev", source_ref=commit2.raw_id,
334 target_ref_type="rev", target_ref=commit5.raw_id,
334 target_ref_type="rev", target_ref=commit5.raw_id,
335 params=dict(merge='1'),))
335 params=dict(merge='1'),))
336
336
337 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
337 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
338 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
338 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
339
339
340 compare_page = ComparePage(response)
340 compare_page = ComparePage(response)
341 compare_page.contains_change_summary(1, 3, 0)
341 compare_page.contains_change_summary(1, 3, 0)
342 compare_page.contains_commits([commit3, commit4, commit5])
342 compare_page.contains_commits([commit3, commit4, commit5])
343
343
344 # files
344 # files
345 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
345 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
346 compare_page.contains_file_links_and_anchors([('file1', anchor),])
346 compare_page.contains_file_links_and_anchors([('file1', anchor),])
347
347
348 @pytest.mark.xfail_backends("svn")
348 @pytest.mark.xfail_backends("svn")
349 def test_compare_remote_branches(self, backend):
349 def test_compare_remote_branches(self, backend):
350 repo1 = backend.repo
350 repo1 = backend.repo
351 repo2 = backend.create_fork()
351 repo2 = backend.create_fork()
352
352
353 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
353 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
354 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
354 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
355 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
355 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
356 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
356 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
357
357
358 response = self.app.get(
358 response = self.app.get(
359 route_path('repo_compare',
359 route_path('repo_compare',
360 repo_name=repo1.repo_name,
360 repo_name=repo1.repo_name,
361 source_ref_type="rev", source_ref=commit_id1,
361 source_ref_type="rev", source_ref=commit_id1,
362 target_ref_type="rev", target_ref=commit_id2,
362 target_ref_type="rev", target_ref=commit_id2,
363 params=dict(merge='1', target_repo=repo2.repo_name),
363 params=dict(merge='1', target_repo=repo2.repo_name),
364 ))
364 ))
365
365
366 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
366 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
367 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
367 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
368
368
369 compare_page = ComparePage(response)
369 compare_page = ComparePage(response)
370
370
371 # outgoing commits between those commits
371 # outgoing commits between those commits
372 compare_page.contains_commits(
372 compare_page.contains_commits(
373 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
373 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
374
374
375 # files
375 # files
376 compare_page.contains_file_links_and_anchors([
376 compare_page.contains_file_links_and_anchors([
377 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
377 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
378 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
378 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
379 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
379 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
380 ])
380 ])
381
381
382 @pytest.mark.xfail_backends("svn")
382 @pytest.mark.xfail_backends("svn")
383 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
383 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
384 repo1 = backend.create_repo()
384 repo1 = backend.create_repo()
385 r1_name = repo1.repo_name
385 r1_name = repo1.repo_name
386
386
387 commit0 = commit_change(
387 commit0 = commit_change(
388 repo=r1_name, filename=b'file1',
388 repo=r1_name, filename=b'file1',
389 content=b'line1', message='commit1', vcs_type=backend.alias,
389 content=b'line1', message='commit1', vcs_type=backend.alias,
390 newfile=True)
390 newfile=True)
391 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
391 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
392
392
393 # fork the repo1
393 # fork the repo1
394 repo2 = backend.create_fork()
394 repo2 = backend.create_fork()
395 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
395 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
396
396
397 self.r2_id = repo2.repo_id
397 self.r2_id = repo2.repo_id
398 r2_name = repo2.repo_name
398 r2_name = repo2.repo_name
399
399
400 commit1 = commit_change(
400 commit1 = commit_change(
401 repo=r2_name, filename=b'file1-fork',
401 repo=r2_name, filename=b'file1-fork',
402 content=b'file1-line1-from-fork', message='commit1-fork',
402 content=b'file1-line1-from-fork', message='commit1-fork',
403 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
403 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
404 newfile=True)
404 newfile=True)
405
405
406 commit2 = commit_change(
406 commit2 = commit_change(
407 repo=r2_name, filename=b'file2-fork',
407 repo=r2_name, filename=b'file2-fork',
408 content=b'file2-line1-from-fork', message='commit2-fork',
408 content=b'file2-line1-from-fork', message='commit2-fork',
409 vcs_type=backend.alias, parent=commit1,
409 vcs_type=backend.alias, parent=commit1,
410 newfile=True)
410 newfile=True)
411
411
412 commit_change( # commit 3
412 commit_change( # commit 3
413 repo=r2_name, filename=b'file3-fork',
413 repo=r2_name, filename=b'file3-fork',
414 content=b'file3-line1-from-fork', message='commit3-fork',
414 content=b'file3-line1-from-fork', message='commit3-fork',
415 vcs_type=backend.alias, parent=commit2, newfile=True)
415 vcs_type=backend.alias, parent=commit2, newfile=True)
416
416
417 # compare !
417 # compare !
418 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
418 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
419 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
419 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
420
420
421 response = self.app.get(
421 response = self.app.get(
422 route_path('repo_compare',
422 route_path('repo_compare',
423 repo_name=r2_name,
423 repo_name=r2_name,
424 source_ref_type="branch", source_ref=commit_id1,
424 source_ref_type="branch", source_ref=commit_id1,
425 target_ref_type="branch", target_ref=commit_id2,
425 target_ref_type="branch", target_ref=commit_id2,
426 params=dict(merge='1', target_repo=r1_name),
426 params=dict(merge='1', target_repo=r1_name),
427 ))
427 ))
428
428
429 response.mustcontain('%s@%s' % (r2_name, commit_id1))
429 response.mustcontain('%s@%s' % (r2_name, commit_id1))
430 response.mustcontain('%s@%s' % (r1_name, commit_id2))
430 response.mustcontain('%s@%s' % (r1_name, commit_id2))
431 response.mustcontain('No files')
431 response.mustcontain('No files')
432 response.mustcontain('No commits in this compare')
432 response.mustcontain('No commits in this compare')
433
433
434 commit0 = commit_change(
434 commit0 = commit_change(
435 repo=r1_name, filename=b'file2',
435 repo=r1_name, filename=b'file2',
436 content=b'line1-added-after-fork', message='commit2-parent',
436 content=b'line1-added-after-fork', message='commit2-parent',
437 vcs_type=backend.alias, parent=None, newfile=True)
437 vcs_type=backend.alias, parent=None, newfile=True)
438
438
439 # compare !
439 # compare !
440 response = self.app.get(
440 response = self.app.get(
441 route_path('repo_compare',
441 route_path('repo_compare',
442 repo_name=r2_name,
442 repo_name=r2_name,
443 source_ref_type="branch", source_ref=commit_id1,
443 source_ref_type="branch", source_ref=commit_id1,
444 target_ref_type="branch", target_ref=commit_id2,
444 target_ref_type="branch", target_ref=commit_id2,
445 params=dict(merge='1', target_repo=r1_name),
445 params=dict(merge='1', target_repo=r1_name),
446 ))
446 ))
447
447
448 response.mustcontain('%s@%s' % (r2_name, commit_id1))
448 response.mustcontain('%s@%s' % (r2_name, commit_id1))
449 response.mustcontain('%s@%s' % (r1_name, commit_id2))
449 response.mustcontain('%s@%s' % (r1_name, commit_id2))
450
450
451 response.mustcontain("""commit2-parent""")
451 response.mustcontain("""commit2-parent""")
452 response.mustcontain("""line1-added-after-fork""")
452 response.mustcontain("""line1-added-after-fork""")
453 compare_page = ComparePage(response)
453 compare_page = ComparePage(response)
454 compare_page.contains_change_summary(1, 1, 0)
454 compare_page.contains_change_summary(1, 1, 0)
455
455
456 @pytest.mark.xfail_backends("svn")
456 @pytest.mark.xfail_backends("svn")
457 def test_compare_commits(self, backend, xhr_header):
457 def test_compare_commits(self, backend, xhr_header):
458 commit0 = backend.repo.get_commit(commit_idx=0)
458 commit0 = backend.repo.get_commit(commit_idx=0)
459 commit1 = backend.repo.get_commit(commit_idx=1)
459 commit1 = backend.repo.get_commit(commit_idx=1)
460
460
461 response = self.app.get(
461 response = self.app.get(
462 route_path('repo_compare',
462 route_path('repo_compare',
463 repo_name=backend.repo_name,
463 repo_name=backend.repo_name,
464 source_ref_type="rev", source_ref=commit0.raw_id,
464 source_ref_type="rev", source_ref=commit0.raw_id,
465 target_ref_type="rev", target_ref=commit1.raw_id,
465 target_ref_type="rev", target_ref=commit1.raw_id,
466 params=dict(merge='1')
466 params=dict(merge='1')
467 ),
467 ),
468 extra_environ=xhr_header, )
468 extra_environ=xhr_header, )
469
469
470 # outgoing commits between those commits
470 # outgoing commits between those commits
471 compare_page = ComparePage(response)
471 compare_page = ComparePage(response)
472 compare_page.contains_commits(commits=[commit1])
472 compare_page.contains_commits(commits=[commit1])
473
473
474 def test_errors_when_comparing_unknown_source_repo(self, backend):
474 def test_errors_when_comparing_unknown_source_repo(self, backend):
475 repo = backend.repo
475 repo = backend.repo
476
476
477 self.app.get(
477 self.app.get(
478 route_path('repo_compare',
478 route_path('repo_compare',
479 repo_name='badrepo',
479 repo_name='badrepo',
480 source_ref_type="rev", source_ref='tip',
480 source_ref_type="rev", source_ref='tip',
481 target_ref_type="rev", target_ref='tip',
481 target_ref_type="rev", target_ref='tip',
482 params=dict(merge='1', target_repo=repo.repo_name)
482 params=dict(merge='1', target_repo=repo.repo_name)
483 ),
483 ),
484 status=404)
484 status=404)
485
485
486 def test_errors_when_comparing_unknown_target_repo(self, backend):
486 def test_errors_when_comparing_unknown_target_repo(self, backend):
487 repo = backend.repo
487 repo = backend.repo
488 badrepo = 'badrepo'
488 badrepo = 'badrepo'
489
489
490 response = self.app.get(
490 response = self.app.get(
491 route_path('repo_compare',
491 route_path('repo_compare',
492 repo_name=repo.repo_name,
492 repo_name=repo.repo_name,
493 source_ref_type="rev", source_ref='tip',
493 source_ref_type="rev", source_ref='tip',
494 target_ref_type="rev", target_ref='tip',
494 target_ref_type="rev", target_ref='tip',
495 params=dict(merge='1', target_repo=badrepo),
495 params=dict(merge='1', target_repo=badrepo),
496 ),
496 ),
497 status=302)
497 status=302)
498 redirected = response.follow()
498 redirected = response.follow()
499 redirected.mustcontain(
499 redirected.mustcontain(
500 'Could not find the target repo: `{}`'.format(badrepo))
500 'Could not find the target repo: `{}`'.format(badrepo))
501
501
502 def test_compare_not_in_preview_mode(self, backend_stub):
502 def test_compare_not_in_preview_mode(self, backend_stub):
503 commit0 = backend_stub.repo.get_commit(commit_idx=0)
503 commit0 = backend_stub.repo.get_commit(commit_idx=0)
504 commit1 = backend_stub.repo.get_commit(commit_idx=1)
504 commit1 = backend_stub.repo.get_commit(commit_idx=1)
505
505
506 response = self.app.get(
506 response = self.app.get(
507 route_path('repo_compare',
507 route_path('repo_compare',
508 repo_name=backend_stub.repo_name,
508 repo_name=backend_stub.repo_name,
509 source_ref_type="rev", source_ref=commit0.raw_id,
509 source_ref_type="rev", source_ref=commit0.raw_id,
510 target_ref_type="rev", target_ref=commit1.raw_id,
510 target_ref_type="rev", target_ref=commit1.raw_id,
511 ))
511 ))
512
512
513 # outgoing commits between those commits
513 # outgoing commits between those commits
514 compare_page = ComparePage(response)
514 compare_page = ComparePage(response)
515 compare_page.swap_is_visible()
515 compare_page.swap_is_visible()
516 compare_page.target_source_are_enabled()
516 compare_page.target_source_are_enabled()
517
517
518 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
518 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
519 orig = backend_hg.create_repo(number_of_commits=1)
519 orig = backend_hg.create_repo(number_of_commits=1)
520 fork = backend_hg.create_fork()
520 fork = backend_hg.create_fork()
521
521
522 settings_util.create_repo_rhodecode_ui(
522 settings_util.create_repo_rhodecode_ui(
523 orig, 'extensions', value='', key='largefiles', active=False)
523 orig, 'extensions', value='', key='largefiles', active=False)
524 settings_util.create_repo_rhodecode_ui(
524 settings_util.create_repo_rhodecode_ui(
525 fork, 'extensions', value='', key='largefiles', active=True)
525 fork, 'extensions', value='', key='largefiles', active=True)
526
526
527 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
527 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
528 'MercurialRepository.compare')
528 'MercurialRepository.compare')
529 with mock.patch(compare_module) as compare_mock:
529 with mock.patch(compare_module) as compare_mock:
530 compare_mock.side_effect = RepositoryRequirementError()
530 compare_mock.side_effect = RepositoryRequirementError()
531
531
532 response = self.app.get(
532 response = self.app.get(
533 route_path('repo_compare',
533 route_path('repo_compare',
534 repo_name=orig.repo_name,
534 repo_name=orig.repo_name,
535 source_ref_type="rev", source_ref="tip",
535 source_ref_type="rev", source_ref="tip",
536 target_ref_type="rev", target_ref="tip",
536 target_ref_type="rev", target_ref="tip",
537 params=dict(merge='1', target_repo=fork.repo_name),
537 params=dict(merge='1', target_repo=fork.repo_name),
538 ),
538 ),
539 status=302)
539 status=302)
540
540
541 assert_session_flash(
541 assert_session_flash(
542 response,
542 response,
543 'Could not compare repos with different large file settings')
543 'Could not compare repos with different large file settings')
544
544
545
545
546 @pytest.mark.usefixtures("autologin_user")
546 @pytest.mark.usefixtures("autologin_user")
547 class TestCompareControllerSvn(object):
547 class TestCompareControllerSvn(object):
548
548
549 def test_supports_references_with_path(self, app, backend_svn):
549 def test_supports_references_with_path(self, app, backend_svn):
550 repo = backend_svn['svn-simple-layout']
550 repo = backend_svn['svn-simple-layout']
551 commit_id = repo.get_commit(commit_idx=-1).raw_id
551 commit_id = repo.get_commit(commit_idx=-1).raw_id
552 response = app.get(
552 response = app.get(
553 route_path('repo_compare',
553 route_path('repo_compare',
554 repo_name=repo.repo_name,
554 repo_name=repo.repo_name,
555 source_ref_type="tag",
555 source_ref_type="tag",
556 source_ref="%s@%s" % ('tags/v0.1', commit_id),
556 source_ref="%s@%s" % ('tags/v0.1', commit_id),
557 target_ref_type="tag",
557 target_ref_type="tag",
558 target_ref="%s@%s" % ('tags/v0.2', commit_id),
558 target_ref="%s@%s" % ('tags/v0.2', commit_id),
559 params=dict(merge='1'),
559 params=dict(merge='1'),
560 ),
560 ),
561 status=200)
561 status=200)
562
562
563 # Expecting no commits, since both paths are at the same revision
563 # Expecting no commits, since both paths are at the same revision
564 response.mustcontain('No commits in this compare')
564 response.mustcontain('No commits in this compare')
565
565
566 # Should find only one file changed when comparing those two tags
566 # Should find only one file changed when comparing those two tags
567 response.mustcontain('example.py')
567 response.mustcontain('example.py')
568 compare_page = ComparePage(response)
568 compare_page = ComparePage(response)
569 compare_page.contains_change_summary(1, 5, 1)
569 compare_page.contains_change_summary(1, 5, 1)
570
570
571 def test_shows_commits_if_different_ids(self, app, backend_svn):
571 def test_shows_commits_if_different_ids(self, app, backend_svn):
572 repo = backend_svn['svn-simple-layout']
572 repo = backend_svn['svn-simple-layout']
573 source_id = repo.get_commit(commit_idx=-6).raw_id
573 source_id = repo.get_commit(commit_idx=-6).raw_id
574 target_id = repo.get_commit(commit_idx=-1).raw_id
574 target_id = repo.get_commit(commit_idx=-1).raw_id
575 response = app.get(
575 response = app.get(
576 route_path('repo_compare',
576 route_path('repo_compare',
577 repo_name=repo.repo_name,
577 repo_name=repo.repo_name,
578 source_ref_type="tag",
578 source_ref_type="tag",
579 source_ref="%s@%s" % ('tags/v0.1', source_id),
579 source_ref="%s@%s" % ('tags/v0.1', source_id),
580 target_ref_type="tag",
580 target_ref_type="tag",
581 target_ref="%s@%s" % ('tags/v0.2', target_id),
581 target_ref="%s@%s" % ('tags/v0.2', target_id),
582 params=dict(merge='1')
582 params=dict(merge='1')
583 ),
583 ),
584 status=200)
584 status=200)
585
585
586 # It should show commits
586 # It should show commits
587 assert 'No commits in this compare' not in response.text
587 assert 'No commits in this compare' not in response.text
588
588
589 # Should find only one file changed when comparing those two tags
589 # Should find only one file changed when comparing those two tags
590 response.mustcontain('example.py')
590 response.mustcontain('example.py')
591 compare_page = ComparePage(response)
591 compare_page = ComparePage(response)
592 compare_page.contains_change_summary(1, 5, 1)
592 compare_page.contains_change_summary(1, 5, 1)
593
593
594
594
595 class ComparePage(AssertResponse):
595 class ComparePage(AssertResponse):
596 """
596 """
597 Abstracts the page template from the tests
597 Abstracts the page template from the tests
598 """
598 """
599
599
600 def contains_file_links_and_anchors(self, files):
600 def contains_file_links_and_anchors(self, files):
601 doc = lxml.html.fromstring(self.response.body)
601 doc = lxml.html.fromstring(self.response.body)
602 for filename, file_id in files:
602 for filename, file_id in files:
603 self.contains_one_anchor(file_id)
603 self.contains_one_anchor(file_id)
604 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
604 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
605 assert len(diffblock) == 2
605 assert len(diffblock) == 2
606 for lnk in diffblock[0].cssselect('a'):
606 for lnk in diffblock[0].cssselect('a'):
607 if 'permalink' in lnk.text:
607 if 'permalink' in lnk.text:
608 assert '#{}'.format(file_id) in lnk.attrib['href']
608 assert '#{}'.format(file_id) in lnk.attrib['href']
609 break
609 break
610 else:
610 else:
611 pytest.fail('Unable to find permalink')
611 pytest.fail('Unable to find permalink')
612
612
613 def contains_change_summary(self, files_changed, inserted, deleted):
613 def contains_change_summary(self, files_changed, inserted, deleted):
614 template = (
614 template = (
615 '{files_changed} file{plural} changed: '
615 '{files_changed} file{plural} changed: '
616 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
616 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
617 self.response.mustcontain(template.format(
617 self.response.mustcontain(template.format(
618 files_changed=files_changed,
618 files_changed=files_changed,
619 plural="s" if files_changed > 1 else "",
619 plural="s" if files_changed > 1 else "",
620 inserted=inserted,
620 inserted=inserted,
621 deleted=deleted))
621 deleted=deleted))
622
622
623 def contains_commits(self, commits, ancestors=None):
623 def contains_commits(self, commits, ancestors=None):
624 response = self.response
624 response = self.response
625
625
626 for commit in commits:
626 for commit in commits:
627 # Expecting to see the commit message in an element which
627 # Expecting to see the commit message in an element which
628 # has the ID "c-{commit.raw_id}"
628 # has the ID "c-{commit.raw_id}"
629 self.element_contains('#c-' + commit.raw_id, commit.message)
629 self.element_contains('#c-' + commit.raw_id, commit.message)
630 self.contains_one_link(
630 self.contains_one_link(
631 'r%s:%s' % (commit.idx, commit.short_id),
631 'r%s:%s' % (commit.idx, commit.short_id),
632 self._commit_url(commit))
632 self._commit_url(commit))
633
633
634 if ancestors:
634 if ancestors:
635 response.mustcontain('Ancestor')
635 response.mustcontain('Ancestor')
636 for ancestor in ancestors:
636 for ancestor in ancestors:
637 self.contains_one_link(
637 self.contains_one_link(
638 ancestor.short_id, self._commit_url(ancestor))
638 ancestor.short_id, self._commit_url(ancestor))
639
639
640 def _commit_url(self, commit):
640 def _commit_url(self, commit):
641 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
641 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
642
642
643 def swap_is_hidden(self):
643 def swap_is_hidden(self):
644 assert '<a id="btn-swap"' not in self.response.text
644 assert '<a id="btn-swap"' not in self.response.text
645
645
646 def swap_is_visible(self):
646 def swap_is_visible(self):
647 assert '<a id="btn-swap"' in self.response.text
647 assert '<a id="btn-swap"' in self.response.text
648
648
649 def target_source_are_disabled(self):
649 def target_source_are_disabled(self):
650 response = self.response
650 response = self.response
651 response.mustcontain("var enable_fields = false;")
651 response.mustcontain("var enable_fields = false;")
652 response.mustcontain('.select2("enable", enable_fields)')
652 response.mustcontain('.select2("enable", enable_fields)')
653
653
654 def target_source_are_enabled(self):
654 def target_source_are_enabled(self):
655 response = self.response
655 response = self.response
656 response.mustcontain("var enable_fields = true;")
656 response.mustcontain("var enable_fields = true;")
This diff has been collapsed as it changes many lines, (1860 lines changed) Show them Hide them
@@ -1,1651 +1,1935 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 import mock
18 import mock
20 import pytest
19 import pytest
21
20
22 import rhodecode
21 import rhodecode
23 from rhodecode.lib import helpers as h
22 from rhodecode.lib import helpers as h
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
23 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason, Reference
25 from rhodecode.lib.vcs.nodes import FileNode
24 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib.ext_json import json
25 from rhodecode.lib.ext_json import json
27 from rhodecode.model.changeset_status import ChangesetStatusModel
26 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
27 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
28 PullRequest,
29 ChangesetStatus,
30 UserLog,
31 Notification,
32 ChangesetComment,
33 Repository,
34 )
30 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
36 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
37 from rhodecode.model.user import UserModel
33 from rhodecode.model.comment import CommentsModel
38 from rhodecode.model.comment import CommentsModel
34 from rhodecode.tests import (
39 from rhodecode.tests import (
35 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
40 assert_session_flash,
41 TEST_USER_ADMIN_LOGIN,
42 TEST_USER_REGULAR_LOGIN,
43 )
44 from rhodecode.tests.fixture_mods.fixture_utils import PRTestUtility
36 from rhodecode.tests.routes import route_path
45 from rhodecode.tests.routes import route_path
37
46
38
47
39 @pytest.mark.usefixtures('app', 'autologin_user')
48 @pytest.mark.usefixtures("app", "autologin_user")
40 @pytest.mark.backends("git", "hg")
49 @pytest.mark.backends("git", "hg")
41 class TestPullrequestsView(object):
50 class TestPullrequestsView(object):
42
43 def test_index(self, backend):
51 def test_index(self, backend):
44 self.app.get(route_path(
52 self.app.get(route_path("pullrequest_new", repo_name=backend.repo_name))
45 'pullrequest_new',
46 repo_name=backend.repo_name))
47
53
48 def test_option_menu_create_pull_request_exists(self, backend):
54 def test_option_menu_create_pull_request_exists(self, backend):
49 repo_name = backend.repo_name
55 repo_name = backend.repo_name
50 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
56 response = self.app.get(h.route_path("repo_summary", repo_name=repo_name))
51
57
52 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
58 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
53 'pullrequest_new', repo_name=repo_name)
59 "pullrequest_new", repo_name=repo_name
60 )
54 response.mustcontain(create_pr_link)
61 response.mustcontain(create_pr_link)
55
62
56 def test_create_pr_form_with_raw_commit_id(self, backend):
63 def test_create_pr_form_with_raw_commit_id(self, backend):
57 repo = backend.repo
64 repo = backend.repo
58
65
59 self.app.get(
66 self.app.get(
60 route_path('pullrequest_new', repo_name=repo.repo_name,
67 route_path(
61 commit=repo.get_commit().raw_id),
68 "pullrequest_new",
62 status=200)
69 repo_name=repo.repo_name,
70 commit=repo.get_commit().raw_id,
71 ),
72 status=200,
73 )
63
74
64 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
75 @pytest.mark.parametrize("pr_merge_enabled", [True, False])
65 @pytest.mark.parametrize('range_diff', ["0", "1"])
76 @pytest.mark.parametrize("range_diff", ["0", "1"])
66 def test_show(self, pr_util, pr_merge_enabled, range_diff):
77 def test_show(self, pr_util, pr_merge_enabled, range_diff):
67 pull_request = pr_util.create_pull_request(
78 pull_request = pr_util.create_pull_request(
68 mergeable=pr_merge_enabled, enable_notifications=False)
79 mergeable=pr_merge_enabled, enable_notifications=False
80 )
69
81
70 response = self.app.get(route_path(
82 response = self.app.get(
71 'pullrequest_show',
83 route_path(
72 repo_name=pull_request.target_repo.scm_instance().name,
84 "pullrequest_show",
73 pull_request_id=pull_request.pull_request_id,
85 repo_name=pull_request.target_repo.scm_instance().name,
74 params={'range-diff': range_diff}))
86 pull_request_id=pull_request.pull_request_id,
87 params={"range-diff": range_diff},
88 )
89 )
75
90
76 for commit_id in pull_request.revisions:
91 for commit_id in pull_request.revisions:
77 response.mustcontain(commit_id)
92 response.mustcontain(commit_id)
78
93
79 response.mustcontain(pull_request.target_ref_parts.type)
94 response.mustcontain(pull_request.target_ref_parts.type)
80 response.mustcontain(pull_request.target_ref_parts.name)
95 response.mustcontain(pull_request.target_ref_parts.name)
81
96
82 response.mustcontain('class="pull-request-merge"')
97 response.mustcontain('class="pull-request-merge"')
83
98
84 if pr_merge_enabled:
99 if pr_merge_enabled:
85 response.mustcontain('Pull request reviewer approval is pending')
100 response.mustcontain("Pull request reviewer approval is pending")
86 else:
101 else:
87 response.mustcontain('Server-side pull request merging is disabled.')
102 response.mustcontain("Server-side pull request merging is disabled.")
88
103
89 if range_diff == "1":
104 if range_diff == "1":
90 response.mustcontain('Turn off: Show the diff as commit range')
105 response.mustcontain("Turn off: Show the diff as commit range")
91
106
92 def test_show_versions_of_pr(self, backend, csrf_token):
107 def test_show_versions_of_pr(self, backend, csrf_token):
93 commits = [
108 commits = [
94 {'message': 'initial-commit',
109 {
95 'added': [FileNode(b'test-file.txt', b'LINE1\n')]},
110 "message": "initial-commit",
96
111 "added": [FileNode(b"test-file.txt", b"LINE1\n")],
97 {'message': 'commit-1',
112 },
98 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\n')]},
113 {
114 "message": "commit-1",
115 "changed": [FileNode(b"test-file.txt", b"LINE1\nLINE2\n")],
116 },
99 # Above is the initial version of PR that changes a single line
117 # Above is the initial version of PR that changes a single line
100
101 # from now on we'll add 3x commit adding a nother line on each step
118 # from now on we'll add 3x commit adding a nother line on each step
102 {'message': 'commit-2',
119 {
103 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\n')]},
120 "message": "commit-2",
104
121 "changed": [FileNode(b"test-file.txt", b"LINE1\nLINE2\nLINE3\n")],
105 {'message': 'commit-3',
122 },
106 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\nLINE4\n')]},
123 {
107
124 "message": "commit-3",
108 {'message': 'commit-4',
125 "changed": [
109 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
126 FileNode(b"test-file.txt", b"LINE1\nLINE2\nLINE3\nLINE4\n")
127 ],
128 },
129 {
130 "message": "commit-4",
131 "changed": [
132 FileNode(b"test-file.txt", b"LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n")
133 ],
134 },
110 ]
135 ]
111
136
112 commit_ids = backend.create_master_repo(commits)
137 commit_ids = backend.create_master_repo(commits)
113 target = backend.create_repo(heads=['initial-commit'])
138 target = backend.create_repo(heads=["initial-commit"])
114 source = backend.create_repo(heads=['commit-1'])
139 source = backend.create_repo(heads=["commit-1"])
115 source_repo_name = source.repo_name
140 source_repo_name = source.repo_name
116 target_repo_name = target.repo_name
141 target_repo_name = target.repo_name
117
142
118 target_ref = 'branch:{branch}:{commit_id}'.format(
143 target_ref = "branch:{branch}:{commit_id}".format(
119 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
144 branch=backend.default_branch_name, commit_id=commit_ids["initial-commit"]
120 source_ref = 'branch:{branch}:{commit_id}'.format(
145 )
121 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
146 source_ref = "branch:{branch}:{commit_id}".format(
147 branch=backend.default_branch_name, commit_id=commit_ids["commit-1"]
148 )
122
149
123 response = self.app.post(
150 response = self.app.post(
124 route_path('pullrequest_create', repo_name=source.repo_name),
151 route_path("pullrequest_create", repo_name=source.repo_name),
125 [
152 [
126 ('source_repo', source_repo_name),
153 ("source_repo", source_repo_name),
127 ('source_ref', source_ref),
154 ("source_ref", source_ref),
128 ('target_repo', target_repo_name),
155 ("target_repo", target_repo_name),
129 ('target_ref', target_ref),
156 ("target_ref", target_ref),
130 ('common_ancestor', commit_ids['initial-commit']),
157 ("common_ancestor", commit_ids["initial-commit"]),
131 ('pullrequest_title', 'Title'),
158 ("pullrequest_title", "Title"),
132 ('pullrequest_desc', 'Description'),
159 ("pullrequest_desc", "Description"),
133 ('description_renderer', 'markdown'),
160 ("description_renderer", "markdown"),
134 ('__start__', 'review_members:sequence'),
161 ("__start__", "review_members:sequence"),
135 ('__start__', 'reviewer:mapping'),
162 ("__start__", "reviewer:mapping"),
136 ('user_id', '1'),
163 ("user_id", "1"),
137 ('__start__', 'reasons:sequence'),
164 ("__start__", "reasons:sequence"),
138 ('reason', 'Some reason'),
165 ("reason", "Some reason"),
139 ('__end__', 'reasons:sequence'),
166 ("__end__", "reasons:sequence"),
140 ('__start__', 'rules:sequence'),
167 ("__start__", "rules:sequence"),
141 ('__end__', 'rules:sequence'),
168 ("__end__", "rules:sequence"),
142 ('mandatory', 'False'),
169 ("mandatory", "False"),
143 ('__end__', 'reviewer:mapping'),
170 ("__end__", "reviewer:mapping"),
144 ('__end__', 'review_members:sequence'),
171 ("__end__", "review_members:sequence"),
145 ('__start__', 'revisions:sequence'),
172 ("__start__", "revisions:sequence"),
146 ('revisions', commit_ids['commit-1']),
173 ("revisions", commit_ids["commit-1"]),
147 ('__end__', 'revisions:sequence'),
174 ("__end__", "revisions:sequence"),
148 ('user', ''),
175 ("user", ""),
149 ('csrf_token', csrf_token),
176 ("csrf_token", csrf_token),
150 ],
177 ],
151 status=302)
178 status=302,
179 )
152
180
153 location = response.headers['Location']
181 location = response.headers["Location"]
154
182
155 pull_request_id = location.rsplit('/', 1)[1]
183 pull_request_id = location.rsplit("/", 1)[1]
156 assert pull_request_id != 'new'
184 assert pull_request_id != "new"
157 pull_request = PullRequest.get(int(pull_request_id))
185 pull_request = PullRequest.get(int(pull_request_id))
158
186
159 pull_request_id = pull_request.pull_request_id
187 pull_request_id = pull_request.pull_request_id
160
188
161 # Show initial version of PR
189 # Show initial version of PR
162 response = self.app.get(
190 response = self.app.get(
163 route_path('pullrequest_show',
191 route_path(
164 repo_name=target_repo_name,
192 "pullrequest_show",
165 pull_request_id=pull_request_id))
193 repo_name=target_repo_name,
194 pull_request_id=pull_request_id,
195 )
196 )
166
197
167 response.mustcontain('commit-1')
198 response.mustcontain("commit-1")
168 response.mustcontain(no=['commit-2'])
199 response.mustcontain(no=["commit-2"])
169 response.mustcontain(no=['commit-3'])
200 response.mustcontain(no=["commit-3"])
170 response.mustcontain(no=['commit-4'])
201 response.mustcontain(no=["commit-4"])
171
202
172 response.mustcontain('cb-addition"></span><span>LINE2</span>')
203 response.mustcontain('cb-addition"></span><span>LINE2</span>')
173 response.mustcontain(no=['LINE3'])
204 response.mustcontain(no=["LINE3"])
174 response.mustcontain(no=['LINE4'])
205 response.mustcontain(no=["LINE4"])
175 response.mustcontain(no=['LINE5'])
206 response.mustcontain(no=["LINE5"])
176
207
177 # update PR #1
208 # update PR #1
178 source_repo = Repository.get_by_repo_name(source_repo_name)
209 source_repo = Repository.get_by_repo_name(source_repo_name)
179 backend.pull_heads(source_repo, heads=['commit-2'])
210 backend.pull_heads(source_repo, heads=["commit-2"])
180 response = self.app.post(
211 response = self.app.post(
181 route_path('pullrequest_update',
212 route_path(
182 repo_name=target_repo_name, pull_request_id=pull_request_id),
213 "pullrequest_update",
183 params={'update_commits': 'true', 'csrf_token': csrf_token})
214 repo_name=target_repo_name,
215 pull_request_id=pull_request_id,
216 ),
217 params={"update_commits": "true", "csrf_token": csrf_token},
218 )
184
219
185 # update PR #2
220 # update PR #2
186 source_repo = Repository.get_by_repo_name(source_repo_name)
221 source_repo = Repository.get_by_repo_name(source_repo_name)
187 backend.pull_heads(source_repo, heads=['commit-3'])
222 backend.pull_heads(source_repo, heads=["commit-3"])
188 response = self.app.post(
223 response = self.app.post(
189 route_path('pullrequest_update',
224 route_path(
190 repo_name=target_repo_name, pull_request_id=pull_request_id),
225 "pullrequest_update",
191 params={'update_commits': 'true', 'csrf_token': csrf_token})
226 repo_name=target_repo_name,
227 pull_request_id=pull_request_id,
228 ),
229 params={"update_commits": "true", "csrf_token": csrf_token},
230 )
192
231
193 # update PR #3
232 # update PR #3
194 source_repo = Repository.get_by_repo_name(source_repo_name)
233 source_repo = Repository.get_by_repo_name(source_repo_name)
195 backend.pull_heads(source_repo, heads=['commit-4'])
234 backend.pull_heads(source_repo, heads=["commit-4"])
196 response = self.app.post(
235 response = self.app.post(
197 route_path('pullrequest_update',
236 route_path(
198 repo_name=target_repo_name, pull_request_id=pull_request_id),
237 "pullrequest_update",
199 params={'update_commits': 'true', 'csrf_token': csrf_token})
238 repo_name=target_repo_name,
239 pull_request_id=pull_request_id,
240 ),
241 params={"update_commits": "true", "csrf_token": csrf_token},
242 )
200
243
201 # Show final version !
244 # Show final version !
202 response = self.app.get(
245 response = self.app.get(
203 route_path('pullrequest_show',
246 route_path(
204 repo_name=target_repo_name,
247 "pullrequest_show",
205 pull_request_id=pull_request_id))
248 repo_name=target_repo_name,
249 pull_request_id=pull_request_id,
250 )
251 )
206
252
207 # 3 updates, and the latest == 4
253 # 3 updates, and the latest == 4
208 response.mustcontain('4 versions available for this pull request')
254 response.mustcontain("4 versions available for this pull request")
209 response.mustcontain(no=['rhodecode diff rendering error'])
255 response.mustcontain(no=["rhodecode diff rendering error"])
210
256
211 # initial show must have 3 commits, and 3 adds
257 # initial show must have 3 commits, and 3 adds
212 response.mustcontain('commit-1')
258 response.mustcontain("commit-1")
213 response.mustcontain('commit-2')
259 response.mustcontain("commit-2")
214 response.mustcontain('commit-3')
260 response.mustcontain("commit-3")
215 response.mustcontain('commit-4')
261 response.mustcontain("commit-4")
216
262
217 response.mustcontain('cb-addition"></span><span>LINE2</span>')
263 response.mustcontain('cb-addition"></span><span>LINE2</span>')
218 response.mustcontain('cb-addition"></span><span>LINE3</span>')
264 response.mustcontain('cb-addition"></span><span>LINE3</span>')
219 response.mustcontain('cb-addition"></span><span>LINE4</span>')
265 response.mustcontain('cb-addition"></span><span>LINE4</span>')
220 response.mustcontain('cb-addition"></span><span>LINE5</span>')
266 response.mustcontain('cb-addition"></span><span>LINE5</span>')
221
267
222 # fetch versions
268 # fetch versions
223 pr = PullRequest.get(pull_request_id)
269 pr = PullRequest.get(pull_request_id)
224 versions = [x.pull_request_version_id for x in pr.versions.all()]
270 versions = [x.pull_request_version_id for x in pr.versions.all()]
225 assert len(versions) == 3
271 assert len(versions) == 3
226
272
227 # show v1,v2,v3,v4
273 # show v1,v2,v3,v4
228 def cb_line(text):
274 def cb_line(text):
229 return 'cb-addition"></span><span>{}</span>'.format(text)
275 return 'cb-addition"></span><span>{}</span>'.format(text)
230
276
231 def cb_context(text):
277 def cb_context(text):
232 return '<span class="cb-code"><span class="cb-action cb-context">' \
278 return (
233 '</span><span>{}</span></span>'.format(text)
279 '<span class="cb-code"><span class="cb-action cb-context">'
280 "</span><span>{}</span></span>".format(text)
281 )
234
282
235 commit_tests = {
283 commit_tests = {
236 # in response, not in response
284 # in response, not in response
237 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
285 1: (["commit-1"], ["commit-2", "commit-3", "commit-4"]),
238 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
286 2: (["commit-1", "commit-2"], ["commit-3", "commit-4"]),
239 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
287 3: (["commit-1", "commit-2", "commit-3"], ["commit-4"]),
240 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
288 4: (["commit-1", "commit-2", "commit-3", "commit-4"], []),
241 }
289 }
242 diff_tests = {
290 diff_tests = {
243 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
291 1: (["LINE2"], ["LINE3", "LINE4", "LINE5"]),
244 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
292 2: (["LINE2", "LINE3"], ["LINE4", "LINE5"]),
245 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
293 3: (["LINE2", "LINE3", "LINE4"], ["LINE5"]),
246 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
294 4: (["LINE2", "LINE3", "LINE4", "LINE5"], []),
247 }
295 }
248 for idx, ver in enumerate(versions, 1):
296 for idx, ver in enumerate(versions, 1):
297 response = self.app.get(
298 route_path(
299 "pullrequest_show",
300 repo_name=target_repo_name,
301 pull_request_id=pull_request_id,
302 params={"version": ver},
303 )
304 )
249
305
250 response = self.app.get(
306 response.mustcontain(no=["rhodecode diff rendering error"])
251 route_path('pullrequest_show',
307 response.mustcontain("Showing changes at v{}".format(idx))
252 repo_name=target_repo_name,
253 pull_request_id=pull_request_id,
254 params={'version': ver}))
255
256 response.mustcontain(no=['rhodecode diff rendering error'])
257 response.mustcontain('Showing changes at v{}'.format(idx))
258
308
259 yes, no = commit_tests[idx]
309 yes, no = commit_tests[idx]
260 for y in yes:
310 for y in yes:
261 response.mustcontain(y)
311 response.mustcontain(y)
262 for n in no:
312 for n in no:
263 response.mustcontain(no=n)
313 response.mustcontain(no=n)
264
314
265 yes, no = diff_tests[idx]
315 yes, no = diff_tests[idx]
266 for y in yes:
316 for y in yes:
267 response.mustcontain(cb_line(y))
317 response.mustcontain(cb_line(y))
268 for n in no:
318 for n in no:
269 response.mustcontain(no=n)
319 response.mustcontain(no=n)
270
320
271 # show diff between versions
321 # show diff between versions
272 diff_compare_tests = {
322 diff_compare_tests = {
273 1: (['LINE3'], ['LINE1', 'LINE2']),
323 1: (["LINE3"], ["LINE1", "LINE2"]),
274 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
324 2: (["LINE3", "LINE4"], ["LINE1", "LINE2"]),
275 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
325 3: (["LINE3", "LINE4", "LINE5"], ["LINE1", "LINE2"]),
276 }
326 }
277 for idx, ver in enumerate(versions, 1):
327 for idx, ver in enumerate(versions, 1):
278 adds, context = diff_compare_tests[idx]
328 adds, context = diff_compare_tests[idx]
279
329
280 to_ver = ver+1
330 to_ver = ver + 1
281 if idx == 3:
331 if idx == 3:
282 to_ver = 'latest'
332 to_ver = "latest"
283
333
284 response = self.app.get(
334 response = self.app.get(
285 route_path('pullrequest_show',
335 route_path(
286 repo_name=target_repo_name,
336 "pullrequest_show",
287 pull_request_id=pull_request_id,
337 repo_name=target_repo_name,
288 params={'from_version': versions[0], 'version': to_ver}))
338 pull_request_id=pull_request_id,
339 params={"from_version": versions[0], "version": to_ver},
340 )
341 )
289
342
290 response.mustcontain(no=['rhodecode diff rendering error'])
343 response.mustcontain(no=["rhodecode diff rendering error"])
291
344
292 for a in adds:
345 for a in adds:
293 response.mustcontain(cb_line(a))
346 response.mustcontain(cb_line(a))
294 for c in context:
347 for c in context:
295 response.mustcontain(cb_context(c))
348 response.mustcontain(cb_context(c))
296
349
297 # test version v2 -> v3
350 # test version v2 -> v3
298 response = self.app.get(
351 response = self.app.get(
299 route_path('pullrequest_show',
352 route_path(
300 repo_name=target_repo_name,
353 "pullrequest_show",
301 pull_request_id=pull_request_id,
354 repo_name=target_repo_name,
302 params={'from_version': versions[1], 'version': versions[2]}))
355 pull_request_id=pull_request_id,
356 params={"from_version": versions[1], "version": versions[2]},
357 )
358 )
303
359
304 response.mustcontain(cb_context('LINE1'))
360 response.mustcontain(cb_context("LINE1"))
305 response.mustcontain(cb_context('LINE2'))
361 response.mustcontain(cb_context("LINE2"))
306 response.mustcontain(cb_context('LINE3'))
362 response.mustcontain(cb_context("LINE3"))
307 response.mustcontain(cb_line('LINE4'))
363 response.mustcontain(cb_line("LINE4"))
308
364
309 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
365 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
310 # Logout
366 # Logout
311 response = self.app.post(
367 response = self.app.post(
312 h.route_path('logout'),
368 h.route_path("logout"), params={"csrf_token": csrf_token}
313 params={'csrf_token': csrf_token})
369 )
314 # Login as regular user
370 # Login as regular user
315 response = self.app.post(h.route_path('login'),
371 response = self.app.post(
316 {'username': TEST_USER_REGULAR_LOGIN,
372 h.route_path("login"),
317 'password': 'test12'})
373 {"username": TEST_USER_REGULAR_LOGIN, "password": "test12"},
374 )
375
376 pull_request = pr_util.create_pull_request(author=TEST_USER_REGULAR_LOGIN)
318
377
319 pull_request = pr_util.create_pull_request(
378 response = self.app.get(
320 author=TEST_USER_REGULAR_LOGIN)
379 route_path(
380 "pullrequest_show",
381 repo_name=pull_request.target_repo.scm_instance().name,
382 pull_request_id=pull_request.pull_request_id,
383 )
384 )
321
385
322 response = self.app.get(route_path(
386 response.mustcontain("Server-side pull request merging is disabled.")
323 'pullrequest_show',
324 repo_name=pull_request.target_repo.scm_instance().name,
325 pull_request_id=pull_request.pull_request_id))
326
327 response.mustcontain('Server-side pull request merging is disabled.')
328
387
329 assert_response = response.assert_response()
388 assert_response = response.assert_response()
330 # for regular user without a merge permissions, we don't see it
389 # for regular user without a merge permissions, we don't see it
331 assert_response.no_element_exists('#close-pull-request-action')
390 assert_response.no_element_exists("#close-pull-request-action")
332
391
333 user_util.grant_user_permission_to_repo(
392 user_util.grant_user_permission_to_repo(
334 pull_request.target_repo,
393 pull_request.target_repo,
335 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
394 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
336 'repository.write')
395 "repository.write",
337 response = self.app.get(route_path(
396 )
338 'pullrequest_show',
397 response = self.app.get(
339 repo_name=pull_request.target_repo.scm_instance().name,
398 route_path(
340 pull_request_id=pull_request.pull_request_id))
399 "pullrequest_show",
400 repo_name=pull_request.target_repo.scm_instance().name,
401 pull_request_id=pull_request.pull_request_id,
402 )
403 )
341
404
342 response.mustcontain('Server-side pull request merging is disabled.')
405 response.mustcontain("Server-side pull request merging is disabled.")
343
406
344 assert_response = response.assert_response()
407 assert_response = response.assert_response()
345 # now regular user has a merge permissions, we have CLOSE button
408 # now regular user has a merge permissions, we have CLOSE button
346 assert_response.one_element_exists('#close-pull-request-action')
409 assert_response.one_element_exists("#close-pull-request-action")
347
410
348 def test_show_invalid_commit_id(self, pr_util):
411 def test_show_invalid_commit_id(self, pr_util):
349 # Simulating invalid revisions which will cause a lookup error
412 # Simulating invalid revisions which will cause a lookup error
350 pull_request = pr_util.create_pull_request()
413 pull_request = pr_util.create_pull_request()
351 pull_request.revisions = ['invalid']
414 pull_request.revisions = ["invalid"]
352 Session().add(pull_request)
415 Session().add(pull_request)
353 Session().commit()
416 Session().commit()
354
417
355 response = self.app.get(route_path(
418 response = self.app.get(
356 'pullrequest_show',
419 route_path(
357 repo_name=pull_request.target_repo.scm_instance().name,
420 "pullrequest_show",
358 pull_request_id=pull_request.pull_request_id))
421 repo_name=pull_request.target_repo.scm_instance().name,
422 pull_request_id=pull_request.pull_request_id,
423 )
424 )
359
425
360 for commit_id in pull_request.revisions:
426 for commit_id in pull_request.revisions:
361 response.mustcontain(commit_id)
427 response.mustcontain(commit_id)
362
428
363 def test_show_invalid_source_reference(self, pr_util):
429 def test_show_invalid_source_reference(self, pr_util):
364 pull_request = pr_util.create_pull_request()
430 pull_request = pr_util.create_pull_request()
365 pull_request.source_ref = 'branch:b:invalid'
431 pull_request.source_ref = "branch:b:invalid"
366 Session().add(pull_request)
432 Session().add(pull_request)
367 Session().commit()
433 Session().commit()
368
434
369 self.app.get(route_path(
435 self.app.get(
370 'pullrequest_show',
436 route_path(
371 repo_name=pull_request.target_repo.scm_instance().name,
437 "pullrequest_show",
372 pull_request_id=pull_request.pull_request_id))
438 repo_name=pull_request.target_repo.scm_instance().name,
439 pull_request_id=pull_request.pull_request_id,
440 )
441 )
373
442
374 def test_edit_title_description(self, pr_util, csrf_token):
443 def test_edit_title_description(self, pr_util, csrf_token):
375 pull_request = pr_util.create_pull_request()
444 pull_request = pr_util.create_pull_request()
376 pull_request_id = pull_request.pull_request_id
445 pull_request_id = pull_request.pull_request_id
377
446
378 response = self.app.post(
447 response = self.app.post(
379 route_path('pullrequest_update',
448 route_path(
380 repo_name=pull_request.target_repo.repo_name,
449 "pullrequest_update",
381 pull_request_id=pull_request_id),
450 repo_name=pull_request.target_repo.repo_name,
451 pull_request_id=pull_request_id,
452 ),
382 params={
453 params={
383 'edit_pull_request': 'true',
454 "edit_pull_request": "true",
384 'title': 'New title',
455 "title": "New title",
385 'description': 'New description',
456 "description": "New description",
386 'csrf_token': csrf_token})
457 "csrf_token": csrf_token,
458 },
459 )
387
460
388 assert_session_flash(
461 assert_session_flash(
389 response, 'Pull request title & description updated.',
462 response, "Pull request title & description updated.", category="success"
390 category='success')
463 )
391
464
392 pull_request = PullRequest.get(pull_request_id)
465 pull_request = PullRequest.get(pull_request_id)
393 assert pull_request.title == 'New title'
466 assert pull_request.title == "New title"
394 assert pull_request.description == 'New description'
467 assert pull_request.description == "New description"
395
468
396 def test_edit_title_description_special(self, pr_util, csrf_token):
469 def test_edit_title_description_special(self, pr_util, csrf_token):
397 pull_request = pr_util.create_pull_request()
470 pull_request = pr_util.create_pull_request()
398 pull_request_id = pull_request.pull_request_id
471 pull_request_id = pull_request.pull_request_id
399
472
400 response = self.app.post(
473 response = self.app.post(
401 route_path('pullrequest_update',
474 route_path(
402 repo_name=pull_request.target_repo.repo_name,
475 "pullrequest_update",
403 pull_request_id=pull_request_id),
476 repo_name=pull_request.target_repo.repo_name,
477 pull_request_id=pull_request_id,
478 ),
404 params={
479 params={
405 'edit_pull_request': 'true',
480 "edit_pull_request": "true",
406 'title': 'New title {} {2} {foo}',
481 "title": "New title {} {2} {foo}",
407 'description': 'New description',
482 "description": "New description",
408 'csrf_token': csrf_token})
483 "csrf_token": csrf_token,
484 },
485 )
409
486
410 assert_session_flash(
487 assert_session_flash(
411 response, 'Pull request title & description updated.',
488 response, "Pull request title & description updated.", category="success"
412 category='success')
489 )
413
490
414 pull_request = PullRequest.get(pull_request_id)
491 pull_request = PullRequest.get(pull_request_id)
415 assert pull_request.title_safe == 'New title {{}} {{2}} {{foo}}'
492 assert pull_request.title_safe == "New title {{}} {{2}} {{foo}}"
416
493
417 def test_edit_title_description_closed(self, pr_util, csrf_token):
494 def test_edit_title_description_closed(self, pr_util, csrf_token):
418 pull_request = pr_util.create_pull_request()
495 pull_request = pr_util.create_pull_request()
419 pull_request_id = pull_request.pull_request_id
496 pull_request_id = pull_request.pull_request_id
420 repo_name = pull_request.target_repo.repo_name
497 repo_name = pull_request.target_repo.repo_name
421 pr_util.close()
498 pr_util.close()
422
499
423 response = self.app.post(
500 response = self.app.post(
424 route_path('pullrequest_update',
501 route_path(
425 repo_name=repo_name, pull_request_id=pull_request_id),
502 "pullrequest_update",
503 repo_name=repo_name,
504 pull_request_id=pull_request_id,
505 ),
426 params={
506 params={
427 'edit_pull_request': 'true',
507 "edit_pull_request": "true",
428 'title': 'New title',
508 "title": "New title",
429 'description': 'New description',
509 "description": "New description",
430 'csrf_token': csrf_token}, status=200)
510 "csrf_token": csrf_token,
511 },
512 status=200,
513 )
431 assert_session_flash(
514 assert_session_flash(
432 response, 'Cannot update closed pull requests.',
515 response, "Cannot update closed pull requests.", category="error"
433 category='error')
516 )
434
517
435 def test_update_invalid_source_reference(self, pr_util, csrf_token):
518 def test_update_invalid_source_reference(self, pr_util, csrf_token):
436 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
519 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
437
520
438 pull_request = pr_util.create_pull_request()
521 pull_request = pr_util.create_pull_request()
439 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
522 pull_request.source_ref = "branch:invalid-branch:invalid-commit-id"
440 Session().add(pull_request)
523 Session().add(pull_request)
441 Session().commit()
524 Session().commit()
442
525
443 pull_request_id = pull_request.pull_request_id
526 pull_request_id = pull_request.pull_request_id
444
527
445 response = self.app.post(
528 response = self.app.post(
446 route_path('pullrequest_update',
529 route_path(
530 "pullrequest_update",
447 repo_name=pull_request.target_repo.repo_name,
531 repo_name=pull_request.target_repo.repo_name,
448 pull_request_id=pull_request_id),
532 pull_request_id=pull_request_id,
449 params={'update_commits': 'true', 'csrf_token': csrf_token})
533 ),
534 params={"update_commits": "true", "csrf_token": csrf_token},
535 )
450
536
451 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
537 expected_msg = str(
452 UpdateFailureReason.MISSING_SOURCE_REF])
538 PullRequestModel.UPDATE_STATUS_MESSAGES[
453 assert_session_flash(response, expected_msg, category='error')
539 UpdateFailureReason.MISSING_SOURCE_REF
540 ]
541 )
542 assert_session_flash(response, expected_msg, category="error")
454
543
455 def test_missing_target_reference(self, pr_util, csrf_token):
544 def test_missing_target_reference(self, pr_util, csrf_token):
456 from rhodecode.lib.vcs.backends.base import MergeFailureReason
545 from rhodecode.lib.vcs.backends.base import MergeFailureReason
457 pull_request = pr_util.create_pull_request(
546
458 approved=True, mergeable=True)
547 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
459 unicode_reference = 'branch:invalid-branch:invalid-commit-id'
548 unicode_reference = "branch:invalid-branch:invalid-commit-id"
460 pull_request.target_ref = unicode_reference
549 pull_request.target_ref = unicode_reference
461 Session().add(pull_request)
550 Session().add(pull_request)
462 Session().commit()
551 Session().commit()
463
552
464 pull_request_id = pull_request.pull_request_id
553 pull_request_id = pull_request.pull_request_id
465 pull_request_url = route_path(
554 pull_request_url = route_path(
466 'pullrequest_show',
555 "pullrequest_show",
467 repo_name=pull_request.target_repo.repo_name,
556 repo_name=pull_request.target_repo.repo_name,
468 pull_request_id=pull_request_id)
557 pull_request_id=pull_request_id,
558 )
469
559
470 response = self.app.get(pull_request_url)
560 response = self.app.get(pull_request_url)
471 target_ref_id = 'invalid-branch'
561 # target_ref_id = "invalid-branch"
562
472 merge_resp = MergeResponse(
563 merge_resp = MergeResponse(
473 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
564 True,
474 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
565 True,
566 Reference("commit", "STUB_COMMIT_ID", "STUB_COMMIT_ID"),
567 MergeFailureReason.MISSING_TARGET_REF,
568 metadata={
569 "target_ref": PullRequest.unicode_to_reference(unicode_reference)
570 },
571 )
475 response.assert_response().element_contains(
572 response.assert_response().element_contains(
476 'div[data-role="merge-message"]', merge_resp.merge_status_message)
573 'div[data-role="merge-message"]', merge_resp.merge_status_message
574 )
477
575
478 def test_comment_and_close_pull_request_custom_message_approved(
576 def test_comment_and_close_pull_request_custom_message_approved(
479 self, pr_util, csrf_token, xhr_header):
577 self, pr_util, csrf_token, xhr_header
480
578 ):
481 pull_request = pr_util.create_pull_request(approved=True)
579 pull_request = pr_util.create_pull_request(approved=True)
482 pull_request_id = pull_request.pull_request_id
580 pull_request_id = pull_request.pull_request_id
483 author = pull_request.user_id
581 author = pull_request.user_id
484 repo = pull_request.target_repo.repo_id
582 repo = pull_request.target_repo.repo_id
485
583
486 self.app.post(
584 self.app.post(
487 route_path('pullrequest_comment_create',
585 route_path(
488 repo_name=pull_request.target_repo.scm_instance().name,
586 "pullrequest_comment_create",
489 pull_request_id=pull_request_id),
587 repo_name=pull_request.target_repo.scm_instance().name,
588 pull_request_id=pull_request_id,
589 ),
490 params={
590 params={
491 'close_pull_request': '1',
591 "close_pull_request": "1",
492 'text': 'Closing a PR',
592 "text": "Closing a PR",
493 'csrf_token': csrf_token},
593 "csrf_token": csrf_token,
494 extra_environ=xhr_header,)
594 },
595 extra_environ=xhr_header,
596 )
495
597
496 journal = UserLog.query()\
598 journal = (
497 .filter(UserLog.user_id == author)\
599 UserLog.query()
498 .filter(UserLog.repository_id == repo) \
600 .filter(UserLog.user_id == author)
499 .order_by(UserLog.user_log_id.asc()) \
601 .filter(UserLog.repository_id == repo)
602 .order_by(UserLog.user_log_id.asc())
500 .all()
603 .all()
501 assert journal[-1].action == 'repo.pull_request.close'
604 )
605 assert journal[-1].action == "repo.pull_request.close"
502
606
503 pull_request = PullRequest.get(pull_request_id)
607 pull_request = PullRequest.get(pull_request_id)
504 assert pull_request.is_closed()
608 assert pull_request.is_closed()
505
609
506 status = ChangesetStatusModel().get_status(
610 status = ChangesetStatusModel().get_status(
507 pull_request.source_repo, pull_request=pull_request)
611 pull_request.source_repo, pull_request=pull_request
612 )
508 assert status == ChangesetStatus.STATUS_APPROVED
613 assert status == ChangesetStatus.STATUS_APPROVED
509 comments = ChangesetComment().query() \
614 comments = (
510 .filter(ChangesetComment.pull_request == pull_request) \
615 ChangesetComment()
511 .order_by(ChangesetComment.comment_id.asc())\
616 .query()
617 .filter(ChangesetComment.pull_request == pull_request)
618 .order_by(ChangesetComment.comment_id.asc())
512 .all()
619 .all()
513 assert comments[-1].text == 'Closing a PR'
620 )
621 assert comments[-1].text == "Closing a PR"
514
622
515 def test_comment_force_close_pull_request_rejected(
623 def test_comment_force_close_pull_request_rejected(
516 self, pr_util, csrf_token, xhr_header):
624 self, pr_util, csrf_token, xhr_header
625 ):
517 pull_request = pr_util.create_pull_request()
626 pull_request = pr_util.create_pull_request()
518 pull_request_id = pull_request.pull_request_id
627 pull_request_id = pull_request.pull_request_id
519 PullRequestModel().update_reviewers(
628 PullRequestModel().update_reviewers(
520 pull_request_id, [
629 pull_request_id,
521 (1, ['reason'], False, 'reviewer', []),
630 [
522 (2, ['reason2'], False, 'reviewer', [])],
631 (1, ["reason"], False, "reviewer", []),
523 pull_request.author)
632 (2, ["reason2"], False, "reviewer", []),
633 ],
634 pull_request.author,
635 )
524 author = pull_request.user_id
636 author = pull_request.user_id
525 repo = pull_request.target_repo.repo_id
637 repo = pull_request.target_repo.repo_id
526
638
527 self.app.post(
639 self.app.post(
528 route_path('pullrequest_comment_create',
640 route_path(
641 "pullrequest_comment_create",
529 repo_name=pull_request.target_repo.scm_instance().name,
642 repo_name=pull_request.target_repo.scm_instance().name,
530 pull_request_id=pull_request_id),
643 pull_request_id=pull_request_id,
531 params={
644 ),
532 'close_pull_request': '1',
645 params={"close_pull_request": "1", "csrf_token": csrf_token},
533 'csrf_token': csrf_token},
646 extra_environ=xhr_header,
534 extra_environ=xhr_header)
647 )
535
648
536 pull_request = PullRequest.get(pull_request_id)
649 pull_request = PullRequest.get(pull_request_id)
537
650
538 journal = UserLog.query()\
651 journal = (
539 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
652 UserLog.query()
540 .order_by(UserLog.user_log_id.asc()) \
653 .filter(UserLog.user_id == author, UserLog.repository_id == repo)
654 .order_by(UserLog.user_log_id.asc())
541 .all()
655 .all()
542 assert journal[-1].action == 'repo.pull_request.close'
656 )
657 assert journal[-1].action == "repo.pull_request.close"
543
658
544 # check only the latest status, not the review status
659 # check only the latest status, not the review status
545 status = ChangesetStatusModel().get_status(
660 status = ChangesetStatusModel().get_status(
546 pull_request.source_repo, pull_request=pull_request)
661 pull_request.source_repo, pull_request=pull_request
662 )
547 assert status == ChangesetStatus.STATUS_REJECTED
663 assert status == ChangesetStatus.STATUS_REJECTED
548
664
549 def test_comment_and_close_pull_request(
665 def test_comment_and_close_pull_request(self, pr_util, csrf_token, xhr_header):
550 self, pr_util, csrf_token, xhr_header):
551 pull_request = pr_util.create_pull_request()
666 pull_request = pr_util.create_pull_request()
552 pull_request_id = pull_request.pull_request_id
667 pull_request_id = pull_request.pull_request_id
553
668
554 response = self.app.post(
669 response = self.app.post(
555 route_path('pullrequest_comment_create',
670 route_path(
556 repo_name=pull_request.target_repo.scm_instance().name,
671 "pullrequest_comment_create",
557 pull_request_id=pull_request.pull_request_id),
672 repo_name=pull_request.target_repo.scm_instance().name,
558 params={
673 pull_request_id=pull_request.pull_request_id,
559 'close_pull_request': 'true',
674 ),
560 'csrf_token': csrf_token},
675 params={"close_pull_request": "true", "csrf_token": csrf_token},
561 extra_environ=xhr_header)
676 extra_environ=xhr_header,
677 )
562
678
563 assert response.json
679 assert response.json
564
680
565 pull_request = PullRequest.get(pull_request_id)
681 pull_request = PullRequest.get(pull_request_id)
566 assert pull_request.is_closed()
682 assert pull_request.is_closed()
567
683
568 # check only the latest status, not the review status
684 # check only the latest status, not the review status
569 status = ChangesetStatusModel().get_status(
685 status = ChangesetStatusModel().get_status(
570 pull_request.source_repo, pull_request=pull_request)
686 pull_request.source_repo, pull_request=pull_request
687 )
571 assert status == ChangesetStatus.STATUS_REJECTED
688 assert status == ChangesetStatus.STATUS_REJECTED
572
689
573 def test_comment_and_close_pull_request_try_edit_comment(
690 def test_comment_and_close_pull_request_try_edit_comment(
574 self, pr_util, csrf_token, xhr_header
691 self, pr_util, csrf_token, xhr_header
575 ):
692 ):
576 pull_request = pr_util.create_pull_request()
693 pull_request = pr_util.create_pull_request()
577 pull_request_id = pull_request.pull_request_id
694 pull_request_id = pull_request.pull_request_id
578 target_scm = pull_request.target_repo.scm_instance()
695 target_scm = pull_request.target_repo.scm_instance()
579 target_scm_name = target_scm.name
696 target_scm_name = target_scm.name
580
697
581 response = self.app.post(
698 response = self.app.post(
582 route_path(
699 route_path(
583 'pullrequest_comment_create',
700 "pullrequest_comment_create",
584 repo_name=target_scm_name,
701 repo_name=target_scm_name,
585 pull_request_id=pull_request_id,
702 pull_request_id=pull_request_id,
586 ),
703 ),
587 params={
704 params={
588 'close_pull_request': 'true',
705 "close_pull_request": "true",
589 'csrf_token': csrf_token,
706 "csrf_token": csrf_token,
590 },
707 },
591 extra_environ=xhr_header)
708 extra_environ=xhr_header,
709 )
592
710
593 assert response.json
711 assert response.json
594
712
595 pull_request = PullRequest.get(pull_request_id)
713 pull_request = PullRequest.get(pull_request_id)
596 target_scm = pull_request.target_repo.scm_instance()
714 target_scm = pull_request.target_repo.scm_instance()
597 target_scm_name = target_scm.name
715 target_scm_name = target_scm.name
598 assert pull_request.is_closed()
716 assert pull_request.is_closed()
599
717
600 # check only the latest status, not the review status
718 # check only the latest status, not the review status
601 status = ChangesetStatusModel().get_status(
719 status = ChangesetStatusModel().get_status(
602 pull_request.source_repo, pull_request=pull_request)
720 pull_request.source_repo, pull_request=pull_request
721 )
603 assert status == ChangesetStatus.STATUS_REJECTED
722 assert status == ChangesetStatus.STATUS_REJECTED
604
723
605 for comment_id in response.json.keys():
724 for comment_id in response.json.keys():
606 test_text = 'test'
725 test_text = "test"
607 response = self.app.post(
726 response = self.app.post(
608 route_path(
727 route_path(
609 'pullrequest_comment_edit',
728 "pullrequest_comment_edit",
610 repo_name=target_scm_name,
729 repo_name=target_scm_name,
611 pull_request_id=pull_request_id,
730 pull_request_id=pull_request_id,
612 comment_id=comment_id,
731 comment_id=comment_id,
613 ),
732 ),
614 extra_environ=xhr_header,
733 extra_environ=xhr_header,
615 params={
734 params={
616 'csrf_token': csrf_token,
735 "csrf_token": csrf_token,
617 'text': test_text,
736 "text": test_text,
618 },
737 },
619 status=403,
738 status=403,
620 )
739 )
621 assert response.status_int == 403
740 assert response.status_int == 403
622
741
623 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
742 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
624 pull_request = pr_util.create_pull_request()
743 pull_request = pr_util.create_pull_request()
625 target_scm = pull_request.target_repo.scm_instance()
744 target_scm = pull_request.target_repo.scm_instance()
626 target_scm_name = target_scm.name
745 target_scm_name = target_scm.name
627
746
628 response = self.app.post(
747 response = self.app.post(
629 route_path(
748 route_path(
630 'pullrequest_comment_create',
749 "pullrequest_comment_create",
631 repo_name=target_scm_name,
750 repo_name=target_scm_name,
632 pull_request_id=pull_request.pull_request_id),
751 pull_request_id=pull_request.pull_request_id,
752 ),
633 params={
753 params={
634 'csrf_token': csrf_token,
754 "csrf_token": csrf_token,
635 'text': 'init',
755 "text": "init",
636 },
756 },
637 extra_environ=xhr_header,
757 extra_environ=xhr_header,
638 )
758 )
639 assert response.json
759 assert response.json
640
760
641 for comment_id in response.json.keys():
761 for comment_id in response.json.keys():
642 assert comment_id
762 assert comment_id
643 test_text = 'test'
763 test_text = "test"
644 self.app.post(
764 self.app.post(
645 route_path(
765 route_path(
646 'pullrequest_comment_edit',
766 "pullrequest_comment_edit",
647 repo_name=target_scm_name,
767 repo_name=target_scm_name,
648 pull_request_id=pull_request.pull_request_id,
768 pull_request_id=pull_request.pull_request_id,
649 comment_id=comment_id,
769 comment_id=comment_id,
650 ),
770 ),
651 extra_environ=xhr_header,
771 extra_environ=xhr_header,
652 params={
772 params={
653 'csrf_token': csrf_token,
773 "csrf_token": csrf_token,
654 'text': test_text,
774 "text": test_text,
655 'version': '0',
775 "version": "0",
656 },
776 },
657
658 )
777 )
659 text_form_db = ChangesetComment.query().filter(
778 text_form_db = (
660 ChangesetComment.comment_id == comment_id).first().text
779 ChangesetComment.query()
780 .filter(ChangesetComment.comment_id == comment_id)
781 .first()
782 .text
783 )
661 assert test_text == text_form_db
784 assert test_text == text_form_db
662
785
663 def test_comment_and_comment_edit_special(self, pr_util, csrf_token, xhr_header):
786 def test_comment_and_comment_edit_special(self, pr_util, csrf_token, xhr_header):
664 pull_request = pr_util.create_pull_request()
787 pull_request = pr_util.create_pull_request()
665 target_scm = pull_request.target_repo.scm_instance()
788 target_scm = pull_request.target_repo.scm_instance()
666 target_scm_name = target_scm.name
789 target_scm_name = target_scm.name
667
790
668 response = self.app.post(
791 response = self.app.post(
669 route_path(
792 route_path(
670 'pullrequest_comment_create',
793 "pullrequest_comment_create",
671 repo_name=target_scm_name,
794 repo_name=target_scm_name,
672 pull_request_id=pull_request.pull_request_id),
795 pull_request_id=pull_request.pull_request_id,
796 ),
673 params={
797 params={
674 'csrf_token': csrf_token,
798 "csrf_token": csrf_token,
675 'text': 'init',
799 "text": "init",
676 },
800 },
677 extra_environ=xhr_header,
801 extra_environ=xhr_header,
678 )
802 )
679 assert response.json
803 assert response.json
680
804
681 for comment_id in response.json.keys():
805 for comment_id in response.json.keys():
682 test_text = 'init'
806 test_text = "init"
683 response = self.app.post(
807 response = self.app.post(
684 route_path(
808 route_path(
685 'pullrequest_comment_edit',
809 "pullrequest_comment_edit",
686 repo_name=target_scm_name,
810 repo_name=target_scm_name,
687 pull_request_id=pull_request.pull_request_id,
811 pull_request_id=pull_request.pull_request_id,
688 comment_id=comment_id,
812 comment_id=comment_id,
689 ),
813 ),
690 extra_environ=xhr_header,
814 extra_environ=xhr_header,
691 params={
815 params={
692 'csrf_token': csrf_token,
816 "csrf_token": csrf_token,
693 'text': test_text,
817 "text": test_text,
694 'version': '0',
818 "version": "0",
695 },
819 },
696 status=404,
820 status=404,
697
698 )
821 )
699 assert response.status_int == 404
822 assert response.status_int == 404
700
823
701 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
824 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
702 pull_request = pr_util.create_pull_request()
825 pull_request = pr_util.create_pull_request()
703 target_scm = pull_request.target_repo.scm_instance()
826 target_scm = pull_request.target_repo.scm_instance()
704 target_scm_name = target_scm.name
827 target_scm_name = target_scm.name
705
828
706 response = self.app.post(
829 response = self.app.post(
707 route_path(
830 route_path(
708 'pullrequest_comment_create',
831 "pullrequest_comment_create",
709 repo_name=target_scm_name,
832 repo_name=target_scm_name,
710 pull_request_id=pull_request.pull_request_id),
833 pull_request_id=pull_request.pull_request_id,
834 ),
711 params={
835 params={
712 'csrf_token': csrf_token,
836 "csrf_token": csrf_token,
713 'text': 'init',
837 "text": "init",
714 },
838 },
715 extra_environ=xhr_header,
839 extra_environ=xhr_header,
716 )
840 )
717 assert response.json
841 assert response.json
718 for comment_id in response.json.keys():
842 for comment_id in response.json.keys():
719 test_text = 'test'
843 test_text = "test"
720 self.app.post(
844 self.app.post(
721 route_path(
845 route_path(
722 'pullrequest_comment_edit',
846 "pullrequest_comment_edit",
723 repo_name=target_scm_name,
847 repo_name=target_scm_name,
724 pull_request_id=pull_request.pull_request_id,
848 pull_request_id=pull_request.pull_request_id,
725 comment_id=comment_id,
849 comment_id=comment_id,
726 ),
850 ),
727 extra_environ=xhr_header,
851 extra_environ=xhr_header,
728 params={
852 params={
729 'csrf_token': csrf_token,
853 "csrf_token": csrf_token,
730 'text': test_text,
854 "text": test_text,
731 'version': '0',
855 "version": "0",
732 },
856 },
733
734 )
857 )
735 test_text_v2 = 'test_v2'
858 test_text_v2 = "test_v2"
736 response = self.app.post(
859 response = self.app.post(
737 route_path(
860 route_path(
738 'pullrequest_comment_edit',
861 "pullrequest_comment_edit",
739 repo_name=target_scm_name,
862 repo_name=target_scm_name,
740 pull_request_id=pull_request.pull_request_id,
863 pull_request_id=pull_request.pull_request_id,
741 comment_id=comment_id,
864 comment_id=comment_id,
742 ),
865 ),
743 extra_environ=xhr_header,
866 extra_environ=xhr_header,
744 params={
867 params={
745 'csrf_token': csrf_token,
868 "csrf_token": csrf_token,
746 'text': test_text_v2,
869 "text": test_text_v2,
747 'version': '0',
870 "version": "0",
748 },
871 },
749 status=409,
872 status=409,
750 )
873 )
751 assert response.status_int == 409
874 assert response.status_int == 409
752
875
753 text_form_db = ChangesetComment.query().filter(
876 text_form_db = (
754 ChangesetComment.comment_id == comment_id).first().text
877 ChangesetComment.query()
878 .filter(ChangesetComment.comment_id == comment_id)
879 .first()
880 .text
881 )
755
882
756 assert test_text == text_form_db
883 assert test_text == text_form_db
757 assert test_text_v2 != text_form_db
884 assert test_text_v2 != text_form_db
758
885
759 def test_comment_and_comment_edit_permissions_forbidden(
886 def test_comment_and_comment_edit_permissions_forbidden(
760 self, autologin_regular_user, user_regular, user_admin, pr_util,
887 self,
761 csrf_token, xhr_header):
888 autologin_regular_user,
889 user_regular,
890 user_admin,
891 pr_util,
892 csrf_token,
893 xhr_header,
894 ):
762 pull_request = pr_util.create_pull_request(
895 pull_request = pr_util.create_pull_request(
763 author=user_admin.username, enable_notifications=False)
896 author=user_admin.username, enable_notifications=False
897 )
764 comment = CommentsModel().create(
898 comment = CommentsModel().create(
765 text='test',
899 text="test",
766 repo=pull_request.target_repo.scm_instance().name,
900 repo=pull_request.target_repo.scm_instance().name,
767 user=user_admin,
901 user=user_admin,
768 pull_request=pull_request,
902 pull_request=pull_request,
769 )
903 )
770 response = self.app.post(
904 response = self.app.post(
771 route_path(
905 route_path(
772 'pullrequest_comment_edit',
906 "pullrequest_comment_edit",
773 repo_name=pull_request.target_repo.scm_instance().name,
907 repo_name=pull_request.target_repo.scm_instance().name,
774 pull_request_id=pull_request.pull_request_id,
908 pull_request_id=pull_request.pull_request_id,
775 comment_id=comment.comment_id,
909 comment_id=comment.comment_id,
776 ),
910 ),
777 extra_environ=xhr_header,
911 extra_environ=xhr_header,
778 params={
912 params={
779 'csrf_token': csrf_token,
913 "csrf_token": csrf_token,
780 'text': 'test_text',
914 "text": "test_text",
781 },
915 },
782 status=403,
916 status=403,
783 )
917 )
784 assert response.status_int == 403
918 assert response.status_int == 403
785
919
786 def test_create_pull_request(self, backend, csrf_token):
920 def test_create_pull_request(self, backend, csrf_token):
787 commits = [
921 commits = [
788 {'message': 'ancestor'},
922 {"message": "ancestor"},
789 {'message': 'change'},
923 {"message": "change"},
790 {'message': 'change2'},
924 {"message": "change2"},
791 ]
925 ]
792 commit_ids = backend.create_master_repo(commits)
926 commit_ids = backend.create_master_repo(commits)
793 target = backend.create_repo(heads=['ancestor'])
927 target = backend.create_repo(heads=["ancestor"])
794 source = backend.create_repo(heads=['change2'])
928 source = backend.create_repo(heads=["change2"])
795
929
796 response = self.app.post(
930 response = self.app.post(
797 route_path('pullrequest_create', repo_name=source.repo_name),
931 route_path("pullrequest_create", repo_name=source.repo_name),
798 [
932 [
799 ('source_repo', source.repo_name),
933 ("source_repo", source.repo_name),
800 ('source_ref', 'branch:default:' + commit_ids['change2']),
934 ("source_ref", "branch:default:" + commit_ids["change2"]),
801 ('target_repo', target.repo_name),
935 ("target_repo", target.repo_name),
802 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
936 ("target_ref", "branch:default:" + commit_ids["ancestor"]),
803 ('common_ancestor', commit_ids['ancestor']),
937 ("common_ancestor", commit_ids["ancestor"]),
804 ('pullrequest_title', 'Title'),
938 ("pullrequest_title", "Title"),
805 ('pullrequest_desc', 'Description'),
939 ("pullrequest_desc", "Description"),
806 ('description_renderer', 'markdown'),
940 ("description_renderer", "markdown"),
807 ('__start__', 'review_members:sequence'),
941 ("__start__", "review_members:sequence"),
808 ('__start__', 'reviewer:mapping'),
942 ("__start__", "reviewer:mapping"),
809 ('user_id', '1'),
943 ("user_id", "1"),
810 ('__start__', 'reasons:sequence'),
944 ("__start__", "reasons:sequence"),
811 ('reason', 'Some reason'),
945 ("reason", "Some reason"),
812 ('__end__', 'reasons:sequence'),
946 ("__end__", "reasons:sequence"),
813 ('__start__', 'rules:sequence'),
947 ("__start__", "rules:sequence"),
814 ('__end__', 'rules:sequence'),
948 ("__end__", "rules:sequence"),
815 ('mandatory', 'False'),
949 ("mandatory", "False"),
816 ('__end__', 'reviewer:mapping'),
950 ("__end__", "reviewer:mapping"),
817 ('__end__', 'review_members:sequence'),
951 ("__end__", "review_members:sequence"),
818 ('__start__', 'revisions:sequence'),
952 ("__start__", "revisions:sequence"),
819 ('revisions', commit_ids['change']),
953 ("revisions", commit_ids["change"]),
820 ('revisions', commit_ids['change2']),
954 ("revisions", commit_ids["change2"]),
821 ('__end__', 'revisions:sequence'),
955 ("__end__", "revisions:sequence"),
822 ('user', ''),
956 ("user", ""),
823 ('csrf_token', csrf_token),
957 ("csrf_token", csrf_token),
824 ],
958 ],
825 status=302)
959 status=302,
960 )
826
961
827 location = response.headers['Location']
962 location = response.headers["Location"]
828 pull_request_id = location.rsplit('/', 1)[1]
963 pull_request_id = location.rsplit("/", 1)[1]
829 assert pull_request_id != 'new'
964 assert pull_request_id != "new"
830 pull_request = PullRequest.get(int(pull_request_id))
965 pull_request = PullRequest.get(int(pull_request_id))
831
966
832 # check that we have now both revisions
967 # check that we have now both revisions
833 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
968 assert pull_request.revisions == [commit_ids["change2"], commit_ids["change"]]
834 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
969 assert pull_request.source_ref == "branch:default:" + commit_ids["change2"]
835 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
970 expected_target_ref = "branch:default:" + commit_ids["ancestor"]
836 assert pull_request.target_ref == expected_target_ref
971 assert pull_request.target_ref == expected_target_ref
837
972
838 def test_reviewer_notifications(self, backend, csrf_token):
973 def test_reviewer_notifications(self, backend, csrf_token):
839 # We have to use the app.post for this test so it will create the
974 # We have to use the app.post for this test, so it will create the
840 # notifications properly with the new PR
975 # notifications properly with the new PR
841 commits = [
976 commits = [
842 {'message': 'ancestor',
977 {
843 'added': [FileNode(b'file_A', content=b'content_of_ancestor')]},
978 "message": "ancestor",
844 {'message': 'change',
979 "added": [FileNode(b"file_A", content=b"content_of_ancestor")],
845 'added': [FileNode(b'file_a', content=b'content_of_change')]},
980 },
846 {'message': 'change-child'},
981 {
847 {'message': 'ancestor-child', 'parents': ['ancestor'],
982 "message": "change",
848 'added': [ FileNode(b'file_B', content=b'content_of_ancestor_child')]},
983 "added": [FileNode(b"file_a", content=b"content_of_change")],
849 {'message': 'ancestor-child-2'},
984 },
985 {"message": "change-child"},
986 {
987 "message": "ancestor-child",
988 "parents": ["ancestor"],
989 "branch": "feature",
990 "added": [FileNode(b"file_c", content=b"content_of_ancestor_child")],
991 },
992 {"message": "ancestor-child-2", "branch": "feature"},
850 ]
993 ]
851 commit_ids = backend.create_master_repo(commits)
994 commit_ids = backend.create_master_repo(commits)
852 target = backend.create_repo(heads=['ancestor-child'])
995 target = backend.create_repo(heads=["ancestor-child"])
853 source = backend.create_repo(heads=['change'])
996 source = backend.create_repo(heads=["change"])
854
997
855 response = self.app.post(
998 response = self.app.post(
856 route_path('pullrequest_create', repo_name=source.repo_name),
999 route_path("pullrequest_create", repo_name=source.repo_name),
857 [
1000 [
858 ('source_repo', source.repo_name),
1001 ("source_repo", source.repo_name),
859 ('source_ref', 'branch:default:' + commit_ids['change']),
1002 ("source_ref", "branch:default:" + commit_ids["change"]),
860 ('target_repo', target.repo_name),
1003 ("target_repo", target.repo_name),
861 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
1004 ("target_ref", "branch:default:" + commit_ids["ancestor-child"]),
862 ('common_ancestor', commit_ids['ancestor']),
1005 ("common_ancestor", commit_ids["ancestor"]),
863 ('pullrequest_title', 'Title'),
1006 ("pullrequest_title", "Title"),
864 ('pullrequest_desc', 'Description'),
1007 ("pullrequest_desc", "Description"),
865 ('description_renderer', 'markdown'),
1008 ("description_renderer", "markdown"),
866 ('__start__', 'review_members:sequence'),
1009 ("__start__", "review_members:sequence"),
867 ('__start__', 'reviewer:mapping'),
1010 ("__start__", "reviewer:mapping"),
868 ('user_id', '2'),
1011 ("user_id", "2"),
869 ('__start__', 'reasons:sequence'),
1012 ("__start__", "reasons:sequence"),
870 ('reason', 'Some reason'),
1013 ("reason", "Some reason"),
871 ('__end__', 'reasons:sequence'),
1014 ("__end__", "reasons:sequence"),
872 ('__start__', 'rules:sequence'),
1015 ("__start__", "rules:sequence"),
873 ('__end__', 'rules:sequence'),
1016 ("__end__", "rules:sequence"),
874 ('mandatory', 'False'),
1017 ("mandatory", "False"),
875 ('__end__', 'reviewer:mapping'),
1018 ("__end__", "reviewer:mapping"),
876 ('__end__', 'review_members:sequence'),
1019 ("__end__", "review_members:sequence"),
877 ('__start__', 'revisions:sequence'),
1020 ("__start__", "revisions:sequence"),
878 ('revisions', commit_ids['change']),
1021 ("revisions", commit_ids["change"]),
879 ('__end__', 'revisions:sequence'),
1022 ("__end__", "revisions:sequence"),
880 ('user', ''),
1023 ("user", ""),
881 ('csrf_token', csrf_token),
1024 ("csrf_token", csrf_token),
882 ],
1025 ],
883 status=302)
1026 status=302,
1027 )
884
1028
885 location = response.headers['Location']
1029 location = response.headers["Location"]
886
1030
887 pull_request_id = location.rsplit('/', 1)[1]
1031 pull_request_id = location.rsplit("/", 1)[1]
888 assert pull_request_id != 'new'
1032 assert pull_request_id != "new"
889 pull_request = PullRequest.get(int(pull_request_id))
1033 pull_request = PullRequest.get(int(pull_request_id))
890
1034
891 # Check that a notification was made
1035 # Check that a notification was made
892 notifications = Notification.query()\
1036 notifications = Notification.query().filter(
893 .filter(Notification.created_by == pull_request.author.user_id,
1037 Notification.created_by == pull_request.author.user_id,
894 Notification.type_ == Notification.TYPE_PULL_REQUEST,
1038 Notification.type_ == Notification.TYPE_PULL_REQUEST,
895 Notification.subject.contains(
1039 Notification.subject.contains(
896 "requested a pull request review. !%s" % pull_request_id))
1040 "requested a pull request review. !%s" % pull_request_id
1041 ),
1042 )
897 assert len(notifications.all()) == 1
1043 assert len(notifications.all()) == 1
898
1044
899 # Change reviewers and check that a notification was made
1045 # Change reviewers and check that a notification was made
900 PullRequestModel().update_reviewers(
1046 PullRequestModel().update_reviewers(
901 pull_request.pull_request_id, [
1047 pull_request.pull_request_id,
902 (1, [], False, 'reviewer', [])
1048 [(1, [], False, "reviewer", [])],
903 ],
1049 pull_request.author,
904 pull_request.author)
1050 )
905 assert len(notifications.all()) == 2
1051 assert len(notifications.all()) == 2
906
1052
907 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
1053 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
908 commits = [
1054 commits = [
909 {'message': 'ancestor',
1055 {
910 'added': [FileNode(b'file_A', content=b'content_of_ancestor')]},
1056 "message": "ancestor",
911 {'message': 'change',
1057 "added": [FileNode(b"file_A", content=b"content_of_ancestor")],
912 'added': [FileNode(b'file_a', content=b'content_of_change')]},
1058 },
913 {'message': 'change-child'},
1059 {
914 {'message': 'ancestor-child', 'parents': ['ancestor'],
1060 "message": "change",
915 'added': [
1061 "added": [FileNode(b"file_a", content=b"content_of_change")],
916 FileNode(b'file_B', content=b'content_of_ancestor_child')]},
1062 },
917 {'message': 'ancestor-child-2'},
1063 {
1064 "message": "change-child",
1065 "added": [FileNode(b"file_c", content=b"content_of_change_2")],
1066 },
1067 {
1068 "message": "ancestor-child",
1069 "parents": ["ancestor"],
1070 "branch": "feature",
1071 "added": [FileNode(b"file_B", content=b"content_of_ancestor_child")],
1072 },
1073 {"message": "ancestor-child-2", "branch": "feature"},
918 ]
1074 ]
919 commit_ids = backend.create_master_repo(commits)
1075 commit_ids = backend.create_master_repo(commits)
920 target = backend.create_repo(heads=['ancestor-child'])
1076 target = backend.create_repo(heads=["ancestor-child"])
921 source = backend.create_repo(heads=['change'])
1077 source = backend.create_repo(heads=["change"])
922
1078
923 response = self.app.post(
1079 response = self.app.post(
924 route_path('pullrequest_create', repo_name=source.repo_name),
1080 route_path("pullrequest_create", repo_name=source.repo_name),
925 [
1081 [
926 ('source_repo', source.repo_name),
1082 ("source_repo", source.repo_name),
927 ('source_ref', 'branch:default:' + commit_ids['change']),
1083 ("source_ref", "branch:default:" + commit_ids["change"]),
928 ('target_repo', target.repo_name),
1084 ("target_repo", target.repo_name),
929 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
1085 ("target_ref", "branch:default:" + commit_ids["ancestor-child"]),
930 ('common_ancestor', commit_ids['ancestor']),
1086 ("common_ancestor", commit_ids["ancestor"]),
931 ('pullrequest_title', 'Title'),
1087 ("pullrequest_title", "Title"),
932 ('pullrequest_desc', 'Description'),
1088 ("pullrequest_desc", "Description"),
933 ('description_renderer', 'markdown'),
1089 ("description_renderer", "markdown"),
934 ('__start__', 'review_members:sequence'),
1090 ("__start__", "review_members:sequence"),
935 ('__start__', 'reviewer:mapping'),
1091 ("__start__", "reviewer:mapping"),
936 ('user_id', '1'),
1092 ("user_id", "1"),
937 ('__start__', 'reasons:sequence'),
1093 ("__start__", "reasons:sequence"),
938 ('reason', 'Some reason'),
1094 ("reason", "Some reason"),
939 ('__end__', 'reasons:sequence'),
1095 ("__end__", "reasons:sequence"),
940 ('__start__', 'rules:sequence'),
1096 ("__start__", "rules:sequence"),
941 ('__end__', 'rules:sequence'),
1097 ("__end__", "rules:sequence"),
942 ('mandatory', 'False'),
1098 ("mandatory", "False"),
943 ('__end__', 'reviewer:mapping'),
1099 ("__end__", "reviewer:mapping"),
944 ('__end__', 'review_members:sequence'),
1100 ("__end__", "review_members:sequence"),
945 ('__start__', 'revisions:sequence'),
1101 ("__start__", "revisions:sequence"),
946 ('revisions', commit_ids['change']),
1102 ("revisions", commit_ids["change"]),
947 ('__end__', 'revisions:sequence'),
1103 ("__end__", "revisions:sequence"),
948 ('user', ''),
1104 ("user", ""),
949 ('csrf_token', csrf_token),
1105 ("csrf_token", csrf_token),
950 ],
1106 ],
951 status=302)
1107 status=302,
1108 )
952
1109
953 location = response.headers['Location']
1110 location = response.headers["Location"]
954
1111
955 pull_request_id = location.rsplit('/', 1)[1]
1112 pull_request_id = location.rsplit("/", 1)[1]
956 assert pull_request_id != 'new'
1113 assert pull_request_id != "new"
957 pull_request = PullRequest.get(int(pull_request_id))
1114 pull_request = PullRequest.get(int(pull_request_id))
958
1115
959 # target_ref has to point to the ancestor's commit_id in order to
1116 # target_ref has to point to the ancestor's commit_id in order to
960 # show the correct diff
1117 # show the correct diff
961 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
1118 expected_target_ref = "branch:default:" + commit_ids["ancestor"]
962 assert pull_request.target_ref == expected_target_ref
1119 assert pull_request.target_ref == expected_target_ref
963
1120
964 # Check generated diff contents
1121 # Check generated diff contents
965 response = response.follow()
1122 response = response.follow()
966 response.mustcontain(no=['content_of_ancestor'])
1123 response.mustcontain(no=["content_of_ancestor"])
967 response.mustcontain(no=['content_of_ancestor-child'])
1124 response.mustcontain(no=["content_of_ancestor-child"])
968 response.mustcontain('content_of_change')
1125 response.mustcontain("content_of_change")
969
1126
970 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
1127 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
971 # Clear any previous calls to rcextensions
1128 # Clear any previous calls to rcextensions
972 rhodecode.EXTENSIONS.calls.clear()
1129 rhodecode.EXTENSIONS.calls.clear()
973
1130
974 pull_request = pr_util.create_pull_request(
1131 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
975 approved=True, mergeable=True)
976 pull_request_id = pull_request.pull_request_id
1132 pull_request_id = pull_request.pull_request_id
977 repo_name = pull_request.target_repo.scm_instance().name,
1133 repo_name = (pull_request.target_repo.scm_instance().name,)
978
1134
979 url = route_path('pullrequest_merge',
1135 url = route_path(
980 repo_name=str(repo_name[0]),
1136 "pullrequest_merge",
981 pull_request_id=pull_request_id)
1137 repo_name=str(repo_name[0]),
982 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
1138 pull_request_id=pull_request_id,
1139 )
1140 response = self.app.post(url, params={"csrf_token": csrf_token}).follow()
983
1141
984 pull_request = PullRequest.get(pull_request_id)
1142 pull_request = PullRequest.get(pull_request_id)
985
1143
986 assert response.status_int == 200
1144 assert response.status_int == 200
987 assert pull_request.is_closed()
1145 assert pull_request.is_closed()
988 assert_pull_request_status(
1146 assert_pull_request_status(pull_request, ChangesetStatus.STATUS_APPROVED)
989 pull_request, ChangesetStatus.STATUS_APPROVED)
990
1147
991 # Check the relevant log entries were added
1148 # Check the relevant log entries were added
992 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1149 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
993 actions = [log.action for log in user_logs]
1150 actions = [log.action for log in user_logs]
994 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1151 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
995 expected_actions = [
1152 expected_actions = [
996 'repo.pull_request.close',
1153 "repo.pull_request.close",
997 'repo.pull_request.merge',
1154 "repo.pull_request.merge",
998 'repo.pull_request.comment.create'
1155 "repo.pull_request.comment.create",
999 ]
1156 ]
1000 assert actions == expected_actions
1157 assert actions == expected_actions
1001
1158
1002 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1159 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1003 actions = [log for log in user_logs]
1160 actions = [log for log in user_logs]
1004 assert actions[-1].action == 'user.push'
1161 assert actions[-1].action == "user.push"
1005 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1162 assert actions[-1].action_data["commit_ids"] == pr_commit_ids
1006
1163
1007 # Check post_push rcextension was really executed
1164 # Check post_push rcextension was really executed
1008 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1165 push_calls = rhodecode.EXTENSIONS.calls["_push_hook"]
1009 assert len(push_calls) == 1
1166 assert len(push_calls) == 1
1010 unused_last_call_args, last_call_kwargs = push_calls[0]
1167 unused_last_call_args, last_call_kwargs = push_calls[0]
1011 assert last_call_kwargs['action'] == 'push'
1168 assert last_call_kwargs["action"] == "push"
1012 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1169 assert last_call_kwargs["commit_ids"] == pr_commit_ids
1013
1170
1014 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1171 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1015 pull_request = pr_util.create_pull_request(mergeable=False)
1172 pull_request = pr_util.create_pull_request(mergeable=False)
1016 pull_request_id = pull_request.pull_request_id
1173 pull_request_id = pull_request.pull_request_id
1017 pull_request = PullRequest.get(pull_request_id)
1174 pull_request = PullRequest.get(pull_request_id)
1018
1175
1019 response = self.app.post(
1176 response = self.app.post(
1020 route_path('pullrequest_merge',
1177 route_path(
1021 repo_name=pull_request.target_repo.scm_instance().name,
1178 "pullrequest_merge",
1022 pull_request_id=pull_request.pull_request_id),
1179 repo_name=pull_request.target_repo.scm_instance().name,
1023 params={'csrf_token': csrf_token}).follow()
1180 pull_request_id=pull_request.pull_request_id,
1181 ),
1182 params={"csrf_token": csrf_token},
1183 ).follow()
1024
1184
1025 assert response.status_int == 200
1185 assert response.status_int == 200
1026 response.mustcontain(
1186 response.mustcontain(
1027 'Merge is not currently possible because of below failed checks.')
1187 "Merge is not currently possible because of below failed checks."
1028 response.mustcontain('Server-side pull request merging is disabled.')
1188 )
1189 response.mustcontain("Server-side pull request merging is disabled.")
1029
1190
1030 @pytest.mark.skip_backends('svn')
1191 @pytest.mark.skip_backends("svn")
1031 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1192 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1032 pull_request = pr_util.create_pull_request(mergeable=True)
1193 pull_request = pr_util.create_pull_request(mergeable=True)
1033 pull_request_id = pull_request.pull_request_id
1194 pull_request_id = pull_request.pull_request_id
1034 repo_name = pull_request.target_repo.scm_instance().name
1195 repo_name = pull_request.target_repo.scm_instance().name
1035
1196
1036 response = self.app.post(
1197 response = self.app.post(
1037 route_path('pullrequest_merge',
1198 route_path(
1038 repo_name=repo_name, pull_request_id=pull_request_id),
1199 "pullrequest_merge",
1039 params={'csrf_token': csrf_token}).follow()
1200 repo_name=repo_name,
1201 pull_request_id=pull_request_id,
1202 ),
1203 params={"csrf_token": csrf_token},
1204 ).follow()
1040
1205
1041 assert response.status_int == 200
1206 assert response.status_int == 200
1042
1207
1043 response.mustcontain(
1208 response.mustcontain(
1044 'Merge is not currently possible because of below failed checks.')
1209 "Merge is not currently possible because of below failed checks."
1045 response.mustcontain('Pull request reviewer approval is pending.')
1210 )
1211 response.mustcontain("Pull request reviewer approval is pending.")
1046
1212
1047 def test_merge_pull_request_renders_failure_reason(
1213 def test_merge_pull_request_renders_failure_reason(
1048 self, user_regular, csrf_token, pr_util):
1214 self, user_regular, csrf_token, pr_util
1215 ):
1049 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1216 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1050 pull_request_id = pull_request.pull_request_id
1217 pull_request_id = pull_request.pull_request_id
1051 repo_name = pull_request.target_repo.scm_instance().name
1218 repo_name = pull_request.target_repo.scm_instance().name
1052
1219
1053 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1220 merge_resp = MergeResponse(
1054 MergeFailureReason.PUSH_FAILED,
1221 True,
1055 metadata={'target': 'shadow repo',
1222 False,
1056 'merge_commit': 'xxx'})
1223 Reference("commit", "STUB_COMMIT_ID", "STUB_COMMIT_ID"),
1224 MergeFailureReason.PUSH_FAILED,
1225 metadata={"target": "shadow repo", "merge_commit": "xxx"},
1226 )
1057 model_patcher = mock.patch.multiple(
1227 model_patcher = mock.patch.multiple(
1058 PullRequestModel,
1228 PullRequestModel,
1059 merge_repo=mock.Mock(return_value=merge_resp),
1229 merge_repo=mock.Mock(return_value=merge_resp),
1060 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1230 merge_status=mock.Mock(return_value=(None, True, "WRONG_MESSAGE")),
1231 )
1061
1232
1062 with model_patcher:
1233 with model_patcher:
1063 response = self.app.post(
1234 response = self.app.post(
1064 route_path('pullrequest_merge',
1235 route_path(
1065 repo_name=repo_name,
1236 "pullrequest_merge",
1066 pull_request_id=pull_request_id),
1237 repo_name=repo_name,
1067 params={'csrf_token': csrf_token}, status=302)
1238 pull_request_id=pull_request_id,
1239 ),
1240 params={"csrf_token": csrf_token},
1241 status=302,
1242 )
1068
1243
1069 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1244 merge_resp = MergeResponse(
1070 metadata={'target': 'shadow repo',
1245 True,
1071 'merge_commit': 'xxx'})
1246 True,
1247 Reference("commit", "STUB_COMMIT_ID", "STUB_COMMIT_ID"),
1248 MergeFailureReason.PUSH_FAILED,
1249 metadata={"target": "shadow repo", "merge_commit": "xxx"},
1250 )
1072 assert_session_flash(response, merge_resp.merge_status_message)
1251 assert_session_flash(response, merge_resp.merge_status_message)
1073
1252
1074 def test_update_source_revision(self, backend, csrf_token):
1253 def test_update_source_revision(self, backend, csrf_token):
1075 commits = [
1254 commits = [
1076 {'message': 'ancestor'},
1255 {"message": "ancestor"},
1077 {'message': 'change'},
1256 {"message": "change"},
1078 {'message': 'change-2'},
1257 {"message": "change-2"},
1079 ]
1258 ]
1080 commit_ids = backend.create_master_repo(commits)
1259 commit_ids = backend.create_master_repo(commits)
1081 target = backend.create_repo(heads=['ancestor'])
1260 target = backend.create_repo(heads=["ancestor"])
1082 source = backend.create_repo(heads=['change'])
1261 source = backend.create_repo(heads=["change"])
1083
1262
1084 # create pr from a in source to A in target
1263 # create pr from a in source to A in target
1085 pull_request = PullRequest()
1264 pull_request = PullRequest()
1086
1265
1087 pull_request.source_repo = source
1266 pull_request.source_repo = source
1088 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1267 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1089 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1268 branch=backend.default_branch_name, commit_id=commit_ids["change"]
1269 )
1090
1270
1091 pull_request.target_repo = target
1271 pull_request.target_repo = target
1092 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1272 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1093 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1273 branch=backend.default_branch_name, commit_id=commit_ids["ancestor"]
1274 )
1094
1275
1095 pull_request.revisions = [commit_ids['change']]
1276 pull_request.revisions = [commit_ids["change"]]
1096 pull_request.title = "Test"
1277 pull_request.title = "Test"
1097 pull_request.description = "Description"
1278 pull_request.description = "Description"
1098 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1279 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1099 pull_request.pull_request_state = PullRequest.STATE_CREATED
1280 pull_request.pull_request_state = PullRequest.STATE_CREATED
1100 Session().add(pull_request)
1281 Session().add(pull_request)
1101 Session().commit()
1282 Session().commit()
1102 pull_request_id = pull_request.pull_request_id
1283 pull_request_id = pull_request.pull_request_id
1103
1284
1104 # source has ancestor - change - change-2
1285 # source has ancestor - change - change-2
1105 backend.pull_heads(source, heads=['change-2'])
1286 backend.pull_heads(source, heads=["change-2"])
1106 target_repo_name = target.repo_name
1287 target_repo_name = target.repo_name
1107
1288
1108 # update PR
1289 # update PR
1109 self.app.post(
1290 self.app.post(
1110 route_path('pullrequest_update',
1291 route_path(
1111 repo_name=target_repo_name, pull_request_id=pull_request_id),
1292 "pullrequest_update",
1112 params={'update_commits': 'true', 'csrf_token': csrf_token})
1293 repo_name=target_repo_name,
1294 pull_request_id=pull_request_id,
1295 ),
1296 params={"update_commits": "true", "csrf_token": csrf_token},
1297 )
1113
1298
1114 response = self.app.get(
1299 response = self.app.get(
1115 route_path('pullrequest_show',
1300 route_path(
1116 repo_name=target_repo_name,
1301 "pullrequest_show",
1117 pull_request_id=pull_request.pull_request_id))
1302 repo_name=target_repo_name,
1303 pull_request_id=pull_request.pull_request_id,
1304 )
1305 )
1118
1306
1119 assert response.status_int == 200
1307 assert response.status_int == 200
1120 response.mustcontain('Pull request updated to')
1308 response.mustcontain("Pull request updated to")
1121 response.mustcontain('with 1 added, 0 removed commits.')
1309 response.mustcontain("with 1 added, 0 removed commits.")
1122
1310
1123 # check that we have now both revisions
1311 # check that we have now both revisions
1124 pull_request = PullRequest.get(pull_request_id)
1312 pull_request = PullRequest.get(pull_request_id)
1125 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1313 assert pull_request.revisions == [commit_ids["change-2"], commit_ids["change"]]
1126
1314
1127 def test_update_target_revision(self, backend, csrf_token):
1315 def test_update_target_revision(self, backend, csrf_token):
1316 """
1317 Checks when we add more commits into a target branch, and update PR
1318 """
1319
1128 commits = [
1320 commits = [
1129 {'message': 'ancestor'},
1321 {"message": "commit-a"}, # main branch (our PR target)
1130 {'message': 'change'},
1322 {"message": "commit-b"}, # Initial source
1131 {'message': 'ancestor-new', 'parents': ['ancestor']},
1323 {"message": "commit-c"},
1132 {'message': 'change-rebased'},
1324
1325 {"message": "commit-a-prime", "branch": "feature", "parents": ["commit-a"]}, # main branch (source)
1133 ]
1326 ]
1327
1134 commit_ids = backend.create_master_repo(commits)
1328 commit_ids = backend.create_master_repo(commits)
1135 target = backend.create_repo(heads=['ancestor'])
1329 target = backend.create_repo(heads=["commit-a"])
1136 source = backend.create_repo(heads=['change'])
1330 source = backend.create_repo(heads=["commit-b"])
1331 target_repo_name = target.repo_name
1137
1332
1138 # create pr from a in source to A in target
1333 # create pr from commit-b to commit-a
1139 pull_request = PullRequest()
1334 pull_request = PullRequest()
1140
1335
1141 pull_request.source_repo = source
1336 pull_request.target_repo = target
1142 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1337 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1143 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1338 branch=backend.default_branch_name, commit_id=commit_ids["commit-a"]
1339 )
1144
1340
1145 pull_request.target_repo = target
1341 pull_request.source_repo = source
1146 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1342 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1147 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1343 branch=backend.default_branch_name, commit_id=commit_ids["commit-b"]
1344 )
1148
1345
1149 pull_request.revisions = [commit_ids['change']]
1346 pull_request.revisions = [commit_ids["commit-b"]]
1150 pull_request.title = "Test"
1347 pull_request.title = "Test"
1151 pull_request.description = "Description"
1348 pull_request.description = "Description"
1152 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1349 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1153 pull_request.pull_request_state = PullRequest.STATE_CREATED
1350 pull_request.pull_request_state = PullRequest.STATE_CREATED
1154
1351
1155 Session().add(pull_request)
1352 Session().add(pull_request)
1156 Session().commit()
1353 Session().commit()
1157 pull_request_id = pull_request.pull_request_id
1354 pull_request_id = pull_request.pull_request_id
1158
1355
1159 # target has ancestor - ancestor-new
1356 # target - add one commit on top commit-a -> commit-b
1357 backend.pull_heads(target, heads=["commit-b"])
1358
1160 # source has ancestor - ancestor-new - change-rebased
1359 # source has ancestor - ancestor-new - change-rebased
1161 backend.pull_heads(target, heads=['ancestor-new'])
1360 backend.pull_heads(source, heads=["commit-c"])
1162 backend.pull_heads(source, heads=['change-rebased'])
1163 target_repo_name = target.repo_name
1164
1361
1165 # update PR
1362 # update PR
1166 url = route_path('pullrequest_update',
1363 url = route_path(
1167 repo_name=target_repo_name,
1364 "pullrequest_update",
1168 pull_request_id=pull_request_id)
1365 repo_name=target_repo_name,
1169 self.app.post(url,
1366 pull_request_id=pull_request_id,
1170 params={'update_commits': 'true', 'csrf_token': csrf_token},
1367 )
1171 status=200)
1368 self.app.post(
1369 url, params={"update_commits": "true", "csrf_token": csrf_token}, status=200
1370 )
1172
1371
1173 # check that we have now both revisions
1372 # check that we have now both revisions
1174 pull_request = PullRequest.get(pull_request_id)
1373 pull_request = PullRequest.get(pull_request_id)
1175 assert pull_request.revisions == [commit_ids['change-rebased']]
1374 assert pull_request.revisions == [commit_ids["commit-c"]]
1176 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1375 assert pull_request.target_ref == "branch:{branch}:{commit_id}".format(
1177 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1376 branch=backend.default_branch_name, commit_id=commit_ids["commit-b"]
1377 )
1178
1378
1179 response = self.app.get(
1379 response = self.app.get(
1180 route_path('pullrequest_show',
1380 route_path(
1181 repo_name=target_repo_name,
1381 "pullrequest_show",
1182 pull_request_id=pull_request.pull_request_id))
1382 repo_name=target_repo_name,
1383 pull_request_id=pull_request.pull_request_id,
1384 )
1385 )
1183 assert response.status_int == 200
1386 assert response.status_int == 200
1184 response.mustcontain('Pull request updated to')
1387 response.mustcontain("Pull request updated to")
1185 response.mustcontain('with 1 added, 1 removed commits.')
1388 response.mustcontain("with 1 added, 1 removed commits.")
1186
1389
1187 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1390 def test_update_target_revision_with_removal_of_1_commit_git(
1391 self, backend_git, csrf_token
1392 ):
1188 backend = backend_git
1393 backend = backend_git
1189 commits = [
1394 commits = [
1190 {'message': 'master-commit-1'},
1395 {"message": "master-commit-1"},
1191 {'message': 'master-commit-2-change-1'},
1396 {"message": "master-commit-2-change-1"},
1192 {'message': 'master-commit-3-change-2'},
1397 {"message": "master-commit-3-change-2"},
1193
1398 {
1194 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1399 "message": "feat-commit-1",
1195 {'message': 'feat-commit-2'},
1400 "parents": ["master-commit-1"],
1401 "branch": "feature",
1402 },
1403 {"message": "feat-commit-2", "branch": "feature"},
1196 ]
1404 ]
1197 commit_ids = backend.create_master_repo(commits)
1405 commit_ids = backend.create_master_repo(commits)
1198 target = backend.create_repo(heads=['master-commit-3-change-2'])
1406 target = backend.create_repo(heads=["master-commit-3-change-2"])
1199 source = backend.create_repo(heads=['feat-commit-2'])
1407 source = backend.create_repo(heads=["feat-commit-2"])
1200
1408
1201 # create pr from a in source to A in target
1409 # create pr from a in source to A in target
1202 pull_request = PullRequest()
1410 pull_request = PullRequest()
1203 pull_request.source_repo = source
1411 pull_request.source_repo = source
1204
1412
1205 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1413 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1206 branch=backend.default_branch_name,
1414 branch=backend.default_branch_name,
1207 commit_id=commit_ids['master-commit-3-change-2'])
1415 commit_id=commit_ids["master-commit-3-change-2"],
1416 )
1208
1417
1209 pull_request.target_repo = target
1418 pull_request.target_repo = target
1210 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1419 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1211 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1420 branch=backend.default_branch_name, commit_id=commit_ids["feat-commit-2"]
1421 )
1212
1422
1213 pull_request.revisions = [
1423 pull_request.revisions = [
1214 commit_ids['feat-commit-1'],
1424 commit_ids["feat-commit-1"],
1215 commit_ids['feat-commit-2']
1425 commit_ids["feat-commit-2"],
1216 ]
1426 ]
1217 pull_request.title = "Test"
1427 pull_request.title = "Test"
1218 pull_request.description = "Description"
1428 pull_request.description = "Description"
1219 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1429 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1220 pull_request.pull_request_state = PullRequest.STATE_CREATED
1430 pull_request.pull_request_state = PullRequest.STATE_CREATED
1221 Session().add(pull_request)
1431 Session().add(pull_request)
1222 Session().commit()
1432 Session().commit()
1223 pull_request_id = pull_request.pull_request_id
1433 pull_request_id = pull_request.pull_request_id
1224
1434
1225 # PR is created, now we simulate a force-push into target,
1435 # PR is created, now we simulate a force-push into target,
1226 # that drops a 2 last commits
1436 # that drops a 2 last commits
1227 vcsrepo = target.scm_instance()
1437 vcsrepo = target.scm_instance()
1228 vcsrepo.config.clear_section('hooks')
1438 vcsrepo.config.clear_section("hooks")
1229 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1439 vcsrepo.run_git_command(["reset", "--soft", "HEAD~2"])
1230 target_repo_name = target.repo_name
1440 target_repo_name = target.repo_name
1231
1441
1232 # update PR
1442 # update PR
1233 url = route_path('pullrequest_update',
1443 url = route_path(
1234 repo_name=target_repo_name,
1444 "pullrequest_update",
1235 pull_request_id=pull_request_id)
1445 repo_name=target_repo_name,
1236 self.app.post(url,
1446 pull_request_id=pull_request_id,
1237 params={'update_commits': 'true', 'csrf_token': csrf_token},
1447 )
1238 status=200)
1448 self.app.post(
1449 url, params={"update_commits": "true", "csrf_token": csrf_token}, status=200
1450 )
1239
1451
1240 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1452 response = self.app.get(
1453 route_path("pullrequest_new", repo_name=target_repo_name)
1454 )
1241 assert response.status_int == 200
1455 assert response.status_int == 200
1242 response.mustcontain('Pull request updated to')
1456 response.mustcontain("Pull request updated to")
1243 response.mustcontain('with 0 added, 0 removed commits.')
1457 response.mustcontain("with 0 added, 0 removed commits.")
1244
1458
1245 def test_update_of_ancestor_reference(self, backend, csrf_token):
1459 def test_update_pr_ancestor_reference(self, csrf_token, pr_util: PRTestUtility):
1246 commits = [
1460 commits = [
1247 {'message': 'ancestor'},
1461 {"message": "ancestor"},
1248 {'message': 'change'},
1462 {"message": "change"},
1249 {'message': 'change-2'},
1463 {"message": "change-2"},
1250 {'message': 'ancestor-new', 'parents': ['ancestor']},
1464
1251 {'message': 'change-rebased'},
1465 {"message": "ancestor-new", "parents": ["ancestor"], "branch": "feature"},
1466 {"message": "change-rebased", "branch": "feature"},
1252 ]
1467 ]
1253 commit_ids = backend.create_master_repo(commits)
1254 target = backend.create_repo(heads=['ancestor'])
1255 source = backend.create_repo(heads=['change'])
1256
1468
1257 # create pr from a in source to A in target
1469 pull_request = pr_util.create_pull_request(
1258 pull_request = PullRequest()
1470 commits,
1259 pull_request.source_repo = source
1471 target_head="ancestor",
1472 source_head="change",
1473 revisions=["change"],
1474 )
1475 pull_request_id = pull_request.pull_request_id
1476 target_repo_name = pr_util.target_repository.repo_name
1477 commit_ids = pr_util.commit_ids
1478
1479 assert pull_request.revisions == [commit_ids["change"]]
1480 assert list(pull_request.target_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name]
1481 assert list(pull_request.source_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name]
1260
1482
1261 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1483 branch = "feature"
1262 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1484 pr_util.update_target_repository(head="ancestor-new", do_fetch=True)
1263 pull_request.target_repo = target
1485 pr_util.set_pr_target_ref(ref_type="branch", ref_name=branch, ref_commit_id=commit_ids["ancestor-new"])
1264 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1486
1265 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1487 pr_util.update_source_repository(head="change-rebased", do_fetch=True)
1266 pull_request.revisions = [commit_ids['change']]
1488 pr_util.set_pr_source_ref(ref_type="branch", ref_name=branch, ref_commit_id=commit_ids["change-rebased"])
1267 pull_request.title = "Test"
1489
1268 pull_request.description = "Description"
1490 assert list(pull_request.target_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name, branch]
1269 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1491 assert list(pull_request.source_repo.scm_instance(cache=False).branches.keys()) == [pr_util.backend.default_branch_name, branch]
1270 pull_request.pull_request_state = PullRequest.STATE_CREATED
1492
1271 Session().add(pull_request)
1493 Session().add(pr_util.pull_request)
1272 Session().commit()
1494 Session().commit()
1273 pull_request_id = pull_request.pull_request_id
1274
1495
1275 # target has ancestor - ancestor-new
1496 self.app.post(
1276 # source has ancestor - ancestor-new - change-rebased
1497 route_path(
1277 backend.pull_heads(target, heads=['ancestor-new'])
1498 "pullrequest_update",
1278 backend.pull_heads(source, heads=['change-rebased'])
1499 repo_name=target_repo_name,
1279 target_repo_name = target.repo_name
1500 pull_request_id=pull_request_id,
1501 ),
1502 params={"update_commits": "true", "csrf_token": csrf_token, "force_refresh": True},
1503 status=200,
1504 )
1505
1280
1506
1281 # update PR
1507 # response = self.app.get(
1282 self.app.post(
1508 # route_path(
1283 route_path('pullrequest_update',
1509 # "pullrequest_show", repo_name=target_repo_name, pull_request_id=pull_request_id,
1284 repo_name=target_repo_name, pull_request_id=pull_request_id),
1510 # params={"force_refresh": True}
1285 params={'update_commits': 'true', 'csrf_token': csrf_token},
1511 # ),
1286 status=200)
1512 # )
1513 #
1514 # response.mustcontain("Pull request updated to")
1515 # response.mustcontain("with 1 added, 0 removed commits.")
1287
1516
1288 # Expect the target reference to be updated correctly
1289 pull_request = PullRequest.get(pull_request_id)
1517 pull_request = PullRequest.get(pull_request_id)
1290 assert pull_request.revisions == [commit_ids['change-rebased']]
1518
1291 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1519 assert pull_request.target_ref == "branch:{branch}:{commit_id}".format(
1292 branch=backend.default_branch_name,
1520 branch="feature", commit_id=commit_ids["ancestor-new"])
1293 commit_id=commit_ids['ancestor-new'])
1521
1294 assert pull_request.target_ref == expected_target_ref
1522 assert pull_request.revisions == [commit_ids["change-rebased"]]
1523
1295
1524
1296 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1525 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1297 branch_name = 'development'
1526 branch_name = "development"
1298 commits = [
1527 commits = [
1299 {'message': 'initial-commit'},
1528 {"message": "initial-commit"},
1300 {'message': 'old-feature'},
1529 {"message": "old-feature"},
1301 {'message': 'new-feature', 'branch': branch_name},
1530 {"message": "new-feature", "branch": branch_name},
1302 ]
1531 ]
1303 repo = backend_git.create_repo(commits)
1532 repo = backend_git.create_repo(commits)
1304 repo_name = repo.repo_name
1533 repo_name = repo.repo_name
1305 commit_ids = backend_git.commit_ids
1534 commit_ids = backend_git.commit_ids
1306
1535
1307 pull_request = PullRequest()
1536 pull_request = PullRequest()
1308 pull_request.source_repo = repo
1537 pull_request.source_repo = repo
1309 pull_request.target_repo = repo
1538 pull_request.target_repo = repo
1310 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1539 pull_request.source_ref = "branch:{branch}:{commit_id}".format(
1311 branch=branch_name, commit_id=commit_ids['new-feature'])
1540 branch=branch_name, commit_id=commit_ids["new-feature"]
1312 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1541 )
1313 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1542 pull_request.target_ref = "branch:{branch}:{commit_id}".format(
1314 pull_request.revisions = [commit_ids['new-feature']]
1543 branch=backend_git.default_branch_name, commit_id=commit_ids["old-feature"]
1544 )
1545 pull_request.revisions = [commit_ids["new-feature"]]
1315 pull_request.title = "Test"
1546 pull_request.title = "Test"
1316 pull_request.description = "Description"
1547 pull_request.description = "Description"
1317 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1548 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1318 pull_request.pull_request_state = PullRequest.STATE_CREATED
1549 pull_request.pull_request_state = PullRequest.STATE_CREATED
1319 Session().add(pull_request)
1550 Session().add(pull_request)
1320 Session().commit()
1551 Session().commit()
1321
1552
1322 pull_request_id = pull_request.pull_request_id
1553 pull_request_id = pull_request.pull_request_id
1323
1554
1324 vcs = repo.scm_instance()
1555 vcs = repo.scm_instance()
1325 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1556 vcs.remove_ref("refs/heads/{}".format(branch_name))
1326 # NOTE(marcink): run GC to ensure the commits are gone
1557 # NOTE(marcink): run GC to ensure the commits are gone
1327 vcs.run_gc()
1558 vcs.run_gc()
1328
1559
1329 response = self.app.get(route_path(
1560 response = self.app.get(
1330 'pullrequest_show',
1561 route_path(
1331 repo_name=repo_name,
1562 "pullrequest_show", repo_name=repo_name, pull_request_id=pull_request_id
1332 pull_request_id=pull_request_id))
1563 )
1564 )
1333
1565
1334 assert response.status_int == 200
1566 assert response.status_int == 200
1335
1567
1336 response.assert_response().element_contains(
1568 response.assert_response().element_contains(
1337 '#changeset_compare_view_content .alert strong',
1569 "#changeset_compare_view_content .alert strong", "Missing commits"
1338 'Missing commits')
1570 )
1339 response.assert_response().element_contains(
1571 response.assert_response().element_contains(
1340 '#changeset_compare_view_content .alert',
1572 "#changeset_compare_view_content .alert",
1341 'This pull request cannot be displayed, because one or more'
1573 "This pull request cannot be displayed, because one or more"
1342 ' commits no longer exist in the source repository.')
1574 " commits no longer exist in the source repository.",
1575 )
1343
1576
1344 def test_strip_commits_from_pull_request(
1577 def test_strip_commits_from_pull_request(self, backend, pr_util):
1345 self, backend, pr_util, csrf_token):
1346 commits = [
1578 commits = [
1347 {'message': 'initial-commit'},
1579 {"message": "initial-commit"},
1348 {'message': 'old-feature'},
1580 {"message": "old-feature"},
1349 {'message': 'new-feature', 'parents': ['initial-commit']},
1581 {"message": "new-feature"},
1350 ]
1582 ]
1351 pull_request = pr_util.create_pull_request(
1583 pull_request = pr_util.create_pull_request(
1352 commits, target_head='initial-commit', source_head='new-feature',
1584 commits,
1353 revisions=['new-feature'])
1585 target_head="initial-commit",
1586 source_head="new-feature",
1587 revisions=["new-feature"],
1588 )
1354
1589
1355 vcs = pr_util.source_repository.scm_instance()
1590 vcs = pr_util.source_repository.scm_instance()
1356 if backend.alias == 'git':
1591 if backend.alias == "git":
1357 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1592 vcs.strip(pr_util.commit_ids["new-feature"], branch_name=pr_util.backend.default_branch_name)
1358 else:
1593 else:
1359 vcs.strip(pr_util.commit_ids['new-feature'])
1594 vcs.strip(pr_util.commit_ids["new-feature"])
1360
1595
1361 response = self.app.get(route_path(
1596 response = self.app.get(
1362 'pullrequest_show',
1597 route_path(
1363 repo_name=pr_util.target_repository.repo_name,
1598 "pullrequest_show",
1364 pull_request_id=pull_request.pull_request_id))
1599 repo_name=pr_util.target_repository.repo_name,
1600 pull_request_id=pull_request.pull_request_id,
1601 )
1602 )
1365
1603
1366 assert response.status_int == 200
1604 assert response.status_int == 200
1367
1605
1368 response.assert_response().element_contains(
1606 response.assert_response().element_contains(
1369 '#changeset_compare_view_content .alert strong',
1607 "#changeset_compare_view_content .alert strong", "Missing commits"
1370 'Missing commits')
1608 )
1371 response.assert_response().element_contains(
1372 '#changeset_compare_view_content .alert',
1373 'This pull request cannot be displayed, because one or more'
1374 ' commits no longer exist in the source repository.')
1375 response.assert_response().element_contains(
1609 response.assert_response().element_contains(
1376 '#update_commits',
1610 "#changeset_compare_view_content .alert",
1377 'Update commits')
1611 "This pull request cannot be displayed, because one or more"
1612 " commits no longer exist in the source repository.",
1613 )
1614 response.assert_response().element_contains("#update_commits", "Update commits")
1378
1615
1379 def test_strip_commits_and_update(
1616 def test_strip_commits_and_update(self, backend, pr_util, csrf_token):
1380 self, backend, pr_util, csrf_token):
1381 commits = [
1617 commits = [
1382 {'message': 'initial-commit'},
1618 {"message": "initial-commit"},
1383 {'message': 'old-feature'},
1619 {"message": "old-feature"},
1384 {'message': 'new-feature', 'parents': ['old-feature']},
1620 {"message": "new-feature", "parents": ["old-feature"]},
1385 ]
1621 ]
1386 pull_request = pr_util.create_pull_request(
1622 pull_request = pr_util.create_pull_request(
1387 commits, target_head='old-feature', source_head='new-feature',
1623 commits,
1388 revisions=['new-feature'], mergeable=True)
1624 target_head="old-feature",
1625 source_head="new-feature",
1626 revisions=["new-feature"],
1627 mergeable=True,
1628 )
1389 pr_id = pull_request.pull_request_id
1629 pr_id = pull_request.pull_request_id
1390 target_repo_name = pull_request.target_repo.repo_name
1630 target_repo_name = pull_request.target_repo.repo_name
1391
1631
1392 vcs = pr_util.source_repository.scm_instance()
1632 vcs = pr_util.source_repository.scm_instance()
1393 if backend.alias == 'git':
1633 if backend.alias == "git":
1394 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1634 vcs.strip(pr_util.commit_ids["new-feature"], branch_name="master")
1395 else:
1635 else:
1396 vcs.strip(pr_util.commit_ids['new-feature'])
1636 vcs.strip(pr_util.commit_ids["new-feature"])
1397
1637
1398 url = route_path('pullrequest_update',
1638 url = route_path(
1399 repo_name=target_repo_name,
1639 "pullrequest_update", repo_name=target_repo_name, pull_request_id=pr_id
1400 pull_request_id=pr_id)
1640 )
1401 response = self.app.post(url,
1641 response = self.app.post(
1402 params={'update_commits': 'true',
1642 url, params={"update_commits": "true", "csrf_token": csrf_token}
1403 'csrf_token': csrf_token})
1643 )
1404
1644
1405 assert response.status_int == 200
1645 assert response.status_int == 200
1406 assert json.loads(response.body) == json.loads('{"response": true, "redirect_url": null}')
1646 assert json.loads(response.body) == json.loads(
1647 '{"response": true, "redirect_url": null}'
1648 )
1407
1649
1408 # Make sure that after update, it won't raise 500 errors
1650 # Make sure that after update, it won't raise 500 errors
1409 response = self.app.get(route_path(
1651 response = self.app.get(
1410 'pullrequest_show',
1652 route_path(
1411 repo_name=target_repo_name,
1653 "pullrequest_show", repo_name=target_repo_name, pull_request_id=pr_id
1412 pull_request_id=pr_id))
1654 )
1655 )
1413
1656
1414 assert response.status_int == 200
1657 assert response.status_int == 200
1415 response.assert_response().element_contains(
1658 response.assert_response().element_contains(
1416 '#changeset_compare_view_content .alert strong',
1659 "#changeset_compare_view_content .alert strong", "Missing commits"
1417 'Missing commits')
1660 )
1418
1661
1419 def test_branch_is_a_link(self, pr_util):
1662 def test_branch_is_a_link(self, pr_util):
1420 pull_request = pr_util.create_pull_request()
1663 pull_request = pr_util.create_pull_request()
1421 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1664 pull_request.source_ref = "branch:origin:1234567890abcdef"
1422 pull_request.target_ref = 'branch:target:abcdef1234567890'
1665 pull_request.target_ref = "branch:target:abcdef1234567890"
1423 Session().add(pull_request)
1666 Session().add(pull_request)
1424 Session().commit()
1667 Session().commit()
1425
1668
1426 response = self.app.get(route_path(
1669 response = self.app.get(
1427 'pullrequest_show',
1670 route_path(
1428 repo_name=pull_request.target_repo.scm_instance().name,
1671 "pullrequest_show",
1429 pull_request_id=pull_request.pull_request_id))
1672 repo_name=pull_request.target_repo.scm_instance().name,
1673 pull_request_id=pull_request.pull_request_id,
1674 )
1675 )
1430 assert response.status_int == 200
1676 assert response.status_int == 200
1431
1677
1432 source = response.assert_response().get_element('.pr-source-info')
1678 source = response.assert_response().get_element(".pr-source-info")
1433 source_parent = source.getparent()
1679 source_parent = source.getparent()
1434 assert len(source_parent) == 1
1680 assert len(source_parent) == 1
1435
1681
1436 target = response.assert_response().get_element('.pr-target-info')
1682 target = response.assert_response().get_element(".pr-target-info")
1437 target_parent = target.getparent()
1683 target_parent = target.getparent()
1438 assert len(target_parent) == 1
1684 assert len(target_parent) == 1
1439
1685
1440 expected_origin_link = route_path(
1686 expected_origin_link = route_path(
1441 'repo_commits',
1687 "repo_commits",
1442 repo_name=pull_request.source_repo.scm_instance().name,
1688 repo_name=pull_request.source_repo.scm_instance().name,
1443 params=dict(branch='origin'))
1689 params=dict(branch="origin"),
1690 )
1444 expected_target_link = route_path(
1691 expected_target_link = route_path(
1445 'repo_commits',
1692 "repo_commits",
1446 repo_name=pull_request.target_repo.scm_instance().name,
1693 repo_name=pull_request.target_repo.scm_instance().name,
1447 params=dict(branch='target'))
1694 params=dict(branch="target"),
1448 assert source_parent.attrib['href'] == expected_origin_link
1695 )
1449 assert target_parent.attrib['href'] == expected_target_link
1696 assert source_parent.attrib["href"] == expected_origin_link
1697 assert target_parent.attrib["href"] == expected_target_link
1450
1698
1451 def test_bookmark_is_not_a_link(self, pr_util):
1699 def test_bookmark_is_not_a_link(self, pr_util):
1452 pull_request = pr_util.create_pull_request()
1700 pull_request = pr_util.create_pull_request()
1453 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1701 pull_request.source_ref = "bookmark:origin:1234567890abcdef"
1454 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1702 pull_request.target_ref = "bookmark:target:abcdef1234567890"
1455 Session().add(pull_request)
1703 Session().add(pull_request)
1456 Session().commit()
1704 Session().commit()
1457
1705
1458 response = self.app.get(route_path(
1706 response = self.app.get(
1459 'pullrequest_show',
1707 route_path(
1460 repo_name=pull_request.target_repo.scm_instance().name,
1708 "pullrequest_show",
1461 pull_request_id=pull_request.pull_request_id))
1709 repo_name=pull_request.target_repo.scm_instance().name,
1710 pull_request_id=pull_request.pull_request_id,
1711 )
1712 )
1462 assert response.status_int == 200
1713 assert response.status_int == 200
1463
1714
1464 source = response.assert_response().get_element('.pr-source-info')
1715 source = response.assert_response().get_element(".pr-source-info")
1465 assert source.text.strip() == 'bookmark:origin'
1716 assert source.text.strip() == "bookmark:origin"
1466 assert source.getparent().attrib.get('href') is None
1717 assert source.getparent().attrib.get("href") is None
1467
1718
1468 target = response.assert_response().get_element('.pr-target-info')
1719 target = response.assert_response().get_element(".pr-target-info")
1469 assert target.text.strip() == 'bookmark:target'
1720 assert target.text.strip() == "bookmark:target"
1470 assert target.getparent().attrib.get('href') is None
1721 assert target.getparent().attrib.get("href") is None
1471
1722
1472 def test_tag_is_not_a_link(self, pr_util):
1723 def test_tag_is_not_a_link(self, pr_util):
1473 pull_request = pr_util.create_pull_request()
1724 pull_request = pr_util.create_pull_request()
1474 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1725 pull_request.source_ref = "tag:origin:1234567890abcdef"
1475 pull_request.target_ref = 'tag:target:abcdef1234567890'
1726 pull_request.target_ref = "tag:target:abcdef1234567890"
1476 Session().add(pull_request)
1727 Session().add(pull_request)
1477 Session().commit()
1728 Session().commit()
1478
1729
1479 response = self.app.get(route_path(
1730 response = self.app.get(
1480 'pullrequest_show',
1731 route_path(
1481 repo_name=pull_request.target_repo.scm_instance().name,
1732 "pullrequest_show",
1482 pull_request_id=pull_request.pull_request_id))
1733 repo_name=pull_request.target_repo.scm_instance().name,
1734 pull_request_id=pull_request.pull_request_id,
1735 )
1736 )
1483 assert response.status_int == 200
1737 assert response.status_int == 200
1484
1738
1485 source = response.assert_response().get_element('.pr-source-info')
1739 source = response.assert_response().get_element(".pr-source-info")
1486 assert source.text.strip() == 'tag:origin'
1740 assert source.text.strip() == "tag:origin"
1487 assert source.getparent().attrib.get('href') is None
1741 assert source.getparent().attrib.get("href") is None
1488
1742
1489 target = response.assert_response().get_element('.pr-target-info')
1743 target = response.assert_response().get_element(".pr-target-info")
1490 assert target.text.strip() == 'tag:target'
1744 assert target.text.strip() == "tag:target"
1491 assert target.getparent().attrib.get('href') is None
1745 assert target.getparent().attrib.get("href") is None
1492
1746
1493 @pytest.mark.parametrize('mergeable', [True, False])
1747 @pytest.mark.parametrize("mergeable", [True, False])
1494 def test_shadow_repository_link(
1748 def test_shadow_repository_link(self, mergeable, pr_util, http_host_only_stub):
1495 self, mergeable, pr_util, http_host_only_stub):
1496 """
1749 """
1497 Check that the pull request summary page displays a link to the shadow
1750 Check that the pull request summary page displays a link to the shadow
1498 repository if the pull request is mergeable. If it is not mergeable
1751 repository if the pull request is mergeable. If it is not mergeable
1499 the link should not be displayed.
1752 the link should not be displayed.
1500 """
1753 """
1501 pull_request = pr_util.create_pull_request(
1754 pull_request = pr_util.create_pull_request(
1502 mergeable=mergeable, enable_notifications=False)
1755 mergeable=mergeable, enable_notifications=False
1756 )
1503 target_repo = pull_request.target_repo.scm_instance()
1757 target_repo = pull_request.target_repo.scm_instance()
1504 pr_id = pull_request.pull_request_id
1758 pr_id = pull_request.pull_request_id
1505 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1759 shadow_url = "{host}/{repo}/pull-request/{pr_id}/repository".format(
1506 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1760 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id
1761 )
1507
1762
1508 response = self.app.get(route_path(
1763 response = self.app.get(
1509 'pullrequest_show',
1764 route_path(
1510 repo_name=target_repo.name,
1765 "pullrequest_show", repo_name=target_repo.name, pull_request_id=pr_id
1511 pull_request_id=pr_id))
1766 )
1767 )
1512
1768
1513 if mergeable:
1769 if mergeable:
1514 response.assert_response().element_value_contains(
1770 response.assert_response().element_value_contains(
1515 'input.pr-mergeinfo', shadow_url)
1771 "input.pr-mergeinfo", shadow_url
1772 )
1516 response.assert_response().element_value_contains(
1773 response.assert_response().element_value_contains(
1517 'input.pr-mergeinfo ', 'pr-merge')
1774 "input.pr-mergeinfo ", "pr-merge"
1775 )
1518 else:
1776 else:
1519 response.assert_response().no_element_exists('.pr-mergeinfo')
1777 response.assert_response().no_element_exists(".pr-mergeinfo")
1520
1778
1521
1779
1522 @pytest.mark.usefixtures('app')
1780 @pytest.mark.usefixtures("app")
1523 @pytest.mark.backends("git", "hg")
1781 @pytest.mark.backends("git", "hg")
1524 class TestPullrequestsControllerDelete(object):
1782 class TestPullrequestsControllerDelete(object):
1525 def test_pull_request_delete_button_permissions_admin(
1783 def test_pull_request_delete_button_permissions_admin(
1526 self, autologin_user, user_admin, pr_util):
1784 self, autologin_user, user_admin, pr_util
1785 ):
1527 pull_request = pr_util.create_pull_request(
1786 pull_request = pr_util.create_pull_request(
1528 author=user_admin.username, enable_notifications=False)
1787 author=user_admin.username, enable_notifications=False
1788 )
1529
1789
1530 response = self.app.get(route_path(
1790 response = self.app.get(
1531 'pullrequest_show',
1791 route_path(
1532 repo_name=pull_request.target_repo.scm_instance().name,
1792 "pullrequest_show",
1533 pull_request_id=pull_request.pull_request_id))
1793 repo_name=pull_request.target_repo.scm_instance().name,
1794 pull_request_id=pull_request.pull_request_id,
1795 )
1796 )
1534
1797
1535 response.mustcontain('id="delete_pullrequest"')
1798 response.mustcontain('id="delete_pullrequest"')
1536 response.mustcontain('Confirm to delete this pull request')
1799 response.mustcontain("Confirm to delete this pull request")
1537
1800
1538 def test_pull_request_delete_button_permissions_owner(
1801 def test_pull_request_delete_button_permissions_owner(
1539 self, autologin_regular_user, user_regular, pr_util):
1802 self, autologin_regular_user, user_regular, pr_util
1803 ):
1540 pull_request = pr_util.create_pull_request(
1804 pull_request = pr_util.create_pull_request(
1541 author=user_regular.username, enable_notifications=False)
1805 author=user_regular.username, enable_notifications=False
1806 )
1542
1807
1543 response = self.app.get(route_path(
1808 response = self.app.get(
1544 'pullrequest_show',
1809 route_path(
1545 repo_name=pull_request.target_repo.scm_instance().name,
1810 "pullrequest_show",
1546 pull_request_id=pull_request.pull_request_id))
1811 repo_name=pull_request.target_repo.scm_instance().name,
1812 pull_request_id=pull_request.pull_request_id,
1813 )
1814 )
1547
1815
1548 response.mustcontain('id="delete_pullrequest"')
1816 response.mustcontain('id="delete_pullrequest"')
1549 response.mustcontain('Confirm to delete this pull request')
1817 response.mustcontain("Confirm to delete this pull request")
1550
1818
1551 def test_pull_request_delete_button_permissions_forbidden(
1819 def test_pull_request_delete_button_permissions_forbidden(
1552 self, autologin_regular_user, user_regular, user_admin, pr_util):
1820 self, autologin_regular_user, user_regular, user_admin, pr_util
1821 ):
1553 pull_request = pr_util.create_pull_request(
1822 pull_request = pr_util.create_pull_request(
1554 author=user_admin.username, enable_notifications=False)
1823 author=user_admin.username, enable_notifications=False
1824 )
1555
1825
1556 response = self.app.get(route_path(
1826 response = self.app.get(
1557 'pullrequest_show',
1827 route_path(
1558 repo_name=pull_request.target_repo.scm_instance().name,
1828 "pullrequest_show",
1559 pull_request_id=pull_request.pull_request_id))
1829 repo_name=pull_request.target_repo.scm_instance().name,
1830 pull_request_id=pull_request.pull_request_id,
1831 )
1832 )
1560 response.mustcontain(no=['id="delete_pullrequest"'])
1833 response.mustcontain(no=['id="delete_pullrequest"'])
1561 response.mustcontain(no=['Confirm to delete this pull request'])
1834 response.mustcontain(no=["Confirm to delete this pull request"])
1562
1835
1563 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1836 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1564 self, autologin_regular_user, user_regular, user_admin, pr_util,
1837 self, autologin_regular_user, user_regular, user_admin, pr_util, user_util
1565 user_util):
1838 ):
1566
1567 pull_request = pr_util.create_pull_request(
1839 pull_request = pr_util.create_pull_request(
1568 author=user_admin.username, enable_notifications=False)
1840 author=user_admin.username, enable_notifications=False
1841 )
1569
1842
1570 user_util.grant_user_permission_to_repo(
1843 user_util.grant_user_permission_to_repo(
1571 pull_request.target_repo, user_regular,
1844 pull_request.target_repo, user_regular, "repository.write"
1572 'repository.write')
1845 )
1573
1846
1574 response = self.app.get(route_path(
1847 response = self.app.get(
1575 'pullrequest_show',
1848 route_path(
1576 repo_name=pull_request.target_repo.scm_instance().name,
1849 "pullrequest_show",
1577 pull_request_id=pull_request.pull_request_id))
1850 repo_name=pull_request.target_repo.scm_instance().name,
1851 pull_request_id=pull_request.pull_request_id,
1852 )
1853 )
1578
1854
1579 response.mustcontain('id="open_edit_pullrequest"')
1855 response.mustcontain('id="open_edit_pullrequest"')
1580 response.mustcontain('id="delete_pullrequest"')
1856 response.mustcontain('id="delete_pullrequest"')
1581 response.mustcontain(no=['Confirm to delete this pull request'])
1857 response.mustcontain(no=["Confirm to delete this pull request"])
1582
1858
1583 def test_delete_comment_returns_404_if_comment_does_not_exist(
1859 def test_delete_comment_returns_404_if_comment_does_not_exist(
1584 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1860 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header
1585
1861 ):
1586 pull_request = pr_util.create_pull_request(
1862 pull_request = pr_util.create_pull_request(
1587 author=user_admin.username, enable_notifications=False)
1863 author=user_admin.username, enable_notifications=False
1864 )
1588
1865
1589 self.app.post(
1866 self.app.post(
1590 route_path(
1867 route_path(
1591 'pullrequest_comment_delete',
1868 "pullrequest_comment_delete",
1592 repo_name=pull_request.target_repo.scm_instance().name,
1869 repo_name=pull_request.target_repo.scm_instance().name,
1593 pull_request_id=pull_request.pull_request_id,
1870 pull_request_id=pull_request.pull_request_id,
1594 comment_id=1024404),
1871 comment_id=1024404,
1872 ),
1595 extra_environ=xhr_header,
1873 extra_environ=xhr_header,
1596 params={'csrf_token': csrf_token},
1874 params={"csrf_token": csrf_token},
1597 status=404
1875 status=404,
1598 )
1876 )
1599
1877
1600 def test_delete_comment(
1878 def test_delete_comment(
1601 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1879 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header
1602
1880 ):
1603 pull_request = pr_util.create_pull_request(
1881 pull_request = pr_util.create_pull_request(
1604 author=user_admin.username, enable_notifications=False)
1882 author=user_admin.username, enable_notifications=False
1883 )
1605 comment = pr_util.create_comment()
1884 comment = pr_util.create_comment()
1606 comment_id = comment.comment_id
1885 comment_id = comment.comment_id
1607
1886
1608 response = self.app.post(
1887 response = self.app.post(
1609 route_path(
1888 route_path(
1610 'pullrequest_comment_delete',
1889 "pullrequest_comment_delete",
1611 repo_name=pull_request.target_repo.scm_instance().name,
1890 repo_name=pull_request.target_repo.scm_instance().name,
1612 pull_request_id=pull_request.pull_request_id,
1891 pull_request_id=pull_request.pull_request_id,
1613 comment_id=comment_id),
1892 comment_id=comment_id,
1893 ),
1614 extra_environ=xhr_header,
1894 extra_environ=xhr_header,
1615 params={'csrf_token': csrf_token},
1895 params={"csrf_token": csrf_token},
1616 status=200
1896 status=200,
1617 )
1897 )
1618 assert response.text == 'true'
1898 assert response.text == "true"
1619
1899
1620 @pytest.mark.parametrize('url_type', [
1900 @pytest.mark.parametrize(
1621 'pullrequest_new',
1901 "url_type",
1622 'pullrequest_create',
1902 [
1623 'pullrequest_update',
1903 "pullrequest_new",
1624 'pullrequest_merge',
1904 "pullrequest_create",
1625 ])
1905 "pullrequest_update",
1906 "pullrequest_merge",
1907 ],
1908 )
1626 def test_pull_request_is_forbidden_on_archived_repo(
1909 def test_pull_request_is_forbidden_on_archived_repo(
1627 self, autologin_user, backend, xhr_header, user_util, url_type):
1910 self, autologin_user, backend, xhr_header, user_util, url_type
1628
1911 ):
1629 # create a temporary repo
1912 # create a temporary repo
1630 source = user_util.create_repo(repo_type=backend.alias)
1913 source = user_util.create_repo(repo_type=backend.alias)
1631 repo_name = source.repo_name
1914 repo_name = source.repo_name
1632 repo = Repository.get_by_repo_name(repo_name)
1915 repo = Repository.get_by_repo_name(repo_name)
1633 repo.archived = True
1916 repo.archived = True
1634 Session().commit()
1917 Session().commit()
1635
1918
1636 response = self.app.get(
1919 response = self.app.get(
1637 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1920 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302
1921 )
1638
1922
1639 msg = 'Action not supported for archived repository.'
1923 msg = "Action not supported for archived repository."
1640 assert_session_flash(response, msg)
1924 assert_session_flash(response, msg)
1641
1925
1642
1926
1643 def assert_pull_request_status(pull_request, expected_status):
1927 def assert_pull_request_status(pull_request, expected_status):
1644 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1928 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1645 assert status == expected_status
1929 assert status == expected_status
1646
1930
1647
1931
1648 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1932 @pytest.mark.parametrize("route", ["pullrequest_new", "pullrequest_create"])
1649 @pytest.mark.usefixtures("autologin_user")
1933 @pytest.mark.usefixtures("autologin_user")
1650 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1934 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1651 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
1935 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,1044 +1,1044 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Scm model for RhodeCode
20 Scm model for RhodeCode
21 """
21 """
22
22
23 import os.path
23 import os.path
24 import traceback
24 import traceback
25 import logging
25 import logging
26 import io
26 import io
27
27
28 from sqlalchemy import func
28 from sqlalchemy import func
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 or_, false, null,
49 or_, false, null,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
51 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
139 # check permission at this level
139 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
153
154 super().__init__(
154 super().__init__(
155 obj_list=db_repo_list,
155 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
159
159
160
160
161 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
162
162
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
164 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
166
166
167 super().__init__(
167 super().__init__(
168 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
172
172
173
173
174 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
175
175
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
177 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
179
180 super().__init__(
180 super().__init__(
181 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
185
185
186
186
187 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
188 """
188 """
189 Generic Scm Model
189 Generic Scm Model
190 """
190 """
191
191
192 @LazyProperty
192 @LazyProperty
193 def repos_path(self):
193 def repos_path(self):
194 """
194 """
195 Gets the repositories root path from database
195 Gets the repositories root path from database
196 """
196 """
197
197
198 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
200
200
201 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
202 """
202 """
203 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
205
205
206 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
207 """
207 """
208
208
209 if repos_path is None:
209 if repos_path is None:
210 repos_path = self.repos_path
210 repos_path = self.repos_path
211
211
212 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
213
213
214 config = make_db_config()
214 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
216 repos = {}
216 repos = {}
217
217
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
222
222
223 try:
223 try:
224 if name in repos:
224 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
226 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
228 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
230 with_wire={"cache": False})
231 except OSError:
231 except OSError:
232 continue
232 continue
233 except RepositoryError:
233 except RepositoryError:
234 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
235 continue
235 continue
236
236
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == null())\
252 .filter(Repository.group_id == null())\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == null()).all()
261 .filter(RepoGroup.group_parent_id == null()).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them, and also purge caches used by the dogpile
272 flag on them, and also purge caches used by the dogpile
273 """
273 """
274 repo = Repository.get_by_repo_name(repo_name)
274 repo = Repository.get_by_repo_name(repo_name)
275
275
276 if repo:
276 if repo:
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 repo_id=repo.repo_id)
278 repo_id=repo.repo_id)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280
280
281 repo_id = repo.repo_id
281 repo_id = repo.repo_id
282 config = repo._config
282 config = repo._config
283 config.set('extensions', 'largefiles', '')
283 config.set('extensions', 'largefiles', '')
284 repo.update_commit_cache(config=config, cs_cache=None)
284 repo.update_commit_cache(config=config, cs_cache=None)
285 if delete:
285 if delete:
286 cache_namespace_uid = f'cache_repo.{repo_id}'
286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288
288
289 def toggle_following_repo(self, follow_repo_id, user_id):
289 def toggle_following_repo(self, follow_repo_id, user_id):
290
290
291 f = self.sa.query(UserFollowing)\
291 f = self.sa.query(UserFollowing)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.user_id == user_id).scalar()
293 .filter(UserFollowing.user_id == user_id).scalar()
294
294
295 if f is not None:
295 if f is not None:
296 try:
296 try:
297 self.sa.delete(f)
297 self.sa.delete(f)
298 return
298 return
299 except Exception:
299 except Exception:
300 log.error(traceback.format_exc())
300 log.error(traceback.format_exc())
301 raise
301 raise
302
302
303 try:
303 try:
304 f = UserFollowing()
304 f = UserFollowing()
305 f.user_id = user_id
305 f.user_id = user_id
306 f.follows_repo_id = follow_repo_id
306 f.follows_repo_id = follow_repo_id
307 self.sa.add(f)
307 self.sa.add(f)
308 except Exception:
308 except Exception:
309 log.error(traceback.format_exc())
309 log.error(traceback.format_exc())
310 raise
310 raise
311
311
312 def toggle_following_user(self, follow_user_id, user_id):
312 def toggle_following_user(self, follow_user_id, user_id):
313 f = self.sa.query(UserFollowing)\
313 f = self.sa.query(UserFollowing)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.user_id == user_id).scalar()
315 .filter(UserFollowing.user_id == user_id).scalar()
316
316
317 if f is not None:
317 if f is not None:
318 try:
318 try:
319 self.sa.delete(f)
319 self.sa.delete(f)
320 return
320 return
321 except Exception:
321 except Exception:
322 log.error(traceback.format_exc())
322 log.error(traceback.format_exc())
323 raise
323 raise
324
324
325 try:
325 try:
326 f = UserFollowing()
326 f = UserFollowing()
327 f.user_id = user_id
327 f.user_id = user_id
328 f.follows_user_id = follow_user_id
328 f.follows_user_id = follow_user_id
329 self.sa.add(f)
329 self.sa.add(f)
330 except Exception:
330 except Exception:
331 log.error(traceback.format_exc())
331 log.error(traceback.format_exc())
332 raise
332 raise
333
333
334 def is_following_repo(self, repo_name, user_id, cache=False):
334 def is_following_repo(self, repo_name, user_id, cache=False):
335 r = self.sa.query(Repository)\
335 r = self.sa.query(Repository)\
336 .filter(Repository.repo_name == repo_name).scalar()
336 .filter(Repository.repo_name == repo_name).scalar()
337
337
338 f = self.sa.query(UserFollowing)\
338 f = self.sa.query(UserFollowing)\
339 .filter(UserFollowing.follows_repository == r)\
339 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.user_id == user_id).scalar()
340 .filter(UserFollowing.user_id == user_id).scalar()
341
341
342 return f is not None
342 return f is not None
343
343
344 def is_following_user(self, username, user_id, cache=False):
344 def is_following_user(self, username, user_id, cache=False):
345 u = User.get_by_username(username)
345 u = User.get_by_username(username)
346
346
347 f = self.sa.query(UserFollowing)\
347 f = self.sa.query(UserFollowing)\
348 .filter(UserFollowing.follows_user == u)\
348 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.user_id == user_id).scalar()
349 .filter(UserFollowing.user_id == user_id).scalar()
350
350
351 return f is not None
351 return f is not None
352
352
353 def get_followers(self, repo):
353 def get_followers(self, repo):
354 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
355
355
356 return self.sa.query(UserFollowing)\
356 return self.sa.query(UserFollowing)\
357 .filter(UserFollowing.follows_repository == repo).count()
357 .filter(UserFollowing.follows_repository == repo).count()
358
358
359 def get_forks(self, repo):
359 def get_forks(self, repo):
360 repo = self._get_repo(repo)
360 repo = self._get_repo(repo)
361 return self.sa.query(Repository)\
361 return self.sa.query(Repository)\
362 .filter(Repository.fork == repo).count()
362 .filter(Repository.fork == repo).count()
363
363
364 def get_pull_requests(self, repo):
364 def get_pull_requests(self, repo):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366 return self.sa.query(PullRequest)\
366 return self.sa.query(PullRequest)\
367 .filter(PullRequest.target_repo == repo)\
367 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369
369
370 def get_artifacts(self, repo):
370 def get_artifacts(self, repo):
371 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
372 return self.sa.query(FileStore)\
372 return self.sa.query(FileStore)\
373 .filter(FileStore.repo == repo)\
373 .filter(FileStore.repo == repo)\
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
375
375
376 def mark_as_fork(self, repo, fork, user):
376 def mark_as_fork(self, repo, fork, user):
377 repo = self._get_repo(repo)
377 repo = self._get_repo(repo)
378 fork = self._get_repo(fork)
378 fork = self._get_repo(fork)
379 if fork and repo.repo_id == fork.repo_id:
379 if fork and repo.repo_id == fork.repo_id:
380 raise Exception("Cannot set repository as fork of itself")
380 raise Exception("Cannot set repository as fork of itself")
381
381
382 if fork and repo.repo_type != fork.repo_type:
382 if fork and repo.repo_type != fork.repo_type:
383 raise RepositoryError(
383 raise RepositoryError(
384 "Cannot set repository as fork of repository with other type")
384 "Cannot set repository as fork of repository with other type")
385
385
386 repo.fork = fork
386 repo.fork = fork
387 self.sa.add(repo)
387 self.sa.add(repo)
388 return repo
388 return repo
389
389
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 dbrepo = self._get_repo(repo)
391 dbrepo = self._get_repo(repo)
392 remote_uri = remote_uri or dbrepo.clone_uri
392 remote_uri = remote_uri or dbrepo.clone_uri
393 if not remote_uri:
393 if not remote_uri:
394 raise Exception("This repository doesn't have a clone uri")
394 raise Exception("This repository doesn't have a clone uri")
395
395
396 repo = dbrepo.scm_instance(cache=False)
396 repo = dbrepo.scm_instance(cache=False)
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 try:
399 try:
400 # NOTE(marcink): add extra validation so we skip invalid urls
400 # NOTE(marcink): add extra validation so we skip invalid urls
401 # this is due this tasks can be executed via scheduler without
401 # this is due this tasks can be executed via scheduler without
402 # proper validation of remote_uri
402 # proper validation of remote_uri
403 if validate_uri:
403 if validate_uri:
404 config = make_db_config(clear_session=False)
404 config = make_db_config(clear_session=False)
405 url_validator(remote_uri, dbrepo.repo_type, config)
405 url_validator(remote_uri, dbrepo.repo_type, config)
406 except InvalidCloneUrl:
406 except InvalidCloneUrl:
407 raise
407 raise
408
408
409 repo_name = dbrepo.repo_name
409 repo_name = dbrepo.repo_name
410 try:
410 try:
411 # TODO: we need to make sure those operations call proper hooks !
411 # TODO: we need to make sure those operations call proper hooks !
412 repo.fetch(remote_uri)
412 repo.fetch(remote_uri)
413
413
414 self.mark_for_invalidation(repo_name)
414 self.mark_for_invalidation(repo_name)
415 except Exception:
415 except Exception:
416 log.error(traceback.format_exc())
416 log.error(traceback.format_exc())
417 raise
417 raise
418
418
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 dbrepo = self._get_repo(repo)
420 dbrepo = self._get_repo(repo)
421 remote_uri = remote_uri or dbrepo.push_uri
421 remote_uri = remote_uri or dbrepo.push_uri
422 if not remote_uri:
422 if not remote_uri:
423 raise Exception("This repository doesn't have a clone uri")
423 raise Exception("This repository doesn't have a clone uri")
424
424
425 repo = dbrepo.scm_instance(cache=False)
425 repo = dbrepo.scm_instance(cache=False)
426 repo.config.clear_section('hooks')
426 repo.config.clear_section('hooks')
427
427
428 try:
428 try:
429 # NOTE(marcink): add extra validation so we skip invalid urls
429 # NOTE(marcink): add extra validation so we skip invalid urls
430 # this is due this tasks can be executed via scheduler without
430 # this is due this tasks can be executed via scheduler without
431 # proper validation of remote_uri
431 # proper validation of remote_uri
432 if validate_uri:
432 if validate_uri:
433 config = make_db_config(clear_session=False)
433 config = make_db_config(clear_session=False)
434 url_validator(remote_uri, dbrepo.repo_type, config)
434 url_validator(remote_uri, dbrepo.repo_type, config)
435 except InvalidCloneUrl:
435 except InvalidCloneUrl:
436 raise
436 raise
437
437
438 try:
438 try:
439 repo.push(remote_uri)
439 repo.push(remote_uri)
440 except Exception:
440 except Exception:
441 log.error(traceback.format_exc())
441 log.error(traceback.format_exc())
442 raise
442 raise
443
443
444 def commit_change(self, repo, repo_name, commit, user, author, message,
444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 content: bytes, f_path: bytes):
445 content: bytes, f_path: bytes, branch: str = None):
446 """
446 """
447 Commits changes
447 Commits changes
448 """
448 """
449 user = self._get_user(user)
449 user = self._get_user(user)
450
450
451 # message and author needs to be unicode
451 # message and author needs to be unicode
452 # proper backend should then translate that into required type
452 # proper backend should then translate that into required type
453 message = safe_str(message)
453 message = safe_str(message)
454 author = safe_str(author)
454 author = safe_str(author)
455 imc = repo.in_memory_commit
455 imc = repo.in_memory_commit
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 try:
457 try:
458 # TODO: handle pre-push action !
458 # TODO: handle pre-push action !
459 tip = imc.commit(
459 tip = imc.commit(
460 message=message, author=author, parents=[commit],
460 message=message, author=author, parents=[commit],
461 branch=commit.branch)
461 branch=branch or commit.branch)
462 except Exception as e:
462 except Exception as e:
463 log.error(traceback.format_exc())
463 log.error(traceback.format_exc())
464 raise IMCCommitError(str(e))
464 raise IMCCommitError(str(e))
465 finally:
465 finally:
466 # always clear caches, if commit fails we want fresh object also
466 # always clear caches, if commit fails we want fresh object also
467 self.mark_for_invalidation(repo_name)
467 self.mark_for_invalidation(repo_name)
468
468
469 # We trigger the post-push action
469 # We trigger the post-push action
470 hooks_utils.trigger_post_push_hook(
470 hooks_utils.trigger_post_push_hook(
471 username=user.username, action='push_local', hook_type='post_push',
471 username=user.username, action='push_local', hook_type='post_push',
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 return tip
473 return tip
474
474
475 def _sanitize_path(self, f_path: bytes):
475 def _sanitize_path(self, f_path: bytes):
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 if f_path:
478 if f_path:
479 f_path = os.path.normpath(f_path)
479 f_path = os.path.normpath(f_path)
480 return f_path
480 return f_path
481
481
482 def get_dirnode_metadata(self, request, commit, dir_node):
482 def get_dirnode_metadata(self, request, commit, dir_node):
483 if not dir_node.is_dir():
483 if not dir_node.is_dir():
484 return []
484 return []
485
485
486 data = []
486 data = []
487 for node in dir_node:
487 for node in dir_node:
488 if not node.is_file():
488 if not node.is_file():
489 # we skip file-nodes
489 # we skip file-nodes
490 continue
490 continue
491
491
492 last_commit = node.last_commit
492 last_commit = node.last_commit
493 last_commit_date = last_commit.date
493 last_commit_date = last_commit.date
494 data.append({
494 data.append({
495 'name': node.name,
495 'name': node.name,
496 'size': h.format_byte_size_binary(node.size),
496 'size': h.format_byte_size_binary(node.size),
497 'modified_at': h.format_date(last_commit_date),
497 'modified_at': h.format_date(last_commit_date),
498 'modified_ts': last_commit_date.isoformat(),
498 'modified_ts': last_commit_date.isoformat(),
499 'revision': last_commit.revision,
499 'revision': last_commit.revision,
500 'short_id': last_commit.short_id,
500 'short_id': last_commit.short_id,
501 'message': h.escape(last_commit.message),
501 'message': h.escape(last_commit.message),
502 'author': h.escape(last_commit.author),
502 'author': h.escape(last_commit.author),
503 'user_profile': h.gravatar_with_user(
503 'user_profile': h.gravatar_with_user(
504 request, last_commit.author),
504 request, last_commit.author),
505 })
505 })
506
506
507 return data
507 return data
508
508
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 extended_info=False, content=False, max_file_bytes=None):
510 extended_info=False, content=False, max_file_bytes=None):
511 """
511 """
512 recursive walk in root dir and return a set of all path in that dir
512 recursive walk in root dir and return a set of all path in that dir
513 based on repository walk function
513 based on repository walk function
514
514
515 :param repo_name: name of repository
515 :param repo_name: name of repository
516 :param commit_id: commit id for which to list nodes
516 :param commit_id: commit id for which to list nodes
517 :param root_path: root path to list
517 :param root_path: root path to list
518 :param flat: return as a list, if False returns a dict with description
518 :param flat: return as a list, if False returns a dict with description
519 :param extended_info: show additional info such as md5, binary, size etc
519 :param extended_info: show additional info such as md5, binary, size etc
520 :param content: add nodes content to the return data
520 :param content: add nodes content to the return data
521 :param max_file_bytes: will not return file contents over this limit
521 :param max_file_bytes: will not return file contents over this limit
522
522
523 """
523 """
524 _files = list()
524 _files = list()
525 _dirs = list()
525 _dirs = list()
526
526
527 try:
527 try:
528 _repo = self._get_repo(repo_name)
528 _repo = self._get_repo(repo_name)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 root_path = root_path.lstrip('/')
530 root_path = root_path.lstrip('/')
531
531
532 # get RootNode, inject pre-load options before walking
532 # get RootNode, inject pre-load options before walking
533 top_node = commit.get_node(root_path)
533 top_node = commit.get_node(root_path)
534 extended_info_pre_load = []
534 extended_info_pre_load = []
535 if extended_info:
535 if extended_info:
536 extended_info_pre_load += ['md5']
536 extended_info_pre_load += ['md5']
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538
538
539 for __, dirs, files in commit.walk(top_node):
539 for __, dirs, files in commit.walk(top_node):
540
540
541 for f in files:
541 for f in files:
542 _content = None
542 _content = None
543 _data = f_name = f.str_path
543 _data = f_name = f.str_path
544
544
545 if not flat:
545 if not flat:
546 _data = {
546 _data = {
547 "name": h.escape(f_name),
547 "name": h.escape(f_name),
548 "type": "file",
548 "type": "file",
549 }
549 }
550 if extended_info:
550 if extended_info:
551 _data.update({
551 _data.update({
552 "md5": f.md5,
552 "md5": f.md5,
553 "binary": f.is_binary,
553 "binary": f.is_binary,
554 "size": f.size,
554 "size": f.size,
555 "extension": f.extension,
555 "extension": f.extension,
556 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
557 "lines": f.lines()[0]
558 })
558 })
559
559
560 if content:
560 if content:
561 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
563 full_content = None
563 full_content = None
564 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
565 full_content = f.str_content
565 full_content = f.str_content
566
566
567 _data.update({
567 _data.update({
568 "content": full_content,
568 "content": full_content,
569 })
569 })
570 _files.append(_data)
570 _files.append(_data)
571
571
572 for d in dirs:
572 for d in dirs:
573 _data = d_name = d.str_path
573 _data = d_name = d.str_path
574 if not flat:
574 if not flat:
575 _data = {
575 _data = {
576 "name": h.escape(d_name),
576 "name": h.escape(d_name),
577 "type": "dir",
577 "type": "dir",
578 }
578 }
579 if extended_info:
579 if extended_info:
580 _data.update({
580 _data.update({
581 "md5": "",
581 "md5": "",
582 "binary": False,
582 "binary": False,
583 "size": 0,
583 "size": 0,
584 "extension": "",
584 "extension": "",
585 })
585 })
586 if content:
586 if content:
587 _data.update({
587 _data.update({
588 "content": None
588 "content": None
589 })
589 })
590 _dirs.append(_data)
590 _dirs.append(_data)
591 except RepositoryError:
591 except RepositoryError:
592 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
593 raise
593 raise
594
594
595 return _dirs, _files
595 return _dirs, _files
596
596
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
598 """
599 Generate files for quick filter in files view
599 Generate files for quick filter in files view
600 """
600 """
601
601
602 _files = list()
602 _files = list()
603 _dirs = list()
603 _dirs = list()
604 try:
604 try:
605 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
608
608
609 top_node = commit.get_node(root_path)
609 top_node = commit.get_node(root_path)
610 top_node.default_pre_load = []
610 top_node.default_pre_load = []
611
611
612 for __, dirs, files in commit.walk(top_node):
612 for __, dirs, files in commit.walk(top_node):
613 for f in files:
613 for f in files:
614
614
615 _data = {
615 _data = {
616 "name": h.escape(f.str_path),
616 "name": h.escape(f.str_path),
617 "type": "file",
617 "type": "file",
618 }
618 }
619
619
620 _files.append(_data)
620 _files.append(_data)
621
621
622 for d in dirs:
622 for d in dirs:
623
623
624 _data = {
624 _data = {
625 "name": h.escape(d.str_path),
625 "name": h.escape(d.str_path),
626 "type": "dir",
626 "type": "dir",
627 }
627 }
628
628
629 _dirs.append(_data)
629 _dirs.append(_data)
630 except RepositoryError:
630 except RepositoryError:
631 log.exception("Exception in get_quick_filter_nodes")
631 log.exception("Exception in get_quick_filter_nodes")
632 raise
632 raise
633
633
634 return _dirs, _files
634 return _dirs, _files
635
635
636 def get_node(self, repo_name, commit_id, file_path,
636 def get_node(self, repo_name, commit_id, file_path,
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
638 """
638 """
639 retrieve single node from commit
639 retrieve single node from commit
640 """
640 """
641
641
642 try:
642 try:
643
643
644 _repo = self._get_repo(repo_name)
644 _repo = self._get_repo(repo_name)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
646
646
647 file_node = commit.get_node(file_path)
647 file_node = commit.get_node(file_path)
648 if file_node.is_dir():
648 if file_node.is_dir():
649 raise RepositoryError('The given path is a directory')
649 raise RepositoryError('The given path is a directory')
650
650
651 _content = None
651 _content = None
652 f_name = file_node.str_path
652 f_name = file_node.str_path
653
653
654 file_data = {
654 file_data = {
655 "name": h.escape(f_name),
655 "name": h.escape(f_name),
656 "type": "file",
656 "type": "file",
657 }
657 }
658
658
659 if extended_info:
659 if extended_info:
660 file_data.update({
660 file_data.update({
661 "extension": file_node.extension,
661 "extension": file_node.extension,
662 "mimetype": file_node.mimetype,
662 "mimetype": file_node.mimetype,
663 })
663 })
664
664
665 if cache:
665 if cache:
666 md5 = file_node.md5
666 md5 = file_node.md5
667 is_binary = file_node.is_binary
667 is_binary = file_node.is_binary
668 size = file_node.size
668 size = file_node.size
669 else:
669 else:
670 is_binary, md5, size, _content = file_node.metadata_uncached()
670 is_binary, md5, size, _content = file_node.metadata_uncached()
671
671
672 file_data.update({
672 file_data.update({
673 "md5": md5,
673 "md5": md5,
674 "binary": is_binary,
674 "binary": is_binary,
675 "size": size,
675 "size": size,
676 })
676 })
677
677
678 if content and cache:
678 if content and cache:
679 # get content + cache
679 # get content + cache
680 size = file_node.size
680 size = file_node.size
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
682 full_content = None
682 full_content = None
683 all_lines = 0
683 all_lines = 0
684 if not file_node.is_binary and not over_size_limit:
684 if not file_node.is_binary and not over_size_limit:
685 full_content = safe_str(file_node.content)
685 full_content = safe_str(file_node.content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
687
687
688 file_data.update({
688 file_data.update({
689 "content": full_content,
689 "content": full_content,
690 "lines": all_lines
690 "lines": all_lines
691 })
691 })
692 elif content:
692 elif content:
693 # get content *without* cache
693 # get content *without* cache
694 if _content is None:
694 if _content is None:
695 is_binary, md5, size, _content = file_node.metadata_uncached()
695 is_binary, md5, size, _content = file_node.metadata_uncached()
696
696
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
698 full_content = None
698 full_content = None
699 all_lines = 0
699 all_lines = 0
700 if not is_binary and not over_size_limit:
700 if not is_binary and not over_size_limit:
701 full_content = safe_str(_content)
701 full_content = safe_str(_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
703
703
704 file_data.update({
704 file_data.update({
705 "content": full_content,
705 "content": full_content,
706 "lines": all_lines
706 "lines": all_lines
707 })
707 })
708
708
709 except RepositoryError:
709 except RepositoryError:
710 log.exception("Exception in get_node")
710 log.exception("Exception in get_node")
711 raise
711 raise
712
712
713 return file_data
713 return file_data
714
714
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
716 """
716 """
717 Fetch node tree for usage in full text search
717 Fetch node tree for usage in full text search
718 """
718 """
719
719
720 tree_info = list()
720 tree_info = list()
721
721
722 try:
722 try:
723 _repo = self._get_repo(repo_name)
723 _repo = self._get_repo(repo_name)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
725 root_path = root_path.lstrip('/')
725 root_path = root_path.lstrip('/')
726 top_node = commit.get_node(root_path)
726 top_node = commit.get_node(root_path)
727 top_node.default_pre_load = []
727 top_node.default_pre_load = []
728
728
729 for __, dirs, files in commit.walk(top_node):
729 for __, dirs, files in commit.walk(top_node):
730
730
731 for f in files:
731 for f in files:
732 is_binary, md5, size, _content = f.metadata_uncached()
732 is_binary, md5, size, _content = f.metadata_uncached()
733 _data = {
733 _data = {
734 "name": f.str_path,
734 "name": f.str_path,
735 "md5": md5,
735 "md5": md5,
736 "extension": f.extension,
736 "extension": f.extension,
737 "binary": is_binary,
737 "binary": is_binary,
738 "size": size
738 "size": size
739 }
739 }
740
740
741 tree_info.append(_data)
741 tree_info.append(_data)
742
742
743 except RepositoryError:
743 except RepositoryError:
744 log.exception("Exception in get_nodes")
744 log.exception("Exception in get_nodes")
745 raise
745 raise
746
746
747 return tree_info
747 return tree_info
748
748
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
750 author=None, trigger_push_hook=True):
750 author=None, trigger_push_hook=True):
751 """
751 """
752 Commits given multiple nodes into repo
752 Commits given multiple nodes into repo
753
753
754 :param user: RhodeCode User object or user_id, the commiter
754 :param user: RhodeCode User object or user_id, the commiter
755 :param repo: RhodeCode Repository object
755 :param repo: RhodeCode Repository object
756 :param message: commit message
756 :param message: commit message
757 :param nodes: mapping {filename:{'content':content},...}
757 :param nodes: mapping {filename:{'content':content},...}
758 :param parent_commit: parent commit, can be empty than it's
758 :param parent_commit: parent commit, can be empty than it's
759 initial commit
759 initial commit
760 :param author: author of commit, cna be different that commiter
760 :param author: author of commit, cna be different that commiter
761 only for git
761 only for git
762 :param trigger_push_hook: trigger push hooks
762 :param trigger_push_hook: trigger push hooks
763
763
764 :returns: new committed commit
764 :returns: new committed commit
765 """
765 """
766
766
767 user = self._get_user(user)
767 user = self._get_user(user)
768 scm_instance = repo.scm_instance(cache=False)
768 scm_instance = repo.scm_instance(cache=False)
769
769
770 message = safe_str(message)
770 message = safe_str(message)
771 commiter = user.full_contact
771 commiter = user.full_contact
772 author = safe_str(author) if author else commiter
772 author = safe_str(author) if author else commiter
773
773
774 imc = scm_instance.in_memory_commit
774 imc = scm_instance.in_memory_commit
775
775
776 if not parent_commit:
776 if not parent_commit:
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
778
778
779 if isinstance(parent_commit, EmptyCommit):
779 if isinstance(parent_commit, EmptyCommit):
780 # EmptyCommit means we're editing empty repository
780 # EmptyCommit means we're editing empty repository
781 parents = None
781 parents = None
782 else:
782 else:
783 parents = [parent_commit]
783 parents = [parent_commit]
784
784
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 processed_nodes = []
786 processed_nodes = []
787 for filename, content_dict in nodes.items():
787 for filename, content_dict in nodes.items():
788 if not isinstance(filename, bytes):
788 if not isinstance(filename, bytes):
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 content = content_dict['content']
790 content = content_dict['content']
791 if not isinstance(content, upload_file_types + (bytes,)):
791 if not isinstance(content, upload_file_types + (bytes,)):
792 raise ValueError('content key value in nodes needs to be bytes')
792 raise ValueError('content key value in nodes needs to be bytes')
793
793
794 for f_path in nodes:
794 for f_path in nodes:
795 f_path = self._sanitize_path(f_path)
795 f_path = self._sanitize_path(f_path)
796 content = nodes[f_path]['content']
796 content = nodes[f_path]['content']
797
797
798 # decoding here will force that we have proper encoded values
798 # decoding here will force that we have proper encoded values
799 # in any other case this will throw exceptions and deny commit
799 # in any other case this will throw exceptions and deny commit
800
800
801 if isinstance(content, bytes):
801 if isinstance(content, bytes):
802 pass
802 pass
803 elif isinstance(content, upload_file_types):
803 elif isinstance(content, upload_file_types):
804 content = content.read()
804 content = content.read()
805 else:
805 else:
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 processed_nodes.append((f_path, content))
807 processed_nodes.append((f_path, content))
808
808
809 # add multiple nodes
809 # add multiple nodes
810 for path, content in processed_nodes:
810 for path, content in processed_nodes:
811 imc.add(FileNode(path, content=content))
811 imc.add(FileNode(path, content=content))
812
812
813 # TODO: handle pre push scenario
813 # TODO: handle pre push scenario
814 tip = imc.commit(message=message,
814 tip = imc.commit(message=message,
815 author=author,
815 author=author,
816 parents=parents,
816 parents=parents,
817 branch=parent_commit.branch)
817 branch=parent_commit.branch)
818
818
819 self.mark_for_invalidation(repo.repo_name)
819 self.mark_for_invalidation(repo.repo_name)
820 if trigger_push_hook:
820 if trigger_push_hook:
821 hooks_utils.trigger_post_push_hook(
821 hooks_utils.trigger_post_push_hook(
822 username=user.username, action='push_local',
822 username=user.username, action='push_local',
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 hook_type='post_push',
824 hook_type='post_push',
825 commit_ids=[tip.raw_id])
825 commit_ids=[tip.raw_id])
826 return tip
826 return tip
827
827
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 author=None, trigger_push_hook=True):
829 author=None, trigger_push_hook=True):
830 user = self._get_user(user)
830 user = self._get_user(user)
831 scm_instance = repo.scm_instance(cache=False)
831 scm_instance = repo.scm_instance(cache=False)
832
832
833 message = safe_str(message)
833 message = safe_str(message)
834 commiter = user.full_contact
834 commiter = user.full_contact
835 author = safe_str(author) if author else commiter
835 author = safe_str(author) if author else commiter
836
836
837 imc = scm_instance.in_memory_commit
837 imc = scm_instance.in_memory_commit
838
838
839 if not parent_commit:
839 if not parent_commit:
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
841
841
842 if isinstance(parent_commit, EmptyCommit):
842 if isinstance(parent_commit, EmptyCommit):
843 # EmptyCommit means we we're editing empty repository
843 # EmptyCommit means we we're editing empty repository
844 parents = None
844 parents = None
845 else:
845 else:
846 parents = [parent_commit]
846 parents = [parent_commit]
847
847
848 # add multiple nodes
848 # add multiple nodes
849 for _filename, data in nodes.items():
849 for _filename, data in nodes.items():
850 # new filename, can be renamed from the old one, also sanitaze
850 # new filename, can be renamed from the old one, also sanitaze
851 # the path for any hack around relative paths like ../../ etc.
851 # the path for any hack around relative paths like ../../ etc.
852 filename = self._sanitize_path(data['filename'])
852 filename = self._sanitize_path(data['filename'])
853 old_filename = self._sanitize_path(_filename)
853 old_filename = self._sanitize_path(_filename)
854 content = data['content']
854 content = data['content']
855 file_mode = data.get('mode')
855 file_mode = data.get('mode')
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
857 op = data['op']
857 op = data['op']
858 if op == 'add':
858 if op == 'add':
859 imc.add(filenode)
859 imc.add(filenode)
860 elif op == 'del':
860 elif op == 'del':
861 imc.remove(filenode)
861 imc.remove(filenode)
862 elif op == 'mod':
862 elif op == 'mod':
863 if filename != old_filename:
863 if filename != old_filename:
864 # TODO: handle renames more efficient, needs vcs lib changes
864 # TODO: handle renames more efficient, needs vcs lib changes
865 imc.remove(filenode)
865 imc.remove(filenode)
866 imc.add(FileNode(filename, content=content, mode=file_mode))
866 imc.add(FileNode(filename, content=content, mode=file_mode))
867 else:
867 else:
868 imc.change(filenode)
868 imc.change(filenode)
869
869
870 try:
870 try:
871 # TODO: handle pre push scenario commit changes
871 # TODO: handle pre push scenario commit changes
872 tip = imc.commit(message=message,
872 tip = imc.commit(message=message,
873 author=author,
873 author=author,
874 parents=parents,
874 parents=parents,
875 branch=parent_commit.branch)
875 branch=parent_commit.branch)
876 except NodeNotChangedError:
876 except NodeNotChangedError:
877 raise
877 raise
878 except Exception as e:
878 except Exception as e:
879 log.exception("Unexpected exception during call to imc.commit")
879 log.exception("Unexpected exception during call to imc.commit")
880 raise IMCCommitError(str(e))
880 raise IMCCommitError(str(e))
881 finally:
881 finally:
882 # always clear caches, if commit fails we want fresh object also
882 # always clear caches, if commit fails we want fresh object also
883 self.mark_for_invalidation(repo.repo_name)
883 self.mark_for_invalidation(repo.repo_name)
884
884
885 if trigger_push_hook:
885 if trigger_push_hook:
886 hooks_utils.trigger_post_push_hook(
886 hooks_utils.trigger_post_push_hook(
887 username=user.username, action='push_local', hook_type='post_push',
887 username=user.username, action='push_local', hook_type='post_push',
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
889 commit_ids=[tip.raw_id])
889 commit_ids=[tip.raw_id])
890
890
891 return tip
891 return tip
892
892
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
894 author=None, trigger_push_hook=True):
894 author=None, trigger_push_hook=True):
895 """
895 """
896 Deletes given multiple nodes into `repo`
896 Deletes given multiple nodes into `repo`
897
897
898 :param user: RhodeCode User object or user_id, the committer
898 :param user: RhodeCode User object or user_id, the committer
899 :param repo: RhodeCode Repository object
899 :param repo: RhodeCode Repository object
900 :param message: commit message
900 :param message: commit message
901 :param nodes: mapping {filename:{'content':content},...}
901 :param nodes: mapping {filename:{'content':content},...}
902 :param parent_commit: parent commit, can be empty than it's initial
902 :param parent_commit: parent commit, can be empty than it's initial
903 commit
903 commit
904 :param author: author of commit, cna be different that commiter only
904 :param author: author of commit, cna be different that commiter only
905 for git
905 for git
906 :param trigger_push_hook: trigger push hooks
906 :param trigger_push_hook: trigger push hooks
907
907
908 :returns: new commit after deletion
908 :returns: new commit after deletion
909 """
909 """
910
910
911 user = self._get_user(user)
911 user = self._get_user(user)
912 scm_instance = repo.scm_instance(cache=False)
912 scm_instance = repo.scm_instance(cache=False)
913
913
914 processed_nodes = []
914 processed_nodes = []
915 for f_path in nodes:
915 for f_path in nodes:
916 f_path = self._sanitize_path(f_path)
916 f_path = self._sanitize_path(f_path)
917 # content can be empty but for compatibility it allows same dicts
917 # content can be empty but for compatibility it allows same dicts
918 # structure as add_nodes
918 # structure as add_nodes
919 content = nodes[f_path].get('content')
919 content = nodes[f_path].get('content')
920 processed_nodes.append((safe_bytes(f_path), content))
920 processed_nodes.append((safe_bytes(f_path), content))
921
921
922 message = safe_str(message)
922 message = safe_str(message)
923 commiter = user.full_contact
923 commiter = user.full_contact
924 author = safe_str(author) if author else commiter
924 author = safe_str(author) if author else commiter
925
925
926 imc = scm_instance.in_memory_commit
926 imc = scm_instance.in_memory_commit
927
927
928 if not parent_commit:
928 if not parent_commit:
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
930
930
931 if isinstance(parent_commit, EmptyCommit):
931 if isinstance(parent_commit, EmptyCommit):
932 # EmptyCommit means we we're editing empty repository
932 # EmptyCommit means we we're editing empty repository
933 parents = None
933 parents = None
934 else:
934 else:
935 parents = [parent_commit]
935 parents = [parent_commit]
936 # add multiple nodes
936 # add multiple nodes
937 for path, content in processed_nodes:
937 for path, content in processed_nodes:
938 imc.remove(FileNode(path, content=content))
938 imc.remove(FileNode(path, content=content))
939
939
940 # TODO: handle pre push scenario
940 # TODO: handle pre push scenario
941 tip = imc.commit(message=message,
941 tip = imc.commit(message=message,
942 author=author,
942 author=author,
943 parents=parents,
943 parents=parents,
944 branch=parent_commit.branch)
944 branch=parent_commit.branch)
945
945
946 self.mark_for_invalidation(repo.repo_name)
946 self.mark_for_invalidation(repo.repo_name)
947 if trigger_push_hook:
947 if trigger_push_hook:
948 hooks_utils.trigger_post_push_hook(
948 hooks_utils.trigger_post_push_hook(
949 username=user.username, action='push_local', hook_type='post_push',
949 username=user.username, action='push_local', hook_type='post_push',
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
951 commit_ids=[tip.raw_id])
951 commit_ids=[tip.raw_id])
952 return tip
952 return tip
953
953
954 def strip(self, repo, commit_id, branch):
954 def strip(self, repo, commit_id, branch):
955 scm_instance = repo.scm_instance(cache=False)
955 scm_instance = repo.scm_instance(cache=False)
956 scm_instance.config.clear_section('hooks')
956 scm_instance.config.clear_section('hooks')
957 scm_instance.strip(commit_id, branch)
957 scm_instance.strip(commit_id, branch)
958 self.mark_for_invalidation(repo.repo_name)
958 self.mark_for_invalidation(repo.repo_name)
959
959
960 def get_unread_journal(self):
960 def get_unread_journal(self):
961 return self.sa.query(UserLog).count()
961 return self.sa.query(UserLog).count()
962
962
963 @classmethod
963 @classmethod
964 def backend_landing_ref(cls, repo_type):
964 def backend_landing_ref(cls, repo_type):
965 """
965 """
966 Return a default landing ref based on a repository type.
966 Return a default landing ref based on a repository type.
967 """
967 """
968
968
969 landing_ref = {
969 landing_ref = {
970 'hg': ('branch:default', 'default'),
970 'hg': ('branch:default', 'default'),
971 'git': ('branch:master', 'master'),
971 'git': ('branch:master', 'master'),
972 'svn': ('rev:tip', 'latest tip'),
972 'svn': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
974 }
974 }
975
975
976 return landing_ref.get(repo_type) or landing_ref['default']
976 return landing_ref.get(repo_type) or landing_ref['default']
977
977
978 def get_repo_landing_revs(self, translator, repo=None):
978 def get_repo_landing_revs(self, translator, repo=None):
979 """
979 """
980 Generates select option with tags branches and bookmarks (for hg only)
980 Generates select option with tags branches and bookmarks (for hg only)
981 grouped by type
981 grouped by type
982
982
983 :param repo:
983 :param repo:
984 """
984 """
985 from rhodecode.lib.vcs.backends.git import GitRepository
985 from rhodecode.lib.vcs.backends.git import GitRepository
986
986
987 _ = translator
987 _ = translator
988 repo = self._get_repo(repo)
988 repo = self._get_repo(repo)
989
989
990 if repo:
990 if repo:
991 repo_type = repo.repo_type
991 repo_type = repo.repo_type
992 else:
992 else:
993 repo_type = 'default'
993 repo_type = 'default'
994
994
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
996
996
997 default_ref_options = [
997 default_ref_options = [
998 [default_landing_ref, landing_ref_lbl]
998 [default_landing_ref, landing_ref_lbl]
999 ]
999 ]
1000 default_choices = [
1000 default_choices = [
1001 default_landing_ref
1001 default_landing_ref
1002 ]
1002 ]
1003
1003
1004 if not repo:
1004 if not repo:
1005 # presented at NEW repo creation
1005 # presented at NEW repo creation
1006 return default_choices, default_ref_options
1006 return default_choices, default_ref_options
1007
1007
1008 repo = repo.scm_instance()
1008 repo = repo.scm_instance()
1009
1009
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1011 choices = [default_landing_ref]
1011 choices = [default_landing_ref]
1012
1012
1013 # branches
1013 # branches
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1015 if not branch_group:
1015 if not branch_group:
1016 # new repo, or without maybe a branch?
1016 # new repo, or without maybe a branch?
1017 branch_group = default_ref_options
1017 branch_group = default_ref_options
1018
1018
1019 branches_group = (branch_group, _("Branches"))
1019 branches_group = (branch_group, _("Branches"))
1020 ref_options.append(branches_group)
1020 ref_options.append(branches_group)
1021 choices.extend([x[0] for x in branches_group[0]])
1021 choices.extend([x[0] for x in branches_group[0]])
1022
1022
1023 # bookmarks for HG
1023 # bookmarks for HG
1024 if repo.alias == 'hg':
1024 if repo.alias == 'hg':
1025 bookmarks_group = (
1025 bookmarks_group = (
1026 [(f'book:{safe_str(b)}', safe_str(b))
1026 [(f'book:{safe_str(b)}', safe_str(b))
1027 for b in repo.bookmarks],
1027 for b in repo.bookmarks],
1028 _("Bookmarks"))
1028 _("Bookmarks"))
1029 ref_options.append(bookmarks_group)
1029 ref_options.append(bookmarks_group)
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1031
1031
1032 # tags
1032 # tags
1033 tags_group = (
1033 tags_group = (
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1035 for t in repo.tags],
1035 for t in repo.tags],
1036 _("Tags"))
1036 _("Tags"))
1037 ref_options.append(tags_group)
1037 ref_options.append(tags_group)
1038 choices.extend([x[0] for x in tags_group[0]])
1038 choices.extend([x[0] for x in tags_group[0]])
1039
1039
1040 return choices, ref_options
1040 return choices, ref_options
1041
1041
1042 def get_server_info(self, environ=None):
1042 def get_server_info(self, environ=None):
1043 server_info = get_system_info(environ)
1043 server_info = get_system_info(environ)
1044 return server_info
1044 return server_info
@@ -1,1735 +1,1750 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import collections
20 import collections
21 import datetime
21 import datetime
22 import os
22 import os
23 import re
23 import re
24 import pprint
24 import pprint
25 import shutil
25 import shutil
26 import socket
26 import socket
27 import subprocess
27 import subprocess
28 import time
28 import time
29 import uuid
29 import uuid
30 import dateutil.tz
30 import dateutil.tz
31 import logging
31 import logging
32 import functools
32 import functools
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39 import pyramid.paster
39 import pyramid.paster
40
40
41 import rhodecode
41 import rhodecode
42 import rhodecode.lib
42 import rhodecode.lib
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 from rhodecode.model.meta import Session
48 from rhodecode.model.meta import Session
49 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.user import UserModel
52 from rhodecode.model.user import UserModel
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.str_utils import safe_bytes
59 from rhodecode.lib.str_utils import safe_bytes
60 from rhodecode.lib.hash_utils import sha1_safe
60 from rhodecode.lib.hash_utils import sha1_safe
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
72
72
73
73
74 def cmp(a, b):
74 def cmp(a, b):
75 # backport cmp from python2 so we can still use it in the custom code in this module
75 # backport cmp from python2 so we can still use it in the custom code in this module
76 return (a > b) - (a < b)
76 return (a > b) - (a < b)
77
77
78
78
79 @pytest.fixture(scope='session', autouse=True)
79 @pytest.fixture(scope='session', autouse=True)
80 def activate_example_rcextensions(request):
80 def activate_example_rcextensions(request):
81 """
81 """
82 Patch in an example rcextensions module which verifies passed in kwargs.
82 Patch in an example rcextensions module which verifies passed in kwargs.
83 """
83 """
84 from rhodecode.config import rcextensions
84 from rhodecode.config import rcextensions
85
85
86 old_extensions = rhodecode.EXTENSIONS
86 old_extensions = rhodecode.EXTENSIONS
87 rhodecode.EXTENSIONS = rcextensions
87 rhodecode.EXTENSIONS = rcextensions
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89
89
90 @request.addfinalizer
90 @request.addfinalizer
91 def cleanup():
91 def cleanup():
92 rhodecode.EXTENSIONS = old_extensions
92 rhodecode.EXTENSIONS = old_extensions
93
93
94
94
95 @pytest.fixture()
95 @pytest.fixture()
96 def capture_rcextensions():
96 def capture_rcextensions():
97 """
97 """
98 Returns the recorded calls to entry points in rcextensions.
98 Returns the recorded calls to entry points in rcextensions.
99 """
99 """
100 calls = rhodecode.EXTENSIONS.calls
100 calls = rhodecode.EXTENSIONS.calls
101 calls.clear()
101 calls.clear()
102 # Note: At this moment, it is still the empty dict, but that will
102 # Note: At this moment, it is still the empty dict, but that will
103 # be filled during the test run and since it is a reference this
103 # be filled during the test run and since it is a reference this
104 # is enough to make it work.
104 # is enough to make it work.
105 return calls
105 return calls
106
106
107
107
108 @pytest.fixture(scope='session')
108 @pytest.fixture(scope='session')
109 def http_environ_session():
109 def http_environ_session():
110 """
110 """
111 Allow to use "http_environ" in session scope.
111 Allow to use "http_environ" in session scope.
112 """
112 """
113 return plain_http_environ()
113 return plain_http_environ()
114
114
115
115
116 def plain_http_host_stub():
116 def plain_http_host_stub():
117 """
117 """
118 Value of HTTP_HOST in the test run.
118 Value of HTTP_HOST in the test run.
119 """
119 """
120 return 'example.com:80'
120 return 'example.com:80'
121
121
122
122
123 @pytest.fixture()
123 @pytest.fixture()
124 def http_host_stub():
124 def http_host_stub():
125 """
125 """
126 Value of HTTP_HOST in the test run.
126 Value of HTTP_HOST in the test run.
127 """
127 """
128 return plain_http_host_stub()
128 return plain_http_host_stub()
129
129
130
130
131 def plain_http_host_only_stub():
131 def plain_http_host_only_stub():
132 """
132 """
133 Value of HTTP_HOST in the test run.
133 Value of HTTP_HOST in the test run.
134 """
134 """
135 return plain_http_host_stub().split(':')[0]
135 return plain_http_host_stub().split(':')[0]
136
136
137
137
138 @pytest.fixture()
138 @pytest.fixture()
139 def http_host_only_stub():
139 def http_host_only_stub():
140 """
140 """
141 Value of HTTP_HOST in the test run.
141 Value of HTTP_HOST in the test run.
142 """
142 """
143 return plain_http_host_only_stub()
143 return plain_http_host_only_stub()
144
144
145
145
146 def plain_http_environ():
146 def plain_http_environ():
147 """
147 """
148 HTTP extra environ keys.
148 HTTP extra environ keys.
149
149
150 User by the test application and as well for setting up the pylons
150 User by the test application and as well for setting up the pylons
151 environment. In the case of the fixture "app" it should be possible
151 environment. In the case of the fixture "app" it should be possible
152 to override this for a specific test case.
152 to override this for a specific test case.
153 """
153 """
154 return {
154 return {
155 'SERVER_NAME': plain_http_host_only_stub(),
155 'SERVER_NAME': plain_http_host_only_stub(),
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
157 'HTTP_HOST': plain_http_host_stub(),
157 'HTTP_HOST': plain_http_host_stub(),
158 'HTTP_USER_AGENT': 'rc-test-agent',
158 'HTTP_USER_AGENT': 'rc-test-agent',
159 'REQUEST_METHOD': 'GET'
159 'REQUEST_METHOD': 'GET'
160 }
160 }
161
161
162
162
163 @pytest.fixture()
163 @pytest.fixture()
164 def http_environ():
164 def http_environ():
165 """
165 """
166 HTTP extra environ keys.
166 HTTP extra environ keys.
167
167
168 User by the test application and as well for setting up the pylons
168 User by the test application and as well for setting up the pylons
169 environment. In the case of the fixture "app" it should be possible
169 environment. In the case of the fixture "app" it should be possible
170 to override this for a specific test case.
170 to override this for a specific test case.
171 """
171 """
172 return plain_http_environ()
172 return plain_http_environ()
173
173
174
174
175 @pytest.fixture(scope='session')
175 @pytest.fixture(scope='session')
176 def baseapp(ini_config, vcsserver, http_environ_session):
176 def baseapp(ini_config, vcsserver, http_environ_session):
177 from rhodecode.lib.pyramid_utils import get_app_config
177 from rhodecode.lib.pyramid_utils import get_app_config
178 from rhodecode.config.middleware import make_pyramid_app
178 from rhodecode.config.middleware import make_pyramid_app
179
179
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
181 pyramid.paster.setup_logging(ini_config)
181 pyramid.paster.setup_logging(ini_config)
182
182
183 settings = get_app_config(ini_config)
183 settings = get_app_config(ini_config)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
185
185
186 return app
186 return app
187
187
188
188
189 @pytest.fixture(scope='function')
189 @pytest.fixture(scope='function')
190 def app(request, config_stub, baseapp, http_environ):
190 def app(request, config_stub, baseapp, http_environ):
191 app = CustomTestApp(
191 app = CustomTestApp(
192 baseapp,
192 baseapp,
193 extra_environ=http_environ)
193 extra_environ=http_environ)
194 if request.cls:
194 if request.cls:
195 request.cls.app = app
195 request.cls.app = app
196 return app
196 return app
197
197
198
198
199 @pytest.fixture(scope='session')
199 @pytest.fixture(scope='session')
200 def app_settings(baseapp, ini_config):
200 def app_settings(baseapp, ini_config):
201 """
201 """
202 Settings dictionary used to create the app.
202 Settings dictionary used to create the app.
203
203
204 Parses the ini file and passes the result through the sanitize and apply
204 Parses the ini file and passes the result through the sanitize and apply
205 defaults mechanism in `rhodecode.config.middleware`.
205 defaults mechanism in `rhodecode.config.middleware`.
206 """
206 """
207 return baseapp.config.get_settings()
207 return baseapp.config.get_settings()
208
208
209
209
210 @pytest.fixture(scope='session')
210 @pytest.fixture(scope='session')
211 def db_connection(ini_settings):
211 def db_connection(ini_settings):
212 # Initialize the database connection.
212 # Initialize the database connection.
213 config_utils.initialize_database(ini_settings)
213 config_utils.initialize_database(ini_settings)
214
214
215
215
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
217
217
218
218
219 def _autologin_user(app, *args):
219 def _autologin_user(app, *args):
220 session = login_user_session(app, *args)
220 session = login_user_session(app, *args)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 return LoginData(csrf_token, session['rhodecode_user'])
222 return LoginData(csrf_token, session['rhodecode_user'])
223
223
224
224
225 @pytest.fixture()
225 @pytest.fixture()
226 def autologin_user(app):
226 def autologin_user(app):
227 """
227 """
228 Utility fixture which makes sure that the admin user is logged in
228 Utility fixture which makes sure that the admin user is logged in
229 """
229 """
230 return _autologin_user(app)
230 return _autologin_user(app)
231
231
232
232
233 @pytest.fixture()
233 @pytest.fixture()
234 def autologin_regular_user(app):
234 def autologin_regular_user(app):
235 """
235 """
236 Utility fixture which makes sure that the regular user is logged in
236 Utility fixture which makes sure that the regular user is logged in
237 """
237 """
238 return _autologin_user(
238 return _autologin_user(
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240
240
241
241
242 @pytest.fixture(scope='function')
242 @pytest.fixture(scope='function')
243 def csrf_token(request, autologin_user):
243 def csrf_token(request, autologin_user):
244 return autologin_user.csrf_token
244 return autologin_user.csrf_token
245
245
246
246
247 @pytest.fixture(scope='function')
247 @pytest.fixture(scope='function')
248 def xhr_header(request):
248 def xhr_header(request):
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
250
250
251
251
252 @pytest.fixture()
252 @pytest.fixture()
253 def real_crypto_backend(monkeypatch):
253 def real_crypto_backend(monkeypatch):
254 """
254 """
255 Switch the production crypto backend on for this test.
255 Switch the production crypto backend on for this test.
256
256
257 During the test run the crypto backend is replaced with a faster
257 During the test run the crypto backend is replaced with a faster
258 implementation based on the MD5 algorithm.
258 implementation based on the MD5 algorithm.
259 """
259 """
260 monkeypatch.setattr(rhodecode, 'is_test', False)
260 monkeypatch.setattr(rhodecode, 'is_test', False)
261
261
262
262
263 @pytest.fixture(scope='class')
263 @pytest.fixture(scope='class')
264 def index_location(request, baseapp):
264 def index_location(request, baseapp):
265 index_location = baseapp.config.get_settings()['search.location']
265 index_location = baseapp.config.get_settings()['search.location']
266 if request.cls:
266 if request.cls:
267 request.cls.index_location = index_location
267 request.cls.index_location = index_location
268 return index_location
268 return index_location
269
269
270
270
271 @pytest.fixture(scope='session', autouse=True)
271 @pytest.fixture(scope='session', autouse=True)
272 def tests_tmp_path(request):
272 def tests_tmp_path(request):
273 """
273 """
274 Create temporary directory to be used during the test session.
274 Create temporary directory to be used during the test session.
275 """
275 """
276 if not os.path.exists(TESTS_TMP_PATH):
276 if not os.path.exists(TESTS_TMP_PATH):
277 os.makedirs(TESTS_TMP_PATH)
277 os.makedirs(TESTS_TMP_PATH)
278
278
279 if not request.config.getoption('--keep-tmp-path'):
279 if not request.config.getoption('--keep-tmp-path'):
280 @request.addfinalizer
280 @request.addfinalizer
281 def remove_tmp_path():
281 def remove_tmp_path():
282 shutil.rmtree(TESTS_TMP_PATH)
282 shutil.rmtree(TESTS_TMP_PATH)
283
283
284 return TESTS_TMP_PATH
284 return TESTS_TMP_PATH
285
285
286
286
287 @pytest.fixture()
287 @pytest.fixture()
288 def test_repo_group(request):
288 def test_repo_group(request):
289 """
289 """
290 Create a temporary repository group, and destroy it after
290 Create a temporary repository group, and destroy it after
291 usage automatically
291 usage automatically
292 """
292 """
293 fixture = Fixture()
293 fixture = Fixture()
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
295 repo_group = fixture.create_repo_group(repogroupid)
295 repo_group = fixture.create_repo_group(repogroupid)
296
296
297 def _cleanup():
297 def _cleanup():
298 fixture.destroy_repo_group(repogroupid)
298 fixture.destroy_repo_group(repogroupid)
299
299
300 request.addfinalizer(_cleanup)
300 request.addfinalizer(_cleanup)
301 return repo_group
301 return repo_group
302
302
303
303
304 @pytest.fixture()
304 @pytest.fixture()
305 def test_user_group(request):
305 def test_user_group(request):
306 """
306 """
307 Create a temporary user group, and destroy it after
307 Create a temporary user group, and destroy it after
308 usage automatically
308 usage automatically
309 """
309 """
310 fixture = Fixture()
310 fixture = Fixture()
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
312 user_group = fixture.create_user_group(usergroupid)
312 user_group = fixture.create_user_group(usergroupid)
313
313
314 def _cleanup():
314 def _cleanup():
315 fixture.destroy_user_group(user_group)
315 fixture.destroy_user_group(user_group)
316
316
317 request.addfinalizer(_cleanup)
317 request.addfinalizer(_cleanup)
318 return user_group
318 return user_group
319
319
320
320
321 @pytest.fixture(scope='session')
321 @pytest.fixture(scope='session')
322 def test_repo(request):
322 def test_repo(request):
323 container = TestRepoContainer()
323 container = TestRepoContainer()
324 request.addfinalizer(container._cleanup)
324 request.addfinalizer(container._cleanup)
325 return container
325 return container
326
326
327
327
328 class TestRepoContainer(object):
328 class TestRepoContainer(object):
329 """
329 """
330 Container for test repositories which are used read only.
330 Container for test repositories which are used read only.
331
331
332 Repositories will be created on demand and re-used during the lifetime
332 Repositories will be created on demand and re-used during the lifetime
333 of this object.
333 of this object.
334
334
335 Usage to get the svn test repository "minimal"::
335 Usage to get the svn test repository "minimal"::
336
336
337 test_repo = TestContainer()
337 test_repo = TestContainer()
338 repo = test_repo('minimal', 'svn')
338 repo = test_repo('minimal', 'svn')
339
339
340 """
340 """
341
341
342 dump_extractors = {
342 dump_extractors = {
343 'git': utils.extract_git_repo_from_dump,
343 'git': utils.extract_git_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
346 }
346 }
347
347
348 def __init__(self):
348 def __init__(self):
349 self._cleanup_repos = []
349 self._cleanup_repos = []
350 self._fixture = Fixture()
350 self._fixture = Fixture()
351 self._repos = {}
351 self._repos = {}
352
352
353 def __call__(self, dump_name, backend_alias, config=None):
353 def __call__(self, dump_name, backend_alias, config=None):
354 key = (dump_name, backend_alias)
354 key = (dump_name, backend_alias)
355 if key not in self._repos:
355 if key not in self._repos:
356 repo = self._create_repo(dump_name, backend_alias, config)
356 repo = self._create_repo(dump_name, backend_alias, config)
357 self._repos[key] = repo.repo_id
357 self._repos[key] = repo.repo_id
358 return Repository.get(self._repos[key])
358 return Repository.get(self._repos[key])
359
359
360 def _create_repo(self, dump_name, backend_alias, config):
360 def _create_repo(self, dump_name, backend_alias, config):
361 repo_name = '%s-%s' % (backend_alias, dump_name)
361 repo_name = '%s-%s' % (backend_alias, dump_name)
362 backend = get_backend(backend_alias)
362 backend = get_backend(backend_alias)
363 dump_extractor = self.dump_extractors[backend_alias]
363 dump_extractor = self.dump_extractors[backend_alias]
364 repo_path = dump_extractor(dump_name, repo_name)
364 repo_path = dump_extractor(dump_name, repo_name)
365
365
366 vcs_repo = backend(repo_path, config=config)
366 vcs_repo = backend(repo_path, config=config)
367 repo2db_mapper({repo_name: vcs_repo})
367 repo2db_mapper({repo_name: vcs_repo})
368
368
369 repo = RepoModel().get_by_repo_name(repo_name)
369 repo = RepoModel().get_by_repo_name(repo_name)
370 self._cleanup_repos.append(repo_name)
370 self._cleanup_repos.append(repo_name)
371 return repo
371 return repo
372
372
373 def _cleanup(self):
373 def _cleanup(self):
374 for repo_name in reversed(self._cleanup_repos):
374 for repo_name in reversed(self._cleanup_repos):
375 self._fixture.destroy_repo(repo_name)
375 self._fixture.destroy_repo(repo_name)
376
376
377
377
378 def backend_base(request, backend_alias, baseapp, test_repo):
378 def backend_base(request, backend_alias, baseapp, test_repo):
379 if backend_alias not in request.config.getoption('--backends'):
379 if backend_alias not in request.config.getoption('--backends'):
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
381
381
382 utils.check_xfail_backends(request.node, backend_alias)
382 utils.check_xfail_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
384
384
385 repo_name = 'vcs_test_%s' % (backend_alias, )
385 repo_name = 'vcs_test_%s' % (backend_alias, )
386 backend = Backend(
386 backend = Backend(
387 alias=backend_alias,
387 alias=backend_alias,
388 repo_name=repo_name,
388 repo_name=repo_name,
389 test_name=request.node.name,
389 test_name=request.node.name,
390 test_repo_container=test_repo)
390 test_repo_container=test_repo)
391 request.addfinalizer(backend.cleanup)
391 request.addfinalizer(backend.cleanup)
392 return backend
392 return backend
393
393
394
394
395 @pytest.fixture()
395 @pytest.fixture()
396 def backend(request, backend_alias, baseapp, test_repo):
396 def backend(request, backend_alias, baseapp, test_repo):
397 """
397 """
398 Parametrized fixture which represents a single backend implementation.
398 Parametrized fixture which represents a single backend implementation.
399
399
400 It respects the option `--backends` to focus the test run on specific
400 It respects the option `--backends` to focus the test run on specific
401 backend implementations.
401 backend implementations.
402
402
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
404 for specific backends. This is intended as a utility for incremental
404 for specific backends. This is intended as a utility for incremental
405 development of a new backend implementation.
405 development of a new backend implementation.
406 """
406 """
407 return backend_base(request, backend_alias, baseapp, test_repo)
407 return backend_base(request, backend_alias, baseapp, test_repo)
408
408
409
409
410 @pytest.fixture()
410 @pytest.fixture()
411 def backend_git(request, baseapp, test_repo):
411 def backend_git(request, baseapp, test_repo):
412 return backend_base(request, 'git', baseapp, test_repo)
412 return backend_base(request, 'git', baseapp, test_repo)
413
413
414
414
415 @pytest.fixture()
415 @pytest.fixture()
416 def backend_hg(request, baseapp, test_repo):
416 def backend_hg(request, baseapp, test_repo):
417 return backend_base(request, 'hg', baseapp, test_repo)
417 return backend_base(request, 'hg', baseapp, test_repo)
418
418
419
419
420 @pytest.fixture()
420 @pytest.fixture()
421 def backend_svn(request, baseapp, test_repo):
421 def backend_svn(request, baseapp, test_repo):
422 return backend_base(request, 'svn', baseapp, test_repo)
422 return backend_base(request, 'svn', baseapp, test_repo)
423
423
424
424
425 @pytest.fixture()
425 @pytest.fixture()
426 def backend_random(backend_git):
426 def backend_random(backend_git):
427 """
427 """
428 Use this to express that your tests need "a backend.
428 Use this to express that your tests need "a backend.
429
429
430 A few of our tests need a backend, so that we can run the code. This
430 A few of our tests need a backend, so that we can run the code. This
431 fixture is intended to be used for such cases. It will pick one of the
431 fixture is intended to be used for such cases. It will pick one of the
432 backends and run the tests.
432 backends and run the tests.
433
433
434 The fixture `backend` would run the test multiple times for each
434 The fixture `backend` would run the test multiple times for each
435 available backend which is a pure waste of time if the test is
435 available backend which is a pure waste of time if the test is
436 independent of the backend type.
436 independent of the backend type.
437 """
437 """
438 # TODO: johbo: Change this to pick a random backend
438 # TODO: johbo: Change this to pick a random backend
439 return backend_git
439 return backend_git
440
440
441
441
442 @pytest.fixture()
442 @pytest.fixture()
443 def backend_stub(backend_git):
443 def backend_stub(backend_git):
444 """
444 """
445 Use this to express that your tests need a backend stub
445 Use this to express that your tests need a backend stub
446
446
447 TODO: mikhail: Implement a real stub logic instead of returning
447 TODO: mikhail: Implement a real stub logic instead of returning
448 a git backend
448 a git backend
449 """
449 """
450 return backend_git
450 return backend_git
451
451
452
452
453 @pytest.fixture()
453 @pytest.fixture()
454 def repo_stub(backend_stub):
454 def repo_stub(backend_stub):
455 """
455 """
456 Use this to express that your tests need a repository stub
456 Use this to express that your tests need a repository stub
457 """
457 """
458 return backend_stub.create_repo()
458 return backend_stub.create_repo()
459
459
460
460
461 class Backend(object):
461 class Backend(object):
462 """
462 """
463 Represents the test configuration for one supported backend
463 Represents the test configuration for one supported backend
464
464
465 Provides easy access to different test repositories based on
465 Provides easy access to different test repositories based on
466 `__getitem__`. Such repositories will only be created once per test
466 `__getitem__`. Such repositories will only be created once per test
467 session.
467 session.
468 """
468 """
469
469
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
471 _master_repo = None
471 _master_repo = None
472 _master_repo_path = ''
472 _master_repo_path = ''
473 _commit_ids = {}
473 _commit_ids = {}
474
474
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
476 self.alias = alias
476 self.alias = alias
477 self.repo_name = repo_name
477 self.repo_name = repo_name
478 self._cleanup_repos = []
478 self._cleanup_repos = []
479 self._test_name = test_name
479 self._test_name = test_name
480 self._test_repo_container = test_repo_container
480 self._test_repo_container = test_repo_container
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
482 # Fixture will survive in the end.
482 # Fixture will survive in the end.
483 self._fixture = Fixture()
483 self._fixture = Fixture()
484
484
485 def __getitem__(self, key):
485 def __getitem__(self, key):
486 return self._test_repo_container(key, self.alias)
486 return self._test_repo_container(key, self.alias)
487
487
488 def create_test_repo(self, key, config=None):
488 def create_test_repo(self, key, config=None):
489 return self._test_repo_container(key, self.alias, config)
489 return self._test_repo_container(key, self.alias, config)
490
490
491 @property
491 @property
492 def repo_id(self):
492 def repo_id(self):
493 # just fake some repo_id
493 # just fake some repo_id
494 return self.repo.repo_id
494 return self.repo.repo_id
495
495
496 @property
496 @property
497 def repo(self):
497 def repo(self):
498 """
498 """
499 Returns the "current" repository. This is the vcs_test repo or the
499 Returns the "current" repository. This is the vcs_test repo or the
500 last repo which has been created with `create_repo`.
500 last repo which has been created with `create_repo`.
501 """
501 """
502 from rhodecode.model.db import Repository
502 from rhodecode.model.db import Repository
503 return Repository.get_by_repo_name(self.repo_name)
503 return Repository.get_by_repo_name(self.repo_name)
504
504
505 @property
505 @property
506 def default_branch_name(self):
506 def default_branch_name(self):
507 VcsRepository = get_backend(self.alias)
507 VcsRepository = get_backend(self.alias)
508 return VcsRepository.DEFAULT_BRANCH_NAME
508 return VcsRepository.DEFAULT_BRANCH_NAME
509
509
510 @property
510 @property
511 def default_head_id(self):
511 def default_head_id(self):
512 """
512 """
513 Returns the default head id of the underlying backend.
513 Returns the default head id of the underlying backend.
514
514
515 This will be the default branch name in case the backend does have a
515 This will be the default branch name in case the backend does have a
516 default branch. In the other cases it will point to a valid head
516 default branch. In the other cases it will point to a valid head
517 which can serve as the base to create a new commit on top of it.
517 which can serve as the base to create a new commit on top of it.
518 """
518 """
519 vcsrepo = self.repo.scm_instance()
519 vcsrepo = self.repo.scm_instance()
520 head_id = (
520 head_id = (
521 vcsrepo.DEFAULT_BRANCH_NAME or
521 vcsrepo.DEFAULT_BRANCH_NAME or
522 vcsrepo.commit_ids[-1])
522 vcsrepo.commit_ids[-1])
523 return head_id
523 return head_id
524
524
525 @property
525 @property
526 def commit_ids(self):
526 def commit_ids(self):
527 """
527 """
528 Returns the list of commits for the last created repository
528 Returns the list of commits for the last created repository
529 """
529 """
530 return self._commit_ids
530 return self._commit_ids
531
531
532 def create_master_repo(self, commits):
532 def create_master_repo(self, commits):
533 """
533 """
534 Create a repository and remember it as a template.
534 Create a repository and remember it as a template.
535
535
536 This allows to easily create derived repositories to construct
536 This allows to easily create derived repositories to construct
537 more complex scenarios for diff, compare and pull requests.
537 more complex scenarios for diff, compare and pull requests.
538
538
539 Returns a commit map which maps from commit message to raw_id.
539 Returns a commit map which maps from commit message to raw_id.
540 """
540 """
541 self._master_repo = self.create_repo(commits=commits)
541 self._master_repo = self.create_repo(commits=commits)
542 self._master_repo_path = self._master_repo.repo_full_path
542 self._master_repo_path = self._master_repo.repo_full_path
543
543
544 return self._commit_ids
544 return self._commit_ids
545
545
546 def create_repo(
546 def create_repo(
547 self, commits=None, number_of_commits=0, heads=None,
547 self, commits=None, number_of_commits=0, heads=None,
548 name_suffix='', bare=False, **kwargs):
548 name_suffix='', bare=False, **kwargs):
549 """
549 """
550 Create a repository and record it for later cleanup.
550 Create a repository and record it for later cleanup.
551
551
552 :param commits: Optional. A sequence of dict instances.
552 :param commits: Optional. A sequence of dict instances.
553 Will add a commit per entry to the new repository.
553 Will add a commit per entry to the new repository.
554 :param number_of_commits: Optional. If set to a number, this number of
554 :param number_of_commits: Optional. If set to a number, this number of
555 commits will be added to the new repository.
555 commits will be added to the new repository.
556 :param heads: Optional. Can be set to a sequence of of commit
556 :param heads: Optional. Can be set to a sequence of of commit
557 names which shall be pulled in from the master repository.
557 names which shall be pulled in from the master repository.
558 :param name_suffix: adds special suffix to generated repo name
558 :param name_suffix: adds special suffix to generated repo name
559 :param bare: set a repo as bare (no checkout)
559 :param bare: set a repo as bare (no checkout)
560 """
560 """
561 self.repo_name = self._next_repo_name() + name_suffix
561 self.repo_name = self._next_repo_name() + name_suffix
562 repo = self._fixture.create_repo(
562 repo = self._fixture.create_repo(
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
564 self._cleanup_repos.append(repo.repo_name)
564 self._cleanup_repos.append(repo.repo_name)
565
565
566 commits = commits or [
566 commits = commits or [
567 {'message': 'Commit %s of %s' % (x, self.repo_name)}
567 {'message': f'Commit {x} of {self.repo_name}'}
568 for x in range(number_of_commits)]
568 for x in range(number_of_commits)]
569 vcs_repo = repo.scm_instance()
569 vcs_repo = repo.scm_instance()
570 vcs_repo.count()
570 vcs_repo.count()
571 self._add_commits_to_repo(vcs_repo, commits)
571 self._add_commits_to_repo(vcs_repo, commits)
572 if heads:
572 if heads:
573 self.pull_heads(repo, heads)
573 self.pull_heads(repo, heads)
574
574
575 return repo
575 return repo
576
576
577 def pull_heads(self, repo, heads):
577 def pull_heads(self, repo, heads, do_fetch=False):
578 """
578 """
579 Make sure that repo contains all commits mentioned in `heads`
579 Make sure that repo contains all commits mentioned in `heads`
580 """
580 """
581 vcsrepo = repo.scm_instance()
581 vcsrepo = repo.scm_instance()
582 vcsrepo.config.clear_section('hooks')
582 vcsrepo.config.clear_section('hooks')
583 commit_ids = [self._commit_ids[h] for h in heads]
583 commit_ids = [self._commit_ids[h] for h in heads]
584 if do_fetch:
585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
584 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
585
587
586 def create_fork(self):
588 def create_fork(self):
587 repo_to_fork = self.repo_name
589 repo_to_fork = self.repo_name
588 self.repo_name = self._next_repo_name()
590 self.repo_name = self._next_repo_name()
589 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
590 self._cleanup_repos.append(self.repo_name)
592 self._cleanup_repos.append(self.repo_name)
591 return repo
593 return repo
592
594
593 def new_repo_name(self, suffix=''):
595 def new_repo_name(self, suffix=''):
594 self.repo_name = self._next_repo_name() + suffix
596 self.repo_name = self._next_repo_name() + suffix
595 self._cleanup_repos.append(self.repo_name)
597 self._cleanup_repos.append(self.repo_name)
596 return self.repo_name
598 return self.repo_name
597
599
598 def _next_repo_name(self):
600 def _next_repo_name(self):
599 return u"%s_%s" % (
601 return "%s_%s" % (
600 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
601
603
602 def ensure_file(self, filename, content='Test content\n'):
604 def ensure_file(self, filename, content=b'Test content\n'):
603 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
604 commits = [
606 commits = [
605 {'added': [
607 {'added': [
606 FileNode(filename, content=content),
608 FileNode(filename, content=content),
607 ]},
609 ]},
608 ]
610 ]
609 self._add_commits_to_repo(self.repo.scm_instance(), commits)
611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
610
612
611 def enable_downloads(self):
613 def enable_downloads(self):
612 repo = self.repo
614 repo = self.repo
613 repo.enable_downloads = True
615 repo.enable_downloads = True
614 Session().add(repo)
616 Session().add(repo)
615 Session().commit()
617 Session().commit()
616
618
617 def cleanup(self):
619 def cleanup(self):
618 for repo_name in reversed(self._cleanup_repos):
620 for repo_name in reversed(self._cleanup_repos):
619 self._fixture.destroy_repo(repo_name)
621 self._fixture.destroy_repo(repo_name)
620
622
621 def _add_commits_to_repo(self, repo, commits):
623 def _add_commits_to_repo(self, repo, commits):
622 commit_ids = _add_commits_to_repo(repo, commits)
624 commit_ids = _add_commits_to_repo(repo, commits)
623 if not commit_ids:
625 if not commit_ids:
624 return
626 return
625 self._commit_ids = commit_ids
627 self._commit_ids = commit_ids
626
628
627 # Creating refs for Git to allow fetching them from remote repository
629 # Creating refs for Git to allow fetching them from remote repository
628 if self.alias == 'git':
630 if self.alias == 'git':
629 refs = {}
631 refs = {}
630 for message in self._commit_ids:
632 for message in self._commit_ids:
631 # TODO: mikhail: do more special chars replacements
633 cleanup_message = message.replace(' ', '')
632 ref_name = 'refs/test-refs/{}'.format(
634 ref_name = f'refs/test-refs/{cleanup_message}'
633 message.replace(' ', ''))
634 refs[ref_name] = self._commit_ids[message]
635 refs[ref_name] = self._commit_ids[message]
635 self._create_refs(repo, refs)
636 self._create_refs(repo, refs)
636
637
637 def _create_refs(self, repo, refs):
638 def _create_refs(self, repo, refs):
638 for ref_name in refs:
639 for ref_name, ref_val in refs.items():
639 repo.set_refs(ref_name, refs[ref_name])
640 repo.set_refs(ref_name, ref_val)
640
641
641
642
642 class VcsBackend(object):
643 class VcsBackend(object):
643 """
644 """
644 Represents the test configuration for one supported vcs backend.
645 Represents the test configuration for one supported vcs backend.
645 """
646 """
646
647
647 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
648
649
649 def __init__(self, alias, repo_path, test_name, test_repo_container):
650 def __init__(self, alias, repo_path, test_name, test_repo_container):
650 self.alias = alias
651 self.alias = alias
651 self._repo_path = repo_path
652 self._repo_path = repo_path
652 self._cleanup_repos = []
653 self._cleanup_repos = []
653 self._test_name = test_name
654 self._test_name = test_name
654 self._test_repo_container = test_repo_container
655 self._test_repo_container = test_repo_container
655
656
656 def __getitem__(self, key):
657 def __getitem__(self, key):
657 return self._test_repo_container(key, self.alias).scm_instance()
658 return self._test_repo_container(key, self.alias).scm_instance()
658
659
659 def __repr__(self):
660 def __repr__(self):
660 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
661
662
662 @property
663 @property
663 def repo(self):
664 def repo(self):
664 """
665 """
665 Returns the "current" repository. This is the vcs_test repo of the last
666 Returns the "current" repository. This is the vcs_test repo of the last
666 repo which has been created.
667 repo which has been created.
667 """
668 """
668 Repository = get_backend(self.alias)
669 Repository = get_backend(self.alias)
669 return Repository(self._repo_path)
670 return Repository(self._repo_path)
670
671
671 @property
672 @property
672 def backend(self):
673 def backend(self):
673 """
674 """
674 Returns the backend implementation class.
675 Returns the backend implementation class.
675 """
676 """
676 return get_backend(self.alias)
677 return get_backend(self.alias)
677
678
678 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
679 bare=False):
680 bare=False):
680 repo_name = self._next_repo_name()
681 repo_name = self._next_repo_name()
681 self._repo_path = get_new_dir(repo_name)
682 self._repo_path = get_new_dir(repo_name)
682 repo_class = get_backend(self.alias)
683 repo_class = get_backend(self.alias)
683 src_url = None
684 src_url = None
684 if _clone_repo:
685 if _clone_repo:
685 src_url = _clone_repo.path
686 src_url = _clone_repo.path
686 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
687 self._cleanup_repos.append(repo)
688 self._cleanup_repos.append(repo)
688
689
689 commits = commits or [
690 commits = commits or [
690 {'message': 'Commit %s of %s' % (x, repo_name)}
691 {'message': 'Commit %s of %s' % (x, repo_name)}
691 for x in range(number_of_commits)]
692 for x in range(number_of_commits)]
692 _add_commits_to_repo(repo, commits)
693 _add_commits_to_repo(repo, commits)
693 return repo
694 return repo
694
695
695 def clone_repo(self, repo):
696 def clone_repo(self, repo):
696 return self.create_repo(_clone_repo=repo)
697 return self.create_repo(_clone_repo=repo)
697
698
698 def cleanup(self):
699 def cleanup(self):
699 for repo in self._cleanup_repos:
700 for repo in self._cleanup_repos:
700 shutil.rmtree(repo.path)
701 shutil.rmtree(repo.path)
701
702
702 def new_repo_path(self):
703 def new_repo_path(self):
703 repo_name = self._next_repo_name()
704 repo_name = self._next_repo_name()
704 self._repo_path = get_new_dir(repo_name)
705 self._repo_path = get_new_dir(repo_name)
705 return self._repo_path
706 return self._repo_path
706
707
707 def _next_repo_name(self):
708 def _next_repo_name(self):
708
709
709 return "{}_{}".format(
710 return "{}_{}".format(
710 self.invalid_repo_name.sub('_', self._test_name),
711 self.invalid_repo_name.sub('_', self._test_name),
711 len(self._cleanup_repos)
712 len(self._cleanup_repos)
712 )
713 )
713
714
714 def add_file(self, repo, filename, content='Test content\n'):
715 def add_file(self, repo, filename, content='Test content\n'):
715 imc = repo.in_memory_commit
716 imc = repo.in_memory_commit
716 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
717 imc.commit(
718 imc.commit(
718 message='Automatic commit from vcsbackend fixture',
719 message='Automatic commit from vcsbackend fixture',
719 author='Automatic <automatic@rhodecode.com>')
720 author='Automatic <automatic@rhodecode.com>')
720
721
721 def ensure_file(self, filename, content='Test content\n'):
722 def ensure_file(self, filename, content='Test content\n'):
722 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
723 self.add_file(self.repo, filename, content)
724 self.add_file(self.repo, filename, content)
724
725
725
726
726 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
727 if backend_alias not in request.config.getoption('--backends'):
728 if backend_alias not in request.config.getoption('--backends'):
728 pytest.skip("Backend %s not selected." % (backend_alias, ))
729 pytest.skip("Backend %s not selected." % (backend_alias, ))
729
730
730 utils.check_xfail_backends(request.node, backend_alias)
731 utils.check_xfail_backends(request.node, backend_alias)
731 utils.check_skip_backends(request.node, backend_alias)
732 utils.check_skip_backends(request.node, backend_alias)
732
733
733 repo_name = f'vcs_test_{backend_alias}'
734 repo_name = f'vcs_test_{backend_alias}'
734 repo_path = os.path.join(tests_tmp_path, repo_name)
735 repo_path = os.path.join(tests_tmp_path, repo_name)
735 backend = VcsBackend(
736 backend = VcsBackend(
736 alias=backend_alias,
737 alias=backend_alias,
737 repo_path=repo_path,
738 repo_path=repo_path,
738 test_name=request.node.name,
739 test_name=request.node.name,
739 test_repo_container=test_repo)
740 test_repo_container=test_repo)
740 request.addfinalizer(backend.cleanup)
741 request.addfinalizer(backend.cleanup)
741 return backend
742 return backend
742
743
743
744
744 @pytest.fixture()
745 @pytest.fixture()
745 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
746 """
747 """
747 Parametrized fixture which represents a single vcs backend implementation.
748 Parametrized fixture which represents a single vcs backend implementation.
748
749
749 See the fixture `backend` for more details. This one implements the same
750 See the fixture `backend` for more details. This one implements the same
750 concept, but on vcs level. So it does not provide model instances etc.
751 concept, but on vcs level. So it does not provide model instances etc.
751
752
752 Parameters are generated dynamically, see :func:`pytest_generate_tests`
753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
753 for how this works.
754 for how this works.
754 """
755 """
755 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
756
757
757
758
758 @pytest.fixture()
759 @pytest.fixture()
759 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
760 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
761
762
762
763
763 @pytest.fixture()
764 @pytest.fixture()
764 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
765 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
766
767
767
768
768 @pytest.fixture()
769 @pytest.fixture()
769 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
770 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
771
772
772
773
773 @pytest.fixture()
774 @pytest.fixture()
774 def vcsbackend_stub(vcsbackend_git):
775 def vcsbackend_stub(vcsbackend_git):
775 """
776 """
776 Use this to express that your test just needs a stub of a vcsbackend.
777 Use this to express that your test just needs a stub of a vcsbackend.
777
778
778 Plan is to eventually implement an in-memory stub to speed tests up.
779 Plan is to eventually implement an in-memory stub to speed tests up.
779 """
780 """
780 return vcsbackend_git
781 return vcsbackend_git
781
782
782
783
783 def _add_commits_to_repo(vcs_repo, commits):
784 def _add_commits_to_repo(vcs_repo, commits):
784 commit_ids = {}
785 commit_ids = {}
785 if not commits:
786 if not commits:
786 return commit_ids
787 return commit_ids
787
788
788 imc = vcs_repo.in_memory_commit
789 imc = vcs_repo.in_memory_commit
789 commit = None
790
790
791 for idx, commit in enumerate(commits):
791 for idx, commit in enumerate(commits):
792 message = str(commit.get('message', 'Commit %s' % idx))
792 message = str(commit.get('message', f'Commit {idx}'))
793
793
794 for node in commit.get('added', []):
794 for node in commit.get('added', []):
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 for node in commit.get('changed', []):
796 for node in commit.get('changed', []):
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 for node in commit.get('removed', []):
798 for node in commit.get('removed', []):
799 imc.remove(FileNode(safe_bytes(node.path)))
799 imc.remove(FileNode(safe_bytes(node.path)))
800
800
801 parents = [
801 parents = [
802 vcs_repo.get_commit(commit_id=commit_ids[p])
802 vcs_repo.get_commit(commit_id=commit_ids[p])
803 for p in commit.get('parents', [])]
803 for p in commit.get('parents', [])]
804
804
805 operations = ('added', 'changed', 'removed')
805 operations = ('added', 'changed', 'removed')
806 if not any((commit.get(o) for o in operations)):
806 if not any((commit.get(o) for o in operations)):
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
808
808
809 commit = imc.commit(
809 commit = imc.commit(
810 message=message,
810 message=message,
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
812 date=commit.get('date'),
812 date=commit.get('date'),
813 branch=commit.get('branch'),
813 branch=commit.get('branch'),
814 parents=parents)
814 parents=parents)
815
815
816 commit_ids[commit.message] = commit.raw_id
816 commit_ids[commit.message] = commit.raw_id
817
817
818 return commit_ids
818 return commit_ids
819
819
820
820
821 @pytest.fixture()
821 @pytest.fixture()
822 def reposerver(request):
822 def reposerver(request):
823 """
823 """
824 Allows to serve a backend repository
824 Allows to serve a backend repository
825 """
825 """
826
826
827 repo_server = RepoServer()
827 repo_server = RepoServer()
828 request.addfinalizer(repo_server.cleanup)
828 request.addfinalizer(repo_server.cleanup)
829 return repo_server
829 return repo_server
830
830
831
831
832 class RepoServer(object):
832 class RepoServer(object):
833 """
833 """
834 Utility to serve a local repository for the duration of a test case.
834 Utility to serve a local repository for the duration of a test case.
835
835
836 Supports only Subversion so far.
836 Supports only Subversion so far.
837 """
837 """
838
838
839 url = None
839 url = None
840
840
841 def __init__(self):
841 def __init__(self):
842 self._cleanup_servers = []
842 self._cleanup_servers = []
843
843
844 def serve(self, vcsrepo):
844 def serve(self, vcsrepo):
845 if vcsrepo.alias != 'svn':
845 if vcsrepo.alias != 'svn':
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847
847
848 proc = subprocess.Popen(
848 proc = subprocess.Popen(
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
850 '--root', vcsrepo.path])
850 '--root', vcsrepo.path])
851 self._cleanup_servers.append(proc)
851 self._cleanup_servers.append(proc)
852 self.url = 'svn://localhost'
852 self.url = 'svn://localhost'
853
853
854 def cleanup(self):
854 def cleanup(self):
855 for proc in self._cleanup_servers:
855 for proc in self._cleanup_servers:
856 proc.terminate()
856 proc.terminate()
857
857
858
858
859 @pytest.fixture()
859 @pytest.fixture()
860 def pr_util(backend, request, config_stub):
860 def pr_util(backend, request, config_stub):
861 """
861 """
862 Utility for tests of models and for functional tests around pull requests.
862 Utility for tests of models and for functional tests around pull requests.
863
863
864 It gives an instance of :class:`PRTestUtility` which provides various
864 It gives an instance of :class:`PRTestUtility` which provides various
865 utility methods around one pull request.
865 utility methods around one pull request.
866
866
867 This fixture uses `backend` and inherits its parameterization.
867 This fixture uses `backend` and inherits its parameterization.
868 """
868 """
869
869
870 util = PRTestUtility(backend)
870 util = PRTestUtility(backend)
871 request.addfinalizer(util.cleanup)
871 request.addfinalizer(util.cleanup)
872
872
873 return util
873 return util
874
874
875
875
876 class PRTestUtility(object):
876 class PRTestUtility(object):
877
877
878 pull_request = None
878 pull_request = None
879 pull_request_id = None
879 pull_request_id = None
880 mergeable_patcher = None
880 mergeable_patcher = None
881 mergeable_mock = None
881 mergeable_mock = None
882 notification_patcher = None
882 notification_patcher = None
883 commit_ids: dict
883
884
884 def __init__(self, backend):
885 def __init__(self, backend):
885 self.backend = backend
886 self.backend = backend
886
887
887 def create_pull_request(
888 def create_pull_request(
888 self, commits=None, target_head=None, source_head=None,
889 self, commits=None, target_head=None, source_head=None,
889 revisions=None, approved=False, author=None, mergeable=False,
890 revisions=None, approved=False, author=None, mergeable=False,
890 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
891 title=u"Test", description=u"Description"):
892 title="Test", description="Description"):
892 self.set_mergeable(mergeable)
893 self.set_mergeable(mergeable)
893 if not enable_notifications:
894 if not enable_notifications:
894 # mock notification side effect
895 # mock notification side effect
895 self.notification_patcher = mock.patch(
896 self.notification_patcher = mock.patch(
896 'rhodecode.model.notification.NotificationModel.create')
897 'rhodecode.model.notification.NotificationModel.create')
897 self.notification_patcher.start()
898 self.notification_patcher.start()
898
899
899 if not self.pull_request:
900 if not self.pull_request:
900 if not commits:
901 if not commits:
901 commits = [
902 commits = [
902 {'message': 'c1'},
903 {'message': 'c1'},
903 {'message': 'c2'},
904 {'message': 'c2'},
904 {'message': 'c3'},
905 {'message': 'c3'},
905 ]
906 ]
906 target_head = 'c1'
907 target_head = 'c1'
907 source_head = 'c2'
908 source_head = 'c2'
908 revisions = ['c2']
909 revisions = ['c2']
909
910
910 self.commit_ids = self.backend.create_master_repo(commits)
911 self.commit_ids = self.backend.create_master_repo(commits)
911 self.target_repository = self.backend.create_repo(
912 self.target_repository = self.backend.create_repo(
912 heads=[target_head], name_suffix=name_suffix)
913 heads=[target_head], name_suffix=name_suffix)
913 self.source_repository = self.backend.create_repo(
914 self.source_repository = self.backend.create_repo(
914 heads=[source_head], name_suffix=name_suffix)
915 heads=[source_head], name_suffix=name_suffix)
915 self.author = author or UserModel().get_by_username(
916 self.author = author or UserModel().get_by_username(
916 TEST_USER_ADMIN_LOGIN)
917 TEST_USER_ADMIN_LOGIN)
917
918
918 model = PullRequestModel()
919 model = PullRequestModel()
919 self.create_parameters = {
920 self.create_parameters = {
920 'created_by': self.author,
921 'created_by': self.author,
921 'source_repo': self.source_repository.repo_name,
922 'source_repo': self.source_repository.repo_name,
922 'source_ref': self._default_branch_reference(source_head),
923 'source_ref': self._default_branch_reference(source_head),
923 'target_repo': self.target_repository.repo_name,
924 'target_repo': self.target_repository.repo_name,
924 'target_ref': self._default_branch_reference(target_head),
925 'target_ref': self._default_branch_reference(target_head),
925 'revisions': [self.commit_ids[r] for r in revisions],
926 'revisions': [self.commit_ids[r] for r in revisions],
926 'reviewers': reviewers or self._get_reviewers(),
927 'reviewers': reviewers or self._get_reviewers(),
927 'observers': observers or self._get_observers(),
928 'observers': observers or self._get_observers(),
928 'title': title,
929 'title': title,
929 'description': description,
930 'description': description,
930 }
931 }
931 self.pull_request = model.create(**self.create_parameters)
932 self.pull_request = model.create(**self.create_parameters)
932 assert model.get_versions(self.pull_request) == []
933 assert model.get_versions(self.pull_request) == []
933
934
934 self.pull_request_id = self.pull_request.pull_request_id
935 self.pull_request_id = self.pull_request.pull_request_id
935
936
936 if approved:
937 if approved:
937 self.approve()
938 self.approve()
938
939
939 Session().add(self.pull_request)
940 Session().add(self.pull_request)
940 Session().commit()
941 Session().commit()
941
942
942 return self.pull_request
943 return self.pull_request
943
944
944 def approve(self):
945 def approve(self):
945 self.create_status_votes(
946 self.create_status_votes(
946 ChangesetStatus.STATUS_APPROVED,
947 ChangesetStatus.STATUS_APPROVED,
947 *self.pull_request.reviewers)
948 *self.pull_request.reviewers)
948
949
949 def close(self):
950 def close(self):
950 PullRequestModel().close_pull_request(self.pull_request, self.author)
951 PullRequestModel().close_pull_request(self.pull_request, self.author)
951
952
952 def _default_branch_reference(self, commit_message):
953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
953 reference = '%s:%s:%s' % (
954 default_branch = branch or self.backend.default_branch_name
954 'branch',
955 message = self.commit_ids[commit_message]
955 self.backend.default_branch_name,
956 reference = f'branch:{default_branch}:{message}'
956 self.commit_ids[commit_message])
957
957 return reference
958 return reference
958
959
959 def _get_reviewers(self):
960 def _get_reviewers(self):
960 role = PullRequestReviewers.ROLE_REVIEWER
961 role = PullRequestReviewers.ROLE_REVIEWER
961 return [
962 return [
962 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
963 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
964 ]
965 ]
965
966
966 def _get_observers(self):
967 def _get_observers(self):
967 return [
968 return [
968
969
969 ]
970 ]
970
971
971 def update_source_repository(self, head=None):
972 def update_source_repository(self, head=None, do_fetch=False):
973 heads = [head or 'c3']
974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
975
976 def update_target_repository(self, head=None, do_fetch=False):
972 heads = [head or 'c3']
977 heads = [head or 'c3']
973 self.backend.pull_heads(self.source_repository, heads=heads)
978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
979
980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
982 self.pull_request.target_ref = full_ref
983 return full_ref
984
985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
987 self.pull_request.source_ref = full_ref
988 return full_ref
974
989
975 def add_one_commit(self, head=None):
990 def add_one_commit(self, head=None):
976 self.update_source_repository(head=head)
991 self.update_source_repository(head=head)
977 old_commit_ids = set(self.pull_request.revisions)
992 old_commit_ids = set(self.pull_request.revisions)
978 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
979 commit_ids = set(self.pull_request.revisions)
994 commit_ids = set(self.pull_request.revisions)
980 new_commit_ids = commit_ids - old_commit_ids
995 new_commit_ids = commit_ids - old_commit_ids
981 assert len(new_commit_ids) == 1
996 assert len(new_commit_ids) == 1
982 return new_commit_ids.pop()
997 return new_commit_ids.pop()
983
998
984 def remove_one_commit(self):
999 def remove_one_commit(self):
985 assert len(self.pull_request.revisions) == 2
1000 assert len(self.pull_request.revisions) == 2
986 source_vcs = self.source_repository.scm_instance()
1001 source_vcs = self.source_repository.scm_instance()
987 removed_commit_id = source_vcs.commit_ids[-1]
1002 removed_commit_id = source_vcs.commit_ids[-1]
988
1003
989 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
990 # remove the if once that's sorted out.
1005 # remove the if once that's sorted out.
991 if self.backend.alias == "git":
1006 if self.backend.alias == "git":
992 kwargs = {'branch_name': self.backend.default_branch_name}
1007 kwargs = {'branch_name': self.backend.default_branch_name}
993 else:
1008 else:
994 kwargs = {}
1009 kwargs = {}
995 source_vcs.strip(removed_commit_id, **kwargs)
1010 source_vcs.strip(removed_commit_id, **kwargs)
996
1011
997 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
998 assert len(self.pull_request.revisions) == 1
1013 assert len(self.pull_request.revisions) == 1
999 return removed_commit_id
1014 return removed_commit_id
1000
1015
1001 def create_comment(self, linked_to=None):
1016 def create_comment(self, linked_to=None):
1002 comment = CommentsModel().create(
1017 comment = CommentsModel().create(
1003 text=u"Test comment",
1018 text="Test comment",
1004 repo=self.target_repository.repo_name,
1019 repo=self.target_repository.repo_name,
1005 user=self.author,
1020 user=self.author,
1006 pull_request=self.pull_request)
1021 pull_request=self.pull_request)
1007 assert comment.pull_request_version_id is None
1022 assert comment.pull_request_version_id is None
1008
1023
1009 if linked_to:
1024 if linked_to:
1010 PullRequestModel()._link_comments_to_version(linked_to)
1025 PullRequestModel()._link_comments_to_version(linked_to)
1011
1026
1012 return comment
1027 return comment
1013
1028
1014 def create_inline_comment(
1029 def create_inline_comment(
1015 self, linked_to=None, line_no='n1', file_path='file_1'):
1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1016 comment = CommentsModel().create(
1031 comment = CommentsModel().create(
1017 text=u"Test comment",
1032 text="Test comment",
1018 repo=self.target_repository.repo_name,
1033 repo=self.target_repository.repo_name,
1019 user=self.author,
1034 user=self.author,
1020 line_no=line_no,
1035 line_no=line_no,
1021 f_path=file_path,
1036 f_path=file_path,
1022 pull_request=self.pull_request)
1037 pull_request=self.pull_request)
1023 assert comment.pull_request_version_id is None
1038 assert comment.pull_request_version_id is None
1024
1039
1025 if linked_to:
1040 if linked_to:
1026 PullRequestModel()._link_comments_to_version(linked_to)
1041 PullRequestModel()._link_comments_to_version(linked_to)
1027
1042
1028 return comment
1043 return comment
1029
1044
1030 def create_version_of_pull_request(self):
1045 def create_version_of_pull_request(self):
1031 pull_request = self.create_pull_request()
1046 pull_request = self.create_pull_request()
1032 version = PullRequestModel()._create_version_from_snapshot(
1047 version = PullRequestModel()._create_version_from_snapshot(
1033 pull_request)
1048 pull_request)
1034 return version
1049 return version
1035
1050
1036 def create_status_votes(self, status, *reviewers):
1051 def create_status_votes(self, status, *reviewers):
1037 for reviewer in reviewers:
1052 for reviewer in reviewers:
1038 ChangesetStatusModel().set_status(
1053 ChangesetStatusModel().set_status(
1039 repo=self.pull_request.target_repo,
1054 repo=self.pull_request.target_repo,
1040 status=status,
1055 status=status,
1041 user=reviewer.user_id,
1056 user=reviewer.user_id,
1042 pull_request=self.pull_request)
1057 pull_request=self.pull_request)
1043
1058
1044 def set_mergeable(self, value):
1059 def set_mergeable(self, value):
1045 if not self.mergeable_patcher:
1060 if not self.mergeable_patcher:
1046 self.mergeable_patcher = mock.patch.object(
1061 self.mergeable_patcher = mock.patch.object(
1047 VcsSettingsModel, 'get_general_settings')
1062 VcsSettingsModel, 'get_general_settings')
1048 self.mergeable_mock = self.mergeable_patcher.start()
1063 self.mergeable_mock = self.mergeable_patcher.start()
1049 self.mergeable_mock.return_value = {
1064 self.mergeable_mock.return_value = {
1050 'rhodecode_pr_merge_enabled': value}
1065 'rhodecode_pr_merge_enabled': value}
1051
1066
1052 def cleanup(self):
1067 def cleanup(self):
1053 # In case the source repository is already cleaned up, the pull
1068 # In case the source repository is already cleaned up, the pull
1054 # request will already be deleted.
1069 # request will already be deleted.
1055 pull_request = PullRequest().get(self.pull_request_id)
1070 pull_request = PullRequest().get(self.pull_request_id)
1056 if pull_request:
1071 if pull_request:
1057 PullRequestModel().delete(pull_request, pull_request.author)
1072 PullRequestModel().delete(pull_request, pull_request.author)
1058 Session().commit()
1073 Session().commit()
1059
1074
1060 if self.notification_patcher:
1075 if self.notification_patcher:
1061 self.notification_patcher.stop()
1076 self.notification_patcher.stop()
1062
1077
1063 if self.mergeable_patcher:
1078 if self.mergeable_patcher:
1064 self.mergeable_patcher.stop()
1079 self.mergeable_patcher.stop()
1065
1080
1066
1081
1067 @pytest.fixture()
1082 @pytest.fixture()
1068 def user_admin(baseapp):
1083 def user_admin(baseapp):
1069 """
1084 """
1070 Provides the default admin test user as an instance of `db.User`.
1085 Provides the default admin test user as an instance of `db.User`.
1071 """
1086 """
1072 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1073 return user
1088 return user
1074
1089
1075
1090
1076 @pytest.fixture()
1091 @pytest.fixture()
1077 def user_regular(baseapp):
1092 def user_regular(baseapp):
1078 """
1093 """
1079 Provides the default regular test user as an instance of `db.User`.
1094 Provides the default regular test user as an instance of `db.User`.
1080 """
1095 """
1081 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1082 return user
1097 return user
1083
1098
1084
1099
1085 @pytest.fixture()
1100 @pytest.fixture()
1086 def user_util(request, db_connection):
1101 def user_util(request, db_connection):
1087 """
1102 """
1088 Provides a wired instance of `UserUtility` with integrated cleanup.
1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1089 """
1104 """
1090 utility = UserUtility(test_name=request.node.name)
1105 utility = UserUtility(test_name=request.node.name)
1091 request.addfinalizer(utility.cleanup)
1106 request.addfinalizer(utility.cleanup)
1092 return utility
1107 return utility
1093
1108
1094
1109
1095 # TODO: johbo: Split this up into utilities per domain or something similar
1110 # TODO: johbo: Split this up into utilities per domain or something similar
1096 class UserUtility(object):
1111 class UserUtility(object):
1097
1112
1098 def __init__(self, test_name="test"):
1113 def __init__(self, test_name="test"):
1099 self._test_name = self._sanitize_name(test_name)
1114 self._test_name = self._sanitize_name(test_name)
1100 self.fixture = Fixture()
1115 self.fixture = Fixture()
1101 self.repo_group_ids = []
1116 self.repo_group_ids = []
1102 self.repos_ids = []
1117 self.repos_ids = []
1103 self.user_ids = []
1118 self.user_ids = []
1104 self.user_group_ids = []
1119 self.user_group_ids = []
1105 self.user_repo_permission_ids = []
1120 self.user_repo_permission_ids = []
1106 self.user_group_repo_permission_ids = []
1121 self.user_group_repo_permission_ids = []
1107 self.user_repo_group_permission_ids = []
1122 self.user_repo_group_permission_ids = []
1108 self.user_group_repo_group_permission_ids = []
1123 self.user_group_repo_group_permission_ids = []
1109 self.user_user_group_permission_ids = []
1124 self.user_user_group_permission_ids = []
1110 self.user_group_user_group_permission_ids = []
1125 self.user_group_user_group_permission_ids = []
1111 self.user_permissions = []
1126 self.user_permissions = []
1112
1127
1113 def _sanitize_name(self, name):
1128 def _sanitize_name(self, name):
1114 for char in ['[', ']']:
1129 for char in ['[', ']']:
1115 name = name.replace(char, '_')
1130 name = name.replace(char, '_')
1116 return name
1131 return name
1117
1132
1118 def create_repo_group(
1133 def create_repo_group(
1119 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1120 group_name = "{prefix}_repogroup_{count}".format(
1135 group_name = "{prefix}_repogroup_{count}".format(
1121 prefix=self._test_name,
1136 prefix=self._test_name,
1122 count=len(self.repo_group_ids))
1137 count=len(self.repo_group_ids))
1123 repo_group = self.fixture.create_repo_group(
1138 repo_group = self.fixture.create_repo_group(
1124 group_name, cur_user=owner)
1139 group_name, cur_user=owner)
1125 if auto_cleanup:
1140 if auto_cleanup:
1126 self.repo_group_ids.append(repo_group.group_id)
1141 self.repo_group_ids.append(repo_group.group_id)
1127 return repo_group
1142 return repo_group
1128
1143
1129 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1130 auto_cleanup=True, repo_type='hg', bare=False):
1145 auto_cleanup=True, repo_type='hg', bare=False):
1131 repo_name = "{prefix}_repository_{count}".format(
1146 repo_name = "{prefix}_repository_{count}".format(
1132 prefix=self._test_name,
1147 prefix=self._test_name,
1133 count=len(self.repos_ids))
1148 count=len(self.repos_ids))
1134
1149
1135 repository = self.fixture.create_repo(
1150 repository = self.fixture.create_repo(
1136 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1137 if auto_cleanup:
1152 if auto_cleanup:
1138 self.repos_ids.append(repository.repo_id)
1153 self.repos_ids.append(repository.repo_id)
1139 return repository
1154 return repository
1140
1155
1141 def create_user(self, auto_cleanup=True, **kwargs):
1156 def create_user(self, auto_cleanup=True, **kwargs):
1142 user_name = "{prefix}_user_{count}".format(
1157 user_name = "{prefix}_user_{count}".format(
1143 prefix=self._test_name,
1158 prefix=self._test_name,
1144 count=len(self.user_ids))
1159 count=len(self.user_ids))
1145 user = self.fixture.create_user(user_name, **kwargs)
1160 user = self.fixture.create_user(user_name, **kwargs)
1146 if auto_cleanup:
1161 if auto_cleanup:
1147 self.user_ids.append(user.user_id)
1162 self.user_ids.append(user.user_id)
1148 return user
1163 return user
1149
1164
1150 def create_additional_user_email(self, user, email):
1165 def create_additional_user_email(self, user, email):
1151 uem = self.fixture.create_additional_user_email(user=user, email=email)
1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1152 return uem
1167 return uem
1153
1168
1154 def create_user_with_group(self):
1169 def create_user_with_group(self):
1155 user = self.create_user()
1170 user = self.create_user()
1156 user_group = self.create_user_group(members=[user])
1171 user_group = self.create_user_group(members=[user])
1157 return user, user_group
1172 return user, user_group
1158
1173
1159 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1160 auto_cleanup=True, **kwargs):
1175 auto_cleanup=True, **kwargs):
1161 group_name = "{prefix}_usergroup_{count}".format(
1176 group_name = "{prefix}_usergroup_{count}".format(
1162 prefix=self._test_name,
1177 prefix=self._test_name,
1163 count=len(self.user_group_ids))
1178 count=len(self.user_group_ids))
1164 user_group = self.fixture.create_user_group(
1179 user_group = self.fixture.create_user_group(
1165 group_name, cur_user=owner, **kwargs)
1180 group_name, cur_user=owner, **kwargs)
1166
1181
1167 if auto_cleanup:
1182 if auto_cleanup:
1168 self.user_group_ids.append(user_group.users_group_id)
1183 self.user_group_ids.append(user_group.users_group_id)
1169 if members:
1184 if members:
1170 for user in members:
1185 for user in members:
1171 UserGroupModel().add_user_to_group(user_group, user)
1186 UserGroupModel().add_user_to_group(user_group, user)
1172 return user_group
1187 return user_group
1173
1188
1174 def grant_user_permission(self, user_name, permission_name):
1189 def grant_user_permission(self, user_name, permission_name):
1175 self.inherit_default_user_permissions(user_name, False)
1190 self.inherit_default_user_permissions(user_name, False)
1176 self.user_permissions.append((user_name, permission_name))
1191 self.user_permissions.append((user_name, permission_name))
1177
1192
1178 def grant_user_permission_to_repo_group(
1193 def grant_user_permission_to_repo_group(
1179 self, repo_group, user, permission_name):
1194 self, repo_group, user, permission_name):
1180 permission = RepoGroupModel().grant_user_permission(
1195 permission = RepoGroupModel().grant_user_permission(
1181 repo_group, user, permission_name)
1196 repo_group, user, permission_name)
1182 self.user_repo_group_permission_ids.append(
1197 self.user_repo_group_permission_ids.append(
1183 (repo_group.group_id, user.user_id))
1198 (repo_group.group_id, user.user_id))
1184 return permission
1199 return permission
1185
1200
1186 def grant_user_group_permission_to_repo_group(
1201 def grant_user_group_permission_to_repo_group(
1187 self, repo_group, user_group, permission_name):
1202 self, repo_group, user_group, permission_name):
1188 permission = RepoGroupModel().grant_user_group_permission(
1203 permission = RepoGroupModel().grant_user_group_permission(
1189 repo_group, user_group, permission_name)
1204 repo_group, user_group, permission_name)
1190 self.user_group_repo_group_permission_ids.append(
1205 self.user_group_repo_group_permission_ids.append(
1191 (repo_group.group_id, user_group.users_group_id))
1206 (repo_group.group_id, user_group.users_group_id))
1192 return permission
1207 return permission
1193
1208
1194 def grant_user_permission_to_repo(
1209 def grant_user_permission_to_repo(
1195 self, repo, user, permission_name):
1210 self, repo, user, permission_name):
1196 permission = RepoModel().grant_user_permission(
1211 permission = RepoModel().grant_user_permission(
1197 repo, user, permission_name)
1212 repo, user, permission_name)
1198 self.user_repo_permission_ids.append(
1213 self.user_repo_permission_ids.append(
1199 (repo.repo_id, user.user_id))
1214 (repo.repo_id, user.user_id))
1200 return permission
1215 return permission
1201
1216
1202 def grant_user_group_permission_to_repo(
1217 def grant_user_group_permission_to_repo(
1203 self, repo, user_group, permission_name):
1218 self, repo, user_group, permission_name):
1204 permission = RepoModel().grant_user_group_permission(
1219 permission = RepoModel().grant_user_group_permission(
1205 repo, user_group, permission_name)
1220 repo, user_group, permission_name)
1206 self.user_group_repo_permission_ids.append(
1221 self.user_group_repo_permission_ids.append(
1207 (repo.repo_id, user_group.users_group_id))
1222 (repo.repo_id, user_group.users_group_id))
1208 return permission
1223 return permission
1209
1224
1210 def grant_user_permission_to_user_group(
1225 def grant_user_permission_to_user_group(
1211 self, target_user_group, user, permission_name):
1226 self, target_user_group, user, permission_name):
1212 permission = UserGroupModel().grant_user_permission(
1227 permission = UserGroupModel().grant_user_permission(
1213 target_user_group, user, permission_name)
1228 target_user_group, user, permission_name)
1214 self.user_user_group_permission_ids.append(
1229 self.user_user_group_permission_ids.append(
1215 (target_user_group.users_group_id, user.user_id))
1230 (target_user_group.users_group_id, user.user_id))
1216 return permission
1231 return permission
1217
1232
1218 def grant_user_group_permission_to_user_group(
1233 def grant_user_group_permission_to_user_group(
1219 self, target_user_group, user_group, permission_name):
1234 self, target_user_group, user_group, permission_name):
1220 permission = UserGroupModel().grant_user_group_permission(
1235 permission = UserGroupModel().grant_user_group_permission(
1221 target_user_group, user_group, permission_name)
1236 target_user_group, user_group, permission_name)
1222 self.user_group_user_group_permission_ids.append(
1237 self.user_group_user_group_permission_ids.append(
1223 (target_user_group.users_group_id, user_group.users_group_id))
1238 (target_user_group.users_group_id, user_group.users_group_id))
1224 return permission
1239 return permission
1225
1240
1226 def revoke_user_permission(self, user_name, permission_name):
1241 def revoke_user_permission(self, user_name, permission_name):
1227 self.inherit_default_user_permissions(user_name, True)
1242 self.inherit_default_user_permissions(user_name, True)
1228 UserModel().revoke_perm(user_name, permission_name)
1243 UserModel().revoke_perm(user_name, permission_name)
1229
1244
1230 def inherit_default_user_permissions(self, user_name, value):
1245 def inherit_default_user_permissions(self, user_name, value):
1231 user = UserModel().get_by_username(user_name)
1246 user = UserModel().get_by_username(user_name)
1232 user.inherit_default_permissions = value
1247 user.inherit_default_permissions = value
1233 Session().add(user)
1248 Session().add(user)
1234 Session().commit()
1249 Session().commit()
1235
1250
1236 def cleanup(self):
1251 def cleanup(self):
1237 self._cleanup_permissions()
1252 self._cleanup_permissions()
1238 self._cleanup_repos()
1253 self._cleanup_repos()
1239 self._cleanup_repo_groups()
1254 self._cleanup_repo_groups()
1240 self._cleanup_user_groups()
1255 self._cleanup_user_groups()
1241 self._cleanup_users()
1256 self._cleanup_users()
1242
1257
1243 def _cleanup_permissions(self):
1258 def _cleanup_permissions(self):
1244 if self.user_permissions:
1259 if self.user_permissions:
1245 for user_name, permission_name in self.user_permissions:
1260 for user_name, permission_name in self.user_permissions:
1246 self.revoke_user_permission(user_name, permission_name)
1261 self.revoke_user_permission(user_name, permission_name)
1247
1262
1248 for permission in self.user_repo_permission_ids:
1263 for permission in self.user_repo_permission_ids:
1249 RepoModel().revoke_user_permission(*permission)
1264 RepoModel().revoke_user_permission(*permission)
1250
1265
1251 for permission in self.user_group_repo_permission_ids:
1266 for permission in self.user_group_repo_permission_ids:
1252 RepoModel().revoke_user_group_permission(*permission)
1267 RepoModel().revoke_user_group_permission(*permission)
1253
1268
1254 for permission in self.user_repo_group_permission_ids:
1269 for permission in self.user_repo_group_permission_ids:
1255 RepoGroupModel().revoke_user_permission(*permission)
1270 RepoGroupModel().revoke_user_permission(*permission)
1256
1271
1257 for permission in self.user_group_repo_group_permission_ids:
1272 for permission in self.user_group_repo_group_permission_ids:
1258 RepoGroupModel().revoke_user_group_permission(*permission)
1273 RepoGroupModel().revoke_user_group_permission(*permission)
1259
1274
1260 for permission in self.user_user_group_permission_ids:
1275 for permission in self.user_user_group_permission_ids:
1261 UserGroupModel().revoke_user_permission(*permission)
1276 UserGroupModel().revoke_user_permission(*permission)
1262
1277
1263 for permission in self.user_group_user_group_permission_ids:
1278 for permission in self.user_group_user_group_permission_ids:
1264 UserGroupModel().revoke_user_group_permission(*permission)
1279 UserGroupModel().revoke_user_group_permission(*permission)
1265
1280
1266 def _cleanup_repo_groups(self):
1281 def _cleanup_repo_groups(self):
1267 def _repo_group_compare(first_group_id, second_group_id):
1282 def _repo_group_compare(first_group_id, second_group_id):
1268 """
1283 """
1269 Gives higher priority to the groups with the most complex paths
1284 Gives higher priority to the groups with the most complex paths
1270 """
1285 """
1271 first_group = RepoGroup.get(first_group_id)
1286 first_group = RepoGroup.get(first_group_id)
1272 second_group = RepoGroup.get(second_group_id)
1287 second_group = RepoGroup.get(second_group_id)
1273 first_group_parts = (
1288 first_group_parts = (
1274 len(first_group.group_name.split('/')) if first_group else 0)
1289 len(first_group.group_name.split('/')) if first_group else 0)
1275 second_group_parts = (
1290 second_group_parts = (
1276 len(second_group.group_name.split('/')) if second_group else 0)
1291 len(second_group.group_name.split('/')) if second_group else 0)
1277 return cmp(second_group_parts, first_group_parts)
1292 return cmp(second_group_parts, first_group_parts)
1278
1293
1279 sorted_repo_group_ids = sorted(
1294 sorted_repo_group_ids = sorted(
1280 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1281 for repo_group_id in sorted_repo_group_ids:
1296 for repo_group_id in sorted_repo_group_ids:
1282 self.fixture.destroy_repo_group(repo_group_id)
1297 self.fixture.destroy_repo_group(repo_group_id)
1283
1298
1284 def _cleanup_repos(self):
1299 def _cleanup_repos(self):
1285 sorted_repos_ids = sorted(self.repos_ids)
1300 sorted_repos_ids = sorted(self.repos_ids)
1286 for repo_id in sorted_repos_ids:
1301 for repo_id in sorted_repos_ids:
1287 self.fixture.destroy_repo(repo_id)
1302 self.fixture.destroy_repo(repo_id)
1288
1303
1289 def _cleanup_user_groups(self):
1304 def _cleanup_user_groups(self):
1290 def _user_group_compare(first_group_id, second_group_id):
1305 def _user_group_compare(first_group_id, second_group_id):
1291 """
1306 """
1292 Gives higher priority to the groups with the most complex paths
1307 Gives higher priority to the groups with the most complex paths
1293 """
1308 """
1294 first_group = UserGroup.get(first_group_id)
1309 first_group = UserGroup.get(first_group_id)
1295 second_group = UserGroup.get(second_group_id)
1310 second_group = UserGroup.get(second_group_id)
1296 first_group_parts = (
1311 first_group_parts = (
1297 len(first_group.users_group_name.split('/'))
1312 len(first_group.users_group_name.split('/'))
1298 if first_group else 0)
1313 if first_group else 0)
1299 second_group_parts = (
1314 second_group_parts = (
1300 len(second_group.users_group_name.split('/'))
1315 len(second_group.users_group_name.split('/'))
1301 if second_group else 0)
1316 if second_group else 0)
1302 return cmp(second_group_parts, first_group_parts)
1317 return cmp(second_group_parts, first_group_parts)
1303
1318
1304 sorted_user_group_ids = sorted(
1319 sorted_user_group_ids = sorted(
1305 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1306 for user_group_id in sorted_user_group_ids:
1321 for user_group_id in sorted_user_group_ids:
1307 self.fixture.destroy_user_group(user_group_id)
1322 self.fixture.destroy_user_group(user_group_id)
1308
1323
1309 def _cleanup_users(self):
1324 def _cleanup_users(self):
1310 for user_id in self.user_ids:
1325 for user_id in self.user_ids:
1311 self.fixture.destroy_user(user_id)
1326 self.fixture.destroy_user(user_id)
1312
1327
1313
1328
1314 @pytest.fixture(scope='session')
1329 @pytest.fixture(scope='session')
1315 def testrun():
1330 def testrun():
1316 return {
1331 return {
1317 'uuid': uuid.uuid4(),
1332 'uuid': uuid.uuid4(),
1318 'start': datetime.datetime.utcnow().isoformat(),
1333 'start': datetime.datetime.utcnow().isoformat(),
1319 'timestamp': int(time.time()),
1334 'timestamp': int(time.time()),
1320 }
1335 }
1321
1336
1322
1337
1323 class AppenlightClient(object):
1338 class AppenlightClient(object):
1324
1339
1325 url_template = '{url}?protocol_version=0.5'
1340 url_template = '{url}?protocol_version=0.5'
1326
1341
1327 def __init__(
1342 def __init__(
1328 self, url, api_key, add_server=True, add_timestamp=True,
1343 self, url, api_key, add_server=True, add_timestamp=True,
1329 namespace=None, request=None, testrun=None):
1344 namespace=None, request=None, testrun=None):
1330 self.url = self.url_template.format(url=url)
1345 self.url = self.url_template.format(url=url)
1331 self.api_key = api_key
1346 self.api_key = api_key
1332 self.add_server = add_server
1347 self.add_server = add_server
1333 self.add_timestamp = add_timestamp
1348 self.add_timestamp = add_timestamp
1334 self.namespace = namespace
1349 self.namespace = namespace
1335 self.request = request
1350 self.request = request
1336 self.server = socket.getfqdn(socket.gethostname())
1351 self.server = socket.getfqdn(socket.gethostname())
1337 self.tags_before = {}
1352 self.tags_before = {}
1338 self.tags_after = {}
1353 self.tags_after = {}
1339 self.stats = []
1354 self.stats = []
1340 self.testrun = testrun or {}
1355 self.testrun = testrun or {}
1341
1356
1342 def tag_before(self, tag, value):
1357 def tag_before(self, tag, value):
1343 self.tags_before[tag] = value
1358 self.tags_before[tag] = value
1344
1359
1345 def tag_after(self, tag, value):
1360 def tag_after(self, tag, value):
1346 self.tags_after[tag] = value
1361 self.tags_after[tag] = value
1347
1362
1348 def collect(self, data):
1363 def collect(self, data):
1349 if self.add_server:
1364 if self.add_server:
1350 data.setdefault('server', self.server)
1365 data.setdefault('server', self.server)
1351 if self.add_timestamp:
1366 if self.add_timestamp:
1352 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1353 if self.namespace:
1368 if self.namespace:
1354 data.setdefault('namespace', self.namespace)
1369 data.setdefault('namespace', self.namespace)
1355 if self.request:
1370 if self.request:
1356 data.setdefault('request', self.request)
1371 data.setdefault('request', self.request)
1357 self.stats.append(data)
1372 self.stats.append(data)
1358
1373
1359 def send_stats(self):
1374 def send_stats(self):
1360 tags = [
1375 tags = [
1361 ('testrun', self.request),
1376 ('testrun', self.request),
1362 ('testrun.start', self.testrun['start']),
1377 ('testrun.start', self.testrun['start']),
1363 ('testrun.timestamp', self.testrun['timestamp']),
1378 ('testrun.timestamp', self.testrun['timestamp']),
1364 ('test', self.namespace),
1379 ('test', self.namespace),
1365 ]
1380 ]
1366 for key, value in self.tags_before.items():
1381 for key, value in self.tags_before.items():
1367 tags.append((key + '.before', value))
1382 tags.append((key + '.before', value))
1368 try:
1383 try:
1369 delta = self.tags_after[key] - value
1384 delta = self.tags_after[key] - value
1370 tags.append((key + '.delta', delta))
1385 tags.append((key + '.delta', delta))
1371 except Exception:
1386 except Exception:
1372 pass
1387 pass
1373 for key, value in self.tags_after.items():
1388 for key, value in self.tags_after.items():
1374 tags.append((key + '.after', value))
1389 tags.append((key + '.after', value))
1375 self.collect({
1390 self.collect({
1376 'message': "Collected tags",
1391 'message': "Collected tags",
1377 'tags': tags,
1392 'tags': tags,
1378 })
1393 })
1379
1394
1380 response = requests.post(
1395 response = requests.post(
1381 self.url,
1396 self.url,
1382 headers={
1397 headers={
1383 'X-appenlight-api-key': self.api_key},
1398 'X-appenlight-api-key': self.api_key},
1384 json=self.stats,
1399 json=self.stats,
1385 )
1400 )
1386
1401
1387 if not response.status_code == 200:
1402 if not response.status_code == 200:
1388 pprint.pprint(self.stats)
1403 pprint.pprint(self.stats)
1389 print(response.headers)
1404 print(response.headers)
1390 print(response.text)
1405 print(response.text)
1391 raise Exception('Sending to appenlight failed')
1406 raise Exception('Sending to appenlight failed')
1392
1407
1393
1408
1394 @pytest.fixture()
1409 @pytest.fixture()
1395 def gist_util(request, db_connection):
1410 def gist_util(request, db_connection):
1396 """
1411 """
1397 Provides a wired instance of `GistUtility` with integrated cleanup.
1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1398 """
1413 """
1399 utility = GistUtility()
1414 utility = GistUtility()
1400 request.addfinalizer(utility.cleanup)
1415 request.addfinalizer(utility.cleanup)
1401 return utility
1416 return utility
1402
1417
1403
1418
1404 class GistUtility(object):
1419 class GistUtility(object):
1405 def __init__(self):
1420 def __init__(self):
1406 self.fixture = Fixture()
1421 self.fixture = Fixture()
1407 self.gist_ids = []
1422 self.gist_ids = []
1408
1423
1409 def create_gist(self, **kwargs):
1424 def create_gist(self, **kwargs):
1410 gist = self.fixture.create_gist(**kwargs)
1425 gist = self.fixture.create_gist(**kwargs)
1411 self.gist_ids.append(gist.gist_id)
1426 self.gist_ids.append(gist.gist_id)
1412 return gist
1427 return gist
1413
1428
1414 def cleanup(self):
1429 def cleanup(self):
1415 for id_ in self.gist_ids:
1430 for id_ in self.gist_ids:
1416 self.fixture.destroy_gists(str(id_))
1431 self.fixture.destroy_gists(str(id_))
1417
1432
1418
1433
1419 @pytest.fixture()
1434 @pytest.fixture()
1420 def enabled_backends(request):
1435 def enabled_backends(request):
1421 backends = request.config.option.backends
1436 backends = request.config.option.backends
1422 return backends[:]
1437 return backends[:]
1423
1438
1424
1439
1425 @pytest.fixture()
1440 @pytest.fixture()
1426 def settings_util(request, db_connection):
1441 def settings_util(request, db_connection):
1427 """
1442 """
1428 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1429 """
1444 """
1430 utility = SettingsUtility()
1445 utility = SettingsUtility()
1431 request.addfinalizer(utility.cleanup)
1446 request.addfinalizer(utility.cleanup)
1432 return utility
1447 return utility
1433
1448
1434
1449
1435 class SettingsUtility(object):
1450 class SettingsUtility(object):
1436 def __init__(self):
1451 def __init__(self):
1437 self.rhodecode_ui_ids = []
1452 self.rhodecode_ui_ids = []
1438 self.rhodecode_setting_ids = []
1453 self.rhodecode_setting_ids = []
1439 self.repo_rhodecode_ui_ids = []
1454 self.repo_rhodecode_ui_ids = []
1440 self.repo_rhodecode_setting_ids = []
1455 self.repo_rhodecode_setting_ids = []
1441
1456
1442 def create_repo_rhodecode_ui(
1457 def create_repo_rhodecode_ui(
1443 self, repo, section, value, key=None, active=True, cleanup=True):
1458 self, repo, section, value, key=None, active=True, cleanup=True):
1444 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1445
1460
1446 setting = RepoRhodeCodeUi()
1461 setting = RepoRhodeCodeUi()
1447 setting.repository_id = repo.repo_id
1462 setting.repository_id = repo.repo_id
1448 setting.ui_section = section
1463 setting.ui_section = section
1449 setting.ui_value = value
1464 setting.ui_value = value
1450 setting.ui_key = key
1465 setting.ui_key = key
1451 setting.ui_active = active
1466 setting.ui_active = active
1452 Session().add(setting)
1467 Session().add(setting)
1453 Session().commit()
1468 Session().commit()
1454
1469
1455 if cleanup:
1470 if cleanup:
1456 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1457 return setting
1472 return setting
1458
1473
1459 def create_rhodecode_ui(
1474 def create_rhodecode_ui(
1460 self, section, value, key=None, active=True, cleanup=True):
1475 self, section, value, key=None, active=True, cleanup=True):
1461 key = key or sha1_safe(f'{section}{value}')
1476 key = key or sha1_safe(f'{section}{value}')
1462
1477
1463 setting = RhodeCodeUi()
1478 setting = RhodeCodeUi()
1464 setting.ui_section = section
1479 setting.ui_section = section
1465 setting.ui_value = value
1480 setting.ui_value = value
1466 setting.ui_key = key
1481 setting.ui_key = key
1467 setting.ui_active = active
1482 setting.ui_active = active
1468 Session().add(setting)
1483 Session().add(setting)
1469 Session().commit()
1484 Session().commit()
1470
1485
1471 if cleanup:
1486 if cleanup:
1472 self.rhodecode_ui_ids.append(setting.ui_id)
1487 self.rhodecode_ui_ids.append(setting.ui_id)
1473 return setting
1488 return setting
1474
1489
1475 def create_repo_rhodecode_setting(
1490 def create_repo_rhodecode_setting(
1476 self, repo, name, value, type_, cleanup=True):
1491 self, repo, name, value, type_, cleanup=True):
1477 setting = RepoRhodeCodeSetting(
1492 setting = RepoRhodeCodeSetting(
1478 repo.repo_id, key=name, val=value, type=type_)
1493 repo.repo_id, key=name, val=value, type=type_)
1479 Session().add(setting)
1494 Session().add(setting)
1480 Session().commit()
1495 Session().commit()
1481
1496
1482 if cleanup:
1497 if cleanup:
1483 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1484 return setting
1499 return setting
1485
1500
1486 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1487 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1488 Session().add(setting)
1503 Session().add(setting)
1489 Session().commit()
1504 Session().commit()
1490
1505
1491 if cleanup:
1506 if cleanup:
1492 self.rhodecode_setting_ids.append(setting.app_settings_id)
1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1493
1508
1494 return setting
1509 return setting
1495
1510
1496 def cleanup(self):
1511 def cleanup(self):
1497 for id_ in self.rhodecode_ui_ids:
1512 for id_ in self.rhodecode_ui_ids:
1498 setting = RhodeCodeUi.get(id_)
1513 setting = RhodeCodeUi.get(id_)
1499 Session().delete(setting)
1514 Session().delete(setting)
1500
1515
1501 for id_ in self.rhodecode_setting_ids:
1516 for id_ in self.rhodecode_setting_ids:
1502 setting = RhodeCodeSetting.get(id_)
1517 setting = RhodeCodeSetting.get(id_)
1503 Session().delete(setting)
1518 Session().delete(setting)
1504
1519
1505 for id_ in self.repo_rhodecode_ui_ids:
1520 for id_ in self.repo_rhodecode_ui_ids:
1506 setting = RepoRhodeCodeUi.get(id_)
1521 setting = RepoRhodeCodeUi.get(id_)
1507 Session().delete(setting)
1522 Session().delete(setting)
1508
1523
1509 for id_ in self.repo_rhodecode_setting_ids:
1524 for id_ in self.repo_rhodecode_setting_ids:
1510 setting = RepoRhodeCodeSetting.get(id_)
1525 setting = RepoRhodeCodeSetting.get(id_)
1511 Session().delete(setting)
1526 Session().delete(setting)
1512
1527
1513 Session().commit()
1528 Session().commit()
1514
1529
1515
1530
1516 @pytest.fixture()
1531 @pytest.fixture()
1517 def no_notifications(request):
1532 def no_notifications(request):
1518 notification_patcher = mock.patch(
1533 notification_patcher = mock.patch(
1519 'rhodecode.model.notification.NotificationModel.create')
1534 'rhodecode.model.notification.NotificationModel.create')
1520 notification_patcher.start()
1535 notification_patcher.start()
1521 request.addfinalizer(notification_patcher.stop)
1536 request.addfinalizer(notification_patcher.stop)
1522
1537
1523
1538
1524 @pytest.fixture(scope='session')
1539 @pytest.fixture(scope='session')
1525 def repeat(request):
1540 def repeat(request):
1526 """
1541 """
1527 The number of repetitions is based on this fixture.
1542 The number of repetitions is based on this fixture.
1528
1543
1529 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1530 tests are not too slow in our default test suite.
1545 tests are not too slow in our default test suite.
1531 """
1546 """
1532 return request.config.getoption('--repeat')
1547 return request.config.getoption('--repeat')
1533
1548
1534
1549
1535 @pytest.fixture()
1550 @pytest.fixture()
1536 def rhodecode_fixtures():
1551 def rhodecode_fixtures():
1537 return Fixture()
1552 return Fixture()
1538
1553
1539
1554
1540 @pytest.fixture()
1555 @pytest.fixture()
1541 def context_stub():
1556 def context_stub():
1542 """
1557 """
1543 Stub context object.
1558 Stub context object.
1544 """
1559 """
1545 context = pyramid.testing.DummyResource()
1560 context = pyramid.testing.DummyResource()
1546 return context
1561 return context
1547
1562
1548
1563
1549 @pytest.fixture()
1564 @pytest.fixture()
1550 def request_stub():
1565 def request_stub():
1551 """
1566 """
1552 Stub request object.
1567 Stub request object.
1553 """
1568 """
1554 from rhodecode.lib.base import bootstrap_request
1569 from rhodecode.lib.base import bootstrap_request
1555 request = bootstrap_request(scheme='https')
1570 request = bootstrap_request(scheme='https')
1556 return request
1571 return request
1557
1572
1558
1573
1559 @pytest.fixture()
1574 @pytest.fixture()
1560 def config_stub(request, request_stub):
1575 def config_stub(request, request_stub):
1561 """
1576 """
1562 Set up pyramid.testing and return the Configurator.
1577 Set up pyramid.testing and return the Configurator.
1563 """
1578 """
1564 from rhodecode.lib.base import bootstrap_config
1579 from rhodecode.lib.base import bootstrap_config
1565 config = bootstrap_config(request=request_stub)
1580 config = bootstrap_config(request=request_stub)
1566
1581
1567 @request.addfinalizer
1582 @request.addfinalizer
1568 def cleanup():
1583 def cleanup():
1569 pyramid.testing.tearDown()
1584 pyramid.testing.tearDown()
1570
1585
1571 return config
1586 return config
1572
1587
1573
1588
1574 @pytest.fixture()
1589 @pytest.fixture()
1575 def StubIntegrationType():
1590 def StubIntegrationType():
1576 class _StubIntegrationType(IntegrationTypeBase):
1591 class _StubIntegrationType(IntegrationTypeBase):
1577 """ Test integration type class """
1592 """ Test integration type class """
1578
1593
1579 key = 'test'
1594 key = 'test'
1580 display_name = 'Test integration type'
1595 display_name = 'Test integration type'
1581 description = 'A test integration type for testing'
1596 description = 'A test integration type for testing'
1582
1597
1583 @classmethod
1598 @classmethod
1584 def icon(cls):
1599 def icon(cls):
1585 return 'test_icon_html_image'
1600 return 'test_icon_html_image'
1586
1601
1587 def __init__(self, settings):
1602 def __init__(self, settings):
1588 super(_StubIntegrationType, self).__init__(settings)
1603 super(_StubIntegrationType, self).__init__(settings)
1589 self.sent_events = [] # for testing
1604 self.sent_events = [] # for testing
1590
1605
1591 def send_event(self, event):
1606 def send_event(self, event):
1592 self.sent_events.append(event)
1607 self.sent_events.append(event)
1593
1608
1594 def settings_schema(self):
1609 def settings_schema(self):
1595 class SettingsSchema(colander.Schema):
1610 class SettingsSchema(colander.Schema):
1596 test_string_field = colander.SchemaNode(
1611 test_string_field = colander.SchemaNode(
1597 colander.String(),
1612 colander.String(),
1598 missing=colander.required,
1613 missing=colander.required,
1599 title='test string field',
1614 title='test string field',
1600 )
1615 )
1601 test_int_field = colander.SchemaNode(
1616 test_int_field = colander.SchemaNode(
1602 colander.Int(),
1617 colander.Int(),
1603 title='some integer setting',
1618 title='some integer setting',
1604 )
1619 )
1605 return SettingsSchema()
1620 return SettingsSchema()
1606
1621
1607
1622
1608 integration_type_registry.register_integration_type(_StubIntegrationType)
1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1609 return _StubIntegrationType
1624 return _StubIntegrationType
1610
1625
1611
1626
1612 @pytest.fixture()
1627 @pytest.fixture()
1613 def stub_integration_settings():
1628 def stub_integration_settings():
1614 return {
1629 return {
1615 'test_string_field': 'some data',
1630 'test_string_field': 'some data',
1616 'test_int_field': 100,
1631 'test_int_field': 100,
1617 }
1632 }
1618
1633
1619
1634
1620 @pytest.fixture()
1635 @pytest.fixture()
1621 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1622 stub_integration_settings):
1637 stub_integration_settings):
1623 integration = IntegrationModel().create(
1638 integration = IntegrationModel().create(
1624 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1625 name='test repo integration',
1640 name='test repo integration',
1626 repo=repo_stub, repo_group=None, child_repos_only=None)
1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1627
1642
1628 @request.addfinalizer
1643 @request.addfinalizer
1629 def cleanup():
1644 def cleanup():
1630 IntegrationModel().delete(integration)
1645 IntegrationModel().delete(integration)
1631
1646
1632 return integration
1647 return integration
1633
1648
1634
1649
1635 @pytest.fixture()
1650 @pytest.fixture()
1636 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1637 stub_integration_settings):
1652 stub_integration_settings):
1638 integration = IntegrationModel().create(
1653 integration = IntegrationModel().create(
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1640 name='test repogroup integration',
1655 name='test repogroup integration',
1641 repo=None, repo_group=test_repo_group, child_repos_only=True)
1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1642
1657
1643 @request.addfinalizer
1658 @request.addfinalizer
1644 def cleanup():
1659 def cleanup():
1645 IntegrationModel().delete(integration)
1660 IntegrationModel().delete(integration)
1646
1661
1647 return integration
1662 return integration
1648
1663
1649
1664
1650 @pytest.fixture()
1665 @pytest.fixture()
1651 def repogroup_recursive_integration_stub(request, test_repo_group,
1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1652 StubIntegrationType, stub_integration_settings):
1667 StubIntegrationType, stub_integration_settings):
1653 integration = IntegrationModel().create(
1668 integration = IntegrationModel().create(
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1655 name='test recursive repogroup integration',
1670 name='test recursive repogroup integration',
1656 repo=None, repo_group=test_repo_group, child_repos_only=False)
1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1657
1672
1658 @request.addfinalizer
1673 @request.addfinalizer
1659 def cleanup():
1674 def cleanup():
1660 IntegrationModel().delete(integration)
1675 IntegrationModel().delete(integration)
1661
1676
1662 return integration
1677 return integration
1663
1678
1664
1679
1665 @pytest.fixture()
1680 @pytest.fixture()
1666 def global_integration_stub(request, StubIntegrationType,
1681 def global_integration_stub(request, StubIntegrationType,
1667 stub_integration_settings):
1682 stub_integration_settings):
1668 integration = IntegrationModel().create(
1683 integration = IntegrationModel().create(
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1670 name='test global integration',
1685 name='test global integration',
1671 repo=None, repo_group=None, child_repos_only=None)
1686 repo=None, repo_group=None, child_repos_only=None)
1672
1687
1673 @request.addfinalizer
1688 @request.addfinalizer
1674 def cleanup():
1689 def cleanup():
1675 IntegrationModel().delete(integration)
1690 IntegrationModel().delete(integration)
1676
1691
1677 return integration
1692 return integration
1678
1693
1679
1694
1680 @pytest.fixture()
1695 @pytest.fixture()
1681 def root_repos_integration_stub(request, StubIntegrationType,
1696 def root_repos_integration_stub(request, StubIntegrationType,
1682 stub_integration_settings):
1697 stub_integration_settings):
1683 integration = IntegrationModel().create(
1698 integration = IntegrationModel().create(
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1685 name='test global integration',
1700 name='test global integration',
1686 repo=None, repo_group=None, child_repos_only=True)
1701 repo=None, repo_group=None, child_repos_only=True)
1687
1702
1688 @request.addfinalizer
1703 @request.addfinalizer
1689 def cleanup():
1704 def cleanup():
1690 IntegrationModel().delete(integration)
1705 IntegrationModel().delete(integration)
1691
1706
1692 return integration
1707 return integration
1693
1708
1694
1709
1695 @pytest.fixture()
1710 @pytest.fixture()
1696 def local_dt_to_utc():
1711 def local_dt_to_utc():
1697 def _factory(dt):
1712 def _factory(dt):
1698 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1699 dateutil.tz.tzutc()).replace(tzinfo=None)
1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1700 return _factory
1715 return _factory
1701
1716
1702
1717
1703 @pytest.fixture()
1718 @pytest.fixture()
1704 def disable_anonymous_user(request, baseapp):
1719 def disable_anonymous_user(request, baseapp):
1705 set_anonymous_access(False)
1720 set_anonymous_access(False)
1706
1721
1707 @request.addfinalizer
1722 @request.addfinalizer
1708 def cleanup():
1723 def cleanup():
1709 set_anonymous_access(True)
1724 set_anonymous_access(True)
1710
1725
1711
1726
1712 @pytest.fixture(scope='module')
1727 @pytest.fixture(scope='module')
1713 def rc_fixture(request):
1728 def rc_fixture(request):
1714 return Fixture()
1729 return Fixture()
1715
1730
1716
1731
1717 @pytest.fixture()
1732 @pytest.fixture()
1718 def repo_groups(request):
1733 def repo_groups(request):
1719 fixture = Fixture()
1734 fixture = Fixture()
1720
1735
1721 session = Session()
1736 session = Session()
1722 zombie_group = fixture.create_repo_group('zombie')
1737 zombie_group = fixture.create_repo_group('zombie')
1723 parent_group = fixture.create_repo_group('parent')
1738 parent_group = fixture.create_repo_group('parent')
1724 child_group = fixture.create_repo_group('parent/child')
1739 child_group = fixture.create_repo_group('parent/child')
1725 groups_in_db = session.query(RepoGroup).all()
1740 groups_in_db = session.query(RepoGroup).all()
1726 assert len(groups_in_db) == 3
1741 assert len(groups_in_db) == 3
1727 assert child_group.group_parent_id == parent_group.group_id
1742 assert child_group.group_parent_id == parent_group.group_id
1728
1743
1729 @request.addfinalizer
1744 @request.addfinalizer
1730 def cleanup():
1745 def cleanup():
1731 fixture.destroy_repo_group(zombie_group)
1746 fixture.destroy_repo_group(zombie_group)
1732 fixture.destroy_repo_group(child_group)
1747 fixture.destroy_repo_group(child_group)
1733 fixture.destroy_repo_group(parent_group)
1748 fixture.destroy_repo_group(parent_group)
1734
1749
1735 return zombie_group, parent_group, child_group
1750 return zombie_group, parent_group, child_group
@@ -1,189 +1,189 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import pytest
20 import pytest
21 from mock import Mock, patch, DEFAULT
21 from mock import Mock, patch, DEFAULT
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.model import db, scm
24 from rhodecode.model import db, scm
25
25
26
26
27 def test_scm_instance_config(backend):
27 def test_scm_instance_config(backend):
28 repo = backend.create_repo()
28 repo = backend.create_repo()
29 with patch.multiple('rhodecode.model.db.Repository',
29 with patch.multiple('rhodecode.model.db.Repository',
30 _get_instance=DEFAULT,
30 _get_instance=DEFAULT,
31 _get_instance_cached=DEFAULT) as mocks:
31 _get_instance_cached=DEFAULT) as mocks:
32
32
33 repo.scm_instance()
33 repo.scm_instance()
34 mocks['_get_instance'].assert_called_with(
34 mocks['_get_instance'].assert_called_with(
35 config=None, cache=False)
35 config=None, cache=False)
36
36
37 repo.scm_instance(vcs_full_cache=False)
37 repo.scm_instance(vcs_full_cache=False)
38 mocks['_get_instance'].assert_called_with(
38 mocks['_get_instance'].assert_called_with(
39 config=None, cache=False)
39 config=None, cache=False)
40
40
41 repo.scm_instance(vcs_full_cache=True)
41 repo.scm_instance(vcs_full_cache=True)
42 mocks['_get_instance_cached'].assert_called()
42 mocks['_get_instance_cached'].assert_called()
43
43
44
44
45 def test_get_instance_config(backend):
45 def test_get_instance_config(backend):
46 repo = backend.create_repo()
46 repo = backend.create_repo()
47 vcs_class = Mock()
47 vcs_class = Mock()
48 with patch.multiple('rhodecode.lib.vcs.backends',
48 with patch.multiple('rhodecode.lib.vcs.backends',
49 get_scm=DEFAULT,
49 get_scm=DEFAULT,
50 get_backend=DEFAULT) as mocks:
50 get_backend=DEFAULT) as mocks:
51 mocks['get_scm'].return_value = backend.alias
51 mocks['get_scm'].return_value = backend.alias
52 mocks['get_backend'].return_value = vcs_class
52 mocks['get_backend'].return_value = vcs_class
53 with patch('rhodecode.model.db.Repository._config') as config_mock:
53 with patch('rhodecode.model.db.Repository._config') as config_mock:
54 repo._get_instance()
54 repo._get_instance()
55 vcs_class.assert_called_with(
55 vcs_class.assert_called_with(
56 repo_path=repo.repo_full_path, config=config_mock,
56 repo_path=repo.repo_full_path, config=config_mock,
57 create=False, with_wire={'cache': True, 'repo_state_uid': None})
57 create=False, with_wire={'cache': True, 'repo_state_uid': None})
58
58
59 new_config = {'override': 'old_config'}
59 new_config = {'override': 'old_config'}
60 repo._get_instance(config=new_config)
60 repo._get_instance(config=new_config)
61 vcs_class.assert_called_with(
61 vcs_class.assert_called_with(
62 repo_path=repo.repo_full_path, config=new_config, create=False,
62 repo_path=repo.repo_full_path, config=new_config, create=False,
63 with_wire={'cache': True, 'repo_state_uid': None})
63 with_wire={'cache': True, 'repo_state_uid': None})
64
64
65
65
66 def test_mark_for_invalidation_config(backend):
66 def test_mark_for_invalidation_config(backend):
67 repo = backend.create_repo()
67 repo = backend.create_repo()
68 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
68 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
69 scm.ScmModel().mark_for_invalidation(repo.repo_name)
69 scm.ScmModel().mark_for_invalidation(repo.repo_name)
70 _, kwargs = _mock.call_args
70 _, kwargs = _mock.call_args
71 assert kwargs['config'].__dict__ == repo._config.__dict__
71 assert kwargs['config'].__dict__ == repo._config.__dict__
72
72
73
73
74 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
74 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
75 commits = [{'message': 'A'}, {'message': 'B'}]
75 commits = [{'message': 'A'}, {'message': 'B'}]
76 repo = backend.create_repo(commits=commits)
76 repo = backend.create_repo(commits=commits)
77 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
77 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
78 assert repo.changeset_cache['revision'] == 1
78 assert repo.changeset_cache['revision'] == 1
79
79
80
80
81 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
81 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
82 repo = backend.create_repo()
82 repo = backend.create_repo()
83 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
83 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
84 assert repo.changeset_cache['revision'] == -1
84 assert repo.changeset_cache['revision'] == -1
85
85
86
86
87 def test_strip_with_multiple_heads(backend_hg):
87 def test_strip_with_multiple_heads(backend_hg):
88 commits = [
88 commits = [
89 {'message': 'A'},
89 {'message': 'A'},
90 {'message': 'a'},
90 {'message': 'a'},
91 {'message': 'b'},
91 {'message': 'b'},
92 {'message': 'B', 'parents': ['A']},
92 {'message': 'B', 'parents': ['A'], 'branch': 'feature'},
93 {'message': 'a1'},
93 {'message': 'a1', 'branch': 'feature'},
94 ]
94 ]
95 repo = backend_hg.create_repo(commits=commits)
95 repo = backend_hg.create_repo(commits=commits)
96 commit_ids = backend_hg.commit_ids
96 commit_ids = backend_hg.commit_ids
97
97
98 model = scm.ScmModel()
98 model = scm.ScmModel()
99 model.strip(repo, commit_ids['b'], branch=None)
99 model.strip(repo, commit_ids['b'], branch=None)
100
100
101 vcs_repo = repo.scm_instance()
101 vcs_repo = repo.scm_instance()
102 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
102 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
103 assert len(rest_commit_ids) == 4
103 assert len(rest_commit_ids) == 4
104 assert commit_ids['b'] not in rest_commit_ids
104 assert commit_ids['b'] not in rest_commit_ids
105
105
106
106
107 def test_strip_with_single_heads(backend_hg):
107 def test_strip_with_single_heads(backend_hg):
108 commits = [
108 commits = [
109 {'message': 'A'},
109 {'message': 'A'},
110 {'message': 'a'},
110 {'message': 'a'},
111 {'message': 'b'},
111 {'message': 'b'},
112 ]
112 ]
113 repo = backend_hg.create_repo(commits=commits)
113 repo = backend_hg.create_repo(commits=commits)
114 commit_ids = backend_hg.commit_ids
114 commit_ids = backend_hg.commit_ids
115
115
116 model = scm.ScmModel()
116 model = scm.ScmModel()
117 model.strip(repo, commit_ids['b'], branch=None)
117 model.strip(repo, commit_ids['b'], branch=None)
118
118
119 vcs_repo = repo.scm_instance()
119 vcs_repo = repo.scm_instance()
120 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
120 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
121 assert len(rest_commit_ids) == 2
121 assert len(rest_commit_ids) == 2
122 assert commit_ids['b'] not in rest_commit_ids
122 assert commit_ids['b'] not in rest_commit_ids
123
123
124
124
125 def test_get_nodes_returns_unicode_flat(backend):
125 def test_get_nodes_returns_unicode_flat(backend):
126 repo = backend.repo
126 repo = backend.repo
127 commit_id = repo.get_commit(commit_idx=0).raw_id
127 commit_id = repo.get_commit(commit_idx=0).raw_id
128 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=True)
128 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=True)
129 assert_contains_only_str_chars(directories)
129 assert_contains_only_str_chars(directories)
130 assert_contains_only_str_chars(files)
130 assert_contains_only_str_chars(files)
131
131
132
132
133 def test_get_nodes_returns_unicode_non_flat(backend):
133 def test_get_nodes_returns_unicode_non_flat(backend):
134 repo = backend.repo
134 repo = backend.repo
135 commit_id = repo.get_commit(commit_idx=0).raw_id
135 commit_id = repo.get_commit(commit_idx=0).raw_id
136
136
137 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=False)
137 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=False)
138 # johbo: Checking only the names for now, since that is the critical
138 # johbo: Checking only the names for now, since that is the critical
139 # part.
139 # part.
140 assert_contains_only_str_chars([d['name'] for d in directories])
140 assert_contains_only_str_chars([d['name'] for d in directories])
141 assert_contains_only_str_chars([f['name'] for f in files])
141 assert_contains_only_str_chars([f['name'] for f in files])
142
142
143
143
144 def test_get_nodes_max_file_bytes(backend_random):
144 def test_get_nodes_max_file_bytes(backend_random):
145 repo = backend_random.repo
145 repo = backend_random.repo
146 max_file_bytes = 10
146 max_file_bytes = 10
147 directories, files = scm.ScmModel().get_nodes(
147 directories, files = scm.ScmModel().get_nodes(
148 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
148 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
149 extended_info=True, flat=False)
149 extended_info=True, flat=False)
150 assert any(file['content'] and len(file['content']) > max_file_bytes
150 assert any(file['content'] and len(file['content']) > max_file_bytes
151 for file in files)
151 for file in files)
152
152
153 directories, files = scm.ScmModel().get_nodes(
153 directories, files = scm.ScmModel().get_nodes(
154 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
154 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
155 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
155 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
156 assert all(
156 assert all(
157 file['content'] is None if file['size'] > max_file_bytes else True
157 file['content'] is None if file['size'] > max_file_bytes else True
158 for file in files)
158 for file in files)
159
159
160
160
161 def assert_contains_only_str_chars(structure):
161 def assert_contains_only_str_chars(structure):
162 assert structure
162 assert structure
163 for value in structure:
163 for value in structure:
164 assert isinstance(value, str)
164 assert isinstance(value, str)
165
165
166
166
167 @pytest.mark.backends("hg", "git")
167 @pytest.mark.backends("hg", "git")
168 def test_get_non_str_reference(backend):
168 def test_get_non_str_reference(backend):
169 model = scm.ScmModel()
169 model = scm.ScmModel()
170 special_name = "AdΔ±nΔ±"
170 special_name = "AdΔ±nΔ±"
171 non_str_list = [special_name]
171 non_str_list = [special_name]
172
172
173 def scm_instance():
173 def scm_instance():
174 return Mock(
174 return Mock(
175 branches=non_str_list, bookmarks=non_str_list,
175 branches=non_str_list, bookmarks=non_str_list,
176 tags=non_str_list, alias=backend.alias)
176 tags=non_str_list, alias=backend.alias)
177
177
178 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
178 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
179 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
179 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
180 if backend.alias == 'hg':
180 if backend.alias == 'hg':
181 valid_choices = [
181 valid_choices = [
182 'rev:tip', f'branch:{special_name}',
182 'rev:tip', f'branch:{special_name}',
183 f'book:{special_name}', f'tag:{special_name}']
183 f'book:{special_name}', f'tag:{special_name}']
184 else:
184 else:
185 valid_choices = [
185 valid_choices = [
186 'rev:tip', f'branch:{special_name}',
186 'rev:tip', f'branch:{special_name}',
187 f'tag:{special_name}']
187 f'tag:{special_name}']
188
188
189 assert choices == valid_choices
189 assert choices == valid_choices
@@ -1,493 +1,494 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import threading
19 import threading
20 import time
20 import time
21 import sys
21 import sys
22 import logging
22 import logging
23 import os.path
23 import os.path
24 import subprocess
24 import subprocess
25 import tempfile
25 import tempfile
26 import urllib.request
26 import urllib.request
27 import urllib.error
27 import urllib.error
28 import urllib.parse
28 import urllib.parse
29 from lxml.html import fromstring, tostring
29 from lxml.html import fromstring, tostring
30 from lxml.cssselect import CSSSelector
30 from lxml.cssselect import CSSSelector
31 from urllib.parse import unquote_plus
31 from urllib.parse import unquote_plus
32 import webob
32 import webob
33
33
34 from webtest.app import TestResponse, TestApp
34 from webtest.app import TestResponse, TestApp
35
35
36
36
37 import pytest
37 import pytest
38
38
39 try:
39 try:
40 import rc_testdata
40 import rc_testdata
41 except ImportError:
41 except ImportError:
42 raise ImportError('Failed to import rc_testdata, '
42 raise ImportError('Failed to import rc_testdata, '
43 'please make sure this package is installed from requirements_test.txt')
43 'please make sure this package is installed from requirements_test.txt')
44
44
45 from rhodecode.model.db import User, Repository
45 from rhodecode.model.db import User, Repository
46 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
47 from rhodecode.model.scm import ScmModel
47 from rhodecode.model.scm import ScmModel
48 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
48 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
49 from rhodecode.lib.vcs.backends.base import EmptyCommit
49 from rhodecode.lib.vcs.backends.base import EmptyCommit
50 from rhodecode.tests import login_user_session
50 from rhodecode.tests import login_user_session
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 def print_to_func(value, print_to=sys.stderr):
55 def print_to_func(value, print_to=sys.stderr):
56 print(value, file=print_to)
56 print(value, file=print_to)
57
57
58
58
59 class CustomTestResponse(TestResponse):
59 class CustomTestResponse(TestResponse):
60
60
61 def _save_output(self, out):
61 def _save_output(self, out):
62 f = tempfile.NamedTemporaryFile(mode='w', delete=False, prefix='rc-test-', suffix='.html')
62 f = tempfile.NamedTemporaryFile(mode='w', delete=False, prefix='rc-test-', suffix='.html')
63 f.write(out)
63 f.write(out)
64 return f.name
64 return f.name
65
65
66 def mustcontain(self, *strings, **kw):
66 def mustcontain(self, *strings, **kw):
67 """
67 """
68 Assert that the response contains all the strings passed
68 Assert that the response contains all the strings passed
69 in as arguments.
69 in as arguments.
70
70
71 Equivalent to::
71 Equivalent to::
72
72
73 assert string in res
73 assert string in res
74 """
74 """
75 print_body = kw.pop('print_body', False)
75 print_body = kw.pop('print_body', False)
76 print_to = kw.pop('print_to', sys.stderr)
76 print_to = kw.pop('print_to', sys.stderr)
77
77
78 if 'no' in kw:
78 if 'no' in kw:
79 no = kw['no']
79 no = kw['no']
80 del kw['no']
80 del kw['no']
81 if isinstance(no, str):
81 if isinstance(no, str):
82 no = [no]
82 no = [no]
83 else:
83 else:
84 no = []
84 no = []
85 if kw:
85 if kw:
86 raise TypeError(f"The only keyword argument allowed is 'no' got {kw}")
86 raise TypeError(f"The only keyword argument allowed is 'no' got {kw}")
87
87
88 f = self._save_output(str(self))
88 f = self._save_output(str(self))
89
89
90 for s in strings:
90 for s in strings:
91 if s not in self:
91 if s not in self:
92 print_to_func(f"Actual response (no {s!r}):", print_to=print_to)
92 print_to_func(f"Actual response (no {s!r}):", print_to=print_to)
93 print_to_func(f"body output saved as `{f}`", print_to=print_to)
93 print_to_func(f"body output saved as `{f}`", print_to=print_to)
94 if print_body:
94 if print_body:
95 print_to_func(str(self), print_to=print_to)
95 print_to_func(str(self), print_to=print_to)
96 raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}")
96 raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}")
97
97
98 for no_s in no:
98 for no_s in no:
99 if no_s in self:
99 if no_s in self:
100 print_to_func(f"Actual response (has {no_s!r})", print_to=print_to)
100 print_to_func(f"Actual response (has {no_s!r})", print_to=print_to)
101 print_to_func(f"body output saved as `{f}`", print_to=print_to)
101 print_to_func(f"body output saved as `{f}`", print_to=print_to)
102 if print_body:
102 if print_body:
103 print_to_func(str(self), print_to=print_to)
103 print_to_func(str(self), print_to=print_to)
104 raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}")
104 raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}")
105
105
106 def assert_response(self):
106 def assert_response(self):
107 return AssertResponse(self)
107 return AssertResponse(self)
108
108
109 def get_session_from_response(self):
109 def get_session_from_response(self):
110 """
110 """
111 This returns the session from a response object.
111 This returns the session from a response object.
112 """
112 """
113 from rhodecode.lib.rc_beaker import session_factory_from_settings
113 from rhodecode.lib.rc_beaker import session_factory_from_settings
114 session = session_factory_from_settings(self.test_app._pyramid_settings)
114 session = session_factory_from_settings(self.test_app._pyramid_settings)
115 return session(self.request)
115 return session(self.request)
116
116
117
117
118 class TestRequest(webob.BaseRequest):
118 class TestRequest(webob.BaseRequest):
119
119
120 # for py.test, so it doesn't try to run this tas by name starting with test...
120 # for py.test, so it doesn't try to run this tas by name starting with test...
121 disabled = True
121 disabled = True
122 ResponseClass = CustomTestResponse
122 ResponseClass = CustomTestResponse
123
123
124 def add_response_callback(self, callback):
124 def add_response_callback(self, callback):
125 pass
125 pass
126
126
127 @classmethod
127 @classmethod
128 def blank(cls, path, environ=None, base_url=None,
128 def blank(cls, path, environ=None, base_url=None,
129 headers=None, POST=None, **kw):
129 headers=None, POST=None, **kw):
130
130
131 if not path.isascii():
131 if not path.isascii():
132 # our custom quote path if it contains non-ascii chars
132 # our custom quote path if it contains non-ascii chars
133 path = urllib.parse.quote(path)
133 path = urllib.parse.quote(path)
134
134
135 return super(TestRequest, cls).blank(
135 return super(TestRequest, cls).blank(
136 path, environ=environ, base_url=base_url, headers=headers, POST=POST, **kw)
136 path, environ=environ, base_url=base_url, headers=headers, POST=POST, **kw)
137
137
138
138
139 class CustomTestApp(TestApp):
139 class CustomTestApp(TestApp):
140 """
140 """
141 Custom app to make mustcontain more Useful, and extract special methods
141 Custom app to make mustcontain more Useful, and extract special methods
142 """
142 """
143 RequestClass = TestRequest
143 RequestClass = TestRequest
144 rc_login_data = {}
144 rc_login_data = {}
145 rc_current_session = None
145 rc_current_session = None
146
146
147 def login(self, username=None, password=None):
147 def login(self, username=None, password=None):
148 from rhodecode.lib import auth
148 from rhodecode.lib import auth
149
149
150 if username and password:
150 if username and password:
151 session = login_user_session(self, username, password)
151 session = login_user_session(self, username, password)
152 else:
152 else:
153 session = login_user_session(self)
153 session = login_user_session(self)
154
154
155 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
155 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
156 self.rc_current_session = session
156 self.rc_current_session = session
157 return session['rhodecode_user']
157 return session['rhodecode_user']
158
158
159 @property
159 @property
160 def csrf_token(self):
160 def csrf_token(self):
161 return self.rc_login_data['csrf_token']
161 return self.rc_login_data['csrf_token']
162
162
163 @property
163 @property
164 def _pyramid_registry(self):
164 def _pyramid_registry(self):
165 return self.app.config.registry
165 return self.app.config.registry
166
166
167 @property
167 @property
168 def _pyramid_settings(self):
168 def _pyramid_settings(self):
169 return self._pyramid_registry.settings
169 return self._pyramid_registry.settings
170
170
171 def do_request(self, req, status=None, expect_errors=None):
171 def do_request(self, req, status=None, expect_errors=None):
172 # you can put custom code here
172 # you can put custom code here
173 return super().do_request(req, status, expect_errors)
173 return super().do_request(req, status, expect_errors)
174
174
175
175
176 def set_anonymous_access(enabled):
176 def set_anonymous_access(enabled):
177 """(Dis)allows anonymous access depending on parameter `enabled`"""
177 """(Dis)allows anonymous access depending on parameter `enabled`"""
178 user = User.get_default_user()
178 user = User.get_default_user()
179 user.active = enabled
179 user.active = enabled
180 Session().add(user)
180 Session().add(user)
181 Session().commit()
181 Session().commit()
182 time.sleep(1.5) # must sleep for cache (1s to expire)
182 time.sleep(1.5) # must sleep for cache (1s to expire)
183 log.info('anonymous access is now: %s', enabled)
183 log.info('anonymous access is now: %s', enabled)
184 assert enabled == User.get_default_user().active, (
184 assert enabled == User.get_default_user().active, (
185 'Cannot set anonymous access')
185 'Cannot set anonymous access')
186
186
187
187
188 def check_xfail_backends(node, backend_alias):
188 def check_xfail_backends(node, backend_alias):
189 # Using "xfail_backends" here intentionally, since this marks work
189 # Using "xfail_backends" here intentionally, since this marks work
190 # which is "to be done" soon.
190 # which is "to be done" soon.
191 skip_marker = node.get_closest_marker('xfail_backends')
191 skip_marker = node.get_closest_marker('xfail_backends')
192 if skip_marker and backend_alias in skip_marker.args:
192 if skip_marker and backend_alias in skip_marker.args:
193 msg = "Support for backend %s to be developed." % (backend_alias, )
193 msg = "Support for backend %s to be developed." % (backend_alias, )
194 msg = skip_marker.kwargs.get('reason', msg)
194 msg = skip_marker.kwargs.get('reason', msg)
195 pytest.xfail(msg)
195 pytest.xfail(msg)
196
196
197
197
198 def check_skip_backends(node, backend_alias):
198 def check_skip_backends(node, backend_alias):
199 # Using "skip_backends" here intentionally, since this marks work which is
199 # Using "skip_backends" here intentionally, since this marks work which is
200 # not supported.
200 # not supported.
201 skip_marker = node.get_closest_marker('skip_backends')
201 skip_marker = node.get_closest_marker('skip_backends')
202 if skip_marker and backend_alias in skip_marker.args:
202 if skip_marker and backend_alias in skip_marker.args:
203 msg = "Feature not supported for backend %s." % (backend_alias, )
203 msg = "Feature not supported for backend %s." % (backend_alias, )
204 msg = skip_marker.kwargs.get('reason', msg)
204 msg = skip_marker.kwargs.get('reason', msg)
205 pytest.skip(msg)
205 pytest.skip(msg)
206
206
207
207
208 def extract_git_repo_from_dump(dump_name, repo_name):
208 def extract_git_repo_from_dump(dump_name, repo_name):
209 """Create git repo `repo_name` from dump `dump_name`."""
209 """Create git repo `repo_name` from dump `dump_name`."""
210 repos_path = ScmModel().repos_path
210 repos_path = ScmModel().repos_path
211 target_path = os.path.join(repos_path, repo_name)
211 target_path = os.path.join(repos_path, repo_name)
212 rc_testdata.extract_git_dump(dump_name, target_path)
212 rc_testdata.extract_git_dump(dump_name, target_path)
213 return target_path
213 return target_path
214
214
215
215
216 def extract_hg_repo_from_dump(dump_name, repo_name):
216 def extract_hg_repo_from_dump(dump_name, repo_name):
217 """Create hg repo `repo_name` from dump `dump_name`."""
217 """Create hg repo `repo_name` from dump `dump_name`."""
218 repos_path = ScmModel().repos_path
218 repos_path = ScmModel().repos_path
219 target_path = os.path.join(repos_path, repo_name)
219 target_path = os.path.join(repos_path, repo_name)
220 rc_testdata.extract_hg_dump(dump_name, target_path)
220 rc_testdata.extract_hg_dump(dump_name, target_path)
221 return target_path
221 return target_path
222
222
223
223
224 def extract_svn_repo_from_dump(dump_name, repo_name):
224 def extract_svn_repo_from_dump(dump_name, repo_name):
225 """Create a svn repo `repo_name` from dump `dump_name`."""
225 """Create a svn repo `repo_name` from dump `dump_name`."""
226 repos_path = ScmModel().repos_path
226 repos_path = ScmModel().repos_path
227 target_path = os.path.join(repos_path, repo_name)
227 target_path = os.path.join(repos_path, repo_name)
228 SubversionRepository(target_path, create=True)
228 SubversionRepository(target_path, create=True)
229 _load_svn_dump_into_repo(dump_name, target_path)
229 _load_svn_dump_into_repo(dump_name, target_path)
230 return target_path
230 return target_path
231
231
232
232
233 def assert_message_in_log(log_records, message, levelno, module):
233 def assert_message_in_log(log_records, message, levelno, module):
234 messages = [
234 messages = [
235 r.message for r in log_records
235 r.message for r in log_records
236 if r.module == module and r.levelno == levelno
236 if r.module == module and r.levelno == levelno
237 ]
237 ]
238 assert message in messages
238 assert message in messages
239
239
240
240
241 def _load_svn_dump_into_repo(dump_name, repo_path):
241 def _load_svn_dump_into_repo(dump_name, repo_path):
242 """
242 """
243 Utility to populate a svn repository with a named dump
243 Utility to populate a svn repository with a named dump
244
244
245 Currently the dumps are in rc_testdata. They might later on be
245 Currently the dumps are in rc_testdata. They might later on be
246 integrated with the main repository once they stabilize more.
246 integrated with the main repository once they stabilize more.
247 """
247 """
248 dump = rc_testdata.load_svn_dump(dump_name)
248 dump = rc_testdata.load_svn_dump(dump_name)
249 load_dump = subprocess.Popen(
249 load_dump = subprocess.Popen(
250 ['svnadmin', 'load', repo_path],
250 ['svnadmin', 'load', repo_path],
251 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
251 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
252 stderr=subprocess.PIPE)
252 stderr=subprocess.PIPE)
253 out, err = load_dump.communicate(dump)
253 out, err = load_dump.communicate(dump)
254 if load_dump.returncode != 0:
254 if load_dump.returncode != 0:
255 log.error("Output of load_dump command: %s", out)
255 log.error("Output of load_dump command: %s", out)
256 log.error("Error output of load_dump command: %s", err)
256 log.error("Error output of load_dump command: %s", err)
257 raise Exception(
257 raise Exception(
258 'Failed to load dump "%s" into repository at path "%s".'
258 'Failed to load dump "%s" into repository at path "%s".'
259 % (dump_name, repo_path))
259 % (dump_name, repo_path))
260
260
261
261
262 class AssertResponse(object):
262 class AssertResponse(object):
263 """
263 """
264 Utility that helps to assert things about a given HTML response.
264 Utility that helps to assert things about a given HTML response.
265 """
265 """
266
266
267 def __init__(self, response):
267 def __init__(self, response):
268 self.response = response
268 self.response = response
269
269
270 def get_imports(self):
270 def get_imports(self):
271 return fromstring, tostring, CSSSelector
271 return fromstring, tostring, CSSSelector
272
272
273 def one_element_exists(self, css_selector):
273 def one_element_exists(self, css_selector):
274 self.get_element(css_selector)
274 self.get_element(css_selector)
275
275
276 def no_element_exists(self, css_selector):
276 def no_element_exists(self, css_selector):
277 assert not self._get_elements(css_selector)
277 assert not self._get_elements(css_selector)
278
278
279 def element_equals_to(self, css_selector, expected_content):
279 def element_equals_to(self, css_selector, expected_content):
280 element = self.get_element(css_selector)
280 element = self.get_element(css_selector)
281 element_text = self._element_to_string(element)
281 element_text = self._element_to_string(element)
282
282
283 assert expected_content in element_text
283 assert expected_content in element_text
284
284
285 def element_contains(self, css_selector, expected_content):
285 def element_contains(self, css_selector, expected_content):
286 element = self.get_element(css_selector)
286 element = self.get_element(css_selector)
287 assert expected_content in element.text_content()
287 assert expected_content in element.text_content()
288
288
289 def element_value_contains(self, css_selector, expected_content):
289 def element_value_contains(self, css_selector, expected_content):
290 element = self.get_element(css_selector)
290 element = self.get_element(css_selector)
291 assert expected_content in element.value
291 assert expected_content in element.value
292
292
293 def contains_one_link(self, link_text, href):
293 def contains_one_link(self, link_text, href):
294 fromstring, tostring, CSSSelector = self.get_imports()
294 fromstring, tostring, CSSSelector = self.get_imports()
295 doc = fromstring(self.response.body)
295 doc = fromstring(self.response.body)
296 sel = CSSSelector('a[href]')
296 sel = CSSSelector('a[href]')
297 elements = [
297 elements = [
298 e for e in sel(doc) if e.text_content().strip() == link_text]
298 e for e in sel(doc) if e.text_content().strip() == link_text]
299 assert len(elements) == 1, "Did not find link or found multiple links"
299 assert len(elements) == 1, "Did not find link or found multiple links"
300 self._ensure_url_equal(elements[0].attrib.get('href'), href)
300 self._ensure_url_equal(elements[0].attrib.get('href'), href)
301
301
302 def contains_one_anchor(self, anchor_id):
302 def contains_one_anchor(self, anchor_id):
303 fromstring, tostring, CSSSelector = self.get_imports()
303 fromstring, tostring, CSSSelector = self.get_imports()
304 doc = fromstring(self.response.body)
304 doc = fromstring(self.response.body)
305 sel = CSSSelector('#' + anchor_id)
305 sel = CSSSelector('#' + anchor_id)
306 elements = sel(doc)
306 elements = sel(doc)
307 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
307 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
308
308
309 def _ensure_url_equal(self, found, expected):
309 def _ensure_url_equal(self, found, expected):
310 assert _Url(found) == _Url(expected)
310 assert _Url(found) == _Url(expected)
311
311
312 def get_element(self, css_selector):
312 def get_element(self, css_selector):
313 elements = self._get_elements(css_selector)
313 elements = self._get_elements(css_selector)
314 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
314 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
315 return elements[0]
315 return elements[0]
316
316
317 def get_elements(self, css_selector):
317 def get_elements(self, css_selector):
318 return self._get_elements(css_selector)
318 return self._get_elements(css_selector)
319
319
320 def _get_elements(self, css_selector):
320 def _get_elements(self, css_selector):
321 fromstring, tostring, CSSSelector = self.get_imports()
321 fromstring, tostring, CSSSelector = self.get_imports()
322 doc = fromstring(self.response.body)
322 doc = fromstring(self.response.body)
323 sel = CSSSelector(css_selector)
323 sel = CSSSelector(css_selector)
324 elements = sel(doc)
324 elements = sel(doc)
325 return elements
325 return elements
326
326
327 def _element_to_string(self, element):
327 def _element_to_string(self, element):
328 fromstring, tostring, CSSSelector = self.get_imports()
328 fromstring, tostring, CSSSelector = self.get_imports()
329 return tostring(element, encoding='unicode')
329 return tostring(element, encoding='unicode')
330
330
331
331
332 class _Url(object):
332 class _Url(object):
333 """
333 """
334 A url object that can be compared with other url orbjects
334 A url object that can be compared with other url orbjects
335 without regard to the vagaries of encoding, escaping, and ordering
335 without regard to the vagaries of encoding, escaping, and ordering
336 of parameters in query strings.
336 of parameters in query strings.
337
337
338 Inspired by
338 Inspired by
339 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
339 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
340 """
340 """
341
341
342 def __init__(self, url):
342 def __init__(self, url):
343 parts = urllib.parse.urlparse(url)
343 parts = urllib.parse.urlparse(url)
344 _query = frozenset(urllib.parse.parse_qsl(parts.query))
344 _query = frozenset(urllib.parse.parse_qsl(parts.query))
345 _path = unquote_plus(parts.path)
345 _path = unquote_plus(parts.path)
346 parts = parts._replace(query=_query, path=_path)
346 parts = parts._replace(query=_query, path=_path)
347 self.parts = parts
347 self.parts = parts
348
348
349 def __eq__(self, other):
349 def __eq__(self, other):
350 return self.parts == other.parts
350 return self.parts == other.parts
351
351
352 def __hash__(self):
352 def __hash__(self):
353 return hash(self.parts)
353 return hash(self.parts)
354
354
355
355
356 def run_test_concurrently(times, raise_catched_exc=True):
356 def run_test_concurrently(times, raise_catched_exc=True):
357 """
357 """
358 Add this decorator to small pieces of code that you want to test
358 Add this decorator to small pieces of code that you want to test
359 concurrently
359 concurrently
360
360
361 ex:
361 ex:
362
362
363 @test_concurrently(25)
363 @test_concurrently(25)
364 def my_test_function():
364 def my_test_function():
365 ...
365 ...
366 """
366 """
367 def test_concurrently_decorator(test_func):
367 def test_concurrently_decorator(test_func):
368 def wrapper(*args, **kwargs):
368 def wrapper(*args, **kwargs):
369 exceptions = []
369 exceptions = []
370
370
371 def call_test_func():
371 def call_test_func():
372 try:
372 try:
373 test_func(*args, **kwargs)
373 test_func(*args, **kwargs)
374 except Exception as e:
374 except Exception as e:
375 exceptions.append(e)
375 exceptions.append(e)
376 if raise_catched_exc:
376 if raise_catched_exc:
377 raise
377 raise
378 threads = []
378 threads = []
379 for i in range(times):
379 for i in range(times):
380 threads.append(threading.Thread(target=call_test_func))
380 threads.append(threading.Thread(target=call_test_func))
381 for t in threads:
381 for t in threads:
382 t.start()
382 t.start()
383 for t in threads:
383 for t in threads:
384 t.join()
384 t.join()
385 if exceptions:
385 if exceptions:
386 raise Exception(
386 raise Exception(
387 'test_concurrently intercepted %s exceptions: %s' % (
387 'test_concurrently intercepted %s exceptions: %s' % (
388 len(exceptions), exceptions))
388 len(exceptions), exceptions))
389 return wrapper
389 return wrapper
390 return test_concurrently_decorator
390 return test_concurrently_decorator
391
391
392
392
393 def wait_for_url(url, timeout=10):
393 def wait_for_url(url, timeout=10):
394 """
394 """
395 Wait until URL becomes reachable.
395 Wait until URL becomes reachable.
396
396
397 It polls the URL until the timeout is reached or it became reachable.
397 It polls the URL until the timeout is reached or it became reachable.
398 If will call to `py.test.fail` in case the URL is not reachable.
398 If will call to `py.test.fail` in case the URL is not reachable.
399 """
399 """
400 timeout = time.time() + timeout
400 timeout = time.time() + timeout
401 last = 0
401 last = 0
402 wait = 0.1
402 wait = 0.1
403
403
404 while timeout > last:
404 while timeout > last:
405 last = time.time()
405 last = time.time()
406 if is_url_reachable(url, log_exc=False):
406 if is_url_reachable(url, log_exc=False):
407 break
407 break
408 elif (last + wait) > time.time():
408 elif (last + wait) > time.time():
409 # Go to sleep because not enough time has passed since last check.
409 # Go to sleep because not enough time has passed since last check.
410 time.sleep(wait)
410 time.sleep(wait)
411 else:
411 else:
412 pytest.fail(f"Timeout while waiting for URL {url}")
412 pytest.fail(f"Timeout while waiting for URL {url}")
413
413
414
414
415 def is_url_reachable(url: str, log_exc: bool = False) -> bool:
415 def is_url_reachable(url: str, log_exc: bool = False) -> bool:
416 try:
416 try:
417 urllib.request.urlopen(url)
417 urllib.request.urlopen(url)
418 except urllib.error.URLError:
418 except urllib.error.URLError:
419 if log_exc:
419 if log_exc:
420 log.exception(f'URL `{url}` reach error')
420 log.exception(f'URL `{url}` reach error')
421 return False
421 return False
422 return True
422 return True
423
423
424
424
425 def repo_on_filesystem(repo_name):
425 def repo_on_filesystem(repo_name):
426 from rhodecode.lib import vcs
426 from rhodecode.lib import vcs
427 from rhodecode.tests import TESTS_TMP_PATH
427 from rhodecode.tests import TESTS_TMP_PATH
428 repo = vcs.get_vcs_instance(
428 repo = vcs.get_vcs_instance(
429 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
429 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
430 return repo is not None
430 return repo is not None
431
431
432
432
433 def commit_change(
433 def commit_change(
434 repo, filename: bytes, content: bytes, message, vcs_type, parent=None, newfile=False):
434 repo, filename: bytes, content: bytes, message, vcs_type, parent=None, branch=None, newfile=False):
435 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
435 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
436
436
437 repo = Repository.get_by_repo_name(repo)
437 repo = Repository.get_by_repo_name(repo)
438 _commit = parent
438 _commit = parent
439 if not parent:
439 if not parent:
440 _commit = EmptyCommit(alias=vcs_type)
440 _commit = EmptyCommit(alias=vcs_type)
441
441
442 if newfile:
442 if newfile:
443 nodes = {
443 nodes = {
444 filename: {
444 filename: {
445 'content': content
445 'content': content
446 }
446 }
447 }
447 }
448 commit = ScmModel().create_nodes(
448 commit = ScmModel().create_nodes(
449 user=TEST_USER_ADMIN_LOGIN, repo=repo,
449 user=TEST_USER_ADMIN_LOGIN, repo=repo,
450 message=message,
450 message=message,
451 nodes=nodes,
451 nodes=nodes,
452 parent_commit=_commit,
452 parent_commit=_commit,
453 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
453 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
454 )
454 )
455 else:
455 else:
456 commit = ScmModel().commit_change(
456 commit = ScmModel().commit_change(
457 repo=repo.scm_instance(), repo_name=repo.repo_name,
457 repo=repo.scm_instance(), repo_name=repo.repo_name,
458 commit=parent, user=TEST_USER_ADMIN_LOGIN,
458 commit=parent, user=TEST_USER_ADMIN_LOGIN,
459 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
459 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
460 message=message,
460 message=message,
461 content=content,
461 content=content,
462 f_path=filename
462 f_path=filename,
463 branch=branch
463 )
464 )
464 return commit
465 return commit
465
466
466
467
467 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
468 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
468 if not default:
469 if not default:
469 raise ValueError('Permission for default user must be given')
470 raise ValueError('Permission for default user must be given')
470 form_data = [(
471 form_data = [(
471 'csrf_token', csrf_token
472 'csrf_token', csrf_token
472 )]
473 )]
473 # add default
474 # add default
474 form_data.extend([
475 form_data.extend([
475 ('u_perm_1', default)
476 ('u_perm_1', default)
476 ])
477 ])
477
478
478 if grant:
479 if grant:
479 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
480 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
480 form_data.extend([
481 form_data.extend([
481 ('perm_new_member_perm_new{}'.format(cnt), perm),
482 ('perm_new_member_perm_new{}'.format(cnt), perm),
482 ('perm_new_member_id_new{}'.format(cnt), obj_id),
483 ('perm_new_member_id_new{}'.format(cnt), obj_id),
483 ('perm_new_member_name_new{}'.format(cnt), obj_name),
484 ('perm_new_member_name_new{}'.format(cnt), obj_name),
484 ('perm_new_member_type_new{}'.format(cnt), obj_type),
485 ('perm_new_member_type_new{}'.format(cnt), obj_type),
485
486
486 ])
487 ])
487 if revoke:
488 if revoke:
488 for obj_id, obj_type in revoke:
489 for obj_id, obj_type in revoke:
489 form_data.extend([
490 form_data.extend([
490 ('perm_del_member_id_{}'.format(obj_id), obj_id),
491 ('perm_del_member_id_{}'.format(obj_id), obj_id),
491 ('perm_del_member_type_{}'.format(obj_id), obj_type),
492 ('perm_del_member_type_{}'.format(obj_id), obj_type),
492 ])
493 ])
493 return form_data
494 return form_data
General Comments 0
You need to be logged in to leave comments. Login now