##// END OF EJS Templates
observers: code cleanups and fixed tests.
marcink -
r4519:ea50ffa9 stable
parent child Browse files
Show More
@@ -1,368 +1,368 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.model.db import User
24 24 from rhodecode.model.pull_request import PullRequestModel
25 25 from rhodecode.model.repo import RepoModel
26 26 from rhodecode.model.user import UserModel
27 27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
28 28 from rhodecode.api.tests.utils import build_data, api_call, assert_error
29 29
30 30
31 31 @pytest.mark.usefixtures("testuser_api", "app")
32 32 class TestCreatePullRequestApi(object):
33 33 finalizers = []
34 34
35 35 def teardown_method(self, method):
36 36 if self.finalizers:
37 37 for finalizer in self.finalizers:
38 38 finalizer()
39 39 self.finalizers = []
40 40
41 41 def test_create_with_wrong_data(self):
42 42 required_data = {
43 43 'source_repo': 'tests/source_repo',
44 44 'target_repo': 'tests/target_repo',
45 45 'source_ref': 'branch:default:initial',
46 46 'target_ref': 'branch:default:new-feature',
47 47 }
48 48 for key in required_data:
49 49 data = required_data.copy()
50 50 data.pop(key)
51 51 id_, params = build_data(
52 52 self.apikey, 'create_pull_request', **data)
53 53 response = api_call(self.app, params)
54 54
55 55 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
56 56 assert_error(id_, expected, given=response.body)
57 57
58 58 @pytest.mark.backends("git", "hg")
59 59 @pytest.mark.parametrize('source_ref', [
60 60 'bookmarg:default:initial'
61 61 ])
62 62 def test_create_with_wrong_refs_data(self, backend, source_ref):
63 63
64 64 data = self._prepare_data(backend)
65 65 data['source_ref'] = source_ref
66 66
67 67 id_, params = build_data(
68 68 self.apikey_regular, 'create_pull_request', **data)
69 69
70 70 response = api_call(self.app, params)
71 71
72 72 expected = "Ref `{}` type is not allowed. " \
73 73 "Only:['bookmark', 'book', 'tag', 'branch'] " \
74 74 "are possible.".format(source_ref)
75 75 assert_error(id_, expected, given=response.body)
76 76
77 77 @pytest.mark.backends("git", "hg")
78 78 def test_create_with_correct_data(self, backend):
79 79 data = self._prepare_data(backend)
80 80 RepoModel().revoke_user_permission(
81 81 self.source.repo_name, User.DEFAULT_USER)
82 82 id_, params = build_data(
83 83 self.apikey_regular, 'create_pull_request', **data)
84 84 response = api_call(self.app, params)
85 85 expected_message = "Created new pull request `{title}`".format(
86 86 title=data['title'])
87 87 result = response.json
88 88 assert result['error'] is None
89 89 assert result['result']['msg'] == expected_message
90 90 pull_request_id = result['result']['pull_request_id']
91 91 pull_request = PullRequestModel().get(pull_request_id)
92 92 assert pull_request.title == data['title']
93 93 assert pull_request.description == data['description']
94 94 assert pull_request.source_ref == data['source_ref']
95 95 assert pull_request.target_ref == data['target_ref']
96 96 assert pull_request.source_repo.repo_name == data['source_repo']
97 97 assert pull_request.target_repo.repo_name == data['target_repo']
98 98 assert pull_request.revisions == [self.commit_ids['change']]
99 99 assert len(pull_request.reviewers) == 1
100 100
101 101 @pytest.mark.backends("git", "hg")
102 102 def test_create_with_empty_description(self, backend):
103 103 data = self._prepare_data(backend)
104 104 data.pop('description')
105 105 id_, params = build_data(
106 106 self.apikey_regular, 'create_pull_request', **data)
107 107 response = api_call(self.app, params)
108 108 expected_message = "Created new pull request `{title}`".format(
109 109 title=data['title'])
110 110 result = response.json
111 111 assert result['error'] is None
112 112 assert result['result']['msg'] == expected_message
113 113 pull_request_id = result['result']['pull_request_id']
114 114 pull_request = PullRequestModel().get(pull_request_id)
115 115 assert pull_request.description == ''
116 116
117 117 @pytest.mark.backends("git", "hg")
118 118 def test_create_with_empty_title(self, backend):
119 119 data = self._prepare_data(backend)
120 120 data.pop('title')
121 121 id_, params = build_data(
122 122 self.apikey_regular, 'create_pull_request', **data)
123 123 response = api_call(self.app, params)
124 124 result = response.json
125 125 pull_request_id = result['result']['pull_request_id']
126 126 pull_request = PullRequestModel().get(pull_request_id)
127 127 data['ref'] = backend.default_branch_name
128 128 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
129 129 assert pull_request.title == title
130 130
131 131 @pytest.mark.backends("git", "hg")
132 132 def test_create_with_reviewers_specified_by_names(
133 133 self, backend, no_notifications):
134 134 data = self._prepare_data(backend)
135 135 reviewers = [
136 136 {'username': TEST_USER_REGULAR_LOGIN,
137 137 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
138 138 {'username': TEST_USER_ADMIN_LOGIN,
139 139 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
140 140 'mandatory': True},
141 141 ]
142 142 data['reviewers'] = reviewers
143 143
144 144 id_, params = build_data(
145 145 self.apikey_regular, 'create_pull_request', **data)
146 146 response = api_call(self.app, params)
147 147
148 148 expected_message = "Created new pull request `{title}`".format(
149 149 title=data['title'])
150 150 result = response.json
151 151 assert result['error'] is None
152 152 assert result['result']['msg'] == expected_message
153 153 pull_request_id = result['result']['pull_request_id']
154 154 pull_request = PullRequestModel().get(pull_request_id)
155 155
156 156 actual_reviewers = []
157 157 for rev in pull_request.reviewers:
158 158 entry = {
159 159 'username': rev.user.username,
160 160 'reasons': rev.reasons,
161 161 }
162 162 if rev.mandatory:
163 163 entry['mandatory'] = rev.mandatory
164 164 actual_reviewers.append(entry)
165 165
166 166 owner_username = pull_request.target_repo.user.username
167 167 for spec_reviewer in reviewers[::]:
168 168 # default reviewer will be added who is an owner of the repo
169 169 # this get's overridden by a add owner to reviewers rule
170 170 if spec_reviewer['username'] == owner_username:
171 171 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
172 172 # since owner is more important, we don't inherit mandatory flag
173 173 del spec_reviewer['mandatory']
174 174
175 175 assert sorted(actual_reviewers, key=lambda e: e['username']) \
176 176 == sorted(reviewers, key=lambda e: e['username'])
177 177
178 178 @pytest.mark.backends("git", "hg")
179 179 def test_create_with_reviewers_specified_by_ids(
180 180 self, backend, no_notifications):
181 181 data = self._prepare_data(backend)
182 182 reviewers = [
183 183 {'username': UserModel().get_by_username(
184 184 TEST_USER_REGULAR_LOGIN).user_id,
185 185 'reasons': ['added manually']},
186 186 {'username': UserModel().get_by_username(
187 187 TEST_USER_ADMIN_LOGIN).user_id,
188 188 'reasons': ['added manually']},
189 189 ]
190 190
191 191 data['reviewers'] = reviewers
192 192 id_, params = build_data(
193 193 self.apikey_regular, 'create_pull_request', **data)
194 194 response = api_call(self.app, params)
195 195
196 196 expected_message = "Created new pull request `{title}`".format(
197 197 title=data['title'])
198 198 result = response.json
199 199 assert result['error'] is None
200 200 assert result['result']['msg'] == expected_message
201 201 pull_request_id = result['result']['pull_request_id']
202 202 pull_request = PullRequestModel().get(pull_request_id)
203 203
204 204 actual_reviewers = []
205 205 for rev in pull_request.reviewers:
206 206 entry = {
207 207 'username': rev.user.user_id,
208 208 'reasons': rev.reasons,
209 209 }
210 210 if rev.mandatory:
211 211 entry['mandatory'] = rev.mandatory
212 212 actual_reviewers.append(entry)
213 213
214 214 owner_user_id = pull_request.target_repo.user.user_id
215 215 for spec_reviewer in reviewers[::]:
216 216 # default reviewer will be added who is an owner of the repo
217 217 # this get's overridden by a add owner to reviewers rule
218 218 if spec_reviewer['username'] == owner_user_id:
219 219 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
220 220
221 221 assert sorted(actual_reviewers, key=lambda e: e['username']) \
222 222 == sorted(reviewers, key=lambda e: e['username'])
223 223
224 224 @pytest.mark.backends("git", "hg")
225 225 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
226 226 data = self._prepare_data(backend)
227 227 data['reviewers'] = [{'username': 'somebody'}]
228 228 id_, params = build_data(
229 229 self.apikey_regular, 'create_pull_request', **data)
230 230 response = api_call(self.app, params)
231 231 expected_message = 'user `somebody` does not exist'
232 232 assert_error(id_, expected_message, given=response.body)
233 233
234 234 @pytest.mark.backends("git", "hg")
235 235 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
236 236 data = self._prepare_data(backend)
237 237 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
238 238 data['reviewers'] = reviewers
239 239 id_, params = build_data(
240 240 self.apikey_regular, 'create_pull_request', **data)
241 241 response = api_call(self.app, params)
242 242 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
243 243 assert_error(id_, expected_message, given=response.body)
244 244
245 245 @pytest.mark.backends("git", "hg")
246 246 def test_create_with_no_commit_hashes(self, backend):
247 247 data = self._prepare_data(backend)
248 248 expected_source_ref = data['source_ref']
249 249 expected_target_ref = data['target_ref']
250 250 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
251 251 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
252 252 id_, params = build_data(
253 253 self.apikey_regular, 'create_pull_request', **data)
254 254 response = api_call(self.app, params)
255 255 expected_message = "Created new pull request `{title}`".format(
256 256 title=data['title'])
257 257 result = response.json
258 258 assert result['result']['msg'] == expected_message
259 259 pull_request_id = result['result']['pull_request_id']
260 260 pull_request = PullRequestModel().get(pull_request_id)
261 261 assert pull_request.source_ref == expected_source_ref
262 262 assert pull_request.target_ref == expected_target_ref
263 263
264 264 @pytest.mark.backends("git", "hg")
265 265 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
266 266 def test_create_fails_with_wrong_repo(self, backend, data_key):
267 267 repo_name = 'fake-repo'
268 268 data = self._prepare_data(backend)
269 269 data[data_key] = repo_name
270 270 id_, params = build_data(
271 271 self.apikey_regular, 'create_pull_request', **data)
272 272 response = api_call(self.app, params)
273 273 expected_message = 'repository `{}` does not exist'.format(repo_name)
274 274 assert_error(id_, expected_message, given=response.body)
275 275
276 276 @pytest.mark.backends("git", "hg")
277 277 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
278 278 def test_create_fails_with_non_existing_branch(self, backend, data_key):
279 279 branch_name = 'test-branch'
280 280 data = self._prepare_data(backend)
281 281 data[data_key] = "branch:{}".format(branch_name)
282 282 id_, params = build_data(
283 283 self.apikey_regular, 'create_pull_request', **data)
284 284 response = api_call(self.app, params)
285 285 expected_message = 'The specified value:{type}:`{name}` ' \
286 286 'does not exist, or is not allowed.'.format(type='branch',
287 287 name=branch_name)
288 288 assert_error(id_, expected_message, given=response.body)
289 289
290 290 @pytest.mark.backends("git", "hg")
291 291 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
292 292 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
293 293 data = self._prepare_data(backend)
294 294 ref = 'stange-ref'
295 295 data[data_key] = ref
296 296 id_, params = build_data(
297 297 self.apikey_regular, 'create_pull_request', **data)
298 298 response = api_call(self.app, params)
299 299 expected_message = (
300 300 'Ref `{ref}` given in a wrong format. Please check the API'
301 301 ' documentation for more details'.format(ref=ref))
302 302 assert_error(id_, expected_message, given=response.body)
303 303
304 304 @pytest.mark.backends("git", "hg")
305 305 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
306 306 def test_create_fails_with_non_existing_ref(self, backend, data_key):
307 307 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
308 308 ref = self._get_full_ref(backend, commit_id)
309 309 data = self._prepare_data(backend)
310 310 data[data_key] = ref
311 311 id_, params = build_data(
312 312 self.apikey_regular, 'create_pull_request', **data)
313 313 response = api_call(self.app, params)
314 314 expected_message = 'Ref `{}` does not exist'.format(ref)
315 315 assert_error(id_, expected_message, given=response.body)
316 316
317 317 @pytest.mark.backends("git", "hg")
318 318 def test_create_fails_when_no_revisions(self, backend):
319 319 data = self._prepare_data(backend, source_head='initial')
320 320 id_, params = build_data(
321 321 self.apikey_regular, 'create_pull_request', **data)
322 322 response = api_call(self.app, params)
323 expected_message = 'no commits found'
323 expected_message = 'no commits found for merge between specified references'
324 324 assert_error(id_, expected_message, given=response.body)
325 325
326 326 @pytest.mark.backends("git", "hg")
327 327 def test_create_fails_when_no_permissions(self, backend):
328 328 data = self._prepare_data(backend)
329 329 RepoModel().revoke_user_permission(
330 330 self.source.repo_name, self.test_user)
331 331 RepoModel().revoke_user_permission(
332 332 self.source.repo_name, User.DEFAULT_USER)
333 333
334 334 id_, params = build_data(
335 335 self.apikey_regular, 'create_pull_request', **data)
336 336 response = api_call(self.app, params)
337 337 expected_message = 'repository `{}` does not exist'.format(
338 338 self.source.repo_name)
339 339 assert_error(id_, expected_message, given=response.body)
340 340
341 341 def _prepare_data(
342 342 self, backend, source_head='change', target_head='initial'):
343 343 commits = [
344 344 {'message': 'initial'},
345 345 {'message': 'change'},
346 346 {'message': 'new-feature', 'parents': ['initial']},
347 347 ]
348 348 self.commit_ids = backend.create_master_repo(commits)
349 349 self.source = backend.create_repo(heads=[source_head])
350 350 self.target = backend.create_repo(heads=[target_head])
351 351
352 352 data = {
353 353 'source_repo': self.source.repo_name,
354 354 'target_repo': self.target.repo_name,
355 355 'source_ref': self._get_full_ref(
356 356 backend, self.commit_ids[source_head]),
357 357 'target_ref': self._get_full_ref(
358 358 backend, self.commit_ids[target_head]),
359 359 'title': 'Test PR 1',
360 360 'description': 'Test'
361 361 }
362 362 RepoModel().grant_user_permission(
363 363 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
364 364 return data
365 365
366 366 def _get_full_ref(self, backend, commit_id):
367 367 return 'branch:{branch}:{commit_id}'.format(
368 368 branch=backend.default_branch_name, commit_id=commit_id)
@@ -1,80 +1,82 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error)
28 28
29 29
30 30 @pytest.mark.usefixtures("testuser_api", "app")
31 31 class TestGetPullRequest(object):
32
32 33 @pytest.mark.backends("git", "hg")
33 34 def test_api_get_pull_requests(self, pr_util):
34 35 pull_request = pr_util.create_pull_request()
35 36 pull_request_2 = PullRequestModel().create(
36 37 created_by=pull_request.author,
37 38 source_repo=pull_request.source_repo,
38 39 source_ref=pull_request.source_ref,
39 40 target_repo=pull_request.target_repo,
40 41 target_ref=pull_request.target_ref,
41 42 revisions=pull_request.revisions,
42 43 reviewers=(),
44 observers=(),
43 45 title=pull_request.title,
44 46 description=pull_request.description,
45 47 )
46 48 Session().commit()
47 49 id_, params = build_data(
48 50 self.apikey, 'get_pull_requests',
49 51 repoid=pull_request.target_repo.repo_name)
50 52 response = api_call(self.app, params)
51 53 assert response.status == '200 OK'
52 54 assert len(response.json['result']) == 2
53 55
54 56 PullRequestModel().close_pull_request(
55 57 pull_request_2, pull_request_2.author)
56 58 Session().commit()
57 59
58 60 id_, params = build_data(
59 61 self.apikey, 'get_pull_requests',
60 62 repoid=pull_request.target_repo.repo_name,
61 63 status='new')
62 64 response = api_call(self.app, params)
63 65 assert response.status == '200 OK'
64 66 assert len(response.json['result']) == 1
65 67
66 68 id_, params = build_data(
67 69 self.apikey, 'get_pull_requests',
68 70 repoid=pull_request.target_repo.repo_name,
69 71 status='closed')
70 72 response = api_call(self.app, params)
71 73 assert response.status == '200 OK'
72 74 assert len(response.json['result']) == 1
73 75
74 76 @pytest.mark.backends("git", "hg")
75 77 def test_api_get_pull_requests_repo_error(self):
76 78 id_, params = build_data(self.apikey, 'get_pull_requests', repoid=666)
77 79 response = api_call(self.app, params)
78 80
79 81 expected = 'repository `666` does not exist'
80 82 assert_error(id_, expected, given=response.body)
@@ -1,212 +1,215 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.vcs.nodes import FileNode
24 24 from rhodecode.model.db import User
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 27 from rhodecode.api.tests.utils import (
28 28 build_data, api_call, assert_ok, assert_error)
29 29
30 30
31 31 @pytest.mark.usefixtures("testuser_api", "app")
32 32 class TestUpdatePullRequest(object):
33 33
34 34 @pytest.mark.backends("git", "hg")
35 35 def test_api_update_pull_request_title_or_description(
36 36 self, pr_util, no_notifications):
37 37 pull_request = pr_util.create_pull_request()
38 38
39 39 id_, params = build_data(
40 40 self.apikey, 'update_pull_request',
41 41 repoid=pull_request.target_repo.repo_name,
42 42 pullrequestid=pull_request.pull_request_id,
43 43 title='New TITLE OF A PR',
44 44 description='New DESC OF A PR',
45 45 )
46 46 response = api_call(self.app, params)
47 47
48 48 expected = {
49 49 "msg": "Updated pull request `{}`".format(
50 50 pull_request.pull_request_id),
51 51 "pull_request": response.json['result']['pull_request'],
52 52 "updated_commits": {"added": [], "common": [], "removed": []},
53 53 "updated_reviewers": {"added": [], "removed": []},
54 "updated_observers": {"added": [], "removed": []},
54 55 }
55 56
56 57 response_json = response.json['result']
57 58 assert response_json == expected
58 59 pr = response_json['pull_request']
59 60 assert pr['title'] == 'New TITLE OF A PR'
60 61 assert pr['description'] == 'New DESC OF A PR'
61 62
62 63 @pytest.mark.backends("git", "hg")
63 64 def test_api_try_update_closed_pull_request(
64 65 self, pr_util, no_notifications):
65 66 pull_request = pr_util.create_pull_request()
66 67 PullRequestModel().close_pull_request(
67 68 pull_request, TEST_USER_ADMIN_LOGIN)
68 69
69 70 id_, params = build_data(
70 71 self.apikey, 'update_pull_request',
71 72 repoid=pull_request.target_repo.repo_name,
72 73 pullrequestid=pull_request.pull_request_id)
73 74 response = api_call(self.app, params)
74 75
75 76 expected = 'pull request `{}` update failed, pull request ' \
76 77 'is closed'.format(pull_request.pull_request_id)
77 78
78 79 assert_error(id_, expected, response.body)
79 80
80 81 @pytest.mark.backends("git", "hg")
81 82 def test_api_update_update_commits(self, pr_util, no_notifications):
82 83 commits = [
83 84 {'message': 'a'},
84 85 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
85 86 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
86 87 ]
87 88 pull_request = pr_util.create_pull_request(
88 89 commits=commits, target_head='a', source_head='b', revisions=['b'])
89 90 pr_util.update_source_repository(head='c')
90 91 repo = pull_request.source_repo.scm_instance()
91 92 commits = [x for x in repo.get_commits()]
92 93
93 94 added_commit_id = commits[-1].raw_id # c commit
94 95 common_commit_id = commits[1].raw_id # b commit is common ancestor
95 96 total_commits = [added_commit_id, common_commit_id]
96 97
97 98 id_, params = build_data(
98 99 self.apikey, 'update_pull_request',
99 100 repoid=pull_request.target_repo.repo_name,
100 101 pullrequestid=pull_request.pull_request_id,
101 102 update_commits=True
102 103 )
103 104 response = api_call(self.app, params)
104 105
105 106 expected = {
106 107 "msg": "Updated pull request `{}`".format(
107 108 pull_request.pull_request_id),
108 109 "pull_request": response.json['result']['pull_request'],
109 110 "updated_commits": {"added": [added_commit_id],
110 111 "common": [common_commit_id],
111 112 "total": total_commits,
112 113 "removed": []},
113 114 "updated_reviewers": {"added": [], "removed": []},
115 "updated_observers": {"added": [], "removed": []},
114 116 }
115 117
116 118 assert_ok(id_, expected, response.body)
117 119
118 120 @pytest.mark.backends("git", "hg")
119 121 def test_api_update_change_reviewers(
120 122 self, user_util, pr_util, no_notifications):
121 123 a = user_util.create_user()
122 124 b = user_util.create_user()
123 125 c = user_util.create_user()
124 126 new_reviewers = [
125 {'username': b.username,'reasons': ['updated via API'],
127 {'username': b.username, 'reasons': ['updated via API'],
126 128 'mandatory':False},
127 129 {'username': c.username, 'reasons': ['updated via API'],
128 130 'mandatory':False},
129 131 ]
130 132
131 133 added = [b.username, c.username]
132 134 removed = [a.username]
133 135
134 136 pull_request = pr_util.create_pull_request(
135 reviewers=[(a.username, ['added via API'], False, [])])
137 reviewers=[(a.username, ['added via API'], False, 'reviewer', [])])
136 138
137 139 id_, params = build_data(
138 140 self.apikey, 'update_pull_request',
139 141 repoid=pull_request.target_repo.repo_name,
140 142 pullrequestid=pull_request.pull_request_id,
141 143 reviewers=new_reviewers)
142 144 response = api_call(self.app, params)
143 145 expected = {
144 146 "msg": "Updated pull request `{}`".format(
145 147 pull_request.pull_request_id),
146 148 "pull_request": response.json['result']['pull_request'],
147 149 "updated_commits": {"added": [], "common": [], "removed": []},
148 150 "updated_reviewers": {"added": added, "removed": removed},
151 "updated_observers": {"added": [], "removed": []},
149 152 }
150 153
151 154 assert_ok(id_, expected, response.body)
152 155
153 156 @pytest.mark.backends("git", "hg")
154 157 def test_api_update_bad_user_in_reviewers(self, pr_util):
155 158 pull_request = pr_util.create_pull_request()
156 159
157 160 id_, params = build_data(
158 161 self.apikey, 'update_pull_request',
159 162 repoid=pull_request.target_repo.repo_name,
160 163 pullrequestid=pull_request.pull_request_id,
161 164 reviewers=[{'username': 'bad_name'}])
162 165 response = api_call(self.app, params)
163 166
164 167 expected = 'user `bad_name` does not exist'
165 168
166 169 assert_error(id_, expected, response.body)
167 170
168 171 @pytest.mark.backends("git", "hg")
169 172 def test_api_update_repo_error(self, pr_util):
170 173 pull_request = pr_util.create_pull_request()
171 174 id_, params = build_data(
172 175 self.apikey, 'update_pull_request',
173 176 repoid='fake',
174 177 pullrequestid=pull_request.pull_request_id,
175 178 reviewers=[{'username': 'bad_name'}])
176 179 response = api_call(self.app, params)
177 180
178 181 expected = 'repository `fake` does not exist'
179 182
180 183 response_json = response.json['error']
181 184 assert response_json == expected
182 185
183 186 @pytest.mark.backends("git", "hg")
184 187 def test_api_update_pull_request_error(self, pr_util):
185 188 pull_request = pr_util.create_pull_request()
186 189
187 190 id_, params = build_data(
188 191 self.apikey, 'update_pull_request',
189 192 repoid=pull_request.target_repo.repo_name,
190 193 pullrequestid=999999,
191 194 reviewers=[{'username': 'bad_name'}])
192 195 response = api_call(self.app, params)
193 196
194 197 expected = 'pull request `999999` does not exist'
195 198 assert_error(id_, expected, response.body)
196 199
197 200 @pytest.mark.backends("git", "hg")
198 201 def test_api_update_pull_request_no_perms_to_update(
199 202 self, user_util, pr_util):
200 203 user = user_util.create_user()
201 204 pull_request = pr_util.create_pull_request()
202 205
203 206 id_, params = build_data(
204 207 user.api_key, 'update_pull_request',
205 208 repoid=pull_request.target_repo.repo_name,
206 209 pullrequestid=pull_request.pull_request_id,)
207 210 response = api_call(self.app, params)
208 211
209 212 expected = ('pull request `%s` update failed, '
210 213 'no permission to update.') % pull_request.pull_request_id
211 214
212 215 assert_error(id_, expected, response.body)
@@ -1,1056 +1,1118 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
25 25 from rhodecode.api.utils import (
26 26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 27 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
28 28 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
29 29 from rhodecode.lib import channelstream
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 from rhodecode.lib.vcs.backends.base import unicode_to_reference
33 34 from rhodecode.model.changeset_status import ChangesetStatusModel
34 35 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
36 from rhodecode.model.db import (
37 Session, ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers)
36 38 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 39 from rhodecode.model.settings import SettingsModel
38 40 from rhodecode.model.validation_schema import Invalid
39 41 from rhodecode.model.validation_schema.schemas.reviewer_schema import ReviewerListSchema
40 42
41 43 log = logging.getLogger(__name__)
42 44
43 45
44 46 @jsonrpc_method()
45 47 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
46 48 merge_state=Optional(False)):
47 49 """
48 50 Get a pull request based on the given ID.
49 51
50 52 :param apiuser: This is filled automatically from the |authtoken|.
51 53 :type apiuser: AuthUser
52 54 :param repoid: Optional, repository name or repository ID from where
53 55 the pull request was opened.
54 56 :type repoid: str or int
55 57 :param pullrequestid: ID of the requested pull request.
56 58 :type pullrequestid: int
57 59 :param merge_state: Optional calculate merge state for each repository.
58 60 This could result in longer time to fetch the data
59 61 :type merge_state: bool
60 62
61 63 Example output:
62 64
63 65 .. code-block:: bash
64 66
65 67 "id": <id_given_in_input>,
66 68 "result":
67 69 {
68 70 "pull_request_id": "<pull_request_id>",
69 71 "url": "<url>",
70 72 "title": "<title>",
71 73 "description": "<description>",
72 74 "status" : "<status>",
73 75 "created_on": "<date_time_created>",
74 76 "updated_on": "<date_time_updated>",
75 77 "versions": "<number_or_versions_of_pr>",
76 78 "commit_ids": [
77 79 ...
78 80 "<commit_id>",
79 81 "<commit_id>",
80 82 ...
81 83 ],
82 84 "review_status": "<review_status>",
83 85 "mergeable": {
84 86 "status": "<bool>",
85 87 "message": "<message>",
86 88 },
87 89 "source": {
88 90 "clone_url": "<clone_url>",
89 91 "repository": "<repository_name>",
90 92 "reference":
91 93 {
92 94 "name": "<name>",
93 95 "type": "<type>",
94 96 "commit_id": "<commit_id>",
95 97 }
96 98 },
97 99 "target": {
98 100 "clone_url": "<clone_url>",
99 101 "repository": "<repository_name>",
100 102 "reference":
101 103 {
102 104 "name": "<name>",
103 105 "type": "<type>",
104 106 "commit_id": "<commit_id>",
105 107 }
106 108 },
107 109 "merge": {
108 110 "clone_url": "<clone_url>",
109 111 "reference":
110 112 {
111 113 "name": "<name>",
112 114 "type": "<type>",
113 115 "commit_id": "<commit_id>",
114 116 }
115 117 },
116 118 "author": <user_obj>,
117 119 "reviewers": [
118 120 ...
119 121 {
120 122 "user": "<user_obj>",
121 123 "review_status": "<review_status>",
122 124 }
123 125 ...
124 126 ]
125 127 },
126 128 "error": null
127 129 """
128 130
129 131 pull_request = get_pull_request_or_error(pullrequestid)
130 132 if Optional.extract(repoid):
131 133 repo = get_repo_or_error(repoid)
132 134 else:
133 135 repo = pull_request.target_repo
134 136
135 137 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
136 138 raise JSONRPCError('repository `%s` or pull request `%s` '
137 139 'does not exist' % (repoid, pullrequestid))
138 140
139 141 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
140 142 # otherwise we can lock the repo on calculation of merge state while update/merge
141 143 # is happening.
142 144 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
143 145 merge_state = Optional.extract(merge_state, binary=True) and pr_created
144 146 data = pull_request.get_api_data(with_merge_state=merge_state)
145 147 return data
146 148
147 149
148 150 @jsonrpc_method()
149 151 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
150 152 merge_state=Optional(False)):
151 153 """
152 154 Get all pull requests from the repository specified in `repoid`.
153 155
154 156 :param apiuser: This is filled automatically from the |authtoken|.
155 157 :type apiuser: AuthUser
156 158 :param repoid: Optional repository name or repository ID.
157 159 :type repoid: str or int
158 160 :param status: Only return pull requests with the specified status.
159 161 Valid options are.
160 162 * ``new`` (default)
161 163 * ``open``
162 164 * ``closed``
163 165 :type status: str
164 166 :param merge_state: Optional calculate merge state for each repository.
165 167 This could result in longer time to fetch the data
166 168 :type merge_state: bool
167 169
168 170 Example output:
169 171
170 172 .. code-block:: bash
171 173
172 174 "id": <id_given_in_input>,
173 175 "result":
174 176 [
175 177 ...
176 178 {
177 179 "pull_request_id": "<pull_request_id>",
178 180 "url": "<url>",
179 181 "title" : "<title>",
180 182 "description": "<description>",
181 183 "status": "<status>",
182 184 "created_on": "<date_time_created>",
183 185 "updated_on": "<date_time_updated>",
184 186 "commit_ids": [
185 187 ...
186 188 "<commit_id>",
187 189 "<commit_id>",
188 190 ...
189 191 ],
190 192 "review_status": "<review_status>",
191 193 "mergeable": {
192 194 "status": "<bool>",
193 195 "message: "<message>",
194 196 },
195 197 "source": {
196 198 "clone_url": "<clone_url>",
197 199 "reference":
198 200 {
199 201 "name": "<name>",
200 202 "type": "<type>",
201 203 "commit_id": "<commit_id>",
202 204 }
203 205 },
204 206 "target": {
205 207 "clone_url": "<clone_url>",
206 208 "reference":
207 209 {
208 210 "name": "<name>",
209 211 "type": "<type>",
210 212 "commit_id": "<commit_id>",
211 213 }
212 214 },
213 215 "merge": {
214 216 "clone_url": "<clone_url>",
215 217 "reference":
216 218 {
217 219 "name": "<name>",
218 220 "type": "<type>",
219 221 "commit_id": "<commit_id>",
220 222 }
221 223 },
222 224 "author": <user_obj>,
223 225 "reviewers": [
224 226 ...
225 227 {
226 228 "user": "<user_obj>",
227 229 "review_status": "<review_status>",
228 230 }
229 231 ...
230 232 ]
231 233 }
232 234 ...
233 235 ],
234 236 "error": null
235 237
236 238 """
237 239 repo = get_repo_or_error(repoid)
238 240 if not has_superadmin_permission(apiuser):
239 241 _perms = (
240 242 'repository.admin', 'repository.write', 'repository.read',)
241 243 validate_repo_permissions(apiuser, repoid, repo, _perms)
242 244
243 245 status = Optional.extract(status)
244 246 merge_state = Optional.extract(merge_state, binary=True)
245 247 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
246 248 order_by='id', order_dir='desc')
247 249 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
248 250 return data
249 251
250 252
251 253 @jsonrpc_method()
252 254 def merge_pull_request(
253 255 request, apiuser, pullrequestid, repoid=Optional(None),
254 256 userid=Optional(OAttr('apiuser'))):
255 257 """
256 258 Merge the pull request specified by `pullrequestid` into its target
257 259 repository.
258 260
259 261 :param apiuser: This is filled automatically from the |authtoken|.
260 262 :type apiuser: AuthUser
261 263 :param repoid: Optional, repository name or repository ID of the
262 264 target repository to which the |pr| is to be merged.
263 265 :type repoid: str or int
264 266 :param pullrequestid: ID of the pull request which shall be merged.
265 267 :type pullrequestid: int
266 268 :param userid: Merge the pull request as this user.
267 269 :type userid: Optional(str or int)
268 270
269 271 Example output:
270 272
271 273 .. code-block:: bash
272 274
273 275 "id": <id_given_in_input>,
274 276 "result": {
275 277 "executed": "<bool>",
276 278 "failure_reason": "<int>",
277 279 "merge_status_message": "<str>",
278 280 "merge_commit_id": "<merge_commit_id>",
279 281 "possible": "<bool>",
280 282 "merge_ref": {
281 283 "commit_id": "<commit_id>",
282 284 "type": "<type>",
283 285 "name": "<name>"
284 286 }
285 287 },
286 288 "error": null
287 289 """
288 290 pull_request = get_pull_request_or_error(pullrequestid)
289 291 if Optional.extract(repoid):
290 292 repo = get_repo_or_error(repoid)
291 293 else:
292 294 repo = pull_request.target_repo
293 295 auth_user = apiuser
294 296
295 297 if not isinstance(userid, Optional):
296 298 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
297 299 user=apiuser, repo_name=repo.repo_name)
298 300 if has_superadmin_permission(apiuser) or is_repo_admin:
299 301 apiuser = get_user_or_error(userid)
300 302 auth_user = apiuser.AuthUser()
301 303 else:
302 304 raise JSONRPCError('userid is not the same as your user')
303 305
304 306 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
305 307 raise JSONRPCError(
306 308 'Operation forbidden because pull request is in state {}, '
307 309 'only state {} is allowed.'.format(
308 310 pull_request.pull_request_state, PullRequest.STATE_CREATED))
309 311
310 312 with pull_request.set_state(PullRequest.STATE_UPDATING):
311 313 check = MergeCheck.validate(pull_request, auth_user=auth_user,
312 314 translator=request.translate)
313 315 merge_possible = not check.failed
314 316
315 317 if not merge_possible:
316 318 error_messages = []
317 319 for err_type, error_msg in check.errors:
318 320 error_msg = request.translate(error_msg)
319 321 error_messages.append(error_msg)
320 322
321 323 reasons = ','.join(error_messages)
322 324 raise JSONRPCError(
323 325 'merge not possible for following reasons: {}'.format(reasons))
324 326
325 327 target_repo = pull_request.target_repo
326 328 extras = vcs_operation_context(
327 329 request.environ, repo_name=target_repo.repo_name,
328 330 username=auth_user.username, action='push',
329 331 scm=target_repo.repo_type)
330 332 with pull_request.set_state(PullRequest.STATE_UPDATING):
331 333 merge_response = PullRequestModel().merge_repo(
332 334 pull_request, apiuser, extras=extras)
333 335 if merge_response.executed:
334 336 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
335 337
336 338 Session().commit()
337 339
338 340 # In previous versions the merge response directly contained the merge
339 341 # commit id. It is now contained in the merge reference object. To be
340 342 # backwards compatible we have to extract it again.
341 343 merge_response = merge_response.asdict()
342 344 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
343 345
344 346 return merge_response
345 347
346 348
347 349 @jsonrpc_method()
348 350 def get_pull_request_comments(
349 351 request, apiuser, pullrequestid, repoid=Optional(None)):
350 352 """
351 353 Get all comments of pull request specified with the `pullrequestid`
352 354
353 355 :param apiuser: This is filled automatically from the |authtoken|.
354 356 :type apiuser: AuthUser
355 357 :param repoid: Optional repository name or repository ID.
356 358 :type repoid: str or int
357 359 :param pullrequestid: The pull request ID.
358 360 :type pullrequestid: int
359 361
360 362 Example output:
361 363
362 364 .. code-block:: bash
363 365
364 366 id : <id_given_in_input>
365 367 result : [
366 368 {
367 369 "comment_author": {
368 370 "active": true,
369 371 "full_name_or_username": "Tom Gore",
370 372 "username": "admin"
371 373 },
372 374 "comment_created_on": "2017-01-02T18:43:45.533",
373 375 "comment_f_path": null,
374 376 "comment_id": 25,
375 377 "comment_lineno": null,
376 378 "comment_status": {
377 379 "status": "under_review",
378 380 "status_lbl": "Under Review"
379 381 },
380 382 "comment_text": "Example text",
381 383 "comment_type": null,
382 384 "comment_last_version: 0,
383 385 "pull_request_version": null,
384 386 "comment_commit_id": None,
385 387 "comment_pull_request_id": <pull_request_id>
386 388 }
387 389 ],
388 390 error : null
389 391 """
390 392
391 393 pull_request = get_pull_request_or_error(pullrequestid)
392 394 if Optional.extract(repoid):
393 395 repo = get_repo_or_error(repoid)
394 396 else:
395 397 repo = pull_request.target_repo
396 398
397 399 if not PullRequestModel().check_user_read(
398 400 pull_request, apiuser, api=True):
399 401 raise JSONRPCError('repository `%s` or pull request `%s` '
400 402 'does not exist' % (repoid, pullrequestid))
401 403
402 404 (pull_request_latest,
403 405 pull_request_at_ver,
404 406 pull_request_display_obj,
405 407 at_version) = PullRequestModel().get_pr_version(
406 408 pull_request.pull_request_id, version=None)
407 409
408 410 versions = pull_request_display_obj.versions()
409 411 ver_map = {
410 412 ver.pull_request_version_id: cnt
411 413 for cnt, ver in enumerate(versions, 1)
412 414 }
413 415
414 416 # GENERAL COMMENTS with versions #
415 417 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
416 418 q = q.order_by(ChangesetComment.comment_id.asc())
417 419 general_comments = q.all()
418 420
419 421 # INLINE COMMENTS with versions #
420 422 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
421 423 q = q.order_by(ChangesetComment.comment_id.asc())
422 424 inline_comments = q.all()
423 425
424 426 data = []
425 427 for comment in inline_comments + general_comments:
426 428 full_data = comment.get_api_data()
427 429 pr_version_id = None
428 430 if comment.pull_request_version_id:
429 431 pr_version_id = 'v{}'.format(
430 432 ver_map[comment.pull_request_version_id])
431 433
432 434 # sanitize some entries
433 435
434 436 full_data['pull_request_version'] = pr_version_id
435 437 full_data['comment_author'] = {
436 438 'username': full_data['comment_author'].username,
437 439 'full_name_or_username': full_data['comment_author'].full_name_or_username,
438 440 'active': full_data['comment_author'].active,
439 441 }
440 442
441 443 if full_data['comment_status']:
442 444 full_data['comment_status'] = {
443 445 'status': full_data['comment_status'][0].status,
444 446 'status_lbl': full_data['comment_status'][0].status_lbl,
445 447 }
446 448 else:
447 449 full_data['comment_status'] = {}
448 450
449 451 data.append(full_data)
450 452 return data
451 453
452 454
453 455 @jsonrpc_method()
454 456 def comment_pull_request(
455 457 request, apiuser, pullrequestid, repoid=Optional(None),
456 458 message=Optional(None), commit_id=Optional(None), status=Optional(None),
457 459 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
458 460 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
459 461 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
460 462 """
461 463 Comment on the pull request specified with the `pullrequestid`,
462 464 in the |repo| specified by the `repoid`, and optionally change the
463 465 review status.
464 466
465 467 :param apiuser: This is filled automatically from the |authtoken|.
466 468 :type apiuser: AuthUser
467 469 :param repoid: Optional repository name or repository ID.
468 470 :type repoid: str or int
469 471 :param pullrequestid: The pull request ID.
470 472 :type pullrequestid: int
471 473 :param commit_id: Specify the commit_id for which to set a comment. If
472 474 given commit_id is different than latest in the PR status
473 475 change won't be performed.
474 476 :type commit_id: str
475 477 :param message: The text content of the comment.
476 478 :type message: str
477 479 :param status: (**Optional**) Set the approval status of the pull
478 480 request. One of: 'not_reviewed', 'approved', 'rejected',
479 481 'under_review'
480 482 :type status: str
481 483 :param comment_type: Comment type, one of: 'note', 'todo'
482 484 :type comment_type: Optional(str), default: 'note'
483 485 :param resolves_comment_id: id of comment which this one will resolve
484 486 :type resolves_comment_id: Optional(int)
485 487 :param extra_recipients: list of user ids or usernames to add
486 488 notifications for this comment. Acts like a CC for notification
487 489 :type extra_recipients: Optional(list)
488 490 :param userid: Comment on the pull request as this user
489 491 :type userid: Optional(str or int)
490 492 :param send_email: Define if this comment should also send email notification
491 493 :type send_email: Optional(bool)
492 494
493 495 Example output:
494 496
495 497 .. code-block:: bash
496 498
497 499 id : <id_given_in_input>
498 500 result : {
499 501 "pull_request_id": "<Integer>",
500 502 "comment_id": "<Integer>",
501 503 "status": {"given": <given_status>,
502 504 "was_changed": <bool status_was_actually_changed> },
503 505 },
504 506 error : null
505 507 """
506 508 _ = request.translate
507 509
508 510 pull_request = get_pull_request_or_error(pullrequestid)
509 511 if Optional.extract(repoid):
510 512 repo = get_repo_or_error(repoid)
511 513 else:
512 514 repo = pull_request.target_repo
513 515
514 516 db_repo_name = repo.repo_name
515 517 auth_user = apiuser
516 518 if not isinstance(userid, Optional):
517 519 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
518 520 user=apiuser, repo_name=db_repo_name)
519 521 if has_superadmin_permission(apiuser) or is_repo_admin:
520 522 apiuser = get_user_or_error(userid)
521 523 auth_user = apiuser.AuthUser()
522 524 else:
523 525 raise JSONRPCError('userid is not the same as your user')
524 526
525 527 if pull_request.is_closed():
526 528 raise JSONRPCError(
527 529 'pull request `%s` comment failed, pull request is closed' % (
528 530 pullrequestid,))
529 531
530 532 if not PullRequestModel().check_user_read(
531 533 pull_request, apiuser, api=True):
532 534 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
533 535 message = Optional.extract(message)
534 536 status = Optional.extract(status)
535 537 commit_id = Optional.extract(commit_id)
536 538 comment_type = Optional.extract(comment_type)
537 539 resolves_comment_id = Optional.extract(resolves_comment_id)
538 540 extra_recipients = Optional.extract(extra_recipients)
539 541 send_email = Optional.extract(send_email, binary=True)
540 542
541 543 if not message and not status:
542 544 raise JSONRPCError(
543 545 'Both message and status parameters are missing. '
544 546 'At least one is required.')
545 547
546 548 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
547 549 status is not None):
548 550 raise JSONRPCError('Unknown comment status: `%s`' % status)
549 551
550 552 if commit_id and commit_id not in pull_request.revisions:
551 553 raise JSONRPCError(
552 554 'Invalid commit_id `%s` for this pull request.' % commit_id)
553 555
554 556 allowed_to_change_status = PullRequestModel().check_user_change_status(
555 557 pull_request, apiuser)
556 558
557 559 # if commit_id is passed re-validated if user is allowed to change status
558 560 # based on latest commit_id from the PR
559 561 if commit_id:
560 562 commit_idx = pull_request.revisions.index(commit_id)
561 563 if commit_idx != 0:
562 564 allowed_to_change_status = False
563 565
564 566 if resolves_comment_id:
565 567 comment = ChangesetComment.get(resolves_comment_id)
566 568 if not comment:
567 569 raise JSONRPCError(
568 570 'Invalid resolves_comment_id `%s` for this pull request.'
569 571 % resolves_comment_id)
570 572 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
571 573 raise JSONRPCError(
572 574 'Comment `%s` is wrong type for setting status to resolved.'
573 575 % resolves_comment_id)
574 576
575 577 text = message
576 578 status_label = ChangesetStatus.get_status_lbl(status)
577 579 if status and allowed_to_change_status:
578 580 st_message = ('Status change %(transition_icon)s %(status)s'
579 581 % {'transition_icon': '>', 'status': status_label})
580 582 text = message or st_message
581 583
582 584 rc_config = SettingsModel().get_all_settings()
583 585 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
584 586
585 587 status_change = status and allowed_to_change_status
586 588 comment = CommentsModel().create(
587 589 text=text,
588 590 repo=pull_request.target_repo.repo_id,
589 591 user=apiuser.user_id,
590 592 pull_request=pull_request.pull_request_id,
591 593 f_path=None,
592 594 line_no=None,
593 595 status_change=(status_label if status_change else None),
594 596 status_change_type=(status if status_change else None),
595 597 closing_pr=False,
596 598 renderer=renderer,
597 599 comment_type=comment_type,
598 600 resolves_comment_id=resolves_comment_id,
599 601 auth_user=auth_user,
600 602 extra_recipients=extra_recipients,
601 603 send_email=send_email
602 604 )
603 is_inline = bool(comment.f_path and comment.line_no)
605 is_inline = comment.is_inline
604 606
605 607 if allowed_to_change_status and status:
606 608 old_calculated_status = pull_request.calculated_review_status()
607 609 ChangesetStatusModel().set_status(
608 610 pull_request.target_repo.repo_id,
609 611 status,
610 612 apiuser.user_id,
611 613 comment,
612 614 pull_request=pull_request.pull_request_id
613 615 )
614 616 Session().flush()
615 617
616 618 Session().commit()
617 619
618 620 PullRequestModel().trigger_pull_request_hook(
619 621 pull_request, apiuser, 'comment',
620 622 data={'comment': comment})
621 623
622 624 if allowed_to_change_status and status:
623 625 # we now calculate the status of pull request, and based on that
624 626 # calculation we set the commits status
625 627 calculated_status = pull_request.calculated_review_status()
626 628 if old_calculated_status != calculated_status:
627 629 PullRequestModel().trigger_pull_request_hook(
628 630 pull_request, apiuser, 'review_status_change',
629 631 data={'status': calculated_status})
630 632
631 633 data = {
632 634 'pull_request_id': pull_request.pull_request_id,
633 635 'comment_id': comment.comment_id if comment else None,
634 636 'status': {'given': status, 'was_changed': status_change},
635 637 }
636 638
637 639 comment_broadcast_channel = channelstream.comment_channel(
638 640 db_repo_name, pull_request_obj=pull_request)
639 641
640 642 comment_data = data
641 643 comment_type = 'inline' if is_inline else 'general'
642 644 channelstream.comment_channelstream_push(
643 645 request, comment_broadcast_channel, apiuser,
644 646 _('posted a new {} comment').format(comment_type),
645 647 comment_data=comment_data)
646 648
647 649 return data
648 650
651 def _reviewers_validation(obj_list):
652 schema = ReviewerListSchema()
653 try:
654 reviewer_objects = schema.deserialize(obj_list)
655 except Invalid as err:
656 raise JSONRPCValidationError(colander_exc=err)
657
658 # validate users
659 for reviewer_object in reviewer_objects:
660 user = get_user_or_error(reviewer_object['username'])
661 reviewer_object['user_id'] = user.user_id
662 return reviewer_objects
663
649 664
650 665 @jsonrpc_method()
651 666 def create_pull_request(
652 667 request, apiuser, source_repo, target_repo, source_ref, target_ref,
653 668 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
654 description_renderer=Optional(''), reviewers=Optional(None)):
669 description_renderer=Optional(''),
670 reviewers=Optional(None), observers=Optional(None)):
655 671 """
656 672 Creates a new pull request.
657 673
658 674 Accepts refs in the following formats:
659 675
660 676 * branch:<branch_name>:<sha>
661 677 * branch:<branch_name>
662 678 * bookmark:<bookmark_name>:<sha> (Mercurial only)
663 679 * bookmark:<bookmark_name> (Mercurial only)
664 680
665 681 :param apiuser: This is filled automatically from the |authtoken|.
666 682 :type apiuser: AuthUser
667 683 :param source_repo: Set the source repository name.
668 684 :type source_repo: str
669 685 :param target_repo: Set the target repository name.
670 686 :type target_repo: str
671 687 :param source_ref: Set the source ref name.
672 688 :type source_ref: str
673 689 :param target_ref: Set the target ref name.
674 690 :type target_ref: str
675 691 :param owner: user_id or username
676 692 :type owner: Optional(str)
677 693 :param title: Optionally Set the pull request title, it's generated otherwise
678 694 :type title: str
679 695 :param description: Set the pull request description.
680 696 :type description: Optional(str)
681 697 :type description_renderer: Optional(str)
682 698 :param description_renderer: Set pull request renderer for the description.
683 699 It should be 'rst', 'markdown' or 'plain'. If not give default
684 700 system renderer will be used
685 701 :param reviewers: Set the new pull request reviewers list.
686 702 Reviewer defined by review rules will be added automatically to the
687 703 defined list.
688 704 :type reviewers: Optional(list)
689 705 Accepts username strings or objects of the format:
690 706
691 707 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
708 :param observers: Set the new pull request observers list.
709 Reviewer defined by review rules will be added automatically to the
710 defined list. This feature is only available in RhodeCode EE
711 :type observers: Optional(list)
712 Accepts username strings or objects of the format:
713
714 [{'username': 'nick', 'reasons': ['original author']}]
692 715 """
693 716
694 717 source_db_repo = get_repo_or_error(source_repo)
695 718 target_db_repo = get_repo_or_error(target_repo)
696 719 if not has_superadmin_permission(apiuser):
697 720 _perms = ('repository.admin', 'repository.write', 'repository.read',)
698 721 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
699 722
700 723 owner = validate_set_owner_permissions(apiuser, owner)
701 724
702 725 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
703 726 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
704 727
705 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
706 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
728 get_commit_or_error(full_source_ref, source_db_repo)
729 get_commit_or_error(full_target_ref, target_db_repo)
707 730
708 731 reviewer_objects = Optional.extract(reviewers) or []
732 observer_objects = Optional.extract(observers) or []
709 733
710 734 # serialize and validate passed in given reviewers
711 735 if reviewer_objects:
712 schema = ReviewerListSchema()
713 try:
714 reviewer_objects = schema.deserialize(reviewer_objects)
715 except Invalid as err:
716 raise JSONRPCValidationError(colander_exc=err)
736 reviewer_objects = _reviewers_validation(reviewer_objects)
717 737
718 # validate users
719 for reviewer_object in reviewer_objects:
720 user = get_user_or_error(reviewer_object['username'])
721 reviewer_object['user_id'] = user.user_id
738 if observer_objects:
739 observer_objects = _reviewers_validation(reviewer_objects)
722 740
723 741 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
724 742 PullRequestModel().get_reviewer_functions()
725 743
744 source_ref_obj = unicode_to_reference(full_source_ref)
745 target_ref_obj = unicode_to_reference(full_target_ref)
746
726 747 # recalculate reviewers logic, to make sure we can validate this
727 748 default_reviewers_data = get_default_reviewers_data(
728 749 owner,
729 source_repo,
730 Reference(source_type, source_name, source_commit_id),
731 target_repo,
732 Reference(target_type, target_name, target_commit_id)
750 source_db_repo,
751 source_ref_obj,
752 target_db_repo,
753 target_ref_obj,
733 754 )
734 755
735 # now MERGE our given with the calculated
736 reviewer_objects = default_reviewers_data['reviewers'] + reviewer_objects
756 # now MERGE our given with the calculated from the default rules
757 just_reviewers = [
758 x for x in default_reviewers_data['reviewers']
759 if x['role'] == PullRequestReviewers.ROLE_REVIEWER]
760 reviewer_objects = just_reviewers + reviewer_objects
737 761
738 762 try:
739 763 reviewers = validate_default_reviewers(
740 764 reviewer_objects, default_reviewers_data)
741 765 except ValueError as e:
742 766 raise JSONRPCError('Reviewers Validation: {}'.format(e))
743 767
768 # now MERGE our given with the calculated from the default rules
769 just_observers = [
770 x for x in default_reviewers_data['reviewers']
771 if x['role'] == PullRequestReviewers.ROLE_OBSERVER]
772 observer_objects = just_observers + observer_objects
773
774 try:
775 observers = validate_observers(
776 observer_objects, default_reviewers_data)
777 except ValueError as e:
778 raise JSONRPCError('Observer Validation: {}'.format(e))
779
744 780 title = Optional.extract(title)
745 781 if not title:
746 title_source_ref = source_ref.split(':', 2)[1]
782 title_source_ref = source_ref_obj.name
747 783 title = PullRequestModel().generate_pullrequest_title(
748 784 source=source_repo,
749 785 source_ref=title_source_ref,
750 786 target=target_repo
751 787 )
752 788
753 789 diff_info = default_reviewers_data['diff_info']
754 790 common_ancestor_id = diff_info['ancestor']
755 commits = diff_info['commits']
791 # NOTE(marcink): reversed is consistent with how we open it in the WEB interface
792 commits = [commit['commit_id'] for commit in reversed(diff_info['commits'])]
756 793
757 794 if not common_ancestor_id:
758 raise JSONRPCError('no common ancestor found')
795 raise JSONRPCError('no common ancestor found between specified references')
759 796
760 797 if not commits:
761 raise JSONRPCError('no commits found')
762
763 # NOTE(marcink): reversed is consistent with how we open it in the WEB interface
764 revisions = [commit.raw_id for commit in reversed(commits)]
798 raise JSONRPCError('no commits found for merge between specified references')
765 799
766 800 # recalculate target ref based on ancestor
767 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
768 full_target_ref = ':'.join((target_ref_type, target_ref_name, common_ancestor_id))
801 full_target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, common_ancestor_id))
769 802
770 803 # fetch renderer, if set fallback to plain in case of PR
771 804 rc_config = SettingsModel().get_all_settings()
772 805 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
773 806 description = Optional.extract(description)
774 807 description_renderer = Optional.extract(description_renderer) or default_system_renderer
775 808
776 809 pull_request = PullRequestModel().create(
777 810 created_by=owner.user_id,
778 811 source_repo=source_repo,
779 812 source_ref=full_source_ref,
780 813 target_repo=target_repo,
781 814 target_ref=full_target_ref,
782 815 common_ancestor_id=common_ancestor_id,
783 revisions=revisions,
816 revisions=commits,
784 817 reviewers=reviewers,
818 observers=observers,
785 819 title=title,
786 820 description=description,
787 821 description_renderer=description_renderer,
788 822 reviewer_data=default_reviewers_data,
789 823 auth_user=apiuser
790 824 )
791 825
792 826 Session().commit()
793 827 data = {
794 828 'msg': 'Created new pull request `{}`'.format(title),
795 829 'pull_request_id': pull_request.pull_request_id,
796 830 }
797 831 return data
798 832
799 833
800 834 @jsonrpc_method()
801 835 def update_pull_request(
802 836 request, apiuser, pullrequestid, repoid=Optional(None),
803 837 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
804 reviewers=Optional(None), update_commits=Optional(None)):
838 reviewers=Optional(None), observers=Optional(None), update_commits=Optional(None)):
805 839 """
806 840 Updates a pull request.
807 841
808 842 :param apiuser: This is filled automatically from the |authtoken|.
809 843 :type apiuser: AuthUser
810 844 :param repoid: Optional repository name or repository ID.
811 845 :type repoid: str or int
812 846 :param pullrequestid: The pull request ID.
813 847 :type pullrequestid: int
814 848 :param title: Set the pull request title.
815 849 :type title: str
816 850 :param description: Update pull request description.
817 851 :type description: Optional(str)
818 852 :type description_renderer: Optional(str)
819 853 :param description_renderer: Update pull request renderer for the description.
820 854 It should be 'rst', 'markdown' or 'plain'
821 855 :param reviewers: Update pull request reviewers list with new value.
822 856 :type reviewers: Optional(list)
823 857 Accepts username strings or objects of the format:
824 858
825 859 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
860 :param observers: Update pull request observers list with new value.
861 :type observers: Optional(list)
862 Accepts username strings or objects of the format:
826 863
864 [{'username': 'nick', 'reasons': ['should be aware about this PR']}]
827 865 :param update_commits: Trigger update of commits for this pull request
828 866 :type: update_commits: Optional(bool)
829 867
830 868 Example output:
831 869
832 870 .. code-block:: bash
833 871
834 872 id : <id_given_in_input>
835 873 result : {
836 874 "msg": "Updated pull request `63`",
837 875 "pull_request": <pull_request_object>,
838 876 "updated_reviewers": {
839 877 "added": [
840 878 "username"
841 879 ],
842 880 "removed": []
843 881 },
882 "updated_observers": {
883 "added": [
884 "username"
885 ],
886 "removed": []
887 },
844 888 "updated_commits": {
845 889 "added": [
846 890 "<sha1_hash>"
847 891 ],
848 892 "common": [
849 893 "<sha1_hash>",
850 894 "<sha1_hash>",
851 895 ],
852 896 "removed": []
853 897 }
854 898 }
855 899 error : null
856 900 """
857 901
858 902 pull_request = get_pull_request_or_error(pullrequestid)
859 903 if Optional.extract(repoid):
860 904 repo = get_repo_or_error(repoid)
861 905 else:
862 906 repo = pull_request.target_repo
863 907
864 908 if not PullRequestModel().check_user_update(
865 909 pull_request, apiuser, api=True):
866 910 raise JSONRPCError(
867 911 'pull request `%s` update failed, no permission to update.' % (
868 912 pullrequestid,))
869 913 if pull_request.is_closed():
870 914 raise JSONRPCError(
871 915 'pull request `%s` update failed, pull request is closed' % (
872 916 pullrequestid,))
873 917
874 918 reviewer_objects = Optional.extract(reviewers) or []
875
876 if reviewer_objects:
877 schema = ReviewerListSchema()
878 try:
879 reviewer_objects = schema.deserialize(reviewer_objects)
880 except Invalid as err:
881 raise JSONRPCValidationError(colander_exc=err)
882
883 # validate users
884 for reviewer_object in reviewer_objects:
885 user = get_user_or_error(reviewer_object['username'])
886 reviewer_object['user_id'] = user.user_id
887
888 get_default_reviewers_data, get_validated_reviewers, validate_observers = \
889 PullRequestModel().get_reviewer_functions()
890
891 # re-use stored rules
892 reviewer_rules = pull_request.reviewer_data
893 try:
894 reviewers = get_validated_reviewers(reviewer_objects, reviewer_rules)
895 except ValueError as e:
896 raise JSONRPCError('Reviewers Validation: {}'.format(e))
897 else:
898 reviewers = []
919 observer_objects = Optional.extract(observers) or []
899 920
900 921 title = Optional.extract(title)
901 922 description = Optional.extract(description)
902 923 description_renderer = Optional.extract(description_renderer)
903 924
904 925 # Update title/description
905 926 title_changed = False
906 927 if title or description:
907 928 PullRequestModel().edit(
908 929 pull_request,
909 930 title or pull_request.title,
910 931 description or pull_request.description,
911 932 description_renderer or pull_request.description_renderer,
912 933 apiuser)
913 934 Session().commit()
914 935 title_changed = True
915 936
916 937 commit_changes = {"added": [], "common": [], "removed": []}
917 938
918 939 # Update commits
919 940 commits_changed = False
920 941 if str2bool(Optional.extract(update_commits)):
921 942
922 943 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
923 944 raise JSONRPCError(
924 945 'Operation forbidden because pull request is in state {}, '
925 946 'only state {} is allowed.'.format(
926 947 pull_request.pull_request_state, PullRequest.STATE_CREATED))
927 948
928 949 with pull_request.set_state(PullRequest.STATE_UPDATING):
929 950 if PullRequestModel().has_valid_update_type(pull_request):
930 951 db_user = apiuser.get_instance()
931 952 update_response = PullRequestModel().update_commits(
932 953 pull_request, db_user)
933 954 commit_changes = update_response.changes or commit_changes
934 955 Session().commit()
935 956 commits_changed = True
936 957
937 958 # Update reviewers
959 # serialize and validate passed in given reviewers
960 if reviewer_objects:
961 reviewer_objects = _reviewers_validation(reviewer_objects)
962
963 if observer_objects:
964 observer_objects = _reviewers_validation(reviewer_objects)
965
966 # re-use stored rules
967 default_reviewers_data = pull_request.reviewer_data
968
969 __, validate_default_reviewers, validate_observers = \
970 PullRequestModel().get_reviewer_functions()
971
972 if reviewer_objects:
973 try:
974 reviewers = validate_default_reviewers(reviewer_objects, default_reviewers_data)
975 except ValueError as e:
976 raise JSONRPCError('Reviewers Validation: {}'.format(e))
977 else:
978 reviewers = []
979
980 if observer_objects:
981 try:
982 observers = validate_default_reviewers(reviewer_objects, default_reviewers_data)
983 except ValueError as e:
984 raise JSONRPCError('Observer Validation: {}'.format(e))
985 else:
986 observers = []
987
938 988 reviewers_changed = False
939 989 reviewers_changes = {"added": [], "removed": []}
940 990 if reviewers:
941 991 old_calculated_status = pull_request.calculated_review_status()
942 992 added_reviewers, removed_reviewers = \
943 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
993 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser.get_instance())
944 994
945 995 reviewers_changes['added'] = sorted(
946 996 [get_user_or_error(n).username for n in added_reviewers])
947 997 reviewers_changes['removed'] = sorted(
948 998 [get_user_or_error(n).username for n in removed_reviewers])
949 999 Session().commit()
950 1000
951 1001 # trigger status changed if change in reviewers changes the status
952 1002 calculated_status = pull_request.calculated_review_status()
953 1003 if old_calculated_status != calculated_status:
954 1004 PullRequestModel().trigger_pull_request_hook(
955 1005 pull_request, apiuser, 'review_status_change',
956 1006 data={'status': calculated_status})
957 1007 reviewers_changed = True
958 1008
959 1009 observers_changed = False
1010 observers_changes = {"added": [], "removed": []}
1011 if observers:
1012 added_observers, removed_observers = \
1013 PullRequestModel().update_observers(pull_request, observers, apiuser.get_instance())
1014
1015 observers_changes['added'] = sorted(
1016 [get_user_or_error(n).username for n in added_observers])
1017 observers_changes['removed'] = sorted(
1018 [get_user_or_error(n).username for n in removed_observers])
1019 Session().commit()
1020
1021 reviewers_changed = True
960 1022
961 1023 # push changed to channelstream
962 1024 if commits_changed or reviewers_changed or observers_changed:
963 1025 pr_broadcast_channel = channelstream.pr_channel(pull_request)
964 1026 msg = 'Pull request was updated.'
965 1027 channelstream.pr_update_channelstream_push(
966 1028 request, pr_broadcast_channel, apiuser, msg)
967 1029
968 1030 data = {
969 'msg': 'Updated pull request `{}`'.format(
970 pull_request.pull_request_id),
1031 'msg': 'Updated pull request `{}`'.format(pull_request.pull_request_id),
971 1032 'pull_request': pull_request.get_api_data(),
972 1033 'updated_commits': commit_changes,
973 'updated_reviewers': reviewers_changes
1034 'updated_reviewers': reviewers_changes,
1035 'updated_observers': observers_changes,
974 1036 }
975 1037
976 1038 return data
977 1039
978 1040
979 1041 @jsonrpc_method()
980 1042 def close_pull_request(
981 1043 request, apiuser, pullrequestid, repoid=Optional(None),
982 1044 userid=Optional(OAttr('apiuser')), message=Optional('')):
983 1045 """
984 1046 Close the pull request specified by `pullrequestid`.
985 1047
986 1048 :param apiuser: This is filled automatically from the |authtoken|.
987 1049 :type apiuser: AuthUser
988 1050 :param repoid: Repository name or repository ID to which the pull
989 1051 request belongs.
990 1052 :type repoid: str or int
991 1053 :param pullrequestid: ID of the pull request to be closed.
992 1054 :type pullrequestid: int
993 1055 :param userid: Close the pull request as this user.
994 1056 :type userid: Optional(str or int)
995 1057 :param message: Optional message to close the Pull Request with. If not
996 1058 specified it will be generated automatically.
997 1059 :type message: Optional(str)
998 1060
999 1061 Example output:
1000 1062
1001 1063 .. code-block:: bash
1002 1064
1003 1065 "id": <id_given_in_input>,
1004 1066 "result": {
1005 1067 "pull_request_id": "<int>",
1006 1068 "close_status": "<str:status_lbl>,
1007 1069 "closed": "<bool>"
1008 1070 },
1009 1071 "error": null
1010 1072
1011 1073 """
1012 1074 _ = request.translate
1013 1075
1014 1076 pull_request = get_pull_request_or_error(pullrequestid)
1015 1077 if Optional.extract(repoid):
1016 1078 repo = get_repo_or_error(repoid)
1017 1079 else:
1018 1080 repo = pull_request.target_repo
1019 1081
1020 1082 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
1021 1083 user=apiuser, repo_name=repo.repo_name)
1022 1084 if not isinstance(userid, Optional):
1023 1085 if has_superadmin_permission(apiuser) or is_repo_admin:
1024 1086 apiuser = get_user_or_error(userid)
1025 1087 else:
1026 1088 raise JSONRPCError('userid is not the same as your user')
1027 1089
1028 1090 if pull_request.is_closed():
1029 1091 raise JSONRPCError(
1030 1092 'pull request `%s` is already closed' % (pullrequestid,))
1031 1093
1032 1094 # only owner or admin or person with write permissions
1033 1095 allowed_to_close = PullRequestModel().check_user_update(
1034 1096 pull_request, apiuser, api=True)
1035 1097
1036 1098 if not allowed_to_close:
1037 1099 raise JSONRPCError(
1038 1100 'pull request `%s` close failed, no permission to close.' % (
1039 1101 pullrequestid,))
1040 1102
1041 1103 # message we're using to close the PR, else it's automatically generated
1042 1104 message = Optional.extract(message)
1043 1105
1044 1106 # finally close the PR, with proper message comment
1045 1107 comment, status = PullRequestModel().close_pull_request_with_comment(
1046 1108 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1047 1109 status_lbl = ChangesetStatus.get_status_lbl(status)
1048 1110
1049 1111 Session().commit()
1050 1112
1051 1113 data = {
1052 1114 'pull_request_id': pull_request.pull_request_id,
1053 1115 'close_status': status_lbl,
1054 1116 'closed': True,
1055 1117 }
1056 1118 return data
@@ -1,2523 +1,2523 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import time
23 23
24 24 import rhodecode
25 25 from rhodecode.api import (
26 26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 27 from rhodecode.api.utils import (
28 28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 31 validate_set_owner_permissions)
32 32 from rhodecode.lib import audit_logger, rc_cache, channelstream
33 33 from rhodecode.lib import repo_maintenance
34 34 from rhodecode.lib.auth import (
35 35 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
36 36 HasRepoPermissionAnyApi)
37 37 from rhodecode.lib.celerylib.utils import get_task_id
38 38 from rhodecode.lib.utils2 import (
39 39 str2bool, time_to_datetime, safe_str, safe_int, safe_unicode)
40 40 from rhodecode.lib.ext_json import json
41 41 from rhodecode.lib.exceptions import (
42 42 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
43 43 from rhodecode.lib.vcs import RepositoryError
44 44 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 47 from rhodecode.model.db import (
48 48 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
49 49 ChangesetComment)
50 50 from rhodecode.model.permission import PermissionModel
51 51 from rhodecode.model.pull_request import PullRequestModel
52 52 from rhodecode.model.repo import RepoModel
53 53 from rhodecode.model.scm import ScmModel, RepoList
54 54 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
55 55 from rhodecode.model import validation_schema
56 56 from rhodecode.model.validation_schema.schemas import repo_schema
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60
61 61 @jsonrpc_method()
62 62 def get_repo(request, apiuser, repoid, cache=Optional(True)):
63 63 """
64 64 Gets an existing repository by its name or repository_id.
65 65
66 66 The members section so the output returns users groups or users
67 67 associated with that repository.
68 68
69 69 This command can only be run using an |authtoken| with admin rights,
70 70 or users with at least read rights to the |repo|.
71 71
72 72 :param apiuser: This is filled automatically from the |authtoken|.
73 73 :type apiuser: AuthUser
74 74 :param repoid: The repository name or repository id.
75 75 :type repoid: str or int
76 76 :param cache: use the cached value for last changeset
77 77 :type: cache: Optional(bool)
78 78
79 79 Example output:
80 80
81 81 .. code-block:: bash
82 82
83 83 {
84 84 "error": null,
85 85 "id": <repo_id>,
86 86 "result": {
87 87 "clone_uri": null,
88 88 "created_on": "timestamp",
89 89 "description": "repo description",
90 90 "enable_downloads": false,
91 91 "enable_locking": false,
92 92 "enable_statistics": false,
93 93 "followers": [
94 94 {
95 95 "active": true,
96 96 "admin": false,
97 97 "api_key": "****************************************",
98 98 "api_keys": [
99 99 "****************************************"
100 100 ],
101 101 "email": "user@example.com",
102 102 "emails": [
103 103 "user@example.com"
104 104 ],
105 105 "extern_name": "rhodecode",
106 106 "extern_type": "rhodecode",
107 107 "firstname": "username",
108 108 "ip_addresses": [],
109 109 "language": null,
110 110 "last_login": "2015-09-16T17:16:35.854",
111 111 "lastname": "surname",
112 112 "user_id": <user_id>,
113 113 "username": "name"
114 114 }
115 115 ],
116 116 "fork_of": "parent-repo",
117 117 "landing_rev": [
118 118 "rev",
119 119 "tip"
120 120 ],
121 121 "last_changeset": {
122 122 "author": "User <user@example.com>",
123 123 "branch": "default",
124 124 "date": "timestamp",
125 125 "message": "last commit message",
126 126 "parents": [
127 127 {
128 128 "raw_id": "commit-id"
129 129 }
130 130 ],
131 131 "raw_id": "commit-id",
132 132 "revision": <revision number>,
133 133 "short_id": "short id"
134 134 },
135 135 "lock_reason": null,
136 136 "locked_by": null,
137 137 "locked_date": null,
138 138 "owner": "owner-name",
139 139 "permissions": [
140 140 {
141 141 "name": "super-admin-name",
142 142 "origin": "super-admin",
143 143 "permission": "repository.admin",
144 144 "type": "user"
145 145 },
146 146 {
147 147 "name": "owner-name",
148 148 "origin": "owner",
149 149 "permission": "repository.admin",
150 150 "type": "user"
151 151 },
152 152 {
153 153 "name": "user-group-name",
154 154 "origin": "permission",
155 155 "permission": "repository.write",
156 156 "type": "user_group"
157 157 }
158 158 ],
159 159 "private": true,
160 160 "repo_id": 676,
161 161 "repo_name": "user-group/repo-name",
162 162 "repo_type": "hg"
163 163 }
164 164 }
165 165 """
166 166
167 167 repo = get_repo_or_error(repoid)
168 168 cache = Optional.extract(cache)
169 169
170 170 include_secrets = False
171 171 if has_superadmin_permission(apiuser):
172 172 include_secrets = True
173 173 else:
174 174 # check if we have at least read permission for this repo !
175 175 _perms = (
176 176 'repository.admin', 'repository.write', 'repository.read',)
177 177 validate_repo_permissions(apiuser, repoid, repo, _perms)
178 178
179 179 permissions = []
180 180 for _user in repo.permissions():
181 181 user_data = {
182 182 'name': _user.username,
183 183 'permission': _user.permission,
184 184 'origin': get_origin(_user),
185 185 'type': "user",
186 186 }
187 187 permissions.append(user_data)
188 188
189 189 for _user_group in repo.permission_user_groups():
190 190 user_group_data = {
191 191 'name': _user_group.users_group_name,
192 192 'permission': _user_group.permission,
193 193 'origin': get_origin(_user_group),
194 194 'type': "user_group",
195 195 }
196 196 permissions.append(user_group_data)
197 197
198 198 following_users = [
199 199 user.user.get_api_data(include_secrets=include_secrets)
200 200 for user in repo.followers]
201 201
202 202 if not cache:
203 203 repo.update_commit_cache()
204 204 data = repo.get_api_data(include_secrets=include_secrets)
205 205 data['permissions'] = permissions
206 206 data['followers'] = following_users
207 207 return data
208 208
209 209
210 210 @jsonrpc_method()
211 211 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
212 212 """
213 213 Lists all existing repositories.
214 214
215 215 This command can only be run using an |authtoken| with admin rights,
216 216 or users with at least read rights to |repos|.
217 217
218 218 :param apiuser: This is filled automatically from the |authtoken|.
219 219 :type apiuser: AuthUser
220 220 :param root: specify root repository group to fetch repositories.
221 221 filters the returned repositories to be members of given root group.
222 222 :type root: Optional(None)
223 223 :param traverse: traverse given root into subrepositories. With this flag
224 224 set to False, it will only return top-level repositories from `root`.
225 225 if root is empty it will return just top-level repositories.
226 226 :type traverse: Optional(True)
227 227
228 228
229 229 Example output:
230 230
231 231 .. code-block:: bash
232 232
233 233 id : <id_given_in_input>
234 234 result: [
235 235 {
236 236 "repo_id" : "<repo_id>",
237 237 "repo_name" : "<reponame>"
238 238 "repo_type" : "<repo_type>",
239 239 "clone_uri" : "<clone_uri>",
240 240 "private": : "<bool>",
241 241 "created_on" : "<datetimecreated>",
242 242 "description" : "<description>",
243 243 "landing_rev": "<landing_rev>",
244 244 "owner": "<repo_owner>",
245 245 "fork_of": "<name_of_fork_parent>",
246 246 "enable_downloads": "<bool>",
247 247 "enable_locking": "<bool>",
248 248 "enable_statistics": "<bool>",
249 249 },
250 250 ...
251 251 ]
252 252 error: null
253 253 """
254 254
255 255 include_secrets = has_superadmin_permission(apiuser)
256 256 _perms = ('repository.read', 'repository.write', 'repository.admin',)
257 257 extras = {'user': apiuser}
258 258
259 259 root = Optional.extract(root)
260 260 traverse = Optional.extract(traverse, binary=True)
261 261
262 262 if root:
263 263 # verify parent existance, if it's empty return an error
264 264 parent = RepoGroup.get_by_group_name(root)
265 265 if not parent:
266 266 raise JSONRPCError(
267 267 'Root repository group `{}` does not exist'.format(root))
268 268
269 269 if traverse:
270 270 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
271 271 else:
272 272 repos = RepoModel().get_repos_for_root(root=parent)
273 273 else:
274 274 if traverse:
275 275 repos = RepoModel().get_all()
276 276 else:
277 277 # return just top-level
278 278 repos = RepoModel().get_repos_for_root(root=None)
279 279
280 280 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
281 281 return [repo.get_api_data(include_secrets=include_secrets)
282 282 for repo in repo_list]
283 283
284 284
285 285 @jsonrpc_method()
286 286 def get_repo_changeset(request, apiuser, repoid, revision,
287 287 details=Optional('basic')):
288 288 """
289 289 Returns information about a changeset.
290 290
291 291 Additionally parameters define the amount of details returned by
292 292 this function.
293 293
294 294 This command can only be run using an |authtoken| with admin rights,
295 295 or users with at least read rights to the |repo|.
296 296
297 297 :param apiuser: This is filled automatically from the |authtoken|.
298 298 :type apiuser: AuthUser
299 299 :param repoid: The repository name or repository id
300 300 :type repoid: str or int
301 301 :param revision: revision for which listing should be done
302 302 :type revision: str
303 303 :param details: details can be 'basic|extended|full' full gives diff
304 304 info details like the diff itself, and number of changed files etc.
305 305 :type details: Optional(str)
306 306
307 307 """
308 308 repo = get_repo_or_error(repoid)
309 309 if not has_superadmin_permission(apiuser):
310 310 _perms = ('repository.admin', 'repository.write', 'repository.read',)
311 311 validate_repo_permissions(apiuser, repoid, repo, _perms)
312 312
313 313 changes_details = Optional.extract(details)
314 314 _changes_details_types = ['basic', 'extended', 'full']
315 315 if changes_details not in _changes_details_types:
316 316 raise JSONRPCError(
317 317 'ret_type must be one of %s' % (
318 318 ','.join(_changes_details_types)))
319 319
320 320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
321 321 'status', '_commit', '_file_paths']
322 322
323 323 try:
324 324 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
325 325 except TypeError as e:
326 326 raise JSONRPCError(safe_str(e))
327 327 _cs_json = cs.__json__()
328 328 _cs_json['diff'] = build_commit_data(cs, changes_details)
329 329 if changes_details == 'full':
330 330 _cs_json['refs'] = cs._get_refs()
331 331 return _cs_json
332 332
333 333
334 334 @jsonrpc_method()
335 335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
336 336 details=Optional('basic')):
337 337 """
338 338 Returns a set of commits limited by the number starting
339 339 from the `start_rev` option.
340 340
341 341 Additional parameters define the amount of details returned by this
342 342 function.
343 343
344 344 This command can only be run using an |authtoken| with admin rights,
345 345 or users with at least read rights to |repos|.
346 346
347 347 :param apiuser: This is filled automatically from the |authtoken|.
348 348 :type apiuser: AuthUser
349 349 :param repoid: The repository name or repository ID.
350 350 :type repoid: str or int
351 351 :param start_rev: The starting revision from where to get changesets.
352 352 :type start_rev: str
353 353 :param limit: Limit the number of commits to this amount
354 354 :type limit: str or int
355 355 :param details: Set the level of detail returned. Valid option are:
356 356 ``basic``, ``extended`` and ``full``.
357 357 :type details: Optional(str)
358 358
359 359 .. note::
360 360
361 361 Setting the parameter `details` to the value ``full`` is extensive
362 362 and returns details like the diff itself, and the number
363 363 of changed files.
364 364
365 365 """
366 366 repo = get_repo_or_error(repoid)
367 367 if not has_superadmin_permission(apiuser):
368 368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
369 369 validate_repo_permissions(apiuser, repoid, repo, _perms)
370 370
371 371 changes_details = Optional.extract(details)
372 372 _changes_details_types = ['basic', 'extended', 'full']
373 373 if changes_details not in _changes_details_types:
374 374 raise JSONRPCError(
375 375 'ret_type must be one of %s' % (
376 376 ','.join(_changes_details_types)))
377 377
378 378 limit = int(limit)
379 379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
380 380 'status', '_commit', '_file_paths']
381 381
382 382 vcs_repo = repo.scm_instance()
383 383 # SVN needs a special case to distinguish its index and commit id
384 384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
385 385 start_rev = vcs_repo.commit_ids[0]
386 386
387 387 try:
388 388 commits = vcs_repo.get_commits(
389 389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
390 390 except TypeError as e:
391 391 raise JSONRPCError(safe_str(e))
392 392 except Exception:
393 393 log.exception('Fetching of commits failed')
394 394 raise JSONRPCError('Error occurred during commit fetching')
395 395
396 396 ret = []
397 397 for cnt, commit in enumerate(commits):
398 398 if cnt >= limit != -1:
399 399 break
400 400 _cs_json = commit.__json__()
401 401 _cs_json['diff'] = build_commit_data(commit, changes_details)
402 402 if changes_details == 'full':
403 403 _cs_json['refs'] = {
404 404 'branches': [commit.branch],
405 405 'bookmarks': getattr(commit, 'bookmarks', []),
406 406 'tags': commit.tags
407 407 }
408 408 ret.append(_cs_json)
409 409 return ret
410 410
411 411
412 412 @jsonrpc_method()
413 413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
414 414 ret_type=Optional('all'), details=Optional('basic'),
415 415 max_file_bytes=Optional(None)):
416 416 """
417 417 Returns a list of nodes and children in a flat list for a given
418 418 path at given revision.
419 419
420 420 It's possible to specify ret_type to show only `files` or `dirs`.
421 421
422 422 This command can only be run using an |authtoken| with admin rights,
423 423 or users with at least read rights to |repos|.
424 424
425 425 :param apiuser: This is filled automatically from the |authtoken|.
426 426 :type apiuser: AuthUser
427 427 :param repoid: The repository name or repository ID.
428 428 :type repoid: str or int
429 429 :param revision: The revision for which listing should be done.
430 430 :type revision: str
431 431 :param root_path: The path from which to start displaying.
432 432 :type root_path: str
433 433 :param ret_type: Set the return type. Valid options are
434 434 ``all`` (default), ``files`` and ``dirs``.
435 435 :type ret_type: Optional(str)
436 436 :param details: Returns extended information about nodes, such as
437 437 md5, binary, and or content.
438 438 The valid options are ``basic`` and ``full``.
439 439 :type details: Optional(str)
440 440 :param max_file_bytes: Only return file content under this file size bytes
441 441 :type details: Optional(int)
442 442
443 443 Example output:
444 444
445 445 .. code-block:: bash
446 446
447 447 id : <id_given_in_input>
448 448 result: [
449 449 {
450 450 "binary": false,
451 451 "content": "File line",
452 452 "extension": "md",
453 453 "lines": 2,
454 454 "md5": "059fa5d29b19c0657e384749480f6422",
455 455 "mimetype": "text/x-minidsrc",
456 456 "name": "file.md",
457 457 "size": 580,
458 458 "type": "file"
459 459 },
460 460 ...
461 461 ]
462 462 error: null
463 463 """
464 464
465 465 repo = get_repo_or_error(repoid)
466 466 if not has_superadmin_permission(apiuser):
467 467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
468 468 validate_repo_permissions(apiuser, repoid, repo, _perms)
469 469
470 470 ret_type = Optional.extract(ret_type)
471 471 details = Optional.extract(details)
472 472 _extended_types = ['basic', 'full']
473 473 if details not in _extended_types:
474 474 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
475 475 extended_info = False
476 476 content = False
477 477 if details == 'basic':
478 478 extended_info = True
479 479
480 480 if details == 'full':
481 481 extended_info = content = True
482 482
483 483 _map = {}
484 484 try:
485 485 # check if repo is not empty by any chance, skip quicker if it is.
486 486 _scm = repo.scm_instance()
487 487 if _scm.is_empty():
488 488 return []
489 489
490 490 _d, _f = ScmModel().get_nodes(
491 491 repo, revision, root_path, flat=False,
492 492 extended_info=extended_info, content=content,
493 493 max_file_bytes=max_file_bytes)
494 494 _map = {
495 495 'all': _d + _f,
496 496 'files': _f,
497 497 'dirs': _d,
498 498 }
499 499 return _map[ret_type]
500 500 except KeyError:
501 501 raise JSONRPCError(
502 502 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
503 503 except Exception:
504 504 log.exception("Exception occurred while trying to get repo nodes")
505 505 raise JSONRPCError(
506 506 'failed to get repo: `%s` nodes' % repo.repo_name
507 507 )
508 508
509 509
510 510 @jsonrpc_method()
511 511 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
512 512 max_file_bytes=Optional(None), details=Optional('basic'),
513 513 cache=Optional(True)):
514 514 """
515 515 Returns a single file from repository at given revision.
516 516
517 517 This command can only be run using an |authtoken| with admin rights,
518 518 or users with at least read rights to |repos|.
519 519
520 520 :param apiuser: This is filled automatically from the |authtoken|.
521 521 :type apiuser: AuthUser
522 522 :param repoid: The repository name or repository ID.
523 523 :type repoid: str or int
524 524 :param commit_id: The revision for which listing should be done.
525 525 :type commit_id: str
526 526 :param file_path: The path from which to start displaying.
527 527 :type file_path: str
528 528 :param details: Returns different set of information about nodes.
529 529 The valid options are ``minimal`` ``basic`` and ``full``.
530 530 :type details: Optional(str)
531 531 :param max_file_bytes: Only return file content under this file size bytes
532 532 :type max_file_bytes: Optional(int)
533 533 :param cache: Use internal caches for fetching files. If disabled fetching
534 534 files is slower but more memory efficient
535 535 :type cache: Optional(bool)
536 536
537 537 Example output:
538 538
539 539 .. code-block:: bash
540 540
541 541 id : <id_given_in_input>
542 542 result: {
543 543 "binary": false,
544 544 "extension": "py",
545 545 "lines": 35,
546 546 "content": "....",
547 547 "md5": "76318336366b0f17ee249e11b0c99c41",
548 548 "mimetype": "text/x-python",
549 549 "name": "python.py",
550 550 "size": 817,
551 551 "type": "file",
552 552 }
553 553 error: null
554 554 """
555 555
556 556 repo = get_repo_or_error(repoid)
557 557 if not has_superadmin_permission(apiuser):
558 558 _perms = ('repository.admin', 'repository.write', 'repository.read',)
559 559 validate_repo_permissions(apiuser, repoid, repo, _perms)
560 560
561 561 cache = Optional.extract(cache, binary=True)
562 562 details = Optional.extract(details)
563 563 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
564 564 if details not in _extended_types:
565 565 raise JSONRPCError(
566 566 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
567 567 extended_info = False
568 568 content = False
569 569
570 570 if details == 'minimal':
571 571 extended_info = False
572 572
573 573 elif details == 'basic':
574 574 extended_info = True
575 575
576 576 elif details == 'full':
577 577 extended_info = content = True
578 578
579 579 file_path = safe_unicode(file_path)
580 580 try:
581 581 # check if repo is not empty by any chance, skip quicker if it is.
582 582 _scm = repo.scm_instance()
583 583 if _scm.is_empty():
584 584 return None
585 585
586 586 node = ScmModel().get_node(
587 587 repo, commit_id, file_path, extended_info=extended_info,
588 588 content=content, max_file_bytes=max_file_bytes, cache=cache)
589 589 except NodeDoesNotExistError:
590 590 raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
591 591 repo.repo_name, file_path, commit_id))
592 592 except Exception:
593 593 log.exception(u"Exception occurred while trying to get repo %s file",
594 594 repo.repo_name)
595 595 raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format(
596 596 repo.repo_name, file_path))
597 597
598 598 return node
599 599
600 600
601 601 @jsonrpc_method()
602 602 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
603 603 """
604 604 Returns a list of tree nodes for path at given revision. This api is built
605 605 strictly for usage in full text search building, and shouldn't be consumed
606 606
607 607 This command can only be run using an |authtoken| with admin rights,
608 608 or users with at least read rights to |repos|.
609 609
610 610 """
611 611
612 612 repo = get_repo_or_error(repoid)
613 613 if not has_superadmin_permission(apiuser):
614 614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
615 615 validate_repo_permissions(apiuser, repoid, repo, _perms)
616 616
617 617 repo_id = repo.repo_id
618 618 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
619 619 cache_on = cache_seconds > 0
620 620
621 621 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
622 622 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
623 623
624 624 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
625 625 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
626 626
627 627 try:
628 628 # check if repo is not empty by any chance, skip quicker if it is.
629 629 _scm = repo.scm_instance()
630 630 if _scm.is_empty():
631 631 return []
632 632 except RepositoryError:
633 633 log.exception("Exception occurred while trying to get repo nodes")
634 634 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
635 635
636 636 try:
637 637 # we need to resolve commit_id to a FULL sha for cache to work correctly.
638 638 # sending 'master' is a pointer that needs to be translated to current commit.
639 639 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
640 640 log.debug(
641 641 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
642 642 'with caching: %s[TTL: %ss]' % (
643 643 repo_id, commit_id, cache_on, cache_seconds or 0))
644 644
645 645 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
646 646 return tree_files
647 647
648 648 except Exception:
649 649 log.exception("Exception occurred while trying to get repo nodes")
650 650 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
651 651
652 652
653 653 @jsonrpc_method()
654 654 def get_repo_refs(request, apiuser, repoid):
655 655 """
656 656 Returns a dictionary of current references. It returns
657 657 bookmarks, branches, closed_branches, and tags for given repository
658 658
659 659 It's possible to specify ret_type to show only `files` or `dirs`.
660 660
661 661 This command can only be run using an |authtoken| with admin rights,
662 662 or users with at least read rights to |repos|.
663 663
664 664 :param apiuser: This is filled automatically from the |authtoken|.
665 665 :type apiuser: AuthUser
666 666 :param repoid: The repository name or repository ID.
667 667 :type repoid: str or int
668 668
669 669 Example output:
670 670
671 671 .. code-block:: bash
672 672
673 673 id : <id_given_in_input>
674 674 "result": {
675 675 "bookmarks": {
676 676 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
677 677 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
678 678 },
679 679 "branches": {
680 680 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
681 681 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
682 682 },
683 683 "branches_closed": {},
684 684 "tags": {
685 685 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
686 686 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
687 687 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
688 688 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
689 689 }
690 690 }
691 691 error: null
692 692 """
693 693
694 694 repo = get_repo_or_error(repoid)
695 695 if not has_superadmin_permission(apiuser):
696 696 _perms = ('repository.admin', 'repository.write', 'repository.read',)
697 697 validate_repo_permissions(apiuser, repoid, repo, _perms)
698 698
699 699 try:
700 700 # check if repo is not empty by any chance, skip quicker if it is.
701 701 vcs_instance = repo.scm_instance()
702 702 refs = vcs_instance.refs()
703 703 return refs
704 704 except Exception:
705 705 log.exception("Exception occurred while trying to get repo refs")
706 706 raise JSONRPCError(
707 707 'failed to get repo: `%s` references' % repo.repo_name
708 708 )
709 709
710 710
711 711 @jsonrpc_method()
712 712 def create_repo(
713 713 request, apiuser, repo_name, repo_type,
714 714 owner=Optional(OAttr('apiuser')),
715 715 description=Optional(''),
716 716 private=Optional(False),
717 717 clone_uri=Optional(None),
718 718 push_uri=Optional(None),
719 719 landing_rev=Optional(None),
720 720 enable_statistics=Optional(False),
721 721 enable_locking=Optional(False),
722 722 enable_downloads=Optional(False),
723 723 copy_permissions=Optional(False)):
724 724 """
725 725 Creates a repository.
726 726
727 727 * If the repository name contains "/", repository will be created inside
728 728 a repository group or nested repository groups
729 729
730 730 For example "foo/bar/repo1" will create |repo| called "repo1" inside
731 731 group "foo/bar". You have to have permissions to access and write to
732 732 the last repository group ("bar" in this example)
733 733
734 734 This command can only be run using an |authtoken| with at least
735 735 permissions to create repositories, or write permissions to
736 736 parent repository groups.
737 737
738 738 :param apiuser: This is filled automatically from the |authtoken|.
739 739 :type apiuser: AuthUser
740 740 :param repo_name: Set the repository name.
741 741 :type repo_name: str
742 742 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
743 743 :type repo_type: str
744 744 :param owner: user_id or username
745 745 :type owner: Optional(str)
746 746 :param description: Set the repository description.
747 747 :type description: Optional(str)
748 748 :param private: set repository as private
749 749 :type private: bool
750 750 :param clone_uri: set clone_uri
751 751 :type clone_uri: str
752 752 :param push_uri: set push_uri
753 753 :type push_uri: str
754 754 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
755 755 :type landing_rev: str
756 756 :param enable_locking:
757 757 :type enable_locking: bool
758 758 :param enable_downloads:
759 759 :type enable_downloads: bool
760 760 :param enable_statistics:
761 761 :type enable_statistics: bool
762 762 :param copy_permissions: Copy permission from group in which the
763 763 repository is being created.
764 764 :type copy_permissions: bool
765 765
766 766
767 767 Example output:
768 768
769 769 .. code-block:: bash
770 770
771 771 id : <id_given_in_input>
772 772 result: {
773 773 "msg": "Created new repository `<reponame>`",
774 774 "success": true,
775 775 "task": "<celery task id or None if done sync>"
776 776 }
777 777 error: null
778 778
779 779
780 780 Example error output:
781 781
782 782 .. code-block:: bash
783 783
784 784 id : <id_given_in_input>
785 785 result : null
786 786 error : {
787 787 'failed to create repository `<repo_name>`'
788 788 }
789 789
790 790 """
791 791
792 792 owner = validate_set_owner_permissions(apiuser, owner)
793 793
794 794 description = Optional.extract(description)
795 795 copy_permissions = Optional.extract(copy_permissions)
796 796 clone_uri = Optional.extract(clone_uri)
797 797 push_uri = Optional.extract(push_uri)
798 798
799 799 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
800 800 if isinstance(private, Optional):
801 801 private = defs.get('repo_private') or Optional.extract(private)
802 802 if isinstance(repo_type, Optional):
803 803 repo_type = defs.get('repo_type')
804 804 if isinstance(enable_statistics, Optional):
805 805 enable_statistics = defs.get('repo_enable_statistics')
806 806 if isinstance(enable_locking, Optional):
807 807 enable_locking = defs.get('repo_enable_locking')
808 808 if isinstance(enable_downloads, Optional):
809 809 enable_downloads = defs.get('repo_enable_downloads')
810 810
811 811 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
812 812 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
813 813 ref_choices = list(set(ref_choices + [landing_ref]))
814 814
815 815 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
816 816
817 817 schema = repo_schema.RepoSchema().bind(
818 818 repo_type_options=rhodecode.BACKENDS.keys(),
819 819 repo_ref_options=ref_choices,
820 820 repo_type=repo_type,
821 821 # user caller
822 822 user=apiuser)
823 823
824 824 try:
825 825 schema_data = schema.deserialize(dict(
826 826 repo_name=repo_name,
827 827 repo_type=repo_type,
828 828 repo_owner=owner.username,
829 829 repo_description=description,
830 830 repo_landing_commit_ref=landing_commit_ref,
831 831 repo_clone_uri=clone_uri,
832 832 repo_push_uri=push_uri,
833 833 repo_private=private,
834 834 repo_copy_permissions=copy_permissions,
835 835 repo_enable_statistics=enable_statistics,
836 836 repo_enable_downloads=enable_downloads,
837 837 repo_enable_locking=enable_locking))
838 838 except validation_schema.Invalid as err:
839 839 raise JSONRPCValidationError(colander_exc=err)
840 840
841 841 try:
842 842 data = {
843 843 'owner': owner,
844 844 'repo_name': schema_data['repo_group']['repo_name_without_group'],
845 845 'repo_name_full': schema_data['repo_name'],
846 846 'repo_group': schema_data['repo_group']['repo_group_id'],
847 847 'repo_type': schema_data['repo_type'],
848 848 'repo_description': schema_data['repo_description'],
849 849 'repo_private': schema_data['repo_private'],
850 850 'clone_uri': schema_data['repo_clone_uri'],
851 851 'push_uri': schema_data['repo_push_uri'],
852 852 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
853 853 'enable_statistics': schema_data['repo_enable_statistics'],
854 854 'enable_locking': schema_data['repo_enable_locking'],
855 855 'enable_downloads': schema_data['repo_enable_downloads'],
856 856 'repo_copy_permissions': schema_data['repo_copy_permissions'],
857 857 }
858 858
859 859 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
860 860 task_id = get_task_id(task)
861 861 # no commit, it's done in RepoModel, or async via celery
862 862 return {
863 863 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
864 864 'success': True, # cannot return the repo data here since fork
865 865 # can be done async
866 866 'task': task_id
867 867 }
868 868 except Exception:
869 869 log.exception(
870 870 u"Exception while trying to create the repository %s",
871 871 schema_data['repo_name'])
872 872 raise JSONRPCError(
873 873 'failed to create repository `%s`' % (schema_data['repo_name'],))
874 874
875 875
876 876 @jsonrpc_method()
877 877 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
878 878 description=Optional('')):
879 879 """
880 880 Adds an extra field to a repository.
881 881
882 882 This command can only be run using an |authtoken| with at least
883 883 write permissions to the |repo|.
884 884
885 885 :param apiuser: This is filled automatically from the |authtoken|.
886 886 :type apiuser: AuthUser
887 887 :param repoid: Set the repository name or repository id.
888 888 :type repoid: str or int
889 889 :param key: Create a unique field key for this repository.
890 890 :type key: str
891 891 :param label:
892 892 :type label: Optional(str)
893 893 :param description:
894 894 :type description: Optional(str)
895 895 """
896 896 repo = get_repo_or_error(repoid)
897 897 if not has_superadmin_permission(apiuser):
898 898 _perms = ('repository.admin',)
899 899 validate_repo_permissions(apiuser, repoid, repo, _perms)
900 900
901 901 label = Optional.extract(label) or key
902 902 description = Optional.extract(description)
903 903
904 904 field = RepositoryField.get_by_key_name(key, repo)
905 905 if field:
906 906 raise JSONRPCError('Field with key '
907 907 '`%s` exists for repo `%s`' % (key, repoid))
908 908
909 909 try:
910 910 RepoModel().add_repo_field(repo, key, field_label=label,
911 911 field_desc=description)
912 912 Session().commit()
913 913 return {
914 914 'msg': "Added new repository field `%s`" % (key,),
915 915 'success': True,
916 916 }
917 917 except Exception:
918 918 log.exception("Exception occurred while trying to add field to repo")
919 919 raise JSONRPCError(
920 920 'failed to create new field for repository `%s`' % (repoid,))
921 921
922 922
923 923 @jsonrpc_method()
924 924 def remove_field_from_repo(request, apiuser, repoid, key):
925 925 """
926 926 Removes an extra field from a repository.
927 927
928 928 This command can only be run using an |authtoken| with at least
929 929 write permissions to the |repo|.
930 930
931 931 :param apiuser: This is filled automatically from the |authtoken|.
932 932 :type apiuser: AuthUser
933 933 :param repoid: Set the repository name or repository ID.
934 934 :type repoid: str or int
935 935 :param key: Set the unique field key for this repository.
936 936 :type key: str
937 937 """
938 938
939 939 repo = get_repo_or_error(repoid)
940 940 if not has_superadmin_permission(apiuser):
941 941 _perms = ('repository.admin',)
942 942 validate_repo_permissions(apiuser, repoid, repo, _perms)
943 943
944 944 field = RepositoryField.get_by_key_name(key, repo)
945 945 if not field:
946 946 raise JSONRPCError('Field with key `%s` does not '
947 947 'exists for repo `%s`' % (key, repoid))
948 948
949 949 try:
950 950 RepoModel().delete_repo_field(repo, field_key=key)
951 951 Session().commit()
952 952 return {
953 953 'msg': "Deleted repository field `%s`" % (key,),
954 954 'success': True,
955 955 }
956 956 except Exception:
957 957 log.exception(
958 958 "Exception occurred while trying to delete field from repo")
959 959 raise JSONRPCError(
960 960 'failed to delete field for repository `%s`' % (repoid,))
961 961
962 962
963 963 @jsonrpc_method()
964 964 def update_repo(
965 965 request, apiuser, repoid, repo_name=Optional(None),
966 966 owner=Optional(OAttr('apiuser')), description=Optional(''),
967 967 private=Optional(False),
968 968 clone_uri=Optional(None), push_uri=Optional(None),
969 969 landing_rev=Optional(None), fork_of=Optional(None),
970 970 enable_statistics=Optional(False),
971 971 enable_locking=Optional(False),
972 972 enable_downloads=Optional(False), fields=Optional('')):
973 973 """
974 974 Updates a repository with the given information.
975 975
976 976 This command can only be run using an |authtoken| with at least
977 977 admin permissions to the |repo|.
978 978
979 979 * If the repository name contains "/", repository will be updated
980 980 accordingly with a repository group or nested repository groups
981 981
982 982 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
983 983 called "repo-test" and place it inside group "foo/bar".
984 984 You have to have permissions to access and write to the last repository
985 985 group ("bar" in this example)
986 986
987 987 :param apiuser: This is filled automatically from the |authtoken|.
988 988 :type apiuser: AuthUser
989 989 :param repoid: repository name or repository ID.
990 990 :type repoid: str or int
991 991 :param repo_name: Update the |repo| name, including the
992 992 repository group it's in.
993 993 :type repo_name: str
994 994 :param owner: Set the |repo| owner.
995 995 :type owner: str
996 996 :param fork_of: Set the |repo| as fork of another |repo|.
997 997 :type fork_of: str
998 998 :param description: Update the |repo| description.
999 999 :type description: str
1000 1000 :param private: Set the |repo| as private. (True | False)
1001 1001 :type private: bool
1002 1002 :param clone_uri: Update the |repo| clone URI.
1003 1003 :type clone_uri: str
1004 1004 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1005 1005 :type landing_rev: str
1006 1006 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1007 1007 :type enable_statistics: bool
1008 1008 :param enable_locking: Enable |repo| locking.
1009 1009 :type enable_locking: bool
1010 1010 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1011 1011 :type enable_downloads: bool
1012 1012 :param fields: Add extra fields to the |repo|. Use the following
1013 1013 example format: ``field_key=field_val,field_key2=fieldval2``.
1014 1014 Escape ', ' with \,
1015 1015 :type fields: str
1016 1016 """
1017 1017
1018 1018 repo = get_repo_or_error(repoid)
1019 1019
1020 1020 include_secrets = False
1021 1021 if not has_superadmin_permission(apiuser):
1022 1022 _perms = ('repository.admin',)
1023 1023 validate_repo_permissions(apiuser, repoid, repo, _perms)
1024 1024 else:
1025 1025 include_secrets = True
1026 1026
1027 1027 updates = dict(
1028 1028 repo_name=repo_name
1029 1029 if not isinstance(repo_name, Optional) else repo.repo_name,
1030 1030
1031 1031 fork_id=fork_of
1032 1032 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1033 1033
1034 1034 user=owner
1035 1035 if not isinstance(owner, Optional) else repo.user.username,
1036 1036
1037 1037 repo_description=description
1038 1038 if not isinstance(description, Optional) else repo.description,
1039 1039
1040 1040 repo_private=private
1041 1041 if not isinstance(private, Optional) else repo.private,
1042 1042
1043 1043 clone_uri=clone_uri
1044 1044 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1045 1045
1046 1046 push_uri=push_uri
1047 1047 if not isinstance(push_uri, Optional) else repo.push_uri,
1048 1048
1049 1049 repo_landing_rev=landing_rev
1050 1050 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1051 1051
1052 1052 repo_enable_statistics=enable_statistics
1053 1053 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1054 1054
1055 1055 repo_enable_locking=enable_locking
1056 1056 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1057 1057
1058 1058 repo_enable_downloads=enable_downloads
1059 1059 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1060 1060
1061 1061 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1062 1062 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1063 1063 request.translate, repo=repo)
1064 1064 ref_choices = list(set(ref_choices + [landing_ref]))
1065 1065
1066 1066 old_values = repo.get_api_data()
1067 1067 repo_type = repo.repo_type
1068 1068 schema = repo_schema.RepoSchema().bind(
1069 1069 repo_type_options=rhodecode.BACKENDS.keys(),
1070 1070 repo_ref_options=ref_choices,
1071 1071 repo_type=repo_type,
1072 1072 # user caller
1073 1073 user=apiuser,
1074 1074 old_values=old_values)
1075 1075 try:
1076 1076 schema_data = schema.deserialize(dict(
1077 1077 # we save old value, users cannot change type
1078 1078 repo_type=repo_type,
1079 1079
1080 1080 repo_name=updates['repo_name'],
1081 1081 repo_owner=updates['user'],
1082 1082 repo_description=updates['repo_description'],
1083 1083 repo_clone_uri=updates['clone_uri'],
1084 1084 repo_push_uri=updates['push_uri'],
1085 1085 repo_fork_of=updates['fork_id'],
1086 1086 repo_private=updates['repo_private'],
1087 1087 repo_landing_commit_ref=updates['repo_landing_rev'],
1088 1088 repo_enable_statistics=updates['repo_enable_statistics'],
1089 1089 repo_enable_downloads=updates['repo_enable_downloads'],
1090 1090 repo_enable_locking=updates['repo_enable_locking']))
1091 1091 except validation_schema.Invalid as err:
1092 1092 raise JSONRPCValidationError(colander_exc=err)
1093 1093
1094 1094 # save validated data back into the updates dict
1095 1095 validated_updates = dict(
1096 1096 repo_name=schema_data['repo_group']['repo_name_without_group'],
1097 1097 repo_group=schema_data['repo_group']['repo_group_id'],
1098 1098
1099 1099 user=schema_data['repo_owner'],
1100 1100 repo_description=schema_data['repo_description'],
1101 1101 repo_private=schema_data['repo_private'],
1102 1102 clone_uri=schema_data['repo_clone_uri'],
1103 1103 push_uri=schema_data['repo_push_uri'],
1104 1104 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1105 1105 repo_enable_statistics=schema_data['repo_enable_statistics'],
1106 1106 repo_enable_locking=schema_data['repo_enable_locking'],
1107 1107 repo_enable_downloads=schema_data['repo_enable_downloads'],
1108 1108 )
1109 1109
1110 1110 if schema_data['repo_fork_of']:
1111 1111 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1112 1112 validated_updates['fork_id'] = fork_repo.repo_id
1113 1113
1114 1114 # extra fields
1115 1115 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1116 1116 if fields:
1117 1117 validated_updates.update(fields)
1118 1118
1119 1119 try:
1120 1120 RepoModel().update(repo, **validated_updates)
1121 1121 audit_logger.store_api(
1122 1122 'repo.edit', action_data={'old_data': old_values},
1123 1123 user=apiuser, repo=repo)
1124 1124 Session().commit()
1125 1125 return {
1126 1126 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1127 1127 'repository': repo.get_api_data(include_secrets=include_secrets)
1128 1128 }
1129 1129 except Exception:
1130 1130 log.exception(
1131 1131 u"Exception while trying to update the repository %s",
1132 1132 repoid)
1133 1133 raise JSONRPCError('failed to update repo `%s`' % repoid)
1134 1134
1135 1135
1136 1136 @jsonrpc_method()
1137 1137 def fork_repo(request, apiuser, repoid, fork_name,
1138 1138 owner=Optional(OAttr('apiuser')),
1139 1139 description=Optional(''),
1140 1140 private=Optional(False),
1141 1141 clone_uri=Optional(None),
1142 1142 landing_rev=Optional(None),
1143 1143 copy_permissions=Optional(False)):
1144 1144 """
1145 1145 Creates a fork of the specified |repo|.
1146 1146
1147 1147 * If the fork_name contains "/", fork will be created inside
1148 1148 a repository group or nested repository groups
1149 1149
1150 1150 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1151 1151 inside group "foo/bar". You have to have permissions to access and
1152 1152 write to the last repository group ("bar" in this example)
1153 1153
1154 1154 This command can only be run using an |authtoken| with minimum
1155 1155 read permissions of the forked repo, create fork permissions for an user.
1156 1156
1157 1157 :param apiuser: This is filled automatically from the |authtoken|.
1158 1158 :type apiuser: AuthUser
1159 1159 :param repoid: Set repository name or repository ID.
1160 1160 :type repoid: str or int
1161 1161 :param fork_name: Set the fork name, including it's repository group membership.
1162 1162 :type fork_name: str
1163 1163 :param owner: Set the fork owner.
1164 1164 :type owner: str
1165 1165 :param description: Set the fork description.
1166 1166 :type description: str
1167 1167 :param copy_permissions: Copy permissions from parent |repo|. The
1168 1168 default is False.
1169 1169 :type copy_permissions: bool
1170 1170 :param private: Make the fork private. The default is False.
1171 1171 :type private: bool
1172 1172 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1173 1173
1174 1174 Example output:
1175 1175
1176 1176 .. code-block:: bash
1177 1177
1178 1178 id : <id_for_response>
1179 1179 api_key : "<api_key>"
1180 1180 args: {
1181 1181 "repoid" : "<reponame or repo_id>",
1182 1182 "fork_name": "<forkname>",
1183 1183 "owner": "<username or user_id = Optional(=apiuser)>",
1184 1184 "description": "<description>",
1185 1185 "copy_permissions": "<bool>",
1186 1186 "private": "<bool>",
1187 1187 "landing_rev": "<landing_rev>"
1188 1188 }
1189 1189
1190 1190 Example error output:
1191 1191
1192 1192 .. code-block:: bash
1193 1193
1194 1194 id : <id_given_in_input>
1195 1195 result: {
1196 1196 "msg": "Created fork of `<reponame>` as `<forkname>`",
1197 1197 "success": true,
1198 1198 "task": "<celery task id or None if done sync>"
1199 1199 }
1200 1200 error: null
1201 1201
1202 1202 """
1203 1203
1204 1204 repo = get_repo_or_error(repoid)
1205 1205 repo_name = repo.repo_name
1206 1206
1207 1207 if not has_superadmin_permission(apiuser):
1208 1208 # check if we have at least read permission for
1209 1209 # this repo that we fork !
1210 1210 _perms = ('repository.admin', 'repository.write', 'repository.read')
1211 1211 validate_repo_permissions(apiuser, repoid, repo, _perms)
1212 1212
1213 1213 # check if the regular user has at least fork permissions as well
1214 1214 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1215 1215 raise JSONRPCForbidden()
1216 1216
1217 1217 # check if user can set owner parameter
1218 1218 owner = validate_set_owner_permissions(apiuser, owner)
1219 1219
1220 1220 description = Optional.extract(description)
1221 1221 copy_permissions = Optional.extract(copy_permissions)
1222 1222 clone_uri = Optional.extract(clone_uri)
1223 1223
1224 1224 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1225 1225 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1226 1226 ref_choices = list(set(ref_choices + [landing_ref]))
1227 1227 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1228 1228
1229 1229 private = Optional.extract(private)
1230 1230
1231 1231 schema = repo_schema.RepoSchema().bind(
1232 1232 repo_type_options=rhodecode.BACKENDS.keys(),
1233 1233 repo_ref_options=ref_choices,
1234 1234 repo_type=repo.repo_type,
1235 1235 # user caller
1236 1236 user=apiuser)
1237 1237
1238 1238 try:
1239 1239 schema_data = schema.deserialize(dict(
1240 1240 repo_name=fork_name,
1241 1241 repo_type=repo.repo_type,
1242 1242 repo_owner=owner.username,
1243 1243 repo_description=description,
1244 1244 repo_landing_commit_ref=landing_commit_ref,
1245 1245 repo_clone_uri=clone_uri,
1246 1246 repo_private=private,
1247 1247 repo_copy_permissions=copy_permissions))
1248 1248 except validation_schema.Invalid as err:
1249 1249 raise JSONRPCValidationError(colander_exc=err)
1250 1250
1251 1251 try:
1252 1252 data = {
1253 1253 'fork_parent_id': repo.repo_id,
1254 1254
1255 1255 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1256 1256 'repo_name_full': schema_data['repo_name'],
1257 1257 'repo_group': schema_data['repo_group']['repo_group_id'],
1258 1258 'repo_type': schema_data['repo_type'],
1259 1259 'description': schema_data['repo_description'],
1260 1260 'private': schema_data['repo_private'],
1261 1261 'copy_permissions': schema_data['repo_copy_permissions'],
1262 1262 'landing_rev': schema_data['repo_landing_commit_ref'],
1263 1263 }
1264 1264
1265 1265 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1266 1266 # no commit, it's done in RepoModel, or async via celery
1267 1267 task_id = get_task_id(task)
1268 1268
1269 1269 return {
1270 1270 'msg': 'Created fork of `%s` as `%s`' % (
1271 1271 repo.repo_name, schema_data['repo_name']),
1272 1272 'success': True, # cannot return the repo data here since fork
1273 1273 # can be done async
1274 1274 'task': task_id
1275 1275 }
1276 1276 except Exception:
1277 1277 log.exception(
1278 1278 u"Exception while trying to create fork %s",
1279 1279 schema_data['repo_name'])
1280 1280 raise JSONRPCError(
1281 1281 'failed to fork repository `%s` as `%s`' % (
1282 1282 repo_name, schema_data['repo_name']))
1283 1283
1284 1284
1285 1285 @jsonrpc_method()
1286 1286 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1287 1287 """
1288 1288 Deletes a repository.
1289 1289
1290 1290 * When the `forks` parameter is set it's possible to detach or delete
1291 1291 forks of deleted repository.
1292 1292
1293 1293 This command can only be run using an |authtoken| with admin
1294 1294 permissions on the |repo|.
1295 1295
1296 1296 :param apiuser: This is filled automatically from the |authtoken|.
1297 1297 :type apiuser: AuthUser
1298 1298 :param repoid: Set the repository name or repository ID.
1299 1299 :type repoid: str or int
1300 1300 :param forks: Set to `detach` or `delete` forks from the |repo|.
1301 1301 :type forks: Optional(str)
1302 1302
1303 1303 Example error output:
1304 1304
1305 1305 .. code-block:: bash
1306 1306
1307 1307 id : <id_given_in_input>
1308 1308 result: {
1309 1309 "msg": "Deleted repository `<reponame>`",
1310 1310 "success": true
1311 1311 }
1312 1312 error: null
1313 1313 """
1314 1314
1315 1315 repo = get_repo_or_error(repoid)
1316 1316 repo_name = repo.repo_name
1317 1317 if not has_superadmin_permission(apiuser):
1318 1318 _perms = ('repository.admin',)
1319 1319 validate_repo_permissions(apiuser, repoid, repo, _perms)
1320 1320
1321 1321 try:
1322 1322 handle_forks = Optional.extract(forks)
1323 1323 _forks_msg = ''
1324 1324 _forks = [f for f in repo.forks]
1325 1325 if handle_forks == 'detach':
1326 1326 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1327 1327 elif handle_forks == 'delete':
1328 1328 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1329 1329 elif _forks:
1330 1330 raise JSONRPCError(
1331 1331 'Cannot delete `%s` it still contains attached forks' %
1332 1332 (repo.repo_name,)
1333 1333 )
1334 1334 old_data = repo.get_api_data()
1335 1335 RepoModel().delete(repo, forks=forks)
1336 1336
1337 1337 repo = audit_logger.RepoWrap(repo_id=None,
1338 1338 repo_name=repo.repo_name)
1339 1339
1340 1340 audit_logger.store_api(
1341 1341 'repo.delete', action_data={'old_data': old_data},
1342 1342 user=apiuser, repo=repo)
1343 1343
1344 1344 ScmModel().mark_for_invalidation(repo_name, delete=True)
1345 1345 Session().commit()
1346 1346 return {
1347 1347 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1348 1348 'success': True
1349 1349 }
1350 1350 except Exception:
1351 1351 log.exception("Exception occurred while trying to delete repo")
1352 1352 raise JSONRPCError(
1353 1353 'failed to delete repository `%s`' % (repo_name,)
1354 1354 )
1355 1355
1356 1356
1357 1357 #TODO: marcink, change name ?
1358 1358 @jsonrpc_method()
1359 1359 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1360 1360 """
1361 1361 Invalidates the cache for the specified repository.
1362 1362
1363 1363 This command can only be run using an |authtoken| with admin rights to
1364 1364 the specified repository.
1365 1365
1366 1366 This command takes the following options:
1367 1367
1368 1368 :param apiuser: This is filled automatically from |authtoken|.
1369 1369 :type apiuser: AuthUser
1370 1370 :param repoid: Sets the repository name or repository ID.
1371 1371 :type repoid: str or int
1372 1372 :param delete_keys: This deletes the invalidated keys instead of
1373 1373 just flagging them.
1374 1374 :type delete_keys: Optional(``True`` | ``False``)
1375 1375
1376 1376 Example output:
1377 1377
1378 1378 .. code-block:: bash
1379 1379
1380 1380 id : <id_given_in_input>
1381 1381 result : {
1382 1382 'msg': Cache for repository `<repository name>` was invalidated,
1383 1383 'repository': <repository name>
1384 1384 }
1385 1385 error : null
1386 1386
1387 1387 Example error output:
1388 1388
1389 1389 .. code-block:: bash
1390 1390
1391 1391 id : <id_given_in_input>
1392 1392 result : null
1393 1393 error : {
1394 1394 'Error occurred during cache invalidation action'
1395 1395 }
1396 1396
1397 1397 """
1398 1398
1399 1399 repo = get_repo_or_error(repoid)
1400 1400 if not has_superadmin_permission(apiuser):
1401 1401 _perms = ('repository.admin', 'repository.write',)
1402 1402 validate_repo_permissions(apiuser, repoid, repo, _perms)
1403 1403
1404 1404 delete = Optional.extract(delete_keys)
1405 1405 try:
1406 1406 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1407 1407 return {
1408 1408 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1409 1409 'repository': repo.repo_name
1410 1410 }
1411 1411 except Exception:
1412 1412 log.exception(
1413 1413 "Exception occurred while trying to invalidate repo cache")
1414 1414 raise JSONRPCError(
1415 1415 'Error occurred during cache invalidation action'
1416 1416 )
1417 1417
1418 1418
1419 1419 #TODO: marcink, change name ?
1420 1420 @jsonrpc_method()
1421 1421 def lock(request, apiuser, repoid, locked=Optional(None),
1422 1422 userid=Optional(OAttr('apiuser'))):
1423 1423 """
1424 1424 Sets the lock state of the specified |repo| by the given user.
1425 1425 From more information, see :ref:`repo-locking`.
1426 1426
1427 1427 * If the ``userid`` option is not set, the repository is locked to the
1428 1428 user who called the method.
1429 1429 * If the ``locked`` parameter is not set, the current lock state of the
1430 1430 repository is displayed.
1431 1431
1432 1432 This command can only be run using an |authtoken| with admin rights to
1433 1433 the specified repository.
1434 1434
1435 1435 This command takes the following options:
1436 1436
1437 1437 :param apiuser: This is filled automatically from the |authtoken|.
1438 1438 :type apiuser: AuthUser
1439 1439 :param repoid: Sets the repository name or repository ID.
1440 1440 :type repoid: str or int
1441 1441 :param locked: Sets the lock state.
1442 1442 :type locked: Optional(``True`` | ``False``)
1443 1443 :param userid: Set the repository lock to this user.
1444 1444 :type userid: Optional(str or int)
1445 1445
1446 1446 Example error output:
1447 1447
1448 1448 .. code-block:: bash
1449 1449
1450 1450 id : <id_given_in_input>
1451 1451 result : {
1452 1452 'repo': '<reponame>',
1453 1453 'locked': <bool: lock state>,
1454 1454 'locked_since': <int: lock timestamp>,
1455 1455 'locked_by': <username of person who made the lock>,
1456 1456 'lock_reason': <str: reason for locking>,
1457 1457 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1458 1458 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1459 1459 or
1460 1460 'msg': 'Repo `<repository name>` not locked.'
1461 1461 or
1462 1462 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1463 1463 }
1464 1464 error : null
1465 1465
1466 1466 Example error output:
1467 1467
1468 1468 .. code-block:: bash
1469 1469
1470 1470 id : <id_given_in_input>
1471 1471 result : null
1472 1472 error : {
1473 1473 'Error occurred locking repository `<reponame>`'
1474 1474 }
1475 1475 """
1476 1476
1477 1477 repo = get_repo_or_error(repoid)
1478 1478 if not has_superadmin_permission(apiuser):
1479 1479 # check if we have at least write permission for this repo !
1480 1480 _perms = ('repository.admin', 'repository.write',)
1481 1481 validate_repo_permissions(apiuser, repoid, repo, _perms)
1482 1482
1483 1483 # make sure normal user does not pass someone else userid,
1484 1484 # he is not allowed to do that
1485 1485 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1486 1486 raise JSONRPCError('userid is not the same as your user')
1487 1487
1488 1488 if isinstance(userid, Optional):
1489 1489 userid = apiuser.user_id
1490 1490
1491 1491 user = get_user_or_error(userid)
1492 1492
1493 1493 if isinstance(locked, Optional):
1494 1494 lockobj = repo.locked
1495 1495
1496 1496 if lockobj[0] is None:
1497 1497 _d = {
1498 1498 'repo': repo.repo_name,
1499 1499 'locked': False,
1500 1500 'locked_since': None,
1501 1501 'locked_by': None,
1502 1502 'lock_reason': None,
1503 1503 'lock_state_changed': False,
1504 1504 'msg': 'Repo `%s` not locked.' % repo.repo_name
1505 1505 }
1506 1506 return _d
1507 1507 else:
1508 1508 _user_id, _time, _reason = lockobj
1509 1509 lock_user = get_user_or_error(userid)
1510 1510 _d = {
1511 1511 'repo': repo.repo_name,
1512 1512 'locked': True,
1513 1513 'locked_since': _time,
1514 1514 'locked_by': lock_user.username,
1515 1515 'lock_reason': _reason,
1516 1516 'lock_state_changed': False,
1517 1517 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1518 1518 % (repo.repo_name, lock_user.username,
1519 1519 json.dumps(time_to_datetime(_time))))
1520 1520 }
1521 1521 return _d
1522 1522
1523 1523 # force locked state through a flag
1524 1524 else:
1525 1525 locked = str2bool(locked)
1526 1526 lock_reason = Repository.LOCK_API
1527 1527 try:
1528 1528 if locked:
1529 1529 lock_time = time.time()
1530 1530 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1531 1531 else:
1532 1532 lock_time = None
1533 1533 Repository.unlock(repo)
1534 1534 _d = {
1535 1535 'repo': repo.repo_name,
1536 1536 'locked': locked,
1537 1537 'locked_since': lock_time,
1538 1538 'locked_by': user.username,
1539 1539 'lock_reason': lock_reason,
1540 1540 'lock_state_changed': True,
1541 1541 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1542 1542 % (user.username, repo.repo_name, locked))
1543 1543 }
1544 1544 return _d
1545 1545 except Exception:
1546 1546 log.exception(
1547 1547 "Exception occurred while trying to lock repository")
1548 1548 raise JSONRPCError(
1549 1549 'Error occurred locking repository `%s`' % repo.repo_name
1550 1550 )
1551 1551
1552 1552
1553 1553 @jsonrpc_method()
1554 1554 def comment_commit(
1555 1555 request, apiuser, repoid, commit_id, message, status=Optional(None),
1556 1556 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1557 1557 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1558 1558 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1559 1559 """
1560 1560 Set a commit comment, and optionally change the status of the commit.
1561 1561
1562 1562 :param apiuser: This is filled automatically from the |authtoken|.
1563 1563 :type apiuser: AuthUser
1564 1564 :param repoid: Set the repository name or repository ID.
1565 1565 :type repoid: str or int
1566 1566 :param commit_id: Specify the commit_id for which to set a comment.
1567 1567 :type commit_id: str
1568 1568 :param message: The comment text.
1569 1569 :type message: str
1570 1570 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1571 1571 'approved', 'rejected', 'under_review'
1572 1572 :type status: str
1573 1573 :param comment_type: Comment type, one of: 'note', 'todo'
1574 1574 :type comment_type: Optional(str), default: 'note'
1575 1575 :param resolves_comment_id: id of comment which this one will resolve
1576 1576 :type resolves_comment_id: Optional(int)
1577 1577 :param extra_recipients: list of user ids or usernames to add
1578 1578 notifications for this comment. Acts like a CC for notification
1579 1579 :type extra_recipients: Optional(list)
1580 1580 :param userid: Set the user name of the comment creator.
1581 1581 :type userid: Optional(str or int)
1582 1582 :param send_email: Define if this comment should also send email notification
1583 1583 :type send_email: Optional(bool)
1584 1584
1585 1585 Example error output:
1586 1586
1587 1587 .. code-block:: bash
1588 1588
1589 1589 {
1590 1590 "id" : <id_given_in_input>,
1591 1591 "result" : {
1592 1592 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1593 1593 "status_change": null or <status>,
1594 1594 "success": true
1595 1595 },
1596 1596 "error" : null
1597 1597 }
1598 1598
1599 1599 """
1600 1600 _ = request.translate
1601 1601
1602 1602 repo = get_repo_or_error(repoid)
1603 1603 if not has_superadmin_permission(apiuser):
1604 1604 _perms = ('repository.read', 'repository.write', 'repository.admin')
1605 1605 validate_repo_permissions(apiuser, repoid, repo, _perms)
1606 1606 db_repo_name = repo.repo_name
1607 1607
1608 1608 try:
1609 1609 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1610 1610 commit_id = commit.raw_id
1611 1611 except Exception as e:
1612 1612 log.exception('Failed to fetch commit')
1613 1613 raise JSONRPCError(safe_str(e))
1614 1614
1615 1615 if isinstance(userid, Optional):
1616 1616 userid = apiuser.user_id
1617 1617
1618 1618 user = get_user_or_error(userid)
1619 1619 status = Optional.extract(status)
1620 1620 comment_type = Optional.extract(comment_type)
1621 1621 resolves_comment_id = Optional.extract(resolves_comment_id)
1622 1622 extra_recipients = Optional.extract(extra_recipients)
1623 1623 send_email = Optional.extract(send_email, binary=True)
1624 1624
1625 1625 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1626 1626 if status and status not in allowed_statuses:
1627 1627 raise JSONRPCError('Bad status, must be on '
1628 1628 'of %s got %s' % (allowed_statuses, status,))
1629 1629
1630 1630 if resolves_comment_id:
1631 1631 comment = ChangesetComment.get(resolves_comment_id)
1632 1632 if not comment:
1633 1633 raise JSONRPCError(
1634 1634 'Invalid resolves_comment_id `%s` for this commit.'
1635 1635 % resolves_comment_id)
1636 1636 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1637 1637 raise JSONRPCError(
1638 1638 'Comment `%s` is wrong type for setting status to resolved.'
1639 1639 % resolves_comment_id)
1640 1640
1641 1641 try:
1642 1642 rc_config = SettingsModel().get_all_settings()
1643 1643 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1644 1644 status_change_label = ChangesetStatus.get_status_lbl(status)
1645 1645 comment = CommentsModel().create(
1646 1646 message, repo, user, commit_id=commit_id,
1647 1647 status_change=status_change_label,
1648 1648 status_change_type=status,
1649 1649 renderer=renderer,
1650 1650 comment_type=comment_type,
1651 1651 resolves_comment_id=resolves_comment_id,
1652 1652 auth_user=apiuser,
1653 1653 extra_recipients=extra_recipients,
1654 1654 send_email=send_email
1655 1655 )
1656 is_inline = bool(comment.f_path and comment.line_no)
1656 is_inline = comment.is_inline
1657 1657
1658 1658 if status:
1659 1659 # also do a status change
1660 1660 try:
1661 1661 ChangesetStatusModel().set_status(
1662 1662 repo, status, user, comment, revision=commit_id,
1663 1663 dont_allow_on_closed_pull_request=True
1664 1664 )
1665 1665 except StatusChangeOnClosedPullRequestError:
1666 1666 log.exception(
1667 1667 "Exception occurred while trying to change repo commit status")
1668 1668 msg = ('Changing status on a commit associated with '
1669 1669 'a closed pull request is not allowed')
1670 1670 raise JSONRPCError(msg)
1671 1671
1672 1672 CommentsModel().trigger_commit_comment_hook(
1673 1673 repo, apiuser, 'create',
1674 1674 data={'comment': comment, 'commit': commit})
1675 1675
1676 1676 Session().commit()
1677 1677
1678 1678 comment_broadcast_channel = channelstream.comment_channel(
1679 1679 db_repo_name, commit_obj=commit)
1680 1680
1681 1681 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1682 1682 comment_type = 'inline' if is_inline else 'general'
1683 1683 channelstream.comment_channelstream_push(
1684 1684 request, comment_broadcast_channel, apiuser,
1685 1685 _('posted a new {} comment').format(comment_type),
1686 1686 comment_data=comment_data)
1687 1687
1688 1688 return {
1689 1689 'msg': (
1690 1690 'Commented on commit `%s` for repository `%s`' % (
1691 1691 comment.revision, repo.repo_name)),
1692 1692 'status_change': status,
1693 1693 'success': True,
1694 1694 }
1695 1695 except JSONRPCError:
1696 1696 # catch any inside errors, and re-raise them to prevent from
1697 1697 # below global catch to silence them
1698 1698 raise
1699 1699 except Exception:
1700 1700 log.exception("Exception occurred while trying to comment on commit")
1701 1701 raise JSONRPCError(
1702 1702 'failed to set comment on repository `%s`' % (repo.repo_name,)
1703 1703 )
1704 1704
1705 1705
1706 1706 @jsonrpc_method()
1707 1707 def get_repo_comments(request, apiuser, repoid,
1708 1708 commit_id=Optional(None), comment_type=Optional(None),
1709 1709 userid=Optional(None)):
1710 1710 """
1711 1711 Get all comments for a repository
1712 1712
1713 1713 :param apiuser: This is filled automatically from the |authtoken|.
1714 1714 :type apiuser: AuthUser
1715 1715 :param repoid: Set the repository name or repository ID.
1716 1716 :type repoid: str or int
1717 1717 :param commit_id: Optionally filter the comments by the commit_id
1718 1718 :type commit_id: Optional(str), default: None
1719 1719 :param comment_type: Optionally filter the comments by the comment_type
1720 1720 one of: 'note', 'todo'
1721 1721 :type comment_type: Optional(str), default: None
1722 1722 :param userid: Optionally filter the comments by the author of comment
1723 1723 :type userid: Optional(str or int), Default: None
1724 1724
1725 1725 Example error output:
1726 1726
1727 1727 .. code-block:: bash
1728 1728
1729 1729 {
1730 1730 "id" : <id_given_in_input>,
1731 1731 "result" : [
1732 1732 {
1733 1733 "comment_author": <USER_DETAILS>,
1734 1734 "comment_created_on": "2017-02-01T14:38:16.309",
1735 1735 "comment_f_path": "file.txt",
1736 1736 "comment_id": 282,
1737 1737 "comment_lineno": "n1",
1738 1738 "comment_resolved_by": null,
1739 1739 "comment_status": [],
1740 1740 "comment_text": "This file needs a header",
1741 1741 "comment_type": "todo",
1742 1742 "comment_last_version: 0
1743 1743 }
1744 1744 ],
1745 1745 "error" : null
1746 1746 }
1747 1747
1748 1748 """
1749 1749 repo = get_repo_or_error(repoid)
1750 1750 if not has_superadmin_permission(apiuser):
1751 1751 _perms = ('repository.read', 'repository.write', 'repository.admin')
1752 1752 validate_repo_permissions(apiuser, repoid, repo, _perms)
1753 1753
1754 1754 commit_id = Optional.extract(commit_id)
1755 1755
1756 1756 userid = Optional.extract(userid)
1757 1757 if userid:
1758 1758 user = get_user_or_error(userid)
1759 1759 else:
1760 1760 user = None
1761 1761
1762 1762 comment_type = Optional.extract(comment_type)
1763 1763 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1764 1764 raise JSONRPCError(
1765 1765 'comment_type must be one of `{}` got {}'.format(
1766 1766 ChangesetComment.COMMENT_TYPES, comment_type)
1767 1767 )
1768 1768
1769 1769 comments = CommentsModel().get_repository_comments(
1770 1770 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1771 1771 return comments
1772 1772
1773 1773
1774 1774 @jsonrpc_method()
1775 1775 def get_comment(request, apiuser, comment_id):
1776 1776 """
1777 1777 Get single comment from repository or pull_request
1778 1778
1779 1779 :param apiuser: This is filled automatically from the |authtoken|.
1780 1780 :type apiuser: AuthUser
1781 1781 :param comment_id: comment id found in the URL of comment
1782 1782 :type comment_id: str or int
1783 1783
1784 1784 Example error output:
1785 1785
1786 1786 .. code-block:: bash
1787 1787
1788 1788 {
1789 1789 "id" : <id_given_in_input>,
1790 1790 "result" : {
1791 1791 "comment_author": <USER_DETAILS>,
1792 1792 "comment_created_on": "2017-02-01T14:38:16.309",
1793 1793 "comment_f_path": "file.txt",
1794 1794 "comment_id": 282,
1795 1795 "comment_lineno": "n1",
1796 1796 "comment_resolved_by": null,
1797 1797 "comment_status": [],
1798 1798 "comment_text": "This file needs a header",
1799 1799 "comment_type": "todo",
1800 1800 "comment_last_version: 0
1801 1801 },
1802 1802 "error" : null
1803 1803 }
1804 1804
1805 1805 """
1806 1806
1807 1807 comment = ChangesetComment.get(comment_id)
1808 1808 if not comment:
1809 1809 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1810 1810
1811 1811 perms = ('repository.read', 'repository.write', 'repository.admin')
1812 1812 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1813 1813 (user=apiuser, repo_name=comment.repo.repo_name)
1814 1814
1815 1815 if not has_comment_perm:
1816 1816 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1817 1817
1818 1818 return comment
1819 1819
1820 1820
1821 1821 @jsonrpc_method()
1822 1822 def edit_comment(request, apiuser, message, comment_id, version,
1823 1823 userid=Optional(OAttr('apiuser'))):
1824 1824 """
1825 1825 Edit comment on the pull request or commit,
1826 1826 specified by the `comment_id` and version. Initially version should be 0
1827 1827
1828 1828 :param apiuser: This is filled automatically from the |authtoken|.
1829 1829 :type apiuser: AuthUser
1830 1830 :param comment_id: Specify the comment_id for editing
1831 1831 :type comment_id: int
1832 1832 :param version: version of the comment that will be created, starts from 0
1833 1833 :type version: int
1834 1834 :param message: The text content of the comment.
1835 1835 :type message: str
1836 1836 :param userid: Comment on the pull request as this user
1837 1837 :type userid: Optional(str or int)
1838 1838
1839 1839 Example output:
1840 1840
1841 1841 .. code-block:: bash
1842 1842
1843 1843 id : <id_given_in_input>
1844 1844 result : {
1845 1845 "comment": "<comment data>",
1846 1846 "version": "<Integer>",
1847 1847 },
1848 1848 error : null
1849 1849 """
1850 1850
1851 1851 auth_user = apiuser
1852 1852 comment = ChangesetComment.get(comment_id)
1853 1853 if not comment:
1854 1854 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1855 1855
1856 1856 is_super_admin = has_superadmin_permission(apiuser)
1857 1857 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1858 1858 (user=apiuser, repo_name=comment.repo.repo_name)
1859 1859
1860 1860 if not isinstance(userid, Optional):
1861 1861 if is_super_admin or is_repo_admin:
1862 1862 apiuser = get_user_or_error(userid)
1863 1863 auth_user = apiuser.AuthUser()
1864 1864 else:
1865 1865 raise JSONRPCError('userid is not the same as your user')
1866 1866
1867 1867 comment_author = comment.author.user_id == auth_user.user_id
1868 1868 if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1869 1869 raise JSONRPCError("you don't have access to edit this comment")
1870 1870
1871 1871 try:
1872 1872 comment_history = CommentsModel().edit(
1873 1873 comment_id=comment_id,
1874 1874 text=message,
1875 1875 auth_user=auth_user,
1876 1876 version=version,
1877 1877 )
1878 1878 Session().commit()
1879 1879 except CommentVersionMismatch:
1880 1880 raise JSONRPCError(
1881 1881 'comment ({}) version ({}) mismatch'.format(comment_id, version)
1882 1882 )
1883 1883 if not comment_history and not message:
1884 1884 raise JSONRPCError(
1885 1885 "comment ({}) can't be changed with empty string".format(comment_id)
1886 1886 )
1887 1887
1888 1888 if comment.pull_request:
1889 1889 pull_request = comment.pull_request
1890 1890 PullRequestModel().trigger_pull_request_hook(
1891 1891 pull_request, apiuser, 'comment_edit',
1892 1892 data={'comment': comment})
1893 1893 else:
1894 1894 db_repo = comment.repo
1895 1895 commit_id = comment.revision
1896 1896 commit = db_repo.get_commit(commit_id)
1897 1897 CommentsModel().trigger_commit_comment_hook(
1898 1898 db_repo, apiuser, 'edit',
1899 1899 data={'comment': comment, 'commit': commit})
1900 1900
1901 1901 data = {
1902 1902 'comment': comment,
1903 1903 'version': comment_history.version if comment_history else None,
1904 1904 }
1905 1905 return data
1906 1906
1907 1907
1908 1908 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1909 1909 # @jsonrpc_method()
1910 1910 # def delete_comment(request, apiuser, comment_id):
1911 1911 # auth_user = apiuser
1912 1912 #
1913 1913 # comment = ChangesetComment.get(comment_id)
1914 1914 # if not comment:
1915 1915 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1916 1916 #
1917 1917 # is_super_admin = has_superadmin_permission(apiuser)
1918 1918 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1919 1919 # (user=apiuser, repo_name=comment.repo.repo_name)
1920 1920 #
1921 1921 # comment_author = comment.author.user_id == auth_user.user_id
1922 1922 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1923 1923 # raise JSONRPCError("you don't have access to edit this comment")
1924 1924
1925 1925 @jsonrpc_method()
1926 1926 def grant_user_permission(request, apiuser, repoid, userid, perm):
1927 1927 """
1928 1928 Grant permissions for the specified user on the given repository,
1929 1929 or update existing permissions if found.
1930 1930
1931 1931 This command can only be run using an |authtoken| with admin
1932 1932 permissions on the |repo|.
1933 1933
1934 1934 :param apiuser: This is filled automatically from the |authtoken|.
1935 1935 :type apiuser: AuthUser
1936 1936 :param repoid: Set the repository name or repository ID.
1937 1937 :type repoid: str or int
1938 1938 :param userid: Set the user name.
1939 1939 :type userid: str
1940 1940 :param perm: Set the user permissions, using the following format
1941 1941 ``(repository.(none|read|write|admin))``
1942 1942 :type perm: str
1943 1943
1944 1944 Example output:
1945 1945
1946 1946 .. code-block:: bash
1947 1947
1948 1948 id : <id_given_in_input>
1949 1949 result: {
1950 1950 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1951 1951 "success": true
1952 1952 }
1953 1953 error: null
1954 1954 """
1955 1955
1956 1956 repo = get_repo_or_error(repoid)
1957 1957 user = get_user_or_error(userid)
1958 1958 perm = get_perm_or_error(perm)
1959 1959 if not has_superadmin_permission(apiuser):
1960 1960 _perms = ('repository.admin',)
1961 1961 validate_repo_permissions(apiuser, repoid, repo, _perms)
1962 1962
1963 1963 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1964 1964 try:
1965 1965 changes = RepoModel().update_permissions(
1966 1966 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1967 1967
1968 1968 action_data = {
1969 1969 'added': changes['added'],
1970 1970 'updated': changes['updated'],
1971 1971 'deleted': changes['deleted'],
1972 1972 }
1973 1973 audit_logger.store_api(
1974 1974 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1975 1975 Session().commit()
1976 1976 PermissionModel().flush_user_permission_caches(changes)
1977 1977
1978 1978 return {
1979 1979 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1980 1980 perm.permission_name, user.username, repo.repo_name
1981 1981 ),
1982 1982 'success': True
1983 1983 }
1984 1984 except Exception:
1985 1985 log.exception("Exception occurred while trying edit permissions for repo")
1986 1986 raise JSONRPCError(
1987 1987 'failed to edit permission for user: `%s` in repo: `%s`' % (
1988 1988 userid, repoid
1989 1989 )
1990 1990 )
1991 1991
1992 1992
1993 1993 @jsonrpc_method()
1994 1994 def revoke_user_permission(request, apiuser, repoid, userid):
1995 1995 """
1996 1996 Revoke permission for a user on the specified repository.
1997 1997
1998 1998 This command can only be run using an |authtoken| with admin
1999 1999 permissions on the |repo|.
2000 2000
2001 2001 :param apiuser: This is filled automatically from the |authtoken|.
2002 2002 :type apiuser: AuthUser
2003 2003 :param repoid: Set the repository name or repository ID.
2004 2004 :type repoid: str or int
2005 2005 :param userid: Set the user name of revoked user.
2006 2006 :type userid: str or int
2007 2007
2008 2008 Example error output:
2009 2009
2010 2010 .. code-block:: bash
2011 2011
2012 2012 id : <id_given_in_input>
2013 2013 result: {
2014 2014 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2015 2015 "success": true
2016 2016 }
2017 2017 error: null
2018 2018 """
2019 2019
2020 2020 repo = get_repo_or_error(repoid)
2021 2021 user = get_user_or_error(userid)
2022 2022 if not has_superadmin_permission(apiuser):
2023 2023 _perms = ('repository.admin',)
2024 2024 validate_repo_permissions(apiuser, repoid, repo, _perms)
2025 2025
2026 2026 perm_deletions = [[user.user_id, None, "user"]]
2027 2027 try:
2028 2028 changes = RepoModel().update_permissions(
2029 2029 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2030 2030
2031 2031 action_data = {
2032 2032 'added': changes['added'],
2033 2033 'updated': changes['updated'],
2034 2034 'deleted': changes['deleted'],
2035 2035 }
2036 2036 audit_logger.store_api(
2037 2037 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2038 2038 Session().commit()
2039 2039 PermissionModel().flush_user_permission_caches(changes)
2040 2040
2041 2041 return {
2042 2042 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
2043 2043 user.username, repo.repo_name
2044 2044 ),
2045 2045 'success': True
2046 2046 }
2047 2047 except Exception:
2048 2048 log.exception("Exception occurred while trying revoke permissions to repo")
2049 2049 raise JSONRPCError(
2050 2050 'failed to edit permission for user: `%s` in repo: `%s`' % (
2051 2051 userid, repoid
2052 2052 )
2053 2053 )
2054 2054
2055 2055
2056 2056 @jsonrpc_method()
2057 2057 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2058 2058 """
2059 2059 Grant permission for a user group on the specified repository,
2060 2060 or update existing permissions.
2061 2061
2062 2062 This command can only be run using an |authtoken| with admin
2063 2063 permissions on the |repo|.
2064 2064
2065 2065 :param apiuser: This is filled automatically from the |authtoken|.
2066 2066 :type apiuser: AuthUser
2067 2067 :param repoid: Set the repository name or repository ID.
2068 2068 :type repoid: str or int
2069 2069 :param usergroupid: Specify the ID of the user group.
2070 2070 :type usergroupid: str or int
2071 2071 :param perm: Set the user group permissions using the following
2072 2072 format: (repository.(none|read|write|admin))
2073 2073 :type perm: str
2074 2074
2075 2075 Example output:
2076 2076
2077 2077 .. code-block:: bash
2078 2078
2079 2079 id : <id_given_in_input>
2080 2080 result : {
2081 2081 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2082 2082 "success": true
2083 2083
2084 2084 }
2085 2085 error : null
2086 2086
2087 2087 Example error output:
2088 2088
2089 2089 .. code-block:: bash
2090 2090
2091 2091 id : <id_given_in_input>
2092 2092 result : null
2093 2093 error : {
2094 2094 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2095 2095 }
2096 2096
2097 2097 """
2098 2098
2099 2099 repo = get_repo_or_error(repoid)
2100 2100 perm = get_perm_or_error(perm)
2101 2101 if not has_superadmin_permission(apiuser):
2102 2102 _perms = ('repository.admin',)
2103 2103 validate_repo_permissions(apiuser, repoid, repo, _perms)
2104 2104
2105 2105 user_group = get_user_group_or_error(usergroupid)
2106 2106 if not has_superadmin_permission(apiuser):
2107 2107 # check if we have at least read permission for this user group !
2108 2108 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2109 2109 if not HasUserGroupPermissionAnyApi(*_perms)(
2110 2110 user=apiuser, user_group_name=user_group.users_group_name):
2111 2111 raise JSONRPCError(
2112 2112 'user group `%s` does not exist' % (usergroupid,))
2113 2113
2114 2114 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2115 2115 try:
2116 2116 changes = RepoModel().update_permissions(
2117 2117 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2118 2118 action_data = {
2119 2119 'added': changes['added'],
2120 2120 'updated': changes['updated'],
2121 2121 'deleted': changes['deleted'],
2122 2122 }
2123 2123 audit_logger.store_api(
2124 2124 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2125 2125 Session().commit()
2126 2126 PermissionModel().flush_user_permission_caches(changes)
2127 2127
2128 2128 return {
2129 2129 'msg': 'Granted perm: `%s` for user group: `%s` in '
2130 2130 'repo: `%s`' % (
2131 2131 perm.permission_name, user_group.users_group_name,
2132 2132 repo.repo_name
2133 2133 ),
2134 2134 'success': True
2135 2135 }
2136 2136 except Exception:
2137 2137 log.exception(
2138 2138 "Exception occurred while trying change permission on repo")
2139 2139 raise JSONRPCError(
2140 2140 'failed to edit permission for user group: `%s` in '
2141 2141 'repo: `%s`' % (
2142 2142 usergroupid, repo.repo_name
2143 2143 )
2144 2144 )
2145 2145
2146 2146
2147 2147 @jsonrpc_method()
2148 2148 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2149 2149 """
2150 2150 Revoke the permissions of a user group on a given repository.
2151 2151
2152 2152 This command can only be run using an |authtoken| with admin
2153 2153 permissions on the |repo|.
2154 2154
2155 2155 :param apiuser: This is filled automatically from the |authtoken|.
2156 2156 :type apiuser: AuthUser
2157 2157 :param repoid: Set the repository name or repository ID.
2158 2158 :type repoid: str or int
2159 2159 :param usergroupid: Specify the user group ID.
2160 2160 :type usergroupid: str or int
2161 2161
2162 2162 Example output:
2163 2163
2164 2164 .. code-block:: bash
2165 2165
2166 2166 id : <id_given_in_input>
2167 2167 result: {
2168 2168 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2169 2169 "success": true
2170 2170 }
2171 2171 error: null
2172 2172 """
2173 2173
2174 2174 repo = get_repo_or_error(repoid)
2175 2175 if not has_superadmin_permission(apiuser):
2176 2176 _perms = ('repository.admin',)
2177 2177 validate_repo_permissions(apiuser, repoid, repo, _perms)
2178 2178
2179 2179 user_group = get_user_group_or_error(usergroupid)
2180 2180 if not has_superadmin_permission(apiuser):
2181 2181 # check if we have at least read permission for this user group !
2182 2182 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2183 2183 if not HasUserGroupPermissionAnyApi(*_perms)(
2184 2184 user=apiuser, user_group_name=user_group.users_group_name):
2185 2185 raise JSONRPCError(
2186 2186 'user group `%s` does not exist' % (usergroupid,))
2187 2187
2188 2188 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2189 2189 try:
2190 2190 changes = RepoModel().update_permissions(
2191 2191 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2192 2192 action_data = {
2193 2193 'added': changes['added'],
2194 2194 'updated': changes['updated'],
2195 2195 'deleted': changes['deleted'],
2196 2196 }
2197 2197 audit_logger.store_api(
2198 2198 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2199 2199 Session().commit()
2200 2200 PermissionModel().flush_user_permission_caches(changes)
2201 2201
2202 2202 return {
2203 2203 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2204 2204 user_group.users_group_name, repo.repo_name
2205 2205 ),
2206 2206 'success': True
2207 2207 }
2208 2208 except Exception:
2209 2209 log.exception("Exception occurred while trying revoke "
2210 2210 "user group permission on repo")
2211 2211 raise JSONRPCError(
2212 2212 'failed to edit permission for user group: `%s` in '
2213 2213 'repo: `%s`' % (
2214 2214 user_group.users_group_name, repo.repo_name
2215 2215 )
2216 2216 )
2217 2217
2218 2218
2219 2219 @jsonrpc_method()
2220 2220 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2221 2221 """
2222 2222 Triggers a pull on the given repository from a remote location. You
2223 2223 can use this to keep remote repositories up-to-date.
2224 2224
2225 2225 This command can only be run using an |authtoken| with admin
2226 2226 rights to the specified repository. For more information,
2227 2227 see :ref:`config-token-ref`.
2228 2228
2229 2229 This command takes the following options:
2230 2230
2231 2231 :param apiuser: This is filled automatically from the |authtoken|.
2232 2232 :type apiuser: AuthUser
2233 2233 :param repoid: The repository name or repository ID.
2234 2234 :type repoid: str or int
2235 2235 :param remote_uri: Optional remote URI to pass in for pull
2236 2236 :type remote_uri: str
2237 2237
2238 2238 Example output:
2239 2239
2240 2240 .. code-block:: bash
2241 2241
2242 2242 id : <id_given_in_input>
2243 2243 result : {
2244 2244 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2245 2245 "repository": "<repository name>"
2246 2246 }
2247 2247 error : null
2248 2248
2249 2249 Example error output:
2250 2250
2251 2251 .. code-block:: bash
2252 2252
2253 2253 id : <id_given_in_input>
2254 2254 result : null
2255 2255 error : {
2256 2256 "Unable to push changes from `<remote_url>`"
2257 2257 }
2258 2258
2259 2259 """
2260 2260
2261 2261 repo = get_repo_or_error(repoid)
2262 2262 remote_uri = Optional.extract(remote_uri)
2263 2263 remote_uri_display = remote_uri or repo.clone_uri_hidden
2264 2264 if not has_superadmin_permission(apiuser):
2265 2265 _perms = ('repository.admin',)
2266 2266 validate_repo_permissions(apiuser, repoid, repo, _perms)
2267 2267
2268 2268 try:
2269 2269 ScmModel().pull_changes(
2270 2270 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2271 2271 return {
2272 2272 'msg': 'Pulled from url `%s` on repo `%s`' % (
2273 2273 remote_uri_display, repo.repo_name),
2274 2274 'repository': repo.repo_name
2275 2275 }
2276 2276 except Exception:
2277 2277 log.exception("Exception occurred while trying to "
2278 2278 "pull changes from remote location")
2279 2279 raise JSONRPCError(
2280 2280 'Unable to pull changes from `%s`' % remote_uri_display
2281 2281 )
2282 2282
2283 2283
2284 2284 @jsonrpc_method()
2285 2285 def strip(request, apiuser, repoid, revision, branch):
2286 2286 """
2287 2287 Strips the given revision from the specified repository.
2288 2288
2289 2289 * This will remove the revision and all of its decendants.
2290 2290
2291 2291 This command can only be run using an |authtoken| with admin rights to
2292 2292 the specified repository.
2293 2293
2294 2294 This command takes the following options:
2295 2295
2296 2296 :param apiuser: This is filled automatically from the |authtoken|.
2297 2297 :type apiuser: AuthUser
2298 2298 :param repoid: The repository name or repository ID.
2299 2299 :type repoid: str or int
2300 2300 :param revision: The revision you wish to strip.
2301 2301 :type revision: str
2302 2302 :param branch: The branch from which to strip the revision.
2303 2303 :type branch: str
2304 2304
2305 2305 Example output:
2306 2306
2307 2307 .. code-block:: bash
2308 2308
2309 2309 id : <id_given_in_input>
2310 2310 result : {
2311 2311 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2312 2312 "repository": "<repository name>"
2313 2313 }
2314 2314 error : null
2315 2315
2316 2316 Example error output:
2317 2317
2318 2318 .. code-block:: bash
2319 2319
2320 2320 id : <id_given_in_input>
2321 2321 result : null
2322 2322 error : {
2323 2323 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2324 2324 }
2325 2325
2326 2326 """
2327 2327
2328 2328 repo = get_repo_or_error(repoid)
2329 2329 if not has_superadmin_permission(apiuser):
2330 2330 _perms = ('repository.admin',)
2331 2331 validate_repo_permissions(apiuser, repoid, repo, _perms)
2332 2332
2333 2333 try:
2334 2334 ScmModel().strip(repo, revision, branch)
2335 2335 audit_logger.store_api(
2336 2336 'repo.commit.strip', action_data={'commit_id': revision},
2337 2337 repo=repo,
2338 2338 user=apiuser, commit=True)
2339 2339
2340 2340 return {
2341 2341 'msg': 'Stripped commit %s from repo `%s`' % (
2342 2342 revision, repo.repo_name),
2343 2343 'repository': repo.repo_name
2344 2344 }
2345 2345 except Exception:
2346 2346 log.exception("Exception while trying to strip")
2347 2347 raise JSONRPCError(
2348 2348 'Unable to strip commit %s from repo `%s`' % (
2349 2349 revision, repo.repo_name)
2350 2350 )
2351 2351
2352 2352
2353 2353 @jsonrpc_method()
2354 2354 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2355 2355 """
2356 2356 Returns all settings for a repository. If key is given it only returns the
2357 2357 setting identified by the key or null.
2358 2358
2359 2359 :param apiuser: This is filled automatically from the |authtoken|.
2360 2360 :type apiuser: AuthUser
2361 2361 :param repoid: The repository name or repository id.
2362 2362 :type repoid: str or int
2363 2363 :param key: Key of the setting to return.
2364 2364 :type: key: Optional(str)
2365 2365
2366 2366 Example output:
2367 2367
2368 2368 .. code-block:: bash
2369 2369
2370 2370 {
2371 2371 "error": null,
2372 2372 "id": 237,
2373 2373 "result": {
2374 2374 "extensions_largefiles": true,
2375 2375 "extensions_evolve": true,
2376 2376 "hooks_changegroup_push_logger": true,
2377 2377 "hooks_changegroup_repo_size": false,
2378 2378 "hooks_outgoing_pull_logger": true,
2379 2379 "phases_publish": "True",
2380 2380 "rhodecode_hg_use_rebase_for_merging": true,
2381 2381 "rhodecode_pr_merge_enabled": true,
2382 2382 "rhodecode_use_outdated_comments": true
2383 2383 }
2384 2384 }
2385 2385 """
2386 2386
2387 2387 # Restrict access to this api method to super-admins, and repo admins only.
2388 2388 repo = get_repo_or_error(repoid)
2389 2389 if not has_superadmin_permission(apiuser):
2390 2390 _perms = ('repository.admin',)
2391 2391 validate_repo_permissions(apiuser, repoid, repo, _perms)
2392 2392
2393 2393 try:
2394 2394 settings_model = VcsSettingsModel(repo=repo)
2395 2395 settings = settings_model.get_global_settings()
2396 2396 settings.update(settings_model.get_repo_settings())
2397 2397
2398 2398 # If only a single setting is requested fetch it from all settings.
2399 2399 key = Optional.extract(key)
2400 2400 if key is not None:
2401 2401 settings = settings.get(key, None)
2402 2402 except Exception:
2403 2403 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2404 2404 log.exception(msg)
2405 2405 raise JSONRPCError(msg)
2406 2406
2407 2407 return settings
2408 2408
2409 2409
2410 2410 @jsonrpc_method()
2411 2411 def set_repo_settings(request, apiuser, repoid, settings):
2412 2412 """
2413 2413 Update repository settings. Returns true on success.
2414 2414
2415 2415 :param apiuser: This is filled automatically from the |authtoken|.
2416 2416 :type apiuser: AuthUser
2417 2417 :param repoid: The repository name or repository id.
2418 2418 :type repoid: str or int
2419 2419 :param settings: The new settings for the repository.
2420 2420 :type: settings: dict
2421 2421
2422 2422 Example output:
2423 2423
2424 2424 .. code-block:: bash
2425 2425
2426 2426 {
2427 2427 "error": null,
2428 2428 "id": 237,
2429 2429 "result": true
2430 2430 }
2431 2431 """
2432 2432 # Restrict access to this api method to super-admins, and repo admins only.
2433 2433 repo = get_repo_or_error(repoid)
2434 2434 if not has_superadmin_permission(apiuser):
2435 2435 _perms = ('repository.admin',)
2436 2436 validate_repo_permissions(apiuser, repoid, repo, _perms)
2437 2437
2438 2438 if type(settings) is not dict:
2439 2439 raise JSONRPCError('Settings have to be a JSON Object.')
2440 2440
2441 2441 try:
2442 2442 settings_model = VcsSettingsModel(repo=repoid)
2443 2443
2444 2444 # Merge global, repo and incoming settings.
2445 2445 new_settings = settings_model.get_global_settings()
2446 2446 new_settings.update(settings_model.get_repo_settings())
2447 2447 new_settings.update(settings)
2448 2448
2449 2449 # Update the settings.
2450 2450 inherit_global_settings = new_settings.get(
2451 2451 'inherit_global_settings', False)
2452 2452 settings_model.create_or_update_repo_settings(
2453 2453 new_settings, inherit_global_settings=inherit_global_settings)
2454 2454 Session().commit()
2455 2455 except Exception:
2456 2456 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2457 2457 log.exception(msg)
2458 2458 raise JSONRPCError(msg)
2459 2459
2460 2460 # Indicate success.
2461 2461 return True
2462 2462
2463 2463
2464 2464 @jsonrpc_method()
2465 2465 def maintenance(request, apiuser, repoid):
2466 2466 """
2467 2467 Triggers a maintenance on the given repository.
2468 2468
2469 2469 This command can only be run using an |authtoken| with admin
2470 2470 rights to the specified repository. For more information,
2471 2471 see :ref:`config-token-ref`.
2472 2472
2473 2473 This command takes the following options:
2474 2474
2475 2475 :param apiuser: This is filled automatically from the |authtoken|.
2476 2476 :type apiuser: AuthUser
2477 2477 :param repoid: The repository name or repository ID.
2478 2478 :type repoid: str or int
2479 2479
2480 2480 Example output:
2481 2481
2482 2482 .. code-block:: bash
2483 2483
2484 2484 id : <id_given_in_input>
2485 2485 result : {
2486 2486 "msg": "executed maintenance command",
2487 2487 "executed_actions": [
2488 2488 <action_message>, <action_message2>...
2489 2489 ],
2490 2490 "repository": "<repository name>"
2491 2491 }
2492 2492 error : null
2493 2493
2494 2494 Example error output:
2495 2495
2496 2496 .. code-block:: bash
2497 2497
2498 2498 id : <id_given_in_input>
2499 2499 result : null
2500 2500 error : {
2501 2501 "Unable to execute maintenance on `<reponame>`"
2502 2502 }
2503 2503
2504 2504 """
2505 2505
2506 2506 repo = get_repo_or_error(repoid)
2507 2507 if not has_superadmin_permission(apiuser):
2508 2508 _perms = ('repository.admin',)
2509 2509 validate_repo_permissions(apiuser, repoid, repo, _perms)
2510 2510
2511 2511 try:
2512 2512 maintenance = repo_maintenance.RepoMaintenance()
2513 2513 executed_actions = maintenance.execute(repo)
2514 2514
2515 2515 return {
2516 2516 'msg': 'executed maintenance command',
2517 2517 'executed_actions': executed_actions,
2518 2518 'repository': repo.repo_name
2519 2519 }
2520 2520 except Exception:
2521 2521 log.exception("Exception occurred while trying to run maintenance")
2522 2522 raise JSONRPCError(
2523 2523 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,1658 +1,1661 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.model.comment import CommentsModel
34 34 from rhodecode.tests import (
35 35 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'repo_commits': '/{repo_name}/commits',
45 45 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
46 46 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
47 47 'pullrequest_show_all': '/{repo_name}/pull-request',
48 48 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
49 49 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
50 50 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
51 51 'pullrequest_new': '/{repo_name}/pull-request/new',
52 52 'pullrequest_create': '/{repo_name}/pull-request/create',
53 53 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
54 54 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
55 55 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
56 56 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
57 57 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
58 58 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
59 59 }[name].format(**kwargs)
60 60
61 61 if params:
62 62 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
63 63 return base_url
64 64
65 65
66 66 @pytest.mark.usefixtures('app', 'autologin_user')
67 67 @pytest.mark.backends("git", "hg")
68 68 class TestPullrequestsView(object):
69 69
70 70 def test_index(self, backend):
71 71 self.app.get(route_path(
72 72 'pullrequest_new',
73 73 repo_name=backend.repo_name))
74 74
75 75 def test_option_menu_create_pull_request_exists(self, backend):
76 76 repo_name = backend.repo_name
77 77 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
78 78
79 79 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
80 80 'pullrequest_new', repo_name=repo_name)
81 81 response.mustcontain(create_pr_link)
82 82
83 83 def test_create_pr_form_with_raw_commit_id(self, backend):
84 84 repo = backend.repo
85 85
86 86 self.app.get(
87 87 route_path('pullrequest_new', repo_name=repo.repo_name,
88 88 commit=repo.get_commit().raw_id),
89 89 status=200)
90 90
91 91 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
92 92 @pytest.mark.parametrize('range_diff', ["0", "1"])
93 93 def test_show(self, pr_util, pr_merge_enabled, range_diff):
94 94 pull_request = pr_util.create_pull_request(
95 95 mergeable=pr_merge_enabled, enable_notifications=False)
96 96
97 97 response = self.app.get(route_path(
98 98 'pullrequest_show',
99 99 repo_name=pull_request.target_repo.scm_instance().name,
100 100 pull_request_id=pull_request.pull_request_id,
101 101 params={'range-diff': range_diff}))
102 102
103 103 for commit_id in pull_request.revisions:
104 104 response.mustcontain(commit_id)
105 105
106 106 response.mustcontain(pull_request.target_ref_parts.type)
107 107 response.mustcontain(pull_request.target_ref_parts.name)
108 108
109 109 response.mustcontain('class="pull-request-merge"')
110 110
111 111 if pr_merge_enabled:
112 112 response.mustcontain('Pull request reviewer approval is pending')
113 113 else:
114 114 response.mustcontain('Server-side pull request merging is disabled.')
115 115
116 116 if range_diff == "1":
117 117 response.mustcontain('Turn off: Show the diff as commit range')
118 118
119 119 def test_show_versions_of_pr(self, backend, csrf_token):
120 120 commits = [
121 121 {'message': 'initial-commit',
122 122 'added': [FileNode('test-file.txt', 'LINE1\n')]},
123 123
124 124 {'message': 'commit-1',
125 125 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
126 126 # Above is the initial version of PR that changes a single line
127 127
128 128 # from now on we'll add 3x commit adding a nother line on each step
129 129 {'message': 'commit-2',
130 130 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
131 131
132 132 {'message': 'commit-3',
133 133 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
134 134
135 135 {'message': 'commit-4',
136 136 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
137 137 ]
138 138
139 139 commit_ids = backend.create_master_repo(commits)
140 140 target = backend.create_repo(heads=['initial-commit'])
141 141 source = backend.create_repo(heads=['commit-1'])
142 142 source_repo_name = source.repo_name
143 143 target_repo_name = target.repo_name
144 144
145 145 target_ref = 'branch:{branch}:{commit_id}'.format(
146 146 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
147 147 source_ref = 'branch:{branch}:{commit_id}'.format(
148 148 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
149 149
150 150 response = self.app.post(
151 151 route_path('pullrequest_create', repo_name=source.repo_name),
152 152 [
153 153 ('source_repo', source_repo_name),
154 154 ('source_ref', source_ref),
155 155 ('target_repo', target_repo_name),
156 156 ('target_ref', target_ref),
157 157 ('common_ancestor', commit_ids['initial-commit']),
158 158 ('pullrequest_title', 'Title'),
159 159 ('pullrequest_desc', 'Description'),
160 160 ('description_renderer', 'markdown'),
161 161 ('__start__', 'review_members:sequence'),
162 162 ('__start__', 'reviewer:mapping'),
163 163 ('user_id', '1'),
164 164 ('__start__', 'reasons:sequence'),
165 165 ('reason', 'Some reason'),
166 166 ('__end__', 'reasons:sequence'),
167 167 ('__start__', 'rules:sequence'),
168 168 ('__end__', 'rules:sequence'),
169 169 ('mandatory', 'False'),
170 170 ('__end__', 'reviewer:mapping'),
171 171 ('__end__', 'review_members:sequence'),
172 172 ('__start__', 'revisions:sequence'),
173 173 ('revisions', commit_ids['commit-1']),
174 174 ('__end__', 'revisions:sequence'),
175 175 ('user', ''),
176 176 ('csrf_token', csrf_token),
177 177 ],
178 178 status=302)
179 179
180 180 location = response.headers['Location']
181 181
182 182 pull_request_id = location.rsplit('/', 1)[1]
183 183 assert pull_request_id != 'new'
184 184 pull_request = PullRequest.get(int(pull_request_id))
185 185
186 186 pull_request_id = pull_request.pull_request_id
187 187
188 188 # Show initial version of PR
189 189 response = self.app.get(
190 190 route_path('pullrequest_show',
191 191 repo_name=target_repo_name,
192 192 pull_request_id=pull_request_id))
193 193
194 194 response.mustcontain('commit-1')
195 195 response.mustcontain(no=['commit-2'])
196 196 response.mustcontain(no=['commit-3'])
197 197 response.mustcontain(no=['commit-4'])
198 198
199 199 response.mustcontain('cb-addition"></span><span>LINE2</span>')
200 200 response.mustcontain(no=['LINE3'])
201 201 response.mustcontain(no=['LINE4'])
202 202 response.mustcontain(no=['LINE5'])
203 203
204 204 # update PR #1
205 205 source_repo = Repository.get_by_repo_name(source_repo_name)
206 206 backend.pull_heads(source_repo, heads=['commit-2'])
207 207 response = self.app.post(
208 208 route_path('pullrequest_update',
209 209 repo_name=target_repo_name, pull_request_id=pull_request_id),
210 210 params={'update_commits': 'true', 'csrf_token': csrf_token})
211 211
212 212 # update PR #2
213 213 source_repo = Repository.get_by_repo_name(source_repo_name)
214 214 backend.pull_heads(source_repo, heads=['commit-3'])
215 215 response = self.app.post(
216 216 route_path('pullrequest_update',
217 217 repo_name=target_repo_name, pull_request_id=pull_request_id),
218 218 params={'update_commits': 'true', 'csrf_token': csrf_token})
219 219
220 220 # update PR #3
221 221 source_repo = Repository.get_by_repo_name(source_repo_name)
222 222 backend.pull_heads(source_repo, heads=['commit-4'])
223 223 response = self.app.post(
224 224 route_path('pullrequest_update',
225 225 repo_name=target_repo_name, pull_request_id=pull_request_id),
226 226 params={'update_commits': 'true', 'csrf_token': csrf_token})
227 227
228 228 # Show final version !
229 229 response = self.app.get(
230 230 route_path('pullrequest_show',
231 231 repo_name=target_repo_name,
232 232 pull_request_id=pull_request_id))
233 233
234 234 # 3 updates, and the latest == 4
235 235 response.mustcontain('4 versions available for this pull request')
236 236 response.mustcontain(no=['rhodecode diff rendering error'])
237 237
238 238 # initial show must have 3 commits, and 3 adds
239 239 response.mustcontain('commit-1')
240 240 response.mustcontain('commit-2')
241 241 response.mustcontain('commit-3')
242 242 response.mustcontain('commit-4')
243 243
244 244 response.mustcontain('cb-addition"></span><span>LINE2</span>')
245 245 response.mustcontain('cb-addition"></span><span>LINE3</span>')
246 246 response.mustcontain('cb-addition"></span><span>LINE4</span>')
247 247 response.mustcontain('cb-addition"></span><span>LINE5</span>')
248 248
249 249 # fetch versions
250 250 pr = PullRequest.get(pull_request_id)
251 251 versions = [x.pull_request_version_id for x in pr.versions.all()]
252 252 assert len(versions) == 3
253 253
254 254 # show v1,v2,v3,v4
255 255 def cb_line(text):
256 256 return 'cb-addition"></span><span>{}</span>'.format(text)
257 257
258 258 def cb_context(text):
259 259 return '<span class="cb-code"><span class="cb-action cb-context">' \
260 260 '</span><span>{}</span></span>'.format(text)
261 261
262 262 commit_tests = {
263 263 # in response, not in response
264 264 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
265 265 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
266 266 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
267 267 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
268 268 }
269 269 diff_tests = {
270 270 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
271 271 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
272 272 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
273 273 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
274 274 }
275 275 for idx, ver in enumerate(versions, 1):
276 276
277 277 response = self.app.get(
278 278 route_path('pullrequest_show',
279 279 repo_name=target_repo_name,
280 280 pull_request_id=pull_request_id,
281 281 params={'version': ver}))
282 282
283 283 response.mustcontain(no=['rhodecode diff rendering error'])
284 284 response.mustcontain('Showing changes at v{}'.format(idx))
285 285
286 286 yes, no = commit_tests[idx]
287 287 for y in yes:
288 288 response.mustcontain(y)
289 289 for n in no:
290 290 response.mustcontain(no=n)
291 291
292 292 yes, no = diff_tests[idx]
293 293 for y in yes:
294 294 response.mustcontain(cb_line(y))
295 295 for n in no:
296 296 response.mustcontain(no=n)
297 297
298 298 # show diff between versions
299 299 diff_compare_tests = {
300 300 1: (['LINE3'], ['LINE1', 'LINE2']),
301 301 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
302 302 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
303 303 }
304 304 for idx, ver in enumerate(versions, 1):
305 305 adds, context = diff_compare_tests[idx]
306 306
307 307 to_ver = ver+1
308 308 if idx == 3:
309 309 to_ver = 'latest'
310 310
311 311 response = self.app.get(
312 312 route_path('pullrequest_show',
313 313 repo_name=target_repo_name,
314 314 pull_request_id=pull_request_id,
315 315 params={'from_version': versions[0], 'version': to_ver}))
316 316
317 317 response.mustcontain(no=['rhodecode diff rendering error'])
318 318
319 319 for a in adds:
320 320 response.mustcontain(cb_line(a))
321 321 for c in context:
322 322 response.mustcontain(cb_context(c))
323 323
324 324 # test version v2 -> v3
325 325 response = self.app.get(
326 326 route_path('pullrequest_show',
327 327 repo_name=target_repo_name,
328 328 pull_request_id=pull_request_id,
329 329 params={'from_version': versions[1], 'version': versions[2]}))
330 330
331 331 response.mustcontain(cb_context('LINE1'))
332 332 response.mustcontain(cb_context('LINE2'))
333 333 response.mustcontain(cb_context('LINE3'))
334 334 response.mustcontain(cb_line('LINE4'))
335 335
336 336 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
337 337 # Logout
338 338 response = self.app.post(
339 339 h.route_path('logout'),
340 340 params={'csrf_token': csrf_token})
341 341 # Login as regular user
342 342 response = self.app.post(h.route_path('login'),
343 343 {'username': TEST_USER_REGULAR_LOGIN,
344 344 'password': 'test12'})
345 345
346 346 pull_request = pr_util.create_pull_request(
347 347 author=TEST_USER_REGULAR_LOGIN)
348 348
349 349 response = self.app.get(route_path(
350 350 'pullrequest_show',
351 351 repo_name=pull_request.target_repo.scm_instance().name,
352 352 pull_request_id=pull_request.pull_request_id))
353 353
354 354 response.mustcontain('Server-side pull request merging is disabled.')
355 355
356 356 assert_response = response.assert_response()
357 357 # for regular user without a merge permissions, we don't see it
358 358 assert_response.no_element_exists('#close-pull-request-action')
359 359
360 360 user_util.grant_user_permission_to_repo(
361 361 pull_request.target_repo,
362 362 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
363 363 'repository.write')
364 364 response = self.app.get(route_path(
365 365 'pullrequest_show',
366 366 repo_name=pull_request.target_repo.scm_instance().name,
367 367 pull_request_id=pull_request.pull_request_id))
368 368
369 369 response.mustcontain('Server-side pull request merging is disabled.')
370 370
371 371 assert_response = response.assert_response()
372 372 # now regular user has a merge permissions, we have CLOSE button
373 373 assert_response.one_element_exists('#close-pull-request-action')
374 374
375 375 def test_show_invalid_commit_id(self, pr_util):
376 376 # Simulating invalid revisions which will cause a lookup error
377 377 pull_request = pr_util.create_pull_request()
378 378 pull_request.revisions = ['invalid']
379 379 Session().add(pull_request)
380 380 Session().commit()
381 381
382 382 response = self.app.get(route_path(
383 383 'pullrequest_show',
384 384 repo_name=pull_request.target_repo.scm_instance().name,
385 385 pull_request_id=pull_request.pull_request_id))
386 386
387 387 for commit_id in pull_request.revisions:
388 388 response.mustcontain(commit_id)
389 389
390 390 def test_show_invalid_source_reference(self, pr_util):
391 391 pull_request = pr_util.create_pull_request()
392 392 pull_request.source_ref = 'branch:b:invalid'
393 393 Session().add(pull_request)
394 394 Session().commit()
395 395
396 396 self.app.get(route_path(
397 397 'pullrequest_show',
398 398 repo_name=pull_request.target_repo.scm_instance().name,
399 399 pull_request_id=pull_request.pull_request_id))
400 400
401 401 def test_edit_title_description(self, pr_util, csrf_token):
402 402 pull_request = pr_util.create_pull_request()
403 403 pull_request_id = pull_request.pull_request_id
404 404
405 405 response = self.app.post(
406 406 route_path('pullrequest_update',
407 407 repo_name=pull_request.target_repo.repo_name,
408 408 pull_request_id=pull_request_id),
409 409 params={
410 410 'edit_pull_request': 'true',
411 411 'title': 'New title',
412 412 'description': 'New description',
413 413 'csrf_token': csrf_token})
414 414
415 415 assert_session_flash(
416 416 response, u'Pull request title & description updated.',
417 417 category='success')
418 418
419 419 pull_request = PullRequest.get(pull_request_id)
420 420 assert pull_request.title == 'New title'
421 421 assert pull_request.description == 'New description'
422 422
423 423 def test_edit_title_description_closed(self, pr_util, csrf_token):
424 424 pull_request = pr_util.create_pull_request()
425 425 pull_request_id = pull_request.pull_request_id
426 426 repo_name = pull_request.target_repo.repo_name
427 427 pr_util.close()
428 428
429 429 response = self.app.post(
430 430 route_path('pullrequest_update',
431 431 repo_name=repo_name, pull_request_id=pull_request_id),
432 432 params={
433 433 'edit_pull_request': 'true',
434 434 'title': 'New title',
435 435 'description': 'New description',
436 436 'csrf_token': csrf_token}, status=200)
437 437 assert_session_flash(
438 438 response, u'Cannot update closed pull requests.',
439 439 category='error')
440 440
441 441 def test_update_invalid_source_reference(self, pr_util, csrf_token):
442 442 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
443 443
444 444 pull_request = pr_util.create_pull_request()
445 445 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
446 446 Session().add(pull_request)
447 447 Session().commit()
448 448
449 449 pull_request_id = pull_request.pull_request_id
450 450
451 451 response = self.app.post(
452 452 route_path('pullrequest_update',
453 453 repo_name=pull_request.target_repo.repo_name,
454 454 pull_request_id=pull_request_id),
455 455 params={'update_commits': 'true', 'csrf_token': csrf_token})
456 456
457 457 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
458 458 UpdateFailureReason.MISSING_SOURCE_REF])
459 459 assert_session_flash(response, expected_msg, category='error')
460 460
461 461 def test_missing_target_reference(self, pr_util, csrf_token):
462 462 from rhodecode.lib.vcs.backends.base import MergeFailureReason
463 463 pull_request = pr_util.create_pull_request(
464 464 approved=True, mergeable=True)
465 465 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
466 466 pull_request.target_ref = unicode_reference
467 467 Session().add(pull_request)
468 468 Session().commit()
469 469
470 470 pull_request_id = pull_request.pull_request_id
471 471 pull_request_url = route_path(
472 472 'pullrequest_show',
473 473 repo_name=pull_request.target_repo.repo_name,
474 474 pull_request_id=pull_request_id)
475 475
476 476 response = self.app.get(pull_request_url)
477 477 target_ref_id = 'invalid-branch'
478 478 merge_resp = MergeResponse(
479 479 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
480 480 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
481 481 response.assert_response().element_contains(
482 482 'div[data-role="merge-message"]', merge_resp.merge_status_message)
483 483
484 484 def test_comment_and_close_pull_request_custom_message_approved(
485 485 self, pr_util, csrf_token, xhr_header):
486 486
487 487 pull_request = pr_util.create_pull_request(approved=True)
488 488 pull_request_id = pull_request.pull_request_id
489 489 author = pull_request.user_id
490 490 repo = pull_request.target_repo.repo_id
491 491
492 492 self.app.post(
493 493 route_path('pullrequest_comment_create',
494 494 repo_name=pull_request.target_repo.scm_instance().name,
495 495 pull_request_id=pull_request_id),
496 496 params={
497 497 'close_pull_request': '1',
498 498 'text': 'Closing a PR',
499 499 'csrf_token': csrf_token},
500 500 extra_environ=xhr_header,)
501 501
502 502 journal = UserLog.query()\
503 503 .filter(UserLog.user_id == author)\
504 504 .filter(UserLog.repository_id == repo) \
505 505 .order_by(UserLog.user_log_id.asc()) \
506 506 .all()
507 507 assert journal[-1].action == 'repo.pull_request.close'
508 508
509 509 pull_request = PullRequest.get(pull_request_id)
510 510 assert pull_request.is_closed()
511 511
512 512 status = ChangesetStatusModel().get_status(
513 513 pull_request.source_repo, pull_request=pull_request)
514 514 assert status == ChangesetStatus.STATUS_APPROVED
515 515 comments = ChangesetComment().query() \
516 516 .filter(ChangesetComment.pull_request == pull_request) \
517 517 .order_by(ChangesetComment.comment_id.asc())\
518 518 .all()
519 519 assert comments[-1].text == 'Closing a PR'
520 520
521 521 def test_comment_force_close_pull_request_rejected(
522 522 self, pr_util, csrf_token, xhr_header):
523 523 pull_request = pr_util.create_pull_request()
524 524 pull_request_id = pull_request.pull_request_id
525 525 PullRequestModel().update_reviewers(
526 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
526 pull_request_id, [
527 (1, ['reason'], False, 'reviewer', []),
528 (2, ['reason2'], False, 'reviewer', [])],
527 529 pull_request.author)
528 530 author = pull_request.user_id
529 531 repo = pull_request.target_repo.repo_id
530 532
531 533 self.app.post(
532 534 route_path('pullrequest_comment_create',
533 535 repo_name=pull_request.target_repo.scm_instance().name,
534 536 pull_request_id=pull_request_id),
535 537 params={
536 538 'close_pull_request': '1',
537 539 'csrf_token': csrf_token},
538 540 extra_environ=xhr_header)
539 541
540 542 pull_request = PullRequest.get(pull_request_id)
541 543
542 544 journal = UserLog.query()\
543 545 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
544 546 .order_by(UserLog.user_log_id.asc()) \
545 547 .all()
546 548 assert journal[-1].action == 'repo.pull_request.close'
547 549
548 550 # check only the latest status, not the review status
549 551 status = ChangesetStatusModel().get_status(
550 552 pull_request.source_repo, pull_request=pull_request)
551 553 assert status == ChangesetStatus.STATUS_REJECTED
552 554
553 555 def test_comment_and_close_pull_request(
554 556 self, pr_util, csrf_token, xhr_header):
555 557 pull_request = pr_util.create_pull_request()
556 558 pull_request_id = pull_request.pull_request_id
557 559
558 560 response = self.app.post(
559 561 route_path('pullrequest_comment_create',
560 562 repo_name=pull_request.target_repo.scm_instance().name,
561 563 pull_request_id=pull_request.pull_request_id),
562 564 params={
563 565 'close_pull_request': 'true',
564 566 'csrf_token': csrf_token},
565 567 extra_environ=xhr_header)
566 568
567 569 assert response.json
568 570
569 571 pull_request = PullRequest.get(pull_request_id)
570 572 assert pull_request.is_closed()
571 573
572 574 # check only the latest status, not the review status
573 575 status = ChangesetStatusModel().get_status(
574 576 pull_request.source_repo, pull_request=pull_request)
575 577 assert status == ChangesetStatus.STATUS_REJECTED
576 578
577 579 def test_comment_and_close_pull_request_try_edit_comment(
578 580 self, pr_util, csrf_token, xhr_header
579 581 ):
580 582 pull_request = pr_util.create_pull_request()
581 583 pull_request_id = pull_request.pull_request_id
582 584 target_scm = pull_request.target_repo.scm_instance()
583 585 target_scm_name = target_scm.name
584 586
585 587 response = self.app.post(
586 588 route_path(
587 589 'pullrequest_comment_create',
588 590 repo_name=target_scm_name,
589 591 pull_request_id=pull_request_id,
590 592 ),
591 593 params={
592 594 'close_pull_request': 'true',
593 595 'csrf_token': csrf_token,
594 596 },
595 597 extra_environ=xhr_header)
596 598
597 599 assert response.json
598 600
599 601 pull_request = PullRequest.get(pull_request_id)
600 602 target_scm = pull_request.target_repo.scm_instance()
601 603 target_scm_name = target_scm.name
602 604 assert pull_request.is_closed()
603 605
604 606 # check only the latest status, not the review status
605 607 status = ChangesetStatusModel().get_status(
606 608 pull_request.source_repo, pull_request=pull_request)
607 609 assert status == ChangesetStatus.STATUS_REJECTED
608 610
609 611 comment_id = response.json.get('comment_id', None)
610 612 test_text = 'test'
611 613 response = self.app.post(
612 614 route_path(
613 615 'pullrequest_comment_edit',
614 616 repo_name=target_scm_name,
615 617 pull_request_id=pull_request_id,
616 618 comment_id=comment_id,
617 619 ),
618 620 extra_environ=xhr_header,
619 621 params={
620 622 'csrf_token': csrf_token,
621 623 'text': test_text,
622 624 },
623 625 status=403,
624 626 )
625 627 assert response.status_int == 403
626 628
627 629 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
628 630 pull_request = pr_util.create_pull_request()
629 631 target_scm = pull_request.target_repo.scm_instance()
630 632 target_scm_name = target_scm.name
631 633
632 634 response = self.app.post(
633 635 route_path(
634 636 'pullrequest_comment_create',
635 637 repo_name=target_scm_name,
636 638 pull_request_id=pull_request.pull_request_id),
637 639 params={
638 640 'csrf_token': csrf_token,
639 641 'text': 'init',
640 642 },
641 643 extra_environ=xhr_header,
642 644 )
643 645 assert response.json
644 646
645 647 comment_id = response.json.get('comment_id', None)
646 648 assert comment_id
647 649 test_text = 'test'
648 650 self.app.post(
649 651 route_path(
650 652 'pullrequest_comment_edit',
651 653 repo_name=target_scm_name,
652 654 pull_request_id=pull_request.pull_request_id,
653 655 comment_id=comment_id,
654 656 ),
655 657 extra_environ=xhr_header,
656 658 params={
657 659 'csrf_token': csrf_token,
658 660 'text': test_text,
659 661 'version': '0',
660 662 },
661 663
662 664 )
663 665 text_form_db = ChangesetComment.query().filter(
664 666 ChangesetComment.comment_id == comment_id).first().text
665 667 assert test_text == text_form_db
666 668
667 669 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
668 670 pull_request = pr_util.create_pull_request()
669 671 target_scm = pull_request.target_repo.scm_instance()
670 672 target_scm_name = target_scm.name
671 673
672 674 response = self.app.post(
673 675 route_path(
674 676 'pullrequest_comment_create',
675 677 repo_name=target_scm_name,
676 678 pull_request_id=pull_request.pull_request_id),
677 679 params={
678 680 'csrf_token': csrf_token,
679 681 'text': 'init',
680 682 },
681 683 extra_environ=xhr_header,
682 684 )
683 685 assert response.json
684 686
685 687 comment_id = response.json.get('comment_id', None)
686 688 assert comment_id
687 689 test_text = 'init'
688 690 response = self.app.post(
689 691 route_path(
690 692 'pullrequest_comment_edit',
691 693 repo_name=target_scm_name,
692 694 pull_request_id=pull_request.pull_request_id,
693 695 comment_id=comment_id,
694 696 ),
695 697 extra_environ=xhr_header,
696 698 params={
697 699 'csrf_token': csrf_token,
698 700 'text': test_text,
699 701 'version': '0',
700 702 },
701 703 status=404,
702 704
703 705 )
704 706 assert response.status_int == 404
705 707
706 708 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
707 709 pull_request = pr_util.create_pull_request()
708 710 target_scm = pull_request.target_repo.scm_instance()
709 711 target_scm_name = target_scm.name
710 712
711 713 response = self.app.post(
712 714 route_path(
713 715 'pullrequest_comment_create',
714 716 repo_name=target_scm_name,
715 717 pull_request_id=pull_request.pull_request_id),
716 718 params={
717 719 'csrf_token': csrf_token,
718 720 'text': 'init',
719 721 },
720 722 extra_environ=xhr_header,
721 723 )
722 724 assert response.json
723 725 comment_id = response.json.get('comment_id', None)
724 726 assert comment_id
725 727
726 728 test_text = 'test'
727 729 self.app.post(
728 730 route_path(
729 731 'pullrequest_comment_edit',
730 732 repo_name=target_scm_name,
731 733 pull_request_id=pull_request.pull_request_id,
732 734 comment_id=comment_id,
733 735 ),
734 736 extra_environ=xhr_header,
735 737 params={
736 738 'csrf_token': csrf_token,
737 739 'text': test_text,
738 740 'version': '0',
739 741 },
740 742
741 743 )
742 744 test_text_v2 = 'test_v2'
743 745 response = self.app.post(
744 746 route_path(
745 747 'pullrequest_comment_edit',
746 748 repo_name=target_scm_name,
747 749 pull_request_id=pull_request.pull_request_id,
748 750 comment_id=comment_id,
749 751 ),
750 752 extra_environ=xhr_header,
751 753 params={
752 754 'csrf_token': csrf_token,
753 755 'text': test_text_v2,
754 756 'version': '0',
755 757 },
756 758 status=409,
757 759 )
758 760 assert response.status_int == 409
759 761
760 762 text_form_db = ChangesetComment.query().filter(
761 763 ChangesetComment.comment_id == comment_id).first().text
762 764
763 765 assert test_text == text_form_db
764 766 assert test_text_v2 != text_form_db
765 767
766 768 def test_comment_and_comment_edit_permissions_forbidden(
767 769 self, autologin_regular_user, user_regular, user_admin, pr_util,
768 770 csrf_token, xhr_header):
769 771 pull_request = pr_util.create_pull_request(
770 772 author=user_admin.username, enable_notifications=False)
771 773 comment = CommentsModel().create(
772 774 text='test',
773 775 repo=pull_request.target_repo.scm_instance().name,
774 776 user=user_admin,
775 777 pull_request=pull_request,
776 778 )
777 779 response = self.app.post(
778 780 route_path(
779 781 'pullrequest_comment_edit',
780 782 repo_name=pull_request.target_repo.scm_instance().name,
781 783 pull_request_id=pull_request.pull_request_id,
782 784 comment_id=comment.comment_id,
783 785 ),
784 786 extra_environ=xhr_header,
785 787 params={
786 788 'csrf_token': csrf_token,
787 789 'text': 'test_text',
788 790 },
789 791 status=403,
790 792 )
791 793 assert response.status_int == 403
792 794
793 795 def test_create_pull_request(self, backend, csrf_token):
794 796 commits = [
795 797 {'message': 'ancestor'},
796 798 {'message': 'change'},
797 799 {'message': 'change2'},
798 800 ]
799 801 commit_ids = backend.create_master_repo(commits)
800 802 target = backend.create_repo(heads=['ancestor'])
801 803 source = backend.create_repo(heads=['change2'])
802 804
803 805 response = self.app.post(
804 806 route_path('pullrequest_create', repo_name=source.repo_name),
805 807 [
806 808 ('source_repo', source.repo_name),
807 809 ('source_ref', 'branch:default:' + commit_ids['change2']),
808 810 ('target_repo', target.repo_name),
809 811 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
810 812 ('common_ancestor', commit_ids['ancestor']),
811 813 ('pullrequest_title', 'Title'),
812 814 ('pullrequest_desc', 'Description'),
813 815 ('description_renderer', 'markdown'),
814 816 ('__start__', 'review_members:sequence'),
815 817 ('__start__', 'reviewer:mapping'),
816 818 ('user_id', '1'),
817 819 ('__start__', 'reasons:sequence'),
818 820 ('reason', 'Some reason'),
819 821 ('__end__', 'reasons:sequence'),
820 822 ('__start__', 'rules:sequence'),
821 823 ('__end__', 'rules:sequence'),
822 824 ('mandatory', 'False'),
823 825 ('__end__', 'reviewer:mapping'),
824 826 ('__end__', 'review_members:sequence'),
825 827 ('__start__', 'revisions:sequence'),
826 828 ('revisions', commit_ids['change']),
827 829 ('revisions', commit_ids['change2']),
828 830 ('__end__', 'revisions:sequence'),
829 831 ('user', ''),
830 832 ('csrf_token', csrf_token),
831 833 ],
832 834 status=302)
833 835
834 836 location = response.headers['Location']
835 837 pull_request_id = location.rsplit('/', 1)[1]
836 838 assert pull_request_id != 'new'
837 839 pull_request = PullRequest.get(int(pull_request_id))
838 840
839 841 # check that we have now both revisions
840 842 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
841 843 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
842 844 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
843 845 assert pull_request.target_ref == expected_target_ref
844 846
845 847 def test_reviewer_notifications(self, backend, csrf_token):
846 848 # We have to use the app.post for this test so it will create the
847 849 # notifications properly with the new PR
848 850 commits = [
849 851 {'message': 'ancestor',
850 852 'added': [FileNode('file_A', content='content_of_ancestor')]},
851 853 {'message': 'change',
852 854 'added': [FileNode('file_a', content='content_of_change')]},
853 855 {'message': 'change-child'},
854 856 {'message': 'ancestor-child', 'parents': ['ancestor'],
855 857 'added': [
856 858 FileNode('file_B', content='content_of_ancestor_child')]},
857 859 {'message': 'ancestor-child-2'},
858 860 ]
859 861 commit_ids = backend.create_master_repo(commits)
860 862 target = backend.create_repo(heads=['ancestor-child'])
861 863 source = backend.create_repo(heads=['change'])
862 864
863 865 response = self.app.post(
864 866 route_path('pullrequest_create', repo_name=source.repo_name),
865 867 [
866 868 ('source_repo', source.repo_name),
867 869 ('source_ref', 'branch:default:' + commit_ids['change']),
868 870 ('target_repo', target.repo_name),
869 871 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
870 872 ('common_ancestor', commit_ids['ancestor']),
871 873 ('pullrequest_title', 'Title'),
872 874 ('pullrequest_desc', 'Description'),
873 875 ('description_renderer', 'markdown'),
874 876 ('__start__', 'review_members:sequence'),
875 877 ('__start__', 'reviewer:mapping'),
876 878 ('user_id', '2'),
877 879 ('__start__', 'reasons:sequence'),
878 880 ('reason', 'Some reason'),
879 881 ('__end__', 'reasons:sequence'),
880 882 ('__start__', 'rules:sequence'),
881 883 ('__end__', 'rules:sequence'),
882 884 ('mandatory', 'False'),
883 885 ('__end__', 'reviewer:mapping'),
884 886 ('__end__', 'review_members:sequence'),
885 887 ('__start__', 'revisions:sequence'),
886 888 ('revisions', commit_ids['change']),
887 889 ('__end__', 'revisions:sequence'),
888 890 ('user', ''),
889 891 ('csrf_token', csrf_token),
890 892 ],
891 893 status=302)
892 894
893 895 location = response.headers['Location']
894 896
895 897 pull_request_id = location.rsplit('/', 1)[1]
896 898 assert pull_request_id != 'new'
897 899 pull_request = PullRequest.get(int(pull_request_id))
898 900
899 901 # Check that a notification was made
900 902 notifications = Notification.query()\
901 903 .filter(Notification.created_by == pull_request.author.user_id,
902 904 Notification.type_ == Notification.TYPE_PULL_REQUEST,
903 905 Notification.subject.contains(
904 906 "requested a pull request review. !%s" % pull_request_id))
905 907 assert len(notifications.all()) == 1
906 908
907 909 # Change reviewers and check that a notification was made
908 910 PullRequestModel().update_reviewers(
909 pull_request.pull_request_id, [(1, [], False, [])],
911 pull_request.pull_request_id, [
912 (1, [], False, 'reviewer', [])
913 ],
910 914 pull_request.author)
911 915 assert len(notifications.all()) == 2
912 916
913 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
914 csrf_token):
917 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
915 918 commits = [
916 919 {'message': 'ancestor',
917 920 'added': [FileNode('file_A', content='content_of_ancestor')]},
918 921 {'message': 'change',
919 922 'added': [FileNode('file_a', content='content_of_change')]},
920 923 {'message': 'change-child'},
921 924 {'message': 'ancestor-child', 'parents': ['ancestor'],
922 925 'added': [
923 926 FileNode('file_B', content='content_of_ancestor_child')]},
924 927 {'message': 'ancestor-child-2'},
925 928 ]
926 929 commit_ids = backend.create_master_repo(commits)
927 930 target = backend.create_repo(heads=['ancestor-child'])
928 931 source = backend.create_repo(heads=['change'])
929 932
930 933 response = self.app.post(
931 934 route_path('pullrequest_create', repo_name=source.repo_name),
932 935 [
933 936 ('source_repo', source.repo_name),
934 937 ('source_ref', 'branch:default:' + commit_ids['change']),
935 938 ('target_repo', target.repo_name),
936 939 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
937 940 ('common_ancestor', commit_ids['ancestor']),
938 941 ('pullrequest_title', 'Title'),
939 942 ('pullrequest_desc', 'Description'),
940 943 ('description_renderer', 'markdown'),
941 944 ('__start__', 'review_members:sequence'),
942 945 ('__start__', 'reviewer:mapping'),
943 946 ('user_id', '1'),
944 947 ('__start__', 'reasons:sequence'),
945 948 ('reason', 'Some reason'),
946 949 ('__end__', 'reasons:sequence'),
947 950 ('__start__', 'rules:sequence'),
948 951 ('__end__', 'rules:sequence'),
949 952 ('mandatory', 'False'),
950 953 ('__end__', 'reviewer:mapping'),
951 954 ('__end__', 'review_members:sequence'),
952 955 ('__start__', 'revisions:sequence'),
953 956 ('revisions', commit_ids['change']),
954 957 ('__end__', 'revisions:sequence'),
955 958 ('user', ''),
956 959 ('csrf_token', csrf_token),
957 960 ],
958 961 status=302)
959 962
960 963 location = response.headers['Location']
961 964
962 965 pull_request_id = location.rsplit('/', 1)[1]
963 966 assert pull_request_id != 'new'
964 967 pull_request = PullRequest.get(int(pull_request_id))
965 968
966 969 # target_ref has to point to the ancestor's commit_id in order to
967 970 # show the correct diff
968 971 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
969 972 assert pull_request.target_ref == expected_target_ref
970 973
971 974 # Check generated diff contents
972 975 response = response.follow()
973 976 response.mustcontain(no=['content_of_ancestor'])
974 977 response.mustcontain(no=['content_of_ancestor-child'])
975 978 response.mustcontain('content_of_change')
976 979
977 980 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
978 981 # Clear any previous calls to rcextensions
979 982 rhodecode.EXTENSIONS.calls.clear()
980 983
981 984 pull_request = pr_util.create_pull_request(
982 985 approved=True, mergeable=True)
983 986 pull_request_id = pull_request.pull_request_id
984 987 repo_name = pull_request.target_repo.scm_instance().name,
985 988
986 989 url = route_path('pullrequest_merge',
987 990 repo_name=str(repo_name[0]),
988 991 pull_request_id=pull_request_id)
989 992 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
990 993
991 994 pull_request = PullRequest.get(pull_request_id)
992 995
993 996 assert response.status_int == 200
994 997 assert pull_request.is_closed()
995 998 assert_pull_request_status(
996 999 pull_request, ChangesetStatus.STATUS_APPROVED)
997 1000
998 1001 # Check the relevant log entries were added
999 1002 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1000 1003 actions = [log.action for log in user_logs]
1001 1004 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1002 1005 expected_actions = [
1003 1006 u'repo.pull_request.close',
1004 1007 u'repo.pull_request.merge',
1005 1008 u'repo.pull_request.comment.create'
1006 1009 ]
1007 1010 assert actions == expected_actions
1008 1011
1009 1012 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1010 1013 actions = [log for log in user_logs]
1011 1014 assert actions[-1].action == 'user.push'
1012 1015 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1013 1016
1014 1017 # Check post_push rcextension was really executed
1015 1018 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1016 1019 assert len(push_calls) == 1
1017 1020 unused_last_call_args, last_call_kwargs = push_calls[0]
1018 1021 assert last_call_kwargs['action'] == 'push'
1019 1022 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1020 1023
1021 1024 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1022 1025 pull_request = pr_util.create_pull_request(mergeable=False)
1023 1026 pull_request_id = pull_request.pull_request_id
1024 1027 pull_request = PullRequest.get(pull_request_id)
1025 1028
1026 1029 response = self.app.post(
1027 1030 route_path('pullrequest_merge',
1028 1031 repo_name=pull_request.target_repo.scm_instance().name,
1029 1032 pull_request_id=pull_request.pull_request_id),
1030 1033 params={'csrf_token': csrf_token}).follow()
1031 1034
1032 1035 assert response.status_int == 200
1033 1036 response.mustcontain(
1034 1037 'Merge is not currently possible because of below failed checks.')
1035 1038 response.mustcontain('Server-side pull request merging is disabled.')
1036 1039
1037 1040 @pytest.mark.skip_backends('svn')
1038 1041 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1039 1042 pull_request = pr_util.create_pull_request(mergeable=True)
1040 1043 pull_request_id = pull_request.pull_request_id
1041 1044 repo_name = pull_request.target_repo.scm_instance().name
1042 1045
1043 1046 response = self.app.post(
1044 1047 route_path('pullrequest_merge',
1045 1048 repo_name=repo_name, pull_request_id=pull_request_id),
1046 1049 params={'csrf_token': csrf_token}).follow()
1047 1050
1048 1051 assert response.status_int == 200
1049 1052
1050 1053 response.mustcontain(
1051 1054 'Merge is not currently possible because of below failed checks.')
1052 1055 response.mustcontain('Pull request reviewer approval is pending.')
1053 1056
1054 1057 def test_merge_pull_request_renders_failure_reason(
1055 1058 self, user_regular, csrf_token, pr_util):
1056 1059 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1057 1060 pull_request_id = pull_request.pull_request_id
1058 1061 repo_name = pull_request.target_repo.scm_instance().name
1059 1062
1060 1063 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1061 1064 MergeFailureReason.PUSH_FAILED,
1062 1065 metadata={'target': 'shadow repo',
1063 1066 'merge_commit': 'xxx'})
1064 1067 model_patcher = mock.patch.multiple(
1065 1068 PullRequestModel,
1066 1069 merge_repo=mock.Mock(return_value=merge_resp),
1067 1070 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1068 1071
1069 1072 with model_patcher:
1070 1073 response = self.app.post(
1071 1074 route_path('pullrequest_merge',
1072 1075 repo_name=repo_name,
1073 1076 pull_request_id=pull_request_id),
1074 1077 params={'csrf_token': csrf_token}, status=302)
1075 1078
1076 1079 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1077 1080 metadata={'target': 'shadow repo',
1078 1081 'merge_commit': 'xxx'})
1079 1082 assert_session_flash(response, merge_resp.merge_status_message)
1080 1083
1081 1084 def test_update_source_revision(self, backend, csrf_token):
1082 1085 commits = [
1083 1086 {'message': 'ancestor'},
1084 1087 {'message': 'change'},
1085 1088 {'message': 'change-2'},
1086 1089 ]
1087 1090 commit_ids = backend.create_master_repo(commits)
1088 1091 target = backend.create_repo(heads=['ancestor'])
1089 1092 source = backend.create_repo(heads=['change'])
1090 1093
1091 1094 # create pr from a in source to A in target
1092 1095 pull_request = PullRequest()
1093 1096
1094 1097 pull_request.source_repo = source
1095 1098 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1096 1099 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1097 1100
1098 1101 pull_request.target_repo = target
1099 1102 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1100 1103 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1101 1104
1102 1105 pull_request.revisions = [commit_ids['change']]
1103 1106 pull_request.title = u"Test"
1104 1107 pull_request.description = u"Description"
1105 1108 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1106 1109 pull_request.pull_request_state = PullRequest.STATE_CREATED
1107 1110 Session().add(pull_request)
1108 1111 Session().commit()
1109 1112 pull_request_id = pull_request.pull_request_id
1110 1113
1111 1114 # source has ancestor - change - change-2
1112 1115 backend.pull_heads(source, heads=['change-2'])
1113 1116 target_repo_name = target.repo_name
1114 1117
1115 1118 # update PR
1116 1119 self.app.post(
1117 1120 route_path('pullrequest_update',
1118 1121 repo_name=target_repo_name, pull_request_id=pull_request_id),
1119 1122 params={'update_commits': 'true', 'csrf_token': csrf_token})
1120 1123
1121 1124 response = self.app.get(
1122 1125 route_path('pullrequest_show',
1123 1126 repo_name=target_repo_name,
1124 1127 pull_request_id=pull_request.pull_request_id))
1125 1128
1126 1129 assert response.status_int == 200
1127 1130 response.mustcontain('Pull request updated to')
1128 1131 response.mustcontain('with 1 added, 0 removed commits.')
1129 1132
1130 1133 # check that we have now both revisions
1131 1134 pull_request = PullRequest.get(pull_request_id)
1132 1135 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1133 1136
1134 1137 def test_update_target_revision(self, backend, csrf_token):
1135 1138 commits = [
1136 1139 {'message': 'ancestor'},
1137 1140 {'message': 'change'},
1138 1141 {'message': 'ancestor-new', 'parents': ['ancestor']},
1139 1142 {'message': 'change-rebased'},
1140 1143 ]
1141 1144 commit_ids = backend.create_master_repo(commits)
1142 1145 target = backend.create_repo(heads=['ancestor'])
1143 1146 source = backend.create_repo(heads=['change'])
1144 1147
1145 1148 # create pr from a in source to A in target
1146 1149 pull_request = PullRequest()
1147 1150
1148 1151 pull_request.source_repo = source
1149 1152 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1150 1153 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1151 1154
1152 1155 pull_request.target_repo = target
1153 1156 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1154 1157 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1155 1158
1156 1159 pull_request.revisions = [commit_ids['change']]
1157 1160 pull_request.title = u"Test"
1158 1161 pull_request.description = u"Description"
1159 1162 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1160 1163 pull_request.pull_request_state = PullRequest.STATE_CREATED
1161 1164
1162 1165 Session().add(pull_request)
1163 1166 Session().commit()
1164 1167 pull_request_id = pull_request.pull_request_id
1165 1168
1166 1169 # target has ancestor - ancestor-new
1167 1170 # source has ancestor - ancestor-new - change-rebased
1168 1171 backend.pull_heads(target, heads=['ancestor-new'])
1169 1172 backend.pull_heads(source, heads=['change-rebased'])
1170 1173 target_repo_name = target.repo_name
1171 1174
1172 1175 # update PR
1173 1176 url = route_path('pullrequest_update',
1174 1177 repo_name=target_repo_name,
1175 1178 pull_request_id=pull_request_id)
1176 1179 self.app.post(url,
1177 1180 params={'update_commits': 'true', 'csrf_token': csrf_token},
1178 1181 status=200)
1179 1182
1180 1183 # check that we have now both revisions
1181 1184 pull_request = PullRequest.get(pull_request_id)
1182 1185 assert pull_request.revisions == [commit_ids['change-rebased']]
1183 1186 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1184 1187 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1185 1188
1186 1189 response = self.app.get(
1187 1190 route_path('pullrequest_show',
1188 1191 repo_name=target_repo_name,
1189 1192 pull_request_id=pull_request.pull_request_id))
1190 1193 assert response.status_int == 200
1191 1194 response.mustcontain('Pull request updated to')
1192 1195 response.mustcontain('with 1 added, 1 removed commits.')
1193 1196
1194 1197 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1195 1198 backend = backend_git
1196 1199 commits = [
1197 1200 {'message': 'master-commit-1'},
1198 1201 {'message': 'master-commit-2-change-1'},
1199 1202 {'message': 'master-commit-3-change-2'},
1200 1203
1201 1204 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1202 1205 {'message': 'feat-commit-2'},
1203 1206 ]
1204 1207 commit_ids = backend.create_master_repo(commits)
1205 1208 target = backend.create_repo(heads=['master-commit-3-change-2'])
1206 1209 source = backend.create_repo(heads=['feat-commit-2'])
1207 1210
1208 1211 # create pr from a in source to A in target
1209 1212 pull_request = PullRequest()
1210 1213 pull_request.source_repo = source
1211 1214
1212 1215 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1213 1216 branch=backend.default_branch_name,
1214 1217 commit_id=commit_ids['master-commit-3-change-2'])
1215 1218
1216 1219 pull_request.target_repo = target
1217 1220 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1218 1221 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1219 1222
1220 1223 pull_request.revisions = [
1221 1224 commit_ids['feat-commit-1'],
1222 1225 commit_ids['feat-commit-2']
1223 1226 ]
1224 1227 pull_request.title = u"Test"
1225 1228 pull_request.description = u"Description"
1226 1229 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1227 1230 pull_request.pull_request_state = PullRequest.STATE_CREATED
1228 1231 Session().add(pull_request)
1229 1232 Session().commit()
1230 1233 pull_request_id = pull_request.pull_request_id
1231 1234
1232 1235 # PR is created, now we simulate a force-push into target,
1233 1236 # that drops a 2 last commits
1234 1237 vcsrepo = target.scm_instance()
1235 1238 vcsrepo.config.clear_section('hooks')
1236 1239 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1237 1240 target_repo_name = target.repo_name
1238 1241
1239 1242 # update PR
1240 1243 url = route_path('pullrequest_update',
1241 1244 repo_name=target_repo_name,
1242 1245 pull_request_id=pull_request_id)
1243 1246 self.app.post(url,
1244 1247 params={'update_commits': 'true', 'csrf_token': csrf_token},
1245 1248 status=200)
1246 1249
1247 1250 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1248 1251 assert response.status_int == 200
1249 1252 response.mustcontain('Pull request updated to')
1250 1253 response.mustcontain('with 0 added, 0 removed commits.')
1251 1254
1252 1255 def test_update_of_ancestor_reference(self, backend, csrf_token):
1253 1256 commits = [
1254 1257 {'message': 'ancestor'},
1255 1258 {'message': 'change'},
1256 1259 {'message': 'change-2'},
1257 1260 {'message': 'ancestor-new', 'parents': ['ancestor']},
1258 1261 {'message': 'change-rebased'},
1259 1262 ]
1260 1263 commit_ids = backend.create_master_repo(commits)
1261 1264 target = backend.create_repo(heads=['ancestor'])
1262 1265 source = backend.create_repo(heads=['change'])
1263 1266
1264 1267 # create pr from a in source to A in target
1265 1268 pull_request = PullRequest()
1266 1269 pull_request.source_repo = source
1267 1270
1268 1271 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1269 1272 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1270 1273 pull_request.target_repo = target
1271 1274 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1272 1275 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1273 1276 pull_request.revisions = [commit_ids['change']]
1274 1277 pull_request.title = u"Test"
1275 1278 pull_request.description = u"Description"
1276 1279 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1277 1280 pull_request.pull_request_state = PullRequest.STATE_CREATED
1278 1281 Session().add(pull_request)
1279 1282 Session().commit()
1280 1283 pull_request_id = pull_request.pull_request_id
1281 1284
1282 1285 # target has ancestor - ancestor-new
1283 1286 # source has ancestor - ancestor-new - change-rebased
1284 1287 backend.pull_heads(target, heads=['ancestor-new'])
1285 1288 backend.pull_heads(source, heads=['change-rebased'])
1286 1289 target_repo_name = target.repo_name
1287 1290
1288 1291 # update PR
1289 1292 self.app.post(
1290 1293 route_path('pullrequest_update',
1291 1294 repo_name=target_repo_name, pull_request_id=pull_request_id),
1292 1295 params={'update_commits': 'true', 'csrf_token': csrf_token},
1293 1296 status=200)
1294 1297
1295 1298 # Expect the target reference to be updated correctly
1296 1299 pull_request = PullRequest.get(pull_request_id)
1297 1300 assert pull_request.revisions == [commit_ids['change-rebased']]
1298 1301 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1299 1302 branch=backend.default_branch_name,
1300 1303 commit_id=commit_ids['ancestor-new'])
1301 1304 assert pull_request.target_ref == expected_target_ref
1302 1305
1303 1306 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1304 1307 branch_name = 'development'
1305 1308 commits = [
1306 1309 {'message': 'initial-commit'},
1307 1310 {'message': 'old-feature'},
1308 1311 {'message': 'new-feature', 'branch': branch_name},
1309 1312 ]
1310 1313 repo = backend_git.create_repo(commits)
1311 1314 repo_name = repo.repo_name
1312 1315 commit_ids = backend_git.commit_ids
1313 1316
1314 1317 pull_request = PullRequest()
1315 1318 pull_request.source_repo = repo
1316 1319 pull_request.target_repo = repo
1317 1320 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1318 1321 branch=branch_name, commit_id=commit_ids['new-feature'])
1319 1322 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1320 1323 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1321 1324 pull_request.revisions = [commit_ids['new-feature']]
1322 1325 pull_request.title = u"Test"
1323 1326 pull_request.description = u"Description"
1324 1327 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1325 1328 pull_request.pull_request_state = PullRequest.STATE_CREATED
1326 1329 Session().add(pull_request)
1327 1330 Session().commit()
1328 1331
1329 1332 pull_request_id = pull_request.pull_request_id
1330 1333
1331 1334 vcs = repo.scm_instance()
1332 1335 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1333 1336 # NOTE(marcink): run GC to ensure the commits are gone
1334 1337 vcs.run_gc()
1335 1338
1336 1339 response = self.app.get(route_path(
1337 1340 'pullrequest_show',
1338 1341 repo_name=repo_name,
1339 1342 pull_request_id=pull_request_id))
1340 1343
1341 1344 assert response.status_int == 200
1342 1345
1343 1346 response.assert_response().element_contains(
1344 1347 '#changeset_compare_view_content .alert strong',
1345 1348 'Missing commits')
1346 1349 response.assert_response().element_contains(
1347 1350 '#changeset_compare_view_content .alert',
1348 1351 'This pull request cannot be displayed, because one or more'
1349 1352 ' commits no longer exist in the source repository.')
1350 1353
1351 1354 def test_strip_commits_from_pull_request(
1352 1355 self, backend, pr_util, csrf_token):
1353 1356 commits = [
1354 1357 {'message': 'initial-commit'},
1355 1358 {'message': 'old-feature'},
1356 1359 {'message': 'new-feature', 'parents': ['initial-commit']},
1357 1360 ]
1358 1361 pull_request = pr_util.create_pull_request(
1359 1362 commits, target_head='initial-commit', source_head='new-feature',
1360 1363 revisions=['new-feature'])
1361 1364
1362 1365 vcs = pr_util.source_repository.scm_instance()
1363 1366 if backend.alias == 'git':
1364 1367 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1365 1368 else:
1366 1369 vcs.strip(pr_util.commit_ids['new-feature'])
1367 1370
1368 1371 response = self.app.get(route_path(
1369 1372 'pullrequest_show',
1370 1373 repo_name=pr_util.target_repository.repo_name,
1371 1374 pull_request_id=pull_request.pull_request_id))
1372 1375
1373 1376 assert response.status_int == 200
1374 1377
1375 1378 response.assert_response().element_contains(
1376 1379 '#changeset_compare_view_content .alert strong',
1377 1380 'Missing commits')
1378 1381 response.assert_response().element_contains(
1379 1382 '#changeset_compare_view_content .alert',
1380 1383 'This pull request cannot be displayed, because one or more'
1381 1384 ' commits no longer exist in the source repository.')
1382 1385 response.assert_response().element_contains(
1383 1386 '#update_commits',
1384 1387 'Update commits')
1385 1388
1386 1389 def test_strip_commits_and_update(
1387 1390 self, backend, pr_util, csrf_token):
1388 1391 commits = [
1389 1392 {'message': 'initial-commit'},
1390 1393 {'message': 'old-feature'},
1391 1394 {'message': 'new-feature', 'parents': ['old-feature']},
1392 1395 ]
1393 1396 pull_request = pr_util.create_pull_request(
1394 1397 commits, target_head='old-feature', source_head='new-feature',
1395 1398 revisions=['new-feature'], mergeable=True)
1396 1399 pr_id = pull_request.pull_request_id
1397 1400 target_repo_name = pull_request.target_repo.repo_name
1398 1401
1399 1402 vcs = pr_util.source_repository.scm_instance()
1400 1403 if backend.alias == 'git':
1401 1404 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1402 1405 else:
1403 1406 vcs.strip(pr_util.commit_ids['new-feature'])
1404 1407
1405 1408 url = route_path('pullrequest_update',
1406 1409 repo_name=target_repo_name,
1407 1410 pull_request_id=pr_id)
1408 1411 response = self.app.post(url,
1409 1412 params={'update_commits': 'true',
1410 1413 'csrf_token': csrf_token})
1411 1414
1412 1415 assert response.status_int == 200
1413 1416 assert response.body == '{"response": true, "redirect_url": null}'
1414 1417
1415 1418 # Make sure that after update, it won't raise 500 errors
1416 1419 response = self.app.get(route_path(
1417 1420 'pullrequest_show',
1418 1421 repo_name=target_repo_name,
1419 1422 pull_request_id=pr_id))
1420 1423
1421 1424 assert response.status_int == 200
1422 1425 response.assert_response().element_contains(
1423 1426 '#changeset_compare_view_content .alert strong',
1424 1427 'Missing commits')
1425 1428
1426 1429 def test_branch_is_a_link(self, pr_util):
1427 1430 pull_request = pr_util.create_pull_request()
1428 1431 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1429 1432 pull_request.target_ref = 'branch:target:abcdef1234567890'
1430 1433 Session().add(pull_request)
1431 1434 Session().commit()
1432 1435
1433 1436 response = self.app.get(route_path(
1434 1437 'pullrequest_show',
1435 1438 repo_name=pull_request.target_repo.scm_instance().name,
1436 1439 pull_request_id=pull_request.pull_request_id))
1437 1440 assert response.status_int == 200
1438 1441
1439 1442 source = response.assert_response().get_element('.pr-source-info')
1440 1443 source_parent = source.getparent()
1441 1444 assert len(source_parent) == 1
1442 1445
1443 1446 target = response.assert_response().get_element('.pr-target-info')
1444 1447 target_parent = target.getparent()
1445 1448 assert len(target_parent) == 1
1446 1449
1447 1450 expected_origin_link = route_path(
1448 1451 'repo_commits',
1449 1452 repo_name=pull_request.source_repo.scm_instance().name,
1450 1453 params=dict(branch='origin'))
1451 1454 expected_target_link = route_path(
1452 1455 'repo_commits',
1453 1456 repo_name=pull_request.target_repo.scm_instance().name,
1454 1457 params=dict(branch='target'))
1455 1458 assert source_parent.attrib['href'] == expected_origin_link
1456 1459 assert target_parent.attrib['href'] == expected_target_link
1457 1460
1458 1461 def test_bookmark_is_not_a_link(self, pr_util):
1459 1462 pull_request = pr_util.create_pull_request()
1460 1463 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1461 1464 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1462 1465 Session().add(pull_request)
1463 1466 Session().commit()
1464 1467
1465 1468 response = self.app.get(route_path(
1466 1469 'pullrequest_show',
1467 1470 repo_name=pull_request.target_repo.scm_instance().name,
1468 1471 pull_request_id=pull_request.pull_request_id))
1469 1472 assert response.status_int == 200
1470 1473
1471 1474 source = response.assert_response().get_element('.pr-source-info')
1472 1475 assert source.text.strip() == 'bookmark:origin'
1473 1476 assert source.getparent().attrib.get('href') is None
1474 1477
1475 1478 target = response.assert_response().get_element('.pr-target-info')
1476 1479 assert target.text.strip() == 'bookmark:target'
1477 1480 assert target.getparent().attrib.get('href') is None
1478 1481
1479 1482 def test_tag_is_not_a_link(self, pr_util):
1480 1483 pull_request = pr_util.create_pull_request()
1481 1484 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1482 1485 pull_request.target_ref = 'tag:target:abcdef1234567890'
1483 1486 Session().add(pull_request)
1484 1487 Session().commit()
1485 1488
1486 1489 response = self.app.get(route_path(
1487 1490 'pullrequest_show',
1488 1491 repo_name=pull_request.target_repo.scm_instance().name,
1489 1492 pull_request_id=pull_request.pull_request_id))
1490 1493 assert response.status_int == 200
1491 1494
1492 1495 source = response.assert_response().get_element('.pr-source-info')
1493 1496 assert source.text.strip() == 'tag:origin'
1494 1497 assert source.getparent().attrib.get('href') is None
1495 1498
1496 1499 target = response.assert_response().get_element('.pr-target-info')
1497 1500 assert target.text.strip() == 'tag:target'
1498 1501 assert target.getparent().attrib.get('href') is None
1499 1502
1500 1503 @pytest.mark.parametrize('mergeable', [True, False])
1501 1504 def test_shadow_repository_link(
1502 1505 self, mergeable, pr_util, http_host_only_stub):
1503 1506 """
1504 1507 Check that the pull request summary page displays a link to the shadow
1505 1508 repository if the pull request is mergeable. If it is not mergeable
1506 1509 the link should not be displayed.
1507 1510 """
1508 1511 pull_request = pr_util.create_pull_request(
1509 1512 mergeable=mergeable, enable_notifications=False)
1510 1513 target_repo = pull_request.target_repo.scm_instance()
1511 1514 pr_id = pull_request.pull_request_id
1512 1515 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1513 1516 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1514 1517
1515 1518 response = self.app.get(route_path(
1516 1519 'pullrequest_show',
1517 1520 repo_name=target_repo.name,
1518 1521 pull_request_id=pr_id))
1519 1522
1520 1523 if mergeable:
1521 1524 response.assert_response().element_value_contains(
1522 1525 'input.pr-mergeinfo', shadow_url)
1523 1526 response.assert_response().element_value_contains(
1524 1527 'input.pr-mergeinfo ', 'pr-merge')
1525 1528 else:
1526 1529 response.assert_response().no_element_exists('.pr-mergeinfo')
1527 1530
1528 1531
1529 1532 @pytest.mark.usefixtures('app')
1530 1533 @pytest.mark.backends("git", "hg")
1531 1534 class TestPullrequestsControllerDelete(object):
1532 1535 def test_pull_request_delete_button_permissions_admin(
1533 1536 self, autologin_user, user_admin, pr_util):
1534 1537 pull_request = pr_util.create_pull_request(
1535 1538 author=user_admin.username, enable_notifications=False)
1536 1539
1537 1540 response = self.app.get(route_path(
1538 1541 'pullrequest_show',
1539 1542 repo_name=pull_request.target_repo.scm_instance().name,
1540 1543 pull_request_id=pull_request.pull_request_id))
1541 1544
1542 1545 response.mustcontain('id="delete_pullrequest"')
1543 1546 response.mustcontain('Confirm to delete this pull request')
1544 1547
1545 1548 def test_pull_request_delete_button_permissions_owner(
1546 1549 self, autologin_regular_user, user_regular, pr_util):
1547 1550 pull_request = pr_util.create_pull_request(
1548 1551 author=user_regular.username, enable_notifications=False)
1549 1552
1550 1553 response = self.app.get(route_path(
1551 1554 'pullrequest_show',
1552 1555 repo_name=pull_request.target_repo.scm_instance().name,
1553 1556 pull_request_id=pull_request.pull_request_id))
1554 1557
1555 1558 response.mustcontain('id="delete_pullrequest"')
1556 1559 response.mustcontain('Confirm to delete this pull request')
1557 1560
1558 1561 def test_pull_request_delete_button_permissions_forbidden(
1559 1562 self, autologin_regular_user, user_regular, user_admin, pr_util):
1560 1563 pull_request = pr_util.create_pull_request(
1561 1564 author=user_admin.username, enable_notifications=False)
1562 1565
1563 1566 response = self.app.get(route_path(
1564 1567 'pullrequest_show',
1565 1568 repo_name=pull_request.target_repo.scm_instance().name,
1566 1569 pull_request_id=pull_request.pull_request_id))
1567 1570 response.mustcontain(no=['id="delete_pullrequest"'])
1568 1571 response.mustcontain(no=['Confirm to delete this pull request'])
1569 1572
1570 1573 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1571 1574 self, autologin_regular_user, user_regular, user_admin, pr_util,
1572 1575 user_util):
1573 1576
1574 1577 pull_request = pr_util.create_pull_request(
1575 1578 author=user_admin.username, enable_notifications=False)
1576 1579
1577 1580 user_util.grant_user_permission_to_repo(
1578 1581 pull_request.target_repo, user_regular,
1579 1582 'repository.write')
1580 1583
1581 1584 response = self.app.get(route_path(
1582 1585 'pullrequest_show',
1583 1586 repo_name=pull_request.target_repo.scm_instance().name,
1584 1587 pull_request_id=pull_request.pull_request_id))
1585 1588
1586 1589 response.mustcontain('id="open_edit_pullrequest"')
1587 1590 response.mustcontain('id="delete_pullrequest"')
1588 1591 response.mustcontain(no=['Confirm to delete this pull request'])
1589 1592
1590 1593 def test_delete_comment_returns_404_if_comment_does_not_exist(
1591 1594 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1592 1595
1593 1596 pull_request = pr_util.create_pull_request(
1594 1597 author=user_admin.username, enable_notifications=False)
1595 1598
1596 1599 self.app.post(
1597 1600 route_path(
1598 1601 'pullrequest_comment_delete',
1599 1602 repo_name=pull_request.target_repo.scm_instance().name,
1600 1603 pull_request_id=pull_request.pull_request_id,
1601 1604 comment_id=1024404),
1602 1605 extra_environ=xhr_header,
1603 1606 params={'csrf_token': csrf_token},
1604 1607 status=404
1605 1608 )
1606 1609
1607 1610 def test_delete_comment(
1608 1611 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1609 1612
1610 1613 pull_request = pr_util.create_pull_request(
1611 1614 author=user_admin.username, enable_notifications=False)
1612 1615 comment = pr_util.create_comment()
1613 1616 comment_id = comment.comment_id
1614 1617
1615 1618 response = self.app.post(
1616 1619 route_path(
1617 1620 'pullrequest_comment_delete',
1618 1621 repo_name=pull_request.target_repo.scm_instance().name,
1619 1622 pull_request_id=pull_request.pull_request_id,
1620 1623 comment_id=comment_id),
1621 1624 extra_environ=xhr_header,
1622 1625 params={'csrf_token': csrf_token},
1623 1626 status=200
1624 1627 )
1625 1628 assert response.body == 'true'
1626 1629
1627 1630 @pytest.mark.parametrize('url_type', [
1628 1631 'pullrequest_new',
1629 1632 'pullrequest_create',
1630 1633 'pullrequest_update',
1631 1634 'pullrequest_merge',
1632 1635 ])
1633 1636 def test_pull_request_is_forbidden_on_archived_repo(
1634 1637 self, autologin_user, backend, xhr_header, user_util, url_type):
1635 1638
1636 1639 # create a temporary repo
1637 1640 source = user_util.create_repo(repo_type=backend.alias)
1638 1641 repo_name = source.repo_name
1639 1642 repo = Repository.get_by_repo_name(repo_name)
1640 1643 repo.archived = True
1641 1644 Session().commit()
1642 1645
1643 1646 response = self.app.get(
1644 1647 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1645 1648
1646 1649 msg = 'Action not supported for archived repository.'
1647 1650 assert_session_flash(response, msg)
1648 1651
1649 1652
1650 1653 def assert_pull_request_status(pull_request, expected_status):
1651 1654 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1652 1655 assert status == expected_status
1653 1656
1654 1657
1655 1658 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1656 1659 @pytest.mark.usefixtures("autologin_user")
1657 1660 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1658 1661 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,791 +1,791 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 from pyramid.httpexceptions import (
25 25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 26 from pyramid.view import view_config
27 27 from pyramid.renderers import render
28 28 from pyramid.response import Response
29 29
30 30 from rhodecode.apps._base import RepoAppView
31 31 from rhodecode.apps.file_store import utils as store_utils
32 32 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
33 33
34 34 from rhodecode.lib import diffs, codeblocks, channelstream
35 35 from rhodecode.lib.auth import (
36 36 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.compat import OrderedDict
39 39 from rhodecode.lib.diffs import (
40 40 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
41 41 get_diff_whitespace_flag)
42 42 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
43 43 import rhodecode.lib.helpers as h
44 44 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict
45 45 from rhodecode.lib.vcs.backends.base import EmptyCommit
46 46 from rhodecode.lib.vcs.exceptions import (
47 47 RepositoryError, CommitDoesNotExistError)
48 48 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
49 49 ChangesetCommentHistory
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 def _update_with_GET(params, request):
59 59 for k in ['diff1', 'diff2', 'diff']:
60 60 params[k] += request.GET.getall(k)
61 61
62 62
63 63 class RepoCommitsView(RepoAppView):
64 64 def load_default_context(self):
65 65 c = self._get_local_tmpl_context(include_app_defaults=True)
66 66 c.rhodecode_repo = self.rhodecode_vcs_repo
67 67
68 68 return c
69 69
70 70 def _is_diff_cache_enabled(self, target_repo):
71 71 caching_enabled = self._get_general_setting(
72 72 target_repo, 'rhodecode_diff_cache')
73 73 log.debug('Diff caching enabled: %s', caching_enabled)
74 74 return caching_enabled
75 75
76 76 def _commit(self, commit_id_range, method):
77 77 _ = self.request.translate
78 78 c = self.load_default_context()
79 79 c.fulldiff = self.request.GET.get('fulldiff')
80 80
81 81 # fetch global flags of ignore ws or context lines
82 82 diff_context = get_diff_context(self.request)
83 83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84 84
85 85 # diff_limit will cut off the whole diff if the limit is applied
86 86 # otherwise it will just hide the big files from the front-end
87 87 diff_limit = c.visual.cut_off_limit_diff
88 88 file_limit = c.visual.cut_off_limit_file
89 89
90 90 # get ranges of commit ids if preset
91 91 commit_range = commit_id_range.split('...')[:2]
92 92
93 93 try:
94 94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 95 'message', 'parents']
96 96 if self.rhodecode_vcs_repo.alias == 'hg':
97 97 pre_load += ['hidden', 'obsolete', 'phase']
98 98
99 99 if len(commit_range) == 2:
100 100 commits = self.rhodecode_vcs_repo.get_commits(
101 101 start_id=commit_range[0], end_id=commit_range[1],
102 102 pre_load=pre_load, translate_tags=False)
103 103 commits = list(commits)
104 104 else:
105 105 commits = [self.rhodecode_vcs_repo.get_commit(
106 106 commit_id=commit_id_range, pre_load=pre_load)]
107 107
108 108 c.commit_ranges = commits
109 109 if not c.commit_ranges:
110 110 raise RepositoryError('The commit range returned an empty result')
111 111 except CommitDoesNotExistError as e:
112 112 msg = _('No such commit exists. Org exception: `{}`').format(e)
113 113 h.flash(msg, category='error')
114 114 raise HTTPNotFound()
115 115 except Exception:
116 116 log.exception("General failure")
117 117 raise HTTPNotFound()
118 118 single_commit = len(c.commit_ranges) == 1
119 119
120 120 c.changes = OrderedDict()
121 121 c.lines_added = 0
122 122 c.lines_deleted = 0
123 123
124 124 # auto collapse if we have more than limit
125 125 collapse_limit = diffs.DiffProcessor._collapse_commits_over
126 126 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
127 127
128 128 c.commit_statuses = ChangesetStatus.STATUSES
129 129 c.inline_comments = []
130 130 c.files = []
131 131
132 132 c.comments = []
133 133 c.unresolved_comments = []
134 134 c.resolved_comments = []
135 135
136 136 # Single commit
137 137 if single_commit:
138 138 commit = c.commit_ranges[0]
139 139 c.comments = CommentsModel().get_comments(
140 140 self.db_repo.repo_id,
141 141 revision=commit.raw_id)
142 142
143 143 # comments from PR
144 144 statuses = ChangesetStatusModel().get_statuses(
145 145 self.db_repo.repo_id, commit.raw_id,
146 146 with_revisions=True)
147 147
148 148 prs = set()
149 149 reviewers = list()
150 150 reviewers_duplicates = set() # to not have duplicates from multiple votes
151 151 for c_status in statuses:
152 152
153 153 # extract associated pull-requests from votes
154 154 if c_status.pull_request:
155 155 prs.add(c_status.pull_request)
156 156
157 157 # extract reviewers
158 158 _user_id = c_status.author.user_id
159 159 if _user_id not in reviewers_duplicates:
160 160 reviewers.append(
161 161 StrictAttributeDict({
162 162 'user': c_status.author,
163 163
164 164 # fake attributed for commit, page that we don't have
165 165 # but we share the display with PR page
166 166 'mandatory': False,
167 167 'reasons': [],
168 168 'rule_user_group_data': lambda: None
169 169 })
170 170 )
171 171 reviewers_duplicates.add(_user_id)
172 172
173 173 c.reviewers_count = len(reviewers)
174 174 c.observers_count = 0
175 175
176 176 # from associated statuses, check the pull requests, and
177 177 # show comments from them
178 178 for pr in prs:
179 179 c.comments.extend(pr.comments)
180 180
181 181 c.unresolved_comments = CommentsModel()\
182 182 .get_commit_unresolved_todos(commit.raw_id)
183 183 c.resolved_comments = CommentsModel()\
184 184 .get_commit_resolved_todos(commit.raw_id)
185 185
186 186 c.inline_comments_flat = CommentsModel()\
187 187 .get_commit_inline_comments(commit.raw_id)
188 188
189 189 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
190 190 statuses, reviewers)
191 191
192 192 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
193 193
194 194 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
195 195
196 196 for review_obj, member, reasons, mandatory, status in review_statuses:
197 197 member_reviewer = h.reviewer_as_json(
198 198 member, reasons=reasons, mandatory=mandatory, role=None,
199 199 user_group=None
200 200 )
201 201
202 202 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
203 203 member_reviewer['review_status'] = current_review_status
204 204 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
205 205 member_reviewer['allowed_to_update'] = False
206 206 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
207 207
208 208 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
209 209
210 210 # NOTE(marcink): this uses the same voting logic as in pull-requests
211 211 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
212 212 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
213 213
214 214 diff = None
215 215 # Iterate over ranges (default commit view is always one commit)
216 216 for commit in c.commit_ranges:
217 217 c.changes[commit.raw_id] = []
218 218
219 219 commit2 = commit
220 220 commit1 = commit.first_parent
221 221
222 222 if method == 'show':
223 223 inline_comments = CommentsModel().get_inline_comments(
224 224 self.db_repo.repo_id, revision=commit.raw_id)
225 225 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
226 226 inline_comments))
227 227 c.inline_comments = inline_comments
228 228
229 229 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
230 230 self.db_repo)
231 231 cache_file_path = diff_cache_exist(
232 232 cache_path, 'diff', commit.raw_id,
233 233 hide_whitespace_changes, diff_context, c.fulldiff)
234 234
235 235 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
236 236 force_recache = str2bool(self.request.GET.get('force_recache'))
237 237
238 238 cached_diff = None
239 239 if caching_enabled:
240 240 cached_diff = load_cached_diff(cache_file_path)
241 241
242 242 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
243 243 if not force_recache and has_proper_diff_cache:
244 244 diffset = cached_diff['diff']
245 245 else:
246 246 vcs_diff = self.rhodecode_vcs_repo.get_diff(
247 247 commit1, commit2,
248 248 ignore_whitespace=hide_whitespace_changes,
249 249 context=diff_context)
250 250
251 251 diff_processor = diffs.DiffProcessor(
252 252 vcs_diff, format='newdiff', diff_limit=diff_limit,
253 253 file_limit=file_limit, show_full_diff=c.fulldiff)
254 254
255 255 _parsed = diff_processor.prepare()
256 256
257 257 diffset = codeblocks.DiffSet(
258 258 repo_name=self.db_repo_name,
259 259 source_node_getter=codeblocks.diffset_node_getter(commit1),
260 260 target_node_getter=codeblocks.diffset_node_getter(commit2))
261 261
262 262 diffset = self.path_filter.render_patchset_filtered(
263 263 diffset, _parsed, commit1.raw_id, commit2.raw_id)
264 264
265 265 # save cached diff
266 266 if caching_enabled:
267 267 cache_diff(cache_file_path, diffset, None)
268 268
269 269 c.limited_diff = diffset.limited_diff
270 270 c.changes[commit.raw_id] = diffset
271 271 else:
272 272 # TODO(marcink): no cache usage here...
273 273 _diff = self.rhodecode_vcs_repo.get_diff(
274 274 commit1, commit2,
275 275 ignore_whitespace=hide_whitespace_changes, context=diff_context)
276 276 diff_processor = diffs.DiffProcessor(
277 277 _diff, format='newdiff', diff_limit=diff_limit,
278 278 file_limit=file_limit, show_full_diff=c.fulldiff)
279 279 # downloads/raw we only need RAW diff nothing else
280 280 diff = self.path_filter.get_raw_patch(diff_processor)
281 281 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
282 282
283 283 # sort comments by how they were generated
284 284 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
285 285 c.at_version_num = None
286 286
287 287 if len(c.commit_ranges) == 1:
288 288 c.commit = c.commit_ranges[0]
289 289 c.parent_tmpl = ''.join(
290 290 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
291 291
292 292 if method == 'download':
293 293 response = Response(diff)
294 294 response.content_type = 'text/plain'
295 295 response.content_disposition = (
296 296 'attachment; filename=%s.diff' % commit_id_range[:12])
297 297 return response
298 298 elif method == 'patch':
299 299 c.diff = safe_unicode(diff)
300 300 patch = render(
301 301 'rhodecode:templates/changeset/patch_changeset.mako',
302 302 self._get_template_context(c), self.request)
303 303 response = Response(patch)
304 304 response.content_type = 'text/plain'
305 305 return response
306 306 elif method == 'raw':
307 307 response = Response(diff)
308 308 response.content_type = 'text/plain'
309 309 return response
310 310 elif method == 'show':
311 311 if len(c.commit_ranges) == 1:
312 312 html = render(
313 313 'rhodecode:templates/changeset/changeset.mako',
314 314 self._get_template_context(c), self.request)
315 315 return Response(html)
316 316 else:
317 317 c.ancestor = None
318 318 c.target_repo = self.db_repo
319 319 html = render(
320 320 'rhodecode:templates/changeset/changeset_range.mako',
321 321 self._get_template_context(c), self.request)
322 322 return Response(html)
323 323
324 324 raise HTTPBadRequest()
325 325
326 326 @LoginRequired()
327 327 @HasRepoPermissionAnyDecorator(
328 328 'repository.read', 'repository.write', 'repository.admin')
329 329 @view_config(
330 330 route_name='repo_commit', request_method='GET',
331 331 renderer=None)
332 332 def repo_commit_show(self):
333 333 commit_id = self.request.matchdict['commit_id']
334 334 return self._commit(commit_id, method='show')
335 335
336 336 @LoginRequired()
337 337 @HasRepoPermissionAnyDecorator(
338 338 'repository.read', 'repository.write', 'repository.admin')
339 339 @view_config(
340 340 route_name='repo_commit_raw', request_method='GET',
341 341 renderer=None)
342 342 @view_config(
343 343 route_name='repo_commit_raw_deprecated', request_method='GET',
344 344 renderer=None)
345 345 def repo_commit_raw(self):
346 346 commit_id = self.request.matchdict['commit_id']
347 347 return self._commit(commit_id, method='raw')
348 348
349 349 @LoginRequired()
350 350 @HasRepoPermissionAnyDecorator(
351 351 'repository.read', 'repository.write', 'repository.admin')
352 352 @view_config(
353 353 route_name='repo_commit_patch', request_method='GET',
354 354 renderer=None)
355 355 def repo_commit_patch(self):
356 356 commit_id = self.request.matchdict['commit_id']
357 357 return self._commit(commit_id, method='patch')
358 358
359 359 @LoginRequired()
360 360 @HasRepoPermissionAnyDecorator(
361 361 'repository.read', 'repository.write', 'repository.admin')
362 362 @view_config(
363 363 route_name='repo_commit_download', request_method='GET',
364 364 renderer=None)
365 365 def repo_commit_download(self):
366 366 commit_id = self.request.matchdict['commit_id']
367 367 return self._commit(commit_id, method='download')
368 368
369 369 @LoginRequired()
370 370 @NotAnonymous()
371 371 @HasRepoPermissionAnyDecorator(
372 372 'repository.read', 'repository.write', 'repository.admin')
373 373 @CSRFRequired()
374 374 @view_config(
375 375 route_name='repo_commit_comment_create', request_method='POST',
376 376 renderer='json_ext')
377 377 def repo_commit_comment_create(self):
378 378 _ = self.request.translate
379 379 commit_id = self.request.matchdict['commit_id']
380 380
381 381 c = self.load_default_context()
382 382 status = self.request.POST.get('changeset_status', None)
383 383 text = self.request.POST.get('text')
384 384 comment_type = self.request.POST.get('comment_type')
385 385 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
386 386
387 387 if status:
388 388 text = text or (_('Status change %(transition_icon)s %(status)s')
389 389 % {'transition_icon': '>',
390 390 'status': ChangesetStatus.get_status_lbl(status)})
391 391
392 392 multi_commit_ids = []
393 393 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
394 394 if _commit_id not in ['', None, EmptyCommit.raw_id]:
395 395 if _commit_id not in multi_commit_ids:
396 396 multi_commit_ids.append(_commit_id)
397 397
398 398 commit_ids = multi_commit_ids or [commit_id]
399 399
400 400 comment = None
401 401 for current_id in filter(None, commit_ids):
402 402 comment = CommentsModel().create(
403 403 text=text,
404 404 repo=self.db_repo.repo_id,
405 405 user=self._rhodecode_db_user.user_id,
406 406 commit_id=current_id,
407 407 f_path=self.request.POST.get('f_path'),
408 408 line_no=self.request.POST.get('line'),
409 409 status_change=(ChangesetStatus.get_status_lbl(status)
410 410 if status else None),
411 411 status_change_type=status,
412 412 comment_type=comment_type,
413 413 resolves_comment_id=resolves_comment_id,
414 414 auth_user=self._rhodecode_user
415 415 )
416 is_inline = bool(comment.f_path and comment.line_no)
416 is_inline = comment.is_inline
417 417
418 418 # get status if set !
419 419 if status:
420 420 # if latest status was from pull request and it's closed
421 421 # disallow changing status !
422 422 # dont_allow_on_closed_pull_request = True !
423 423
424 424 try:
425 425 ChangesetStatusModel().set_status(
426 426 self.db_repo.repo_id,
427 427 status,
428 428 self._rhodecode_db_user.user_id,
429 429 comment,
430 430 revision=current_id,
431 431 dont_allow_on_closed_pull_request=True
432 432 )
433 433 except StatusChangeOnClosedPullRequestError:
434 434 msg = _('Changing the status of a commit associated with '
435 435 'a closed pull request is not allowed')
436 436 log.exception(msg)
437 437 h.flash(msg, category='warning')
438 438 raise HTTPFound(h.route_path(
439 439 'repo_commit', repo_name=self.db_repo_name,
440 440 commit_id=current_id))
441 441
442 442 commit = self.db_repo.get_commit(current_id)
443 443 CommentsModel().trigger_commit_comment_hook(
444 444 self.db_repo, self._rhodecode_user, 'create',
445 445 data={'comment': comment, 'commit': commit})
446 446
447 447 # finalize, commit and redirect
448 448 Session().commit()
449 449
450 450 data = {
451 451 'target_id': h.safeid(h.safe_unicode(
452 452 self.request.POST.get('f_path'))),
453 453 }
454 454 if comment:
455 455 c.co = comment
456 456 c.at_version_num = 0
457 457 rendered_comment = render(
458 458 'rhodecode:templates/changeset/changeset_comment_block.mako',
459 459 self._get_template_context(c), self.request)
460 460
461 461 data.update(comment.get_dict())
462 462 data.update({'rendered_text': rendered_comment})
463 463
464 464 comment_broadcast_channel = channelstream.comment_channel(
465 465 self.db_repo_name, commit_obj=commit)
466 466
467 467 comment_data = data
468 468 comment_type = 'inline' if is_inline else 'general'
469 469 channelstream.comment_channelstream_push(
470 470 self.request, comment_broadcast_channel, self._rhodecode_user,
471 471 _('posted a new {} comment').format(comment_type),
472 472 comment_data=comment_data)
473 473
474 474 return data
475 475
476 476 @LoginRequired()
477 477 @NotAnonymous()
478 478 @HasRepoPermissionAnyDecorator(
479 479 'repository.read', 'repository.write', 'repository.admin')
480 480 @CSRFRequired()
481 481 @view_config(
482 482 route_name='repo_commit_comment_preview', request_method='POST',
483 483 renderer='string', xhr=True)
484 484 def repo_commit_comment_preview(self):
485 485 # Technically a CSRF token is not needed as no state changes with this
486 486 # call. However, as this is a POST is better to have it, so automated
487 487 # tools don't flag it as potential CSRF.
488 488 # Post is required because the payload could be bigger than the maximum
489 489 # allowed by GET.
490 490
491 491 text = self.request.POST.get('text')
492 492 renderer = self.request.POST.get('renderer') or 'rst'
493 493 if text:
494 494 return h.render(text, renderer=renderer, mentions=True,
495 495 repo_name=self.db_repo_name)
496 496 return ''
497 497
498 498 @LoginRequired()
499 499 @HasRepoPermissionAnyDecorator(
500 500 'repository.read', 'repository.write', 'repository.admin')
501 501 @CSRFRequired()
502 502 @view_config(
503 503 route_name='repo_commit_comment_history_view', request_method='POST',
504 504 renderer='string', xhr=True)
505 505 def repo_commit_comment_history_view(self):
506 506 c = self.load_default_context()
507 507
508 508 comment_history_id = self.request.matchdict['comment_history_id']
509 509 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
510 510 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
511 511
512 512 if is_repo_comment:
513 513 c.comment_history = comment_history
514 514
515 515 rendered_comment = render(
516 516 'rhodecode:templates/changeset/comment_history.mako',
517 517 self._get_template_context(c)
518 518 , self.request)
519 519 return rendered_comment
520 520 else:
521 521 log.warning('No permissions for user %s to show comment_history_id: %s',
522 522 self._rhodecode_db_user, comment_history_id)
523 523 raise HTTPNotFound()
524 524
525 525 @LoginRequired()
526 526 @NotAnonymous()
527 527 @HasRepoPermissionAnyDecorator(
528 528 'repository.read', 'repository.write', 'repository.admin')
529 529 @CSRFRequired()
530 530 @view_config(
531 531 route_name='repo_commit_comment_attachment_upload', request_method='POST',
532 532 renderer='json_ext', xhr=True)
533 533 def repo_commit_comment_attachment_upload(self):
534 534 c = self.load_default_context()
535 535 upload_key = 'attachment'
536 536
537 537 file_obj = self.request.POST.get(upload_key)
538 538
539 539 if file_obj is None:
540 540 self.request.response.status = 400
541 541 return {'store_fid': None,
542 542 'access_path': None,
543 543 'error': '{} data field is missing'.format(upload_key)}
544 544
545 545 if not hasattr(file_obj, 'filename'):
546 546 self.request.response.status = 400
547 547 return {'store_fid': None,
548 548 'access_path': None,
549 549 'error': 'filename cannot be read from the data field'}
550 550
551 551 filename = file_obj.filename
552 552 file_display_name = filename
553 553
554 554 metadata = {
555 555 'user_uploaded': {'username': self._rhodecode_user.username,
556 556 'user_id': self._rhodecode_user.user_id,
557 557 'ip': self._rhodecode_user.ip_addr}}
558 558
559 559 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
560 560 allowed_extensions = [
561 561 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
562 562 '.pptx', '.txt', '.xlsx', '.zip']
563 563 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
564 564
565 565 try:
566 566 storage = store_utils.get_file_storage(self.request.registry.settings)
567 567 store_uid, metadata = storage.save_file(
568 568 file_obj.file, filename, extra_metadata=metadata,
569 569 extensions=allowed_extensions, max_filesize=max_file_size)
570 570 except FileNotAllowedException:
571 571 self.request.response.status = 400
572 572 permitted_extensions = ', '.join(allowed_extensions)
573 573 error_msg = 'File `{}` is not allowed. ' \
574 574 'Only following extensions are permitted: {}'.format(
575 575 filename, permitted_extensions)
576 576 return {'store_fid': None,
577 577 'access_path': None,
578 578 'error': error_msg}
579 579 except FileOverSizeException:
580 580 self.request.response.status = 400
581 581 limit_mb = h.format_byte_size_binary(max_file_size)
582 582 return {'store_fid': None,
583 583 'access_path': None,
584 584 'error': 'File {} is exceeding allowed limit of {}.'.format(
585 585 filename, limit_mb)}
586 586
587 587 try:
588 588 entry = FileStore.create(
589 589 file_uid=store_uid, filename=metadata["filename"],
590 590 file_hash=metadata["sha256"], file_size=metadata["size"],
591 591 file_display_name=file_display_name,
592 592 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
593 593 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
594 594 scope_repo_id=self.db_repo.repo_id
595 595 )
596 596 Session().add(entry)
597 597 Session().commit()
598 598 log.debug('Stored upload in DB as %s', entry)
599 599 except Exception:
600 600 log.exception('Failed to store file %s', filename)
601 601 self.request.response.status = 400
602 602 return {'store_fid': None,
603 603 'access_path': None,
604 604 'error': 'File {} failed to store in DB.'.format(filename)}
605 605
606 606 Session().commit()
607 607
608 608 return {
609 609 'store_fid': store_uid,
610 610 'access_path': h.route_path(
611 611 'download_file', fid=store_uid),
612 612 'fqn_access_path': h.route_url(
613 613 'download_file', fid=store_uid),
614 614 'repo_access_path': h.route_path(
615 615 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
616 616 'repo_fqn_access_path': h.route_url(
617 617 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
618 618 }
619 619
620 620 @LoginRequired()
621 621 @NotAnonymous()
622 622 @HasRepoPermissionAnyDecorator(
623 623 'repository.read', 'repository.write', 'repository.admin')
624 624 @CSRFRequired()
625 625 @view_config(
626 626 route_name='repo_commit_comment_delete', request_method='POST',
627 627 renderer='json_ext')
628 628 def repo_commit_comment_delete(self):
629 629 commit_id = self.request.matchdict['commit_id']
630 630 comment_id = self.request.matchdict['comment_id']
631 631
632 632 comment = ChangesetComment.get_or_404(comment_id)
633 633 if not comment:
634 634 log.debug('Comment with id:%s not found, skipping', comment_id)
635 635 # comment already deleted in another call probably
636 636 return True
637 637
638 638 if comment.immutable:
639 639 # don't allow deleting comments that are immutable
640 640 raise HTTPForbidden()
641 641
642 642 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
643 643 super_admin = h.HasPermissionAny('hg.admin')()
644 644 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
645 645 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
646 646 comment_repo_admin = is_repo_admin and is_repo_comment
647 647
648 648 if super_admin or comment_owner or comment_repo_admin:
649 649 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
650 650 Session().commit()
651 651 return True
652 652 else:
653 653 log.warning('No permissions for user %s to delete comment_id: %s',
654 654 self._rhodecode_db_user, comment_id)
655 655 raise HTTPNotFound()
656 656
657 657 @LoginRequired()
658 658 @NotAnonymous()
659 659 @HasRepoPermissionAnyDecorator(
660 660 'repository.read', 'repository.write', 'repository.admin')
661 661 @CSRFRequired()
662 662 @view_config(
663 663 route_name='repo_commit_comment_edit', request_method='POST',
664 664 renderer='json_ext')
665 665 def repo_commit_comment_edit(self):
666 666 self.load_default_context()
667 667
668 668 comment_id = self.request.matchdict['comment_id']
669 669 comment = ChangesetComment.get_or_404(comment_id)
670 670
671 671 if comment.immutable:
672 672 # don't allow deleting comments that are immutable
673 673 raise HTTPForbidden()
674 674
675 675 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
676 676 super_admin = h.HasPermissionAny('hg.admin')()
677 677 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
678 678 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
679 679 comment_repo_admin = is_repo_admin and is_repo_comment
680 680
681 681 if super_admin or comment_owner or comment_repo_admin:
682 682 text = self.request.POST.get('text')
683 683 version = self.request.POST.get('version')
684 684 if text == comment.text:
685 685 log.warning(
686 686 'Comment(repo): '
687 687 'Trying to create new version '
688 688 'with the same comment body {}'.format(
689 689 comment_id,
690 690 )
691 691 )
692 692 raise HTTPNotFound()
693 693
694 694 if version.isdigit():
695 695 version = int(version)
696 696 else:
697 697 log.warning(
698 698 'Comment(repo): Wrong version type {} {} '
699 699 'for comment {}'.format(
700 700 version,
701 701 type(version),
702 702 comment_id,
703 703 )
704 704 )
705 705 raise HTTPNotFound()
706 706
707 707 try:
708 708 comment_history = CommentsModel().edit(
709 709 comment_id=comment_id,
710 710 text=text,
711 711 auth_user=self._rhodecode_user,
712 712 version=version,
713 713 )
714 714 except CommentVersionMismatch:
715 715 raise HTTPConflict()
716 716
717 717 if not comment_history:
718 718 raise HTTPNotFound()
719 719
720 720 commit_id = self.request.matchdict['commit_id']
721 721 commit = self.db_repo.get_commit(commit_id)
722 722 CommentsModel().trigger_commit_comment_hook(
723 723 self.db_repo, self._rhodecode_user, 'edit',
724 724 data={'comment': comment, 'commit': commit})
725 725
726 726 Session().commit()
727 727 return {
728 728 'comment_history_id': comment_history.comment_history_id,
729 729 'comment_id': comment.comment_id,
730 730 'comment_version': comment_history.version,
731 731 'comment_author_username': comment_history.author.username,
732 732 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
733 733 'comment_created_on': h.age_component(comment_history.created_on,
734 734 time_is_local=True),
735 735 }
736 736 else:
737 737 log.warning('No permissions for user %s to edit comment_id: %s',
738 738 self._rhodecode_db_user, comment_id)
739 739 raise HTTPNotFound()
740 740
741 741 @LoginRequired()
742 742 @HasRepoPermissionAnyDecorator(
743 743 'repository.read', 'repository.write', 'repository.admin')
744 744 @view_config(
745 745 route_name='repo_commit_data', request_method='GET',
746 746 renderer='json_ext', xhr=True)
747 747 def repo_commit_data(self):
748 748 commit_id = self.request.matchdict['commit_id']
749 749 self.load_default_context()
750 750
751 751 try:
752 752 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
753 753 except CommitDoesNotExistError as e:
754 754 return EmptyCommit(message=str(e))
755 755
756 756 @LoginRequired()
757 757 @HasRepoPermissionAnyDecorator(
758 758 'repository.read', 'repository.write', 'repository.admin')
759 759 @view_config(
760 760 route_name='repo_commit_children', request_method='GET',
761 761 renderer='json_ext', xhr=True)
762 762 def repo_commit_children(self):
763 763 commit_id = self.request.matchdict['commit_id']
764 764 self.load_default_context()
765 765
766 766 try:
767 767 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
768 768 children = commit.children
769 769 except CommitDoesNotExistError:
770 770 children = []
771 771
772 772 result = {"results": children}
773 773 return result
774 774
775 775 @LoginRequired()
776 776 @HasRepoPermissionAnyDecorator(
777 777 'repository.read', 'repository.write', 'repository.admin')
778 778 @view_config(
779 779 route_name='repo_commit_parents', request_method='GET',
780 780 renderer='json_ext')
781 781 def repo_commit_parents(self):
782 782 commit_id = self.request.matchdict['commit_id']
783 783 self.load_default_context()
784 784
785 785 try:
786 786 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
787 787 parents = commit.parents
788 788 except CommitDoesNotExistError:
789 789 parents = []
790 790 result = {"results": parents}
791 791 return result
@@ -1,1813 +1,1816 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason, Reference
43 from rhodecode.lib.vcs.backends.base import (
44 EmptyCommit, UpdateFailureReason, unicode_to_reference)
44 45 from rhodecode.lib.vcs.exceptions import (
45 46 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
46 47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 48 from rhodecode.model.comment import CommentsModel
48 49 from rhodecode.model.db import (
49 50 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
50 51 PullRequestReviewers)
51 52 from rhodecode.model.forms import PullRequestForm
52 53 from rhodecode.model.meta import Session
53 54 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
54 55 from rhodecode.model.scm import ScmModel
55 56
56 57 log = logging.getLogger(__name__)
57 58
58 59
59 60 class RepoPullRequestsView(RepoAppView, DataGridAppView):
60 61
61 62 def load_default_context(self):
62 63 c = self._get_local_tmpl_context(include_app_defaults=True)
63 64 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
64 65 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
65 66 # backward compat., we use for OLD PRs a plain renderer
66 67 c.renderer = 'plain'
67 68 return c
68 69
69 70 def _get_pull_requests_list(
70 71 self, repo_name, source, filter_type, opened_by, statuses):
71 72
72 73 draw, start, limit = self._extract_chunk(self.request)
73 74 search_q, order_by, order_dir = self._extract_ordering(self.request)
74 75 _render = self.request.get_partial_renderer(
75 76 'rhodecode:templates/data_table/_dt_elements.mako')
76 77
77 78 # pagination
78 79
79 80 if filter_type == 'awaiting_review':
80 81 pull_requests = PullRequestModel().get_awaiting_review(
81 82 repo_name, search_q=search_q, source=source, opened_by=opened_by,
82 83 statuses=statuses, offset=start, length=limit,
83 84 order_by=order_by, order_dir=order_dir)
84 85 pull_requests_total_count = PullRequestModel().count_awaiting_review(
85 86 repo_name, search_q=search_q, source=source, statuses=statuses,
86 87 opened_by=opened_by)
87 88 elif filter_type == 'awaiting_my_review':
88 89 pull_requests = PullRequestModel().get_awaiting_my_review(
89 90 repo_name, search_q=search_q, source=source, opened_by=opened_by,
90 91 user_id=self._rhodecode_user.user_id, statuses=statuses,
91 92 offset=start, length=limit, order_by=order_by,
92 93 order_dir=order_dir)
93 94 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
94 95 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
95 96 statuses=statuses, opened_by=opened_by)
96 97 else:
97 98 pull_requests = PullRequestModel().get_all(
98 99 repo_name, search_q=search_q, source=source, opened_by=opened_by,
99 100 statuses=statuses, offset=start, length=limit,
100 101 order_by=order_by, order_dir=order_dir)
101 102 pull_requests_total_count = PullRequestModel().count_all(
102 103 repo_name, search_q=search_q, source=source, statuses=statuses,
103 104 opened_by=opened_by)
104 105
105 106 data = []
106 107 comments_model = CommentsModel()
107 108 for pr in pull_requests:
108 109 comments_count = comments_model.get_all_comments(
109 110 self.db_repo.repo_id, pull_request=pr, count_only=True)
110 111
111 112 data.append({
112 113 'name': _render('pullrequest_name',
113 114 pr.pull_request_id, pr.pull_request_state,
114 115 pr.work_in_progress, pr.target_repo.repo_name,
115 116 short=True),
116 117 'name_raw': pr.pull_request_id,
117 118 'status': _render('pullrequest_status',
118 119 pr.calculated_review_status()),
119 120 'title': _render('pullrequest_title', pr.title, pr.description),
120 121 'description': h.escape(pr.description),
121 122 'updated_on': _render('pullrequest_updated_on',
122 123 h.datetime_to_time(pr.updated_on)),
123 124 'updated_on_raw': h.datetime_to_time(pr.updated_on),
124 125 'created_on': _render('pullrequest_updated_on',
125 126 h.datetime_to_time(pr.created_on)),
126 127 'created_on_raw': h.datetime_to_time(pr.created_on),
127 128 'state': pr.pull_request_state,
128 129 'author': _render('pullrequest_author',
129 130 pr.author.full_contact, ),
130 131 'author_raw': pr.author.full_name,
131 132 'comments': _render('pullrequest_comments', comments_count),
132 133 'comments_raw': comments_count,
133 134 'closed': pr.is_closed(),
134 135 })
135 136
136 137 data = ({
137 138 'draw': draw,
138 139 'data': data,
139 140 'recordsTotal': pull_requests_total_count,
140 141 'recordsFiltered': pull_requests_total_count,
141 142 })
142 143 return data
143 144
144 145 @LoginRequired()
145 146 @HasRepoPermissionAnyDecorator(
146 147 'repository.read', 'repository.write', 'repository.admin')
147 148 @view_config(
148 149 route_name='pullrequest_show_all', request_method='GET',
149 150 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
150 151 def pull_request_list(self):
151 152 c = self.load_default_context()
152 153
153 154 req_get = self.request.GET
154 155 c.source = str2bool(req_get.get('source'))
155 156 c.closed = str2bool(req_get.get('closed'))
156 157 c.my = str2bool(req_get.get('my'))
157 158 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
158 159 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
159 160
160 161 c.active = 'open'
161 162 if c.my:
162 163 c.active = 'my'
163 164 if c.closed:
164 165 c.active = 'closed'
165 166 if c.awaiting_review and not c.source:
166 167 c.active = 'awaiting'
167 168 if c.source and not c.awaiting_review:
168 169 c.active = 'source'
169 170 if c.awaiting_my_review:
170 171 c.active = 'awaiting_my'
171 172
172 173 return self._get_template_context(c)
173 174
174 175 @LoginRequired()
175 176 @HasRepoPermissionAnyDecorator(
176 177 'repository.read', 'repository.write', 'repository.admin')
177 178 @view_config(
178 179 route_name='pullrequest_show_all_data', request_method='GET',
179 180 renderer='json_ext', xhr=True)
180 181 def pull_request_list_data(self):
181 182 self.load_default_context()
182 183
183 184 # additional filters
184 185 req_get = self.request.GET
185 186 source = str2bool(req_get.get('source'))
186 187 closed = str2bool(req_get.get('closed'))
187 188 my = str2bool(req_get.get('my'))
188 189 awaiting_review = str2bool(req_get.get('awaiting_review'))
189 190 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
190 191
191 192 filter_type = 'awaiting_review' if awaiting_review \
192 193 else 'awaiting_my_review' if awaiting_my_review \
193 194 else None
194 195
195 196 opened_by = None
196 197 if my:
197 198 opened_by = [self._rhodecode_user.user_id]
198 199
199 200 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
200 201 if closed:
201 202 statuses = [PullRequest.STATUS_CLOSED]
202 203
203 204 data = self._get_pull_requests_list(
204 205 repo_name=self.db_repo_name, source=source,
205 206 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
206 207
207 208 return data
208 209
209 210 def _is_diff_cache_enabled(self, target_repo):
210 211 caching_enabled = self._get_general_setting(
211 212 target_repo, 'rhodecode_diff_cache')
212 213 log.debug('Diff caching enabled: %s', caching_enabled)
213 214 return caching_enabled
214 215
215 216 def _get_diffset(self, source_repo_name, source_repo,
216 217 ancestor_commit,
217 218 source_ref_id, target_ref_id,
218 219 target_commit, source_commit, diff_limit, file_limit,
219 220 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
220 221
221 222 if use_ancestor:
222 223 # we might want to not use it for versions
223 224 target_ref_id = ancestor_commit.raw_id
224 225
225 226 vcs_diff = PullRequestModel().get_diff(
226 227 source_repo, source_ref_id, target_ref_id,
227 228 hide_whitespace_changes, diff_context)
228 229
229 230 diff_processor = diffs.DiffProcessor(
230 231 vcs_diff, format='newdiff', diff_limit=diff_limit,
231 232 file_limit=file_limit, show_full_diff=fulldiff)
232 233
233 234 _parsed = diff_processor.prepare()
234 235
235 236 diffset = codeblocks.DiffSet(
236 237 repo_name=self.db_repo_name,
237 238 source_repo_name=source_repo_name,
238 239 source_node_getter=codeblocks.diffset_node_getter(target_commit),
239 240 target_node_getter=codeblocks.diffset_node_getter(source_commit),
240 241 )
241 242 diffset = self.path_filter.render_patchset_filtered(
242 243 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
243 244
244 245 return diffset
245 246
246 247 def _get_range_diffset(self, source_scm, source_repo,
247 248 commit1, commit2, diff_limit, file_limit,
248 249 fulldiff, hide_whitespace_changes, diff_context):
249 250 vcs_diff = source_scm.get_diff(
250 251 commit1, commit2,
251 252 ignore_whitespace=hide_whitespace_changes,
252 253 context=diff_context)
253 254
254 255 diff_processor = diffs.DiffProcessor(
255 256 vcs_diff, format='newdiff', diff_limit=diff_limit,
256 257 file_limit=file_limit, show_full_diff=fulldiff)
257 258
258 259 _parsed = diff_processor.prepare()
259 260
260 261 diffset = codeblocks.DiffSet(
261 262 repo_name=source_repo.repo_name,
262 263 source_node_getter=codeblocks.diffset_node_getter(commit1),
263 264 target_node_getter=codeblocks.diffset_node_getter(commit2))
264 265
265 266 diffset = self.path_filter.render_patchset_filtered(
266 267 diffset, _parsed, commit1.raw_id, commit2.raw_id)
267 268
268 269 return diffset
269 270
270 271 def register_comments_vars(self, c, pull_request, versions):
271 272 comments_model = CommentsModel()
272 273
273 274 # GENERAL COMMENTS with versions #
274 275 q = comments_model._all_general_comments_of_pull_request(pull_request)
275 276 q = q.order_by(ChangesetComment.comment_id.asc())
276 277 general_comments = q
277 278
278 279 # pick comments we want to render at current version
279 280 c.comment_versions = comments_model.aggregate_comments(
280 281 general_comments, versions, c.at_version_num)
281 282
282 283 # INLINE COMMENTS with versions #
283 284 q = comments_model._all_inline_comments_of_pull_request(pull_request)
284 285 q = q.order_by(ChangesetComment.comment_id.asc())
285 286 inline_comments = q
286 287
287 288 c.inline_versions = comments_model.aggregate_comments(
288 289 inline_comments, versions, c.at_version_num, inline=True)
289 290
290 291 # Comments inline+general
291 292 if c.at_version:
292 293 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
293 294 c.comments = c.comment_versions[c.at_version_num]['display']
294 295 else:
295 296 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
296 297 c.comments = c.comment_versions[c.at_version_num]['until']
297 298
298 299 return general_comments, inline_comments
299 300
300 301 @LoginRequired()
301 302 @HasRepoPermissionAnyDecorator(
302 303 'repository.read', 'repository.write', 'repository.admin')
303 304 @view_config(
304 305 route_name='pullrequest_show', request_method='GET',
305 306 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
306 307 def pull_request_show(self):
307 308 _ = self.request.translate
308 309 c = self.load_default_context()
309 310
310 311 pull_request = PullRequest.get_or_404(
311 312 self.request.matchdict['pull_request_id'])
312 313 pull_request_id = pull_request.pull_request_id
313 314
314 315 c.state_progressing = pull_request.is_state_changing()
315 316 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
316 317
317 318 _new_state = {
318 319 'created': PullRequest.STATE_CREATED,
319 320 }.get(self.request.GET.get('force_state'))
320 321
321 322 if c.is_super_admin and _new_state:
322 323 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
323 324 h.flash(
324 325 _('Pull Request state was force changed to `{}`').format(_new_state),
325 326 category='success')
326 327 Session().commit()
327 328
328 329 raise HTTPFound(h.route_path(
329 330 'pullrequest_show', repo_name=self.db_repo_name,
330 331 pull_request_id=pull_request_id))
331 332
332 333 version = self.request.GET.get('version')
333 334 from_version = self.request.GET.get('from_version') or version
334 335 merge_checks = self.request.GET.get('merge_checks')
335 336 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
336 337 force_refresh = str2bool(self.request.GET.get('force_refresh'))
337 338 c.range_diff_on = self.request.GET.get('range-diff') == "1"
338 339
339 340 # fetch global flags of ignore ws or context lines
340 341 diff_context = diffs.get_diff_context(self.request)
341 342 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
342 343
343 344 (pull_request_latest,
344 345 pull_request_at_ver,
345 346 pull_request_display_obj,
346 347 at_version) = PullRequestModel().get_pr_version(
347 348 pull_request_id, version=version)
348 349
349 350 pr_closed = pull_request_latest.is_closed()
350 351
351 352 if pr_closed and (version or from_version):
352 353 # not allow to browse versions for closed PR
353 354 raise HTTPFound(h.route_path(
354 355 'pullrequest_show', repo_name=self.db_repo_name,
355 356 pull_request_id=pull_request_id))
356 357
357 358 versions = pull_request_display_obj.versions()
358 359 # used to store per-commit range diffs
359 360 c.changes = collections.OrderedDict()
360 361
361 362 c.at_version = at_version
362 363 c.at_version_num = (at_version
363 364 if at_version and at_version != PullRequest.LATEST_VER
364 365 else None)
365 366
366 367 c.at_version_index = ChangesetComment.get_index_from_version(
367 368 c.at_version_num, versions)
368 369
369 370 (prev_pull_request_latest,
370 371 prev_pull_request_at_ver,
371 372 prev_pull_request_display_obj,
372 373 prev_at_version) = PullRequestModel().get_pr_version(
373 374 pull_request_id, version=from_version)
374 375
375 376 c.from_version = prev_at_version
376 377 c.from_version_num = (prev_at_version
377 378 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
378 379 else None)
379 380 c.from_version_index = ChangesetComment.get_index_from_version(
380 381 c.from_version_num, versions)
381 382
382 383 # define if we're in COMPARE mode or VIEW at version mode
383 384 compare = at_version != prev_at_version
384 385
385 386 # pull_requests repo_name we opened it against
386 387 # ie. target_repo must match
387 388 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
388 389 log.warning('Mismatch between the current repo: %s, and target %s',
389 390 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
390 391 raise HTTPNotFound()
391 392
392 393 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
393 394
394 395 c.pull_request = pull_request_display_obj
395 396 c.renderer = pull_request_at_ver.description_renderer or c.renderer
396 397 c.pull_request_latest = pull_request_latest
397 398
398 399 # inject latest version
399 400 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
400 401 c.versions = versions + [latest_ver]
401 402
402 403 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
403 404 c.allowed_to_change_status = False
404 405 c.allowed_to_update = False
405 406 c.allowed_to_merge = False
406 407 c.allowed_to_delete = False
407 408 c.allowed_to_comment = False
408 409 c.allowed_to_close = False
409 410 else:
410 411 can_change_status = PullRequestModel().check_user_change_status(
411 412 pull_request_at_ver, self._rhodecode_user)
412 413 c.allowed_to_change_status = can_change_status and not pr_closed
413 414
414 415 c.allowed_to_update = PullRequestModel().check_user_update(
415 416 pull_request_latest, self._rhodecode_user) and not pr_closed
416 417 c.allowed_to_merge = PullRequestModel().check_user_merge(
417 418 pull_request_latest, self._rhodecode_user) and not pr_closed
418 419 c.allowed_to_delete = PullRequestModel().check_user_delete(
419 420 pull_request_latest, self._rhodecode_user) and not pr_closed
420 421 c.allowed_to_comment = not pr_closed
421 422 c.allowed_to_close = c.allowed_to_merge and not pr_closed
422 423
423 424 c.forbid_adding_reviewers = False
424 425 c.forbid_author_to_review = False
425 426 c.forbid_commit_author_to_review = False
426 427
427 428 if pull_request_latest.reviewer_data and \
428 429 'rules' in pull_request_latest.reviewer_data:
429 430 rules = pull_request_latest.reviewer_data['rules'] or {}
430 431 try:
431 432 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
432 433 c.forbid_author_to_review = rules.get('forbid_author_to_review')
433 434 c.forbid_commit_author_to_review = rules.get('forbid_commit_author_to_review')
434 435 except Exception:
435 436 pass
436 437
437 438 # check merge capabilities
438 439 _merge_check = MergeCheck.validate(
439 440 pull_request_latest, auth_user=self._rhodecode_user,
440 441 translator=self.request.translate,
441 442 force_shadow_repo_refresh=force_refresh)
442 443
443 444 c.pr_merge_errors = _merge_check.error_details
444 445 c.pr_merge_possible = not _merge_check.failed
445 446 c.pr_merge_message = _merge_check.merge_msg
446 447 c.pr_merge_source_commit = _merge_check.source_commit
447 448 c.pr_merge_target_commit = _merge_check.target_commit
448 449
449 450 c.pr_merge_info = MergeCheck.get_merge_conditions(
450 451 pull_request_latest, translator=self.request.translate)
451 452
452 453 c.pull_request_review_status = _merge_check.review_status
453 454 if merge_checks:
454 455 self.request.override_renderer = \
455 456 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
456 457 return self._get_template_context(c)
457 458
458 459 c.reviewers_count = pull_request.reviewers_count
459 460 c.observers_count = pull_request.observers_count
460 461
461 462 # reviewers and statuses
462 463 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
463 464 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
464 465 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
465 466
466 467 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
467 468 member_reviewer = h.reviewer_as_json(
468 469 member, reasons=reasons, mandatory=mandatory,
469 470 role=review_obj.role,
470 471 user_group=review_obj.rule_user_group_data()
471 472 )
472 473
473 474 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
474 475 member_reviewer['review_status'] = current_review_status
475 476 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
476 477 member_reviewer['allowed_to_update'] = c.allowed_to_update
477 478 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
478 479
479 480 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
480 481
481 482 for observer_obj, member in pull_request_at_ver.observers():
482 483 member_observer = h.reviewer_as_json(
483 484 member, reasons=[], mandatory=False,
484 485 role=observer_obj.role,
485 486 user_group=observer_obj.rule_user_group_data()
486 487 )
487 488 member_observer['allowed_to_update'] = c.allowed_to_update
488 489 c.pull_request_set_observers_data_json['observers'].append(member_observer)
489 490
490 491 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
491 492
492 493 general_comments, inline_comments = \
493 494 self.register_comments_vars(c, pull_request_latest, versions)
494 495
495 496 # TODOs
496 497 c.unresolved_comments = CommentsModel() \
497 498 .get_pull_request_unresolved_todos(pull_request_latest)
498 499 c.resolved_comments = CommentsModel() \
499 500 .get_pull_request_resolved_todos(pull_request_latest)
500 501
501 502 # if we use version, then do not show later comments
502 503 # than current version
503 504 display_inline_comments = collections.defaultdict(
504 505 lambda: collections.defaultdict(list))
505 506 for co in inline_comments:
506 507 if c.at_version_num:
507 508 # pick comments that are at least UPTO given version, so we
508 509 # don't render comments for higher version
509 510 should_render = co.pull_request_version_id and \
510 511 co.pull_request_version_id <= c.at_version_num
511 512 else:
512 513 # showing all, for 'latest'
513 514 should_render = True
514 515
515 516 if should_render:
516 517 display_inline_comments[co.f_path][co.line_no].append(co)
517 518
518 519 # load diff data into template context, if we use compare mode then
519 520 # diff is calculated based on changes between versions of PR
520 521
521 522 source_repo = pull_request_at_ver.source_repo
522 523 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
523 524
524 525 target_repo = pull_request_at_ver.target_repo
525 526 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
526 527
527 528 if compare:
528 529 # in compare switch the diff base to latest commit from prev version
529 530 target_ref_id = prev_pull_request_display_obj.revisions[0]
530 531
531 532 # despite opening commits for bookmarks/branches/tags, we always
532 533 # convert this to rev to prevent changes after bookmark or branch change
533 534 c.source_ref_type = 'rev'
534 535 c.source_ref = source_ref_id
535 536
536 537 c.target_ref_type = 'rev'
537 538 c.target_ref = target_ref_id
538 539
539 540 c.source_repo = source_repo
540 541 c.target_repo = target_repo
541 542
542 543 c.commit_ranges = []
543 544 source_commit = EmptyCommit()
544 545 target_commit = EmptyCommit()
545 546 c.missing_requirements = False
546 547
547 548 source_scm = source_repo.scm_instance()
548 549 target_scm = target_repo.scm_instance()
549 550
550 551 shadow_scm = None
551 552 try:
552 553 shadow_scm = pull_request_latest.get_shadow_repo()
553 554 except Exception:
554 555 log.debug('Failed to get shadow repo', exc_info=True)
555 556 # try first the existing source_repo, and then shadow
556 557 # repo if we can obtain one
557 558 commits_source_repo = source_scm
558 559 if shadow_scm:
559 560 commits_source_repo = shadow_scm
560 561
561 562 c.commits_source_repo = commits_source_repo
562 563 c.ancestor = None # set it to None, to hide it from PR view
563 564
564 565 # empty version means latest, so we keep this to prevent
565 566 # double caching
566 567 version_normalized = version or PullRequest.LATEST_VER
567 568 from_version_normalized = from_version or PullRequest.LATEST_VER
568 569
569 570 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
570 571 cache_file_path = diff_cache_exist(
571 572 cache_path, 'pull_request', pull_request_id, version_normalized,
572 573 from_version_normalized, source_ref_id, target_ref_id,
573 574 hide_whitespace_changes, diff_context, c.fulldiff)
574 575
575 576 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
576 577 force_recache = self.get_recache_flag()
577 578
578 579 cached_diff = None
579 580 if caching_enabled:
580 581 cached_diff = load_cached_diff(cache_file_path)
581 582
582 583 has_proper_commit_cache = (
583 584 cached_diff and cached_diff.get('commits')
584 585 and len(cached_diff.get('commits', [])) == 5
585 586 and cached_diff.get('commits')[0]
586 587 and cached_diff.get('commits')[3])
587 588
588 589 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
589 590 diff_commit_cache = \
590 591 (ancestor_commit, commit_cache, missing_requirements,
591 592 source_commit, target_commit) = cached_diff['commits']
592 593 else:
593 594 # NOTE(marcink): we reach potentially unreachable errors when a PR has
594 595 # merge errors resulting in potentially hidden commits in the shadow repo.
595 596 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
596 597 and _merge_check.merge_response
597 598 maybe_unreachable = maybe_unreachable \
598 599 and _merge_check.merge_response.metadata.get('unresolved_files')
599 600 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
600 601 diff_commit_cache = \
601 602 (ancestor_commit, commit_cache, missing_requirements,
602 603 source_commit, target_commit) = self.get_commits(
603 604 commits_source_repo,
604 605 pull_request_at_ver,
605 606 source_commit,
606 607 source_ref_id,
607 608 source_scm,
608 609 target_commit,
609 610 target_ref_id,
610 611 target_scm,
611 612 maybe_unreachable=maybe_unreachable)
612 613
613 614 # register our commit range
614 615 for comm in commit_cache.values():
615 616 c.commit_ranges.append(comm)
616 617
617 618 c.missing_requirements = missing_requirements
618 619 c.ancestor_commit = ancestor_commit
619 620 c.statuses = source_repo.statuses(
620 621 [x.raw_id for x in c.commit_ranges])
621 622
622 623 # auto collapse if we have more than limit
623 624 collapse_limit = diffs.DiffProcessor._collapse_commits_over
624 625 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
625 626 c.compare_mode = compare
626 627
627 628 # diff_limit is the old behavior, will cut off the whole diff
628 629 # if the limit is applied otherwise will just hide the
629 630 # big files from the front-end
630 631 diff_limit = c.visual.cut_off_limit_diff
631 632 file_limit = c.visual.cut_off_limit_file
632 633
633 634 c.missing_commits = False
634 635 if (c.missing_requirements
635 636 or isinstance(source_commit, EmptyCommit)
636 637 or source_commit == target_commit):
637 638
638 639 c.missing_commits = True
639 640 else:
640 641 c.inline_comments = display_inline_comments
641 642
642 643 use_ancestor = True
643 644 if from_version_normalized != version_normalized:
644 645 use_ancestor = False
645 646
646 647 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
647 648 if not force_recache and has_proper_diff_cache:
648 649 c.diffset = cached_diff['diff']
649 650 else:
650 651 try:
651 652 c.diffset = self._get_diffset(
652 653 c.source_repo.repo_name, commits_source_repo,
653 654 c.ancestor_commit,
654 655 source_ref_id, target_ref_id,
655 656 target_commit, source_commit,
656 657 diff_limit, file_limit, c.fulldiff,
657 658 hide_whitespace_changes, diff_context,
658 659 use_ancestor=use_ancestor
659 660 )
660 661
661 662 # save cached diff
662 663 if caching_enabled:
663 664 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
664 665 except CommitDoesNotExistError:
665 666 log.exception('Failed to generate diffset')
666 667 c.missing_commits = True
667 668
668 669 if not c.missing_commits:
669 670
670 671 c.limited_diff = c.diffset.limited_diff
671 672
672 673 # calculate removed files that are bound to comments
673 674 comment_deleted_files = [
674 675 fname for fname in display_inline_comments
675 676 if fname not in c.diffset.file_stats]
676 677
677 678 c.deleted_files_comments = collections.defaultdict(dict)
678 679 for fname, per_line_comments in display_inline_comments.items():
679 680 if fname in comment_deleted_files:
680 681 c.deleted_files_comments[fname]['stats'] = 0
681 682 c.deleted_files_comments[fname]['comments'] = list()
682 683 for lno, comments in per_line_comments.items():
683 684 c.deleted_files_comments[fname]['comments'].extend(comments)
684 685
685 686 # maybe calculate the range diff
686 687 if c.range_diff_on:
687 688 # TODO(marcink): set whitespace/context
688 689 context_lcl = 3
689 690 ign_whitespace_lcl = False
690 691
691 692 for commit in c.commit_ranges:
692 693 commit2 = commit
693 694 commit1 = commit.first_parent
694 695
695 696 range_diff_cache_file_path = diff_cache_exist(
696 697 cache_path, 'diff', commit.raw_id,
697 698 ign_whitespace_lcl, context_lcl, c.fulldiff)
698 699
699 700 cached_diff = None
700 701 if caching_enabled:
701 702 cached_diff = load_cached_diff(range_diff_cache_file_path)
702 703
703 704 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
704 705 if not force_recache and has_proper_diff_cache:
705 706 diffset = cached_diff['diff']
706 707 else:
707 708 diffset = self._get_range_diffset(
708 709 commits_source_repo, source_repo,
709 710 commit1, commit2, diff_limit, file_limit,
710 711 c.fulldiff, ign_whitespace_lcl, context_lcl
711 712 )
712 713
713 714 # save cached diff
714 715 if caching_enabled:
715 716 cache_diff(range_diff_cache_file_path, diffset, None)
716 717
717 718 c.changes[commit.raw_id] = diffset
718 719
719 720 # this is a hack to properly display links, when creating PR, the
720 721 # compare view and others uses different notation, and
721 722 # compare_commits.mako renders links based on the target_repo.
722 723 # We need to swap that here to generate it properly on the html side
723 724 c.target_repo = c.source_repo
724 725
725 726 c.commit_statuses = ChangesetStatus.STATUSES
726 727
727 728 c.show_version_changes = not pr_closed
728 729 if c.show_version_changes:
729 730 cur_obj = pull_request_at_ver
730 731 prev_obj = prev_pull_request_at_ver
731 732
732 733 old_commit_ids = prev_obj.revisions
733 734 new_commit_ids = cur_obj.revisions
734 735 commit_changes = PullRequestModel()._calculate_commit_id_changes(
735 736 old_commit_ids, new_commit_ids)
736 737 c.commit_changes_summary = commit_changes
737 738
738 739 # calculate the diff for commits between versions
739 740 c.commit_changes = []
740 741
741 742 def mark(cs, fw):
742 743 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
743 744
744 745 for c_type, raw_id in mark(commit_changes.added, 'a') \
745 746 + mark(commit_changes.removed, 'r') \
746 747 + mark(commit_changes.common, 'c'):
747 748
748 749 if raw_id in commit_cache:
749 750 commit = commit_cache[raw_id]
750 751 else:
751 752 try:
752 753 commit = commits_source_repo.get_commit(raw_id)
753 754 except CommitDoesNotExistError:
754 755 # in case we fail extracting still use "dummy" commit
755 756 # for display in commit diff
756 757 commit = h.AttributeDict(
757 758 {'raw_id': raw_id,
758 759 'message': 'EMPTY or MISSING COMMIT'})
759 760 c.commit_changes.append([c_type, commit])
760 761
761 762 # current user review statuses for each version
762 763 c.review_versions = {}
763 764 is_reviewer = PullRequestModel().is_user_reviewer(
764 765 pull_request, self._rhodecode_user)
765 766 if is_reviewer:
766 767 for co in general_comments:
767 768 if co.author.user_id == self._rhodecode_user.user_id:
768 769 status = co.status_change
769 770 if status:
770 771 _ver_pr = status[0].comment.pull_request_version_id
771 772 c.review_versions[_ver_pr] = status[0]
772 773
773 774 return self._get_template_context(c)
774 775
775 776 def get_commits(
776 777 self, commits_source_repo, pull_request_at_ver, source_commit,
777 778 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
778 779 maybe_unreachable=False):
779 780
780 781 commit_cache = collections.OrderedDict()
781 782 missing_requirements = False
782 783
783 784 try:
784 785 pre_load = ["author", "date", "message", "branch", "parents"]
785 786
786 787 pull_request_commits = pull_request_at_ver.revisions
787 788 log.debug('Loading %s commits from %s',
788 789 len(pull_request_commits), commits_source_repo)
789 790
790 791 for rev in pull_request_commits:
791 792 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
792 793 maybe_unreachable=maybe_unreachable)
793 794 commit_cache[comm.raw_id] = comm
794 795
795 796 # Order here matters, we first need to get target, and then
796 797 # the source
797 798 target_commit = commits_source_repo.get_commit(
798 799 commit_id=safe_str(target_ref_id))
799 800
800 801 source_commit = commits_source_repo.get_commit(
801 802 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
802 803 except CommitDoesNotExistError:
803 804 log.warning('Failed to get commit from `{}` repo'.format(
804 805 commits_source_repo), exc_info=True)
805 806 except RepositoryRequirementError:
806 807 log.warning('Failed to get all required data from repo', exc_info=True)
807 808 missing_requirements = True
808 809
809 810 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
810 811
811 812 try:
812 813 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
813 814 except Exception:
814 815 ancestor_commit = None
815 816
816 817 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
817 818
818 819 def assure_not_empty_repo(self):
819 820 _ = self.request.translate
820 821
821 822 try:
822 823 self.db_repo.scm_instance().get_commit()
823 824 except EmptyRepositoryError:
824 825 h.flash(h.literal(_('There are no commits yet')),
825 826 category='warning')
826 827 raise HTTPFound(
827 828 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
828 829
829 830 @LoginRequired()
830 831 @NotAnonymous()
831 832 @HasRepoPermissionAnyDecorator(
832 833 'repository.read', 'repository.write', 'repository.admin')
833 834 @view_config(
834 835 route_name='pullrequest_new', request_method='GET',
835 836 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
836 837 def pull_request_new(self):
837 838 _ = self.request.translate
838 839 c = self.load_default_context()
839 840
840 841 self.assure_not_empty_repo()
841 842 source_repo = self.db_repo
842 843
843 844 commit_id = self.request.GET.get('commit')
844 845 branch_ref = self.request.GET.get('branch')
845 846 bookmark_ref = self.request.GET.get('bookmark')
846 847
847 848 try:
848 849 source_repo_data = PullRequestModel().generate_repo_data(
849 850 source_repo, commit_id=commit_id,
850 851 branch=branch_ref, bookmark=bookmark_ref,
851 852 translator=self.request.translate)
852 853 except CommitDoesNotExistError as e:
853 854 log.exception(e)
854 855 h.flash(_('Commit does not exist'), 'error')
855 856 raise HTTPFound(
856 857 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
857 858
858 859 default_target_repo = source_repo
859 860
860 861 if source_repo.parent and c.has_origin_repo_read_perm:
861 862 parent_vcs_obj = source_repo.parent.scm_instance()
862 863 if parent_vcs_obj and not parent_vcs_obj.is_empty():
863 864 # change default if we have a parent repo
864 865 default_target_repo = source_repo.parent
865 866
866 867 target_repo_data = PullRequestModel().generate_repo_data(
867 868 default_target_repo, translator=self.request.translate)
868 869
869 870 selected_source_ref = source_repo_data['refs']['selected_ref']
870 871 title_source_ref = ''
871 872 if selected_source_ref:
872 873 title_source_ref = selected_source_ref.split(':', 2)[1]
873 874 c.default_title = PullRequestModel().generate_pullrequest_title(
874 875 source=source_repo.repo_name,
875 876 source_ref=title_source_ref,
876 877 target=default_target_repo.repo_name
877 878 )
878 879
879 880 c.default_repo_data = {
880 881 'source_repo_name': source_repo.repo_name,
881 882 'source_refs_json': json.dumps(source_repo_data),
882 883 'target_repo_name': default_target_repo.repo_name,
883 884 'target_refs_json': json.dumps(target_repo_data),
884 885 }
885 886 c.default_source_ref = selected_source_ref
886 887
887 888 return self._get_template_context(c)
888 889
889 890 @LoginRequired()
890 891 @NotAnonymous()
891 892 @HasRepoPermissionAnyDecorator(
892 893 'repository.read', 'repository.write', 'repository.admin')
893 894 @view_config(
894 895 route_name='pullrequest_repo_refs', request_method='GET',
895 896 renderer='json_ext', xhr=True)
896 897 def pull_request_repo_refs(self):
897 898 self.load_default_context()
898 899 target_repo_name = self.request.matchdict['target_repo_name']
899 900 repo = Repository.get_by_repo_name(target_repo_name)
900 901 if not repo:
901 902 raise HTTPNotFound()
902 903
903 904 target_perm = HasRepoPermissionAny(
904 905 'repository.read', 'repository.write', 'repository.admin')(
905 906 target_repo_name)
906 907 if not target_perm:
907 908 raise HTTPNotFound()
908 909
909 910 return PullRequestModel().generate_repo_data(
910 911 repo, translator=self.request.translate)
911 912
912 913 @LoginRequired()
913 914 @NotAnonymous()
914 915 @HasRepoPermissionAnyDecorator(
915 916 'repository.read', 'repository.write', 'repository.admin')
916 917 @view_config(
917 918 route_name='pullrequest_repo_targets', request_method='GET',
918 919 renderer='json_ext', xhr=True)
919 920 def pullrequest_repo_targets(self):
920 921 _ = self.request.translate
921 922 filter_query = self.request.GET.get('query')
922 923
923 924 # get the parents
924 925 parent_target_repos = []
925 926 if self.db_repo.parent:
926 927 parents_query = Repository.query() \
927 928 .order_by(func.length(Repository.repo_name)) \
928 929 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
929 930
930 931 if filter_query:
931 932 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
932 933 parents_query = parents_query.filter(
933 934 Repository.repo_name.ilike(ilike_expression))
934 935 parents = parents_query.limit(20).all()
935 936
936 937 for parent in parents:
937 938 parent_vcs_obj = parent.scm_instance()
938 939 if parent_vcs_obj and not parent_vcs_obj.is_empty():
939 940 parent_target_repos.append(parent)
940 941
941 942 # get other forks, and repo itself
942 943 query = Repository.query() \
943 944 .order_by(func.length(Repository.repo_name)) \
944 945 .filter(
945 946 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
946 947 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
947 948 ) \
948 949 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
949 950
950 951 if filter_query:
951 952 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
952 953 query = query.filter(Repository.repo_name.ilike(ilike_expression))
953 954
954 955 limit = max(20 - len(parent_target_repos), 5) # not less then 5
955 956 target_repos = query.limit(limit).all()
956 957
957 958 all_target_repos = target_repos + parent_target_repos
958 959
959 960 repos = []
960 961 # This checks permissions to the repositories
961 962 for obj in ScmModel().get_repos(all_target_repos):
962 963 repos.append({
963 964 'id': obj['name'],
964 965 'text': obj['name'],
965 966 'type': 'repo',
966 967 'repo_id': obj['dbrepo']['repo_id'],
967 968 'repo_type': obj['dbrepo']['repo_type'],
968 969 'private': obj['dbrepo']['private'],
969 970
970 971 })
971 972
972 973 data = {
973 974 'more': False,
974 975 'results': [{
975 976 'text': _('Repositories'),
976 977 'children': repos
977 978 }] if repos else []
978 979 }
979 980 return data
980 981
981 982 def _get_existing_ids(self, post_data):
982 983 return filter(lambda e: e, map(safe_int, aslist(post_data.get('comments'), ',')))
983 984
984 985 @LoginRequired()
985 986 @NotAnonymous()
986 987 @HasRepoPermissionAnyDecorator(
987 988 'repository.read', 'repository.write', 'repository.admin')
988 989 @view_config(
989 990 route_name='pullrequest_comments', request_method='POST',
990 991 renderer='string_html', xhr=True)
991 992 def pullrequest_comments(self):
992 993 self.load_default_context()
993 994
994 995 pull_request = PullRequest.get_or_404(
995 996 self.request.matchdict['pull_request_id'])
996 997 pull_request_id = pull_request.pull_request_id
997 998 version = self.request.GET.get('version')
998 999
999 1000 _render = self.request.get_partial_renderer(
1000 1001 'rhodecode:templates/base/sidebar.mako')
1001 1002 c = _render.get_call_context()
1002 1003
1003 1004 (pull_request_latest,
1004 1005 pull_request_at_ver,
1005 1006 pull_request_display_obj,
1006 1007 at_version) = PullRequestModel().get_pr_version(
1007 1008 pull_request_id, version=version)
1008 1009 versions = pull_request_display_obj.versions()
1009 1010 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1010 1011 c.versions = versions + [latest_ver]
1011 1012
1012 1013 c.at_version = at_version
1013 1014 c.at_version_num = (at_version
1014 1015 if at_version and at_version != PullRequest.LATEST_VER
1015 1016 else None)
1016 1017
1017 1018 self.register_comments_vars(c, pull_request_latest, versions)
1018 1019 all_comments = c.inline_comments_flat + c.comments
1019 1020
1020 1021 existing_ids = self._get_existing_ids(self.request.POST)
1021 1022 return _render('comments_table', all_comments, len(all_comments),
1022 1023 existing_ids=existing_ids)
1023 1024
1024 1025 @LoginRequired()
1025 1026 @NotAnonymous()
1026 1027 @HasRepoPermissionAnyDecorator(
1027 1028 'repository.read', 'repository.write', 'repository.admin')
1028 1029 @view_config(
1029 1030 route_name='pullrequest_todos', request_method='POST',
1030 1031 renderer='string_html', xhr=True)
1031 1032 def pullrequest_todos(self):
1032 1033 self.load_default_context()
1033 1034
1034 1035 pull_request = PullRequest.get_or_404(
1035 1036 self.request.matchdict['pull_request_id'])
1036 1037 pull_request_id = pull_request.pull_request_id
1037 1038 version = self.request.GET.get('version')
1038 1039
1039 1040 _render = self.request.get_partial_renderer(
1040 1041 'rhodecode:templates/base/sidebar.mako')
1041 1042 c = _render.get_call_context()
1042 1043 (pull_request_latest,
1043 1044 pull_request_at_ver,
1044 1045 pull_request_display_obj,
1045 1046 at_version) = PullRequestModel().get_pr_version(
1046 1047 pull_request_id, version=version)
1047 1048 versions = pull_request_display_obj.versions()
1048 1049 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1049 1050 c.versions = versions + [latest_ver]
1050 1051
1051 1052 c.at_version = at_version
1052 1053 c.at_version_num = (at_version
1053 1054 if at_version and at_version != PullRequest.LATEST_VER
1054 1055 else None)
1055 1056
1056 1057 c.unresolved_comments = CommentsModel() \
1057 1058 .get_pull_request_unresolved_todos(pull_request)
1058 1059 c.resolved_comments = CommentsModel() \
1059 1060 .get_pull_request_resolved_todos(pull_request)
1060 1061
1061 1062 all_comments = c.unresolved_comments + c.resolved_comments
1062 1063 existing_ids = self._get_existing_ids(self.request.POST)
1063 1064 return _render('comments_table', all_comments, len(c.unresolved_comments),
1064 1065 todo_comments=True, existing_ids=existing_ids)
1065 1066
1066 1067 @LoginRequired()
1067 1068 @NotAnonymous()
1068 1069 @HasRepoPermissionAnyDecorator(
1069 1070 'repository.read', 'repository.write', 'repository.admin')
1070 1071 @CSRFRequired()
1071 1072 @view_config(
1072 1073 route_name='pullrequest_create', request_method='POST',
1073 1074 renderer=None)
1074 1075 def pull_request_create(self):
1075 1076 _ = self.request.translate
1076 1077 self.assure_not_empty_repo()
1077 1078 self.load_default_context()
1078 1079
1079 1080 controls = peppercorn.parse(self.request.POST.items())
1080 1081
1081 1082 try:
1082 1083 form = PullRequestForm(
1083 1084 self.request.translate, self.db_repo.repo_id)()
1084 1085 _form = form.to_python(controls)
1085 1086 except formencode.Invalid as errors:
1086 1087 if errors.error_dict.get('revisions'):
1087 1088 msg = 'Revisions: %s' % errors.error_dict['revisions']
1088 1089 elif errors.error_dict.get('pullrequest_title'):
1089 1090 msg = errors.error_dict.get('pullrequest_title')
1090 1091 else:
1091 1092 msg = _('Error creating pull request: {}').format(errors)
1092 1093 log.exception(msg)
1093 1094 h.flash(msg, 'error')
1094 1095
1095 1096 # would rather just go back to form ...
1096 1097 raise HTTPFound(
1097 1098 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1098 1099
1099 1100 source_repo = _form['source_repo']
1100 1101 source_ref = _form['source_ref']
1101 1102 target_repo = _form['target_repo']
1102 1103 target_ref = _form['target_ref']
1103 1104 commit_ids = _form['revisions'][::-1]
1104 1105 common_ancestor_id = _form['common_ancestor']
1105 1106
1106 1107 # find the ancestor for this pr
1107 1108 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1108 1109 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1109 1110
1110 1111 if not (source_db_repo or target_db_repo):
1111 1112 h.flash(_('source_repo or target repo not found'), category='error')
1112 1113 raise HTTPFound(
1113 1114 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1114 1115
1115 1116 # re-check permissions again here
1116 1117 # source_repo we must have read permissions
1117 1118
1118 1119 source_perm = HasRepoPermissionAny(
1119 1120 'repository.read', 'repository.write', 'repository.admin')(
1120 1121 source_db_repo.repo_name)
1121 1122 if not source_perm:
1122 1123 msg = _('Not Enough permissions to source repo `{}`.'.format(
1123 1124 source_db_repo.repo_name))
1124 1125 h.flash(msg, category='error')
1125 1126 # copy the args back to redirect
1126 1127 org_query = self.request.GET.mixed()
1127 1128 raise HTTPFound(
1128 1129 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1129 1130 _query=org_query))
1130 1131
1131 1132 # target repo we must have read permissions, and also later on
1132 1133 # we want to check branch permissions here
1133 1134 target_perm = HasRepoPermissionAny(
1134 1135 'repository.read', 'repository.write', 'repository.admin')(
1135 1136 target_db_repo.repo_name)
1136 1137 if not target_perm:
1137 1138 msg = _('Not Enough permissions to target repo `{}`.'.format(
1138 1139 target_db_repo.repo_name))
1139 1140 h.flash(msg, category='error')
1140 1141 # copy the args back to redirect
1141 1142 org_query = self.request.GET.mixed()
1142 1143 raise HTTPFound(
1143 1144 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1144 1145 _query=org_query))
1145 1146
1146 1147 source_scm = source_db_repo.scm_instance()
1147 1148 target_scm = target_db_repo.scm_instance()
1148 1149
1149 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
1150 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
1150 source_ref_obj = unicode_to_reference(source_ref)
1151 target_ref_obj = unicode_to_reference(target_ref)
1152
1153 source_commit = source_scm.get_commit(source_ref_obj.commit_id)
1154 target_commit = target_scm.get_commit(target_ref_obj.commit_id)
1151 1155
1152 1156 ancestor = source_scm.get_common_ancestor(
1153 1157 source_commit.raw_id, target_commit.raw_id, target_scm)
1154 1158
1155 source_ref_type, source_ref_name, source_commit_id = _form['target_ref'].split(':')
1156 target_ref_type, target_ref_name, target_commit_id = _form['source_ref'].split(':')
1157 1159 # recalculate target ref based on ancestor
1158 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1160 target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, ancestor))
1159 1161
1160 1162 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1161 1163 PullRequestModel().get_reviewer_functions()
1162 1164
1163 1165 # recalculate reviewers logic, to make sure we can validate this
1164 1166 reviewer_rules = get_default_reviewers_data(
1165 1167 self._rhodecode_db_user,
1166 1168 source_db_repo,
1167 Reference(source_ref_type, source_ref_name, source_commit_id),
1169 source_ref_obj,
1168 1170 target_db_repo,
1169 Reference(target_ref_type, target_ref_name, target_commit_id),
1171 target_ref_obj,
1170 1172 include_diff_info=False)
1171 1173
1172 1174 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1173 1175 observers = validate_observers(_form['observer_members'], reviewer_rules)
1174 1176
1175 1177 pullrequest_title = _form['pullrequest_title']
1176 title_source_ref = source_ref.split(':', 2)[1]
1178 title_source_ref = source_ref_obj.name
1177 1179 if not pullrequest_title:
1178 1180 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1179 1181 source=source_repo,
1180 1182 source_ref=title_source_ref,
1181 1183 target=target_repo
1182 1184 )
1183 1185
1184 1186 description = _form['pullrequest_desc']
1185 1187 description_renderer = _form['description_renderer']
1186 1188
1187 1189 try:
1188 1190 pull_request = PullRequestModel().create(
1189 1191 created_by=self._rhodecode_user.user_id,
1190 1192 source_repo=source_repo,
1191 1193 source_ref=source_ref,
1192 1194 target_repo=target_repo,
1193 1195 target_ref=target_ref,
1194 1196 revisions=commit_ids,
1195 1197 common_ancestor_id=common_ancestor_id,
1196 1198 reviewers=reviewers,
1197 1199 observers=observers,
1198 1200 title=pullrequest_title,
1199 1201 description=description,
1200 1202 description_renderer=description_renderer,
1201 1203 reviewer_data=reviewer_rules,
1202 1204 auth_user=self._rhodecode_user
1203 1205 )
1204 1206 Session().commit()
1205 1207
1206 1208 h.flash(_('Successfully opened new pull request'),
1207 1209 category='success')
1208 1210 except Exception:
1209 1211 msg = _('Error occurred during creation of this pull request.')
1210 1212 log.exception(msg)
1211 1213 h.flash(msg, category='error')
1212 1214
1213 1215 # copy the args back to redirect
1214 1216 org_query = self.request.GET.mixed()
1215 1217 raise HTTPFound(
1216 1218 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1217 1219 _query=org_query))
1218 1220
1219 1221 raise HTTPFound(
1220 1222 h.route_path('pullrequest_show', repo_name=target_repo,
1221 1223 pull_request_id=pull_request.pull_request_id))
1222 1224
1223 1225 @LoginRequired()
1224 1226 @NotAnonymous()
1225 1227 @HasRepoPermissionAnyDecorator(
1226 1228 'repository.read', 'repository.write', 'repository.admin')
1227 1229 @CSRFRequired()
1228 1230 @view_config(
1229 1231 route_name='pullrequest_update', request_method='POST',
1230 1232 renderer='json_ext')
1231 1233 def pull_request_update(self):
1232 1234 pull_request = PullRequest.get_or_404(
1233 1235 self.request.matchdict['pull_request_id'])
1234 1236 _ = self.request.translate
1235 1237
1236 1238 c = self.load_default_context()
1237 1239 redirect_url = None
1238 1240
1239 1241 if pull_request.is_closed():
1240 1242 log.debug('update: forbidden because pull request is closed')
1241 1243 msg = _(u'Cannot update closed pull requests.')
1242 1244 h.flash(msg, category='error')
1243 1245 return {'response': True,
1244 1246 'redirect_url': redirect_url}
1245 1247
1246 1248 is_state_changing = pull_request.is_state_changing()
1247 1249 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1248 1250
1249 1251 # only owner or admin can update it
1250 1252 allowed_to_update = PullRequestModel().check_user_update(
1251 1253 pull_request, self._rhodecode_user)
1252 1254
1253 1255 if allowed_to_update:
1254 1256 controls = peppercorn.parse(self.request.POST.items())
1255 1257 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1256 1258
1257 1259 if 'review_members' in controls:
1258 1260 self._update_reviewers(
1259 1261 c,
1260 1262 pull_request, controls['review_members'],
1261 1263 pull_request.reviewer_data,
1262 1264 PullRequestReviewers.ROLE_REVIEWER)
1263 1265 elif 'observer_members' in controls:
1264 1266 self._update_reviewers(
1265 1267 c,
1266 1268 pull_request, controls['observer_members'],
1267 1269 pull_request.reviewer_data,
1268 1270 PullRequestReviewers.ROLE_OBSERVER)
1269 1271 elif str2bool(self.request.POST.get('update_commits', 'false')):
1270 1272 if is_state_changing:
1271 1273 log.debug('commits update: forbidden because pull request is in state %s',
1272 1274 pull_request.pull_request_state)
1273 1275 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1274 1276 u'Current state is: `{}`').format(
1275 1277 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1276 1278 h.flash(msg, category='error')
1277 1279 return {'response': True,
1278 1280 'redirect_url': redirect_url}
1279 1281
1280 1282 self._update_commits(c, pull_request)
1281 1283 if force_refresh:
1282 1284 redirect_url = h.route_path(
1283 1285 'pullrequest_show', repo_name=self.db_repo_name,
1284 1286 pull_request_id=pull_request.pull_request_id,
1285 1287 _query={"force_refresh": 1})
1286 1288 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1287 1289 self._edit_pull_request(pull_request)
1288 1290 else:
1289 1291 log.error('Unhandled update data.')
1290 1292 raise HTTPBadRequest()
1291 1293
1292 1294 return {'response': True,
1293 1295 'redirect_url': redirect_url}
1294 1296 raise HTTPForbidden()
1295 1297
1296 1298 def _edit_pull_request(self, pull_request):
1297 1299 """
1298 1300 Edit title and description
1299 1301 """
1300 1302 _ = self.request.translate
1301 1303
1302 1304 try:
1303 1305 PullRequestModel().edit(
1304 1306 pull_request,
1305 1307 self.request.POST.get('title'),
1306 1308 self.request.POST.get('description'),
1307 1309 self.request.POST.get('description_renderer'),
1308 1310 self._rhodecode_user)
1309 1311 except ValueError:
1310 1312 msg = _(u'Cannot update closed pull requests.')
1311 1313 h.flash(msg, category='error')
1312 1314 return
1313 1315 else:
1314 1316 Session().commit()
1315 1317
1316 1318 msg = _(u'Pull request title & description updated.')
1317 1319 h.flash(msg, category='success')
1318 1320 return
1319 1321
1320 1322 def _update_commits(self, c, pull_request):
1321 1323 _ = self.request.translate
1322 1324
1323 1325 with pull_request.set_state(PullRequest.STATE_UPDATING):
1324 1326 resp = PullRequestModel().update_commits(
1325 1327 pull_request, self._rhodecode_db_user)
1326 1328
1327 1329 if resp.executed:
1328 1330
1329 1331 if resp.target_changed and resp.source_changed:
1330 1332 changed = 'target and source repositories'
1331 1333 elif resp.target_changed and not resp.source_changed:
1332 1334 changed = 'target repository'
1333 1335 elif not resp.target_changed and resp.source_changed:
1334 1336 changed = 'source repository'
1335 1337 else:
1336 1338 changed = 'nothing'
1337 1339
1338 1340 msg = _(u'Pull request updated to "{source_commit_id}" with '
1339 1341 u'{count_added} added, {count_removed} removed commits. '
1340 1342 u'Source of changes: {change_source}.')
1341 1343 msg = msg.format(
1342 1344 source_commit_id=pull_request.source_ref_parts.commit_id,
1343 1345 count_added=len(resp.changes.added),
1344 1346 count_removed=len(resp.changes.removed),
1345 1347 change_source=changed)
1346 1348 h.flash(msg, category='success')
1347 1349 channelstream.pr_update_channelstream_push(
1348 1350 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1349 1351 else:
1350 1352 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1351 1353 warning_reasons = [
1352 1354 UpdateFailureReason.NO_CHANGE,
1353 1355 UpdateFailureReason.WRONG_REF_TYPE,
1354 1356 ]
1355 1357 category = 'warning' if resp.reason in warning_reasons else 'error'
1356 1358 h.flash(msg, category=category)
1357 1359
1358 1360 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1359 1361 _ = self.request.translate
1360 1362
1361 1363 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1362 1364 PullRequestModel().get_reviewer_functions()
1363 1365
1364 1366 if role == PullRequestReviewers.ROLE_REVIEWER:
1365 1367 try:
1366 1368 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1367 1369 except ValueError as e:
1368 1370 log.error('Reviewers Validation: {}'.format(e))
1369 1371 h.flash(e, category='error')
1370 1372 return
1371 1373
1372 1374 old_calculated_status = pull_request.calculated_review_status()
1373 1375 PullRequestModel().update_reviewers(
1374 pull_request, reviewers, self._rhodecode_user)
1376 pull_request, reviewers, self._rhodecode_db_user)
1375 1377
1376 1378 Session().commit()
1377 1379
1378 1380 msg = _('Pull request reviewers updated.')
1379 1381 h.flash(msg, category='success')
1380 1382 channelstream.pr_update_channelstream_push(
1381 1383 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1382 1384
1383 1385 # trigger status changed if change in reviewers changes the status
1384 1386 calculated_status = pull_request.calculated_review_status()
1385 1387 if old_calculated_status != calculated_status:
1386 1388 PullRequestModel().trigger_pull_request_hook(
1387 1389 pull_request, self._rhodecode_user, 'review_status_change',
1388 1390 data={'status': calculated_status})
1389 1391
1390 1392 elif role == PullRequestReviewers.ROLE_OBSERVER:
1391 1393 try:
1392 1394 observers = validate_observers(review_members, reviewer_rules)
1393 1395 except ValueError as e:
1394 1396 log.error('Observers Validation: {}'.format(e))
1395 1397 h.flash(e, category='error')
1396 1398 return
1397 1399
1398 1400 PullRequestModel().update_observers(
1399 pull_request, observers, self._rhodecode_user)
1401 pull_request, observers, self._rhodecode_db_user)
1400 1402
1401 1403 Session().commit()
1402 1404 msg = _('Pull request observers updated.')
1403 1405 h.flash(msg, category='success')
1404 1406 channelstream.pr_update_channelstream_push(
1405 1407 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1406 1408
1407 1409 @LoginRequired()
1408 1410 @NotAnonymous()
1409 1411 @HasRepoPermissionAnyDecorator(
1410 1412 'repository.read', 'repository.write', 'repository.admin')
1411 1413 @CSRFRequired()
1412 1414 @view_config(
1413 1415 route_name='pullrequest_merge', request_method='POST',
1414 1416 renderer='json_ext')
1415 1417 def pull_request_merge(self):
1416 1418 """
1417 1419 Merge will perform a server-side merge of the specified
1418 1420 pull request, if the pull request is approved and mergeable.
1419 1421 After successful merging, the pull request is automatically
1420 1422 closed, with a relevant comment.
1421 1423 """
1422 1424 pull_request = PullRequest.get_or_404(
1423 1425 self.request.matchdict['pull_request_id'])
1424 1426 _ = self.request.translate
1425 1427
1426 1428 if pull_request.is_state_changing():
1427 1429 log.debug('show: forbidden because pull request is in state %s',
1428 1430 pull_request.pull_request_state)
1429 1431 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1430 1432 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1431 1433 pull_request.pull_request_state)
1432 1434 h.flash(msg, category='error')
1433 1435 raise HTTPFound(
1434 1436 h.route_path('pullrequest_show',
1435 1437 repo_name=pull_request.target_repo.repo_name,
1436 1438 pull_request_id=pull_request.pull_request_id))
1437 1439
1438 1440 self.load_default_context()
1439 1441
1440 1442 with pull_request.set_state(PullRequest.STATE_UPDATING):
1441 1443 check = MergeCheck.validate(
1442 1444 pull_request, auth_user=self._rhodecode_user,
1443 1445 translator=self.request.translate)
1444 1446 merge_possible = not check.failed
1445 1447
1446 1448 for err_type, error_msg in check.errors:
1447 1449 h.flash(error_msg, category=err_type)
1448 1450
1449 1451 if merge_possible:
1450 1452 log.debug("Pre-conditions checked, trying to merge.")
1451 1453 extras = vcs_operation_context(
1452 1454 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1453 1455 username=self._rhodecode_db_user.username, action='push',
1454 1456 scm=pull_request.target_repo.repo_type)
1455 1457 with pull_request.set_state(PullRequest.STATE_UPDATING):
1456 1458 self._merge_pull_request(
1457 1459 pull_request, self._rhodecode_db_user, extras)
1458 1460 else:
1459 1461 log.debug("Pre-conditions failed, NOT merging.")
1460 1462
1461 1463 raise HTTPFound(
1462 1464 h.route_path('pullrequest_show',
1463 1465 repo_name=pull_request.target_repo.repo_name,
1464 1466 pull_request_id=pull_request.pull_request_id))
1465 1467
1466 1468 def _merge_pull_request(self, pull_request, user, extras):
1467 1469 _ = self.request.translate
1468 1470 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1469 1471
1470 1472 if merge_resp.executed:
1471 1473 log.debug("The merge was successful, closing the pull request.")
1472 1474 PullRequestModel().close_pull_request(
1473 1475 pull_request.pull_request_id, user)
1474 1476 Session().commit()
1475 1477 msg = _('Pull request was successfully merged and closed.')
1476 1478 h.flash(msg, category='success')
1477 1479 else:
1478 1480 log.debug(
1479 1481 "The merge was not successful. Merge response: %s", merge_resp)
1480 1482 msg = merge_resp.merge_status_message
1481 1483 h.flash(msg, category='error')
1482 1484
1483 1485 @LoginRequired()
1484 1486 @NotAnonymous()
1485 1487 @HasRepoPermissionAnyDecorator(
1486 1488 'repository.read', 'repository.write', 'repository.admin')
1487 1489 @CSRFRequired()
1488 1490 @view_config(
1489 1491 route_name='pullrequest_delete', request_method='POST',
1490 1492 renderer='json_ext')
1491 1493 def pull_request_delete(self):
1492 1494 _ = self.request.translate
1493 1495
1494 1496 pull_request = PullRequest.get_or_404(
1495 1497 self.request.matchdict['pull_request_id'])
1496 1498 self.load_default_context()
1497 1499
1498 1500 pr_closed = pull_request.is_closed()
1499 1501 allowed_to_delete = PullRequestModel().check_user_delete(
1500 1502 pull_request, self._rhodecode_user) and not pr_closed
1501 1503
1502 1504 # only owner can delete it !
1503 1505 if allowed_to_delete:
1504 1506 PullRequestModel().delete(pull_request, self._rhodecode_user)
1505 1507 Session().commit()
1506 1508 h.flash(_('Successfully deleted pull request'),
1507 1509 category='success')
1508 1510 raise HTTPFound(h.route_path('pullrequest_show_all',
1509 1511 repo_name=self.db_repo_name))
1510 1512
1511 1513 log.warning('user %s tried to delete pull request without access',
1512 1514 self._rhodecode_user)
1513 1515 raise HTTPNotFound()
1514 1516
1515 1517 @LoginRequired()
1516 1518 @NotAnonymous()
1517 1519 @HasRepoPermissionAnyDecorator(
1518 1520 'repository.read', 'repository.write', 'repository.admin')
1519 1521 @CSRFRequired()
1520 1522 @view_config(
1521 1523 route_name='pullrequest_comment_create', request_method='POST',
1522 1524 renderer='json_ext')
1523 1525 def pull_request_comment_create(self):
1524 1526 _ = self.request.translate
1525 1527
1526 1528 pull_request = PullRequest.get_or_404(
1527 1529 self.request.matchdict['pull_request_id'])
1528 1530 pull_request_id = pull_request.pull_request_id
1529 1531
1530 1532 if pull_request.is_closed():
1531 1533 log.debug('comment: forbidden because pull request is closed')
1532 1534 raise HTTPForbidden()
1533 1535
1534 1536 allowed_to_comment = PullRequestModel().check_user_comment(
1535 1537 pull_request, self._rhodecode_user)
1536 1538 if not allowed_to_comment:
1537 1539 log.debug('comment: forbidden because pull request is from forbidden repo')
1538 1540 raise HTTPForbidden()
1539 1541
1540 1542 c = self.load_default_context()
1541 1543
1542 1544 status = self.request.POST.get('changeset_status', None)
1543 1545 text = self.request.POST.get('text')
1544 1546 comment_type = self.request.POST.get('comment_type')
1545 1547 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1546 1548 close_pull_request = self.request.POST.get('close_pull_request')
1547 1549
1548 1550 # the logic here should work like following, if we submit close
1549 1551 # pr comment, use `close_pull_request_with_comment` function
1550 1552 # else handle regular comment logic
1551 1553
1552 1554 if close_pull_request:
1553 1555 # only owner or admin or person with write permissions
1554 1556 allowed_to_close = PullRequestModel().check_user_update(
1555 1557 pull_request, self._rhodecode_user)
1556 1558 if not allowed_to_close:
1557 1559 log.debug('comment: forbidden because not allowed to close '
1558 1560 'pull request %s', pull_request_id)
1559 1561 raise HTTPForbidden()
1560 1562
1561 1563 # This also triggers `review_status_change`
1562 1564 comment, status = PullRequestModel().close_pull_request_with_comment(
1563 1565 pull_request, self._rhodecode_user, self.db_repo, message=text,
1564 1566 auth_user=self._rhodecode_user)
1565 1567 Session().flush()
1568 is_inline = comment.is_inline
1566 1569
1567 1570 PullRequestModel().trigger_pull_request_hook(
1568 1571 pull_request, self._rhodecode_user, 'comment',
1569 1572 data={'comment': comment})
1570 1573
1571 1574 else:
1572 1575 # regular comment case, could be inline, or one with status.
1573 1576 # for that one we check also permissions
1574 1577
1575 1578 allowed_to_change_status = PullRequestModel().check_user_change_status(
1576 1579 pull_request, self._rhodecode_user)
1577 1580
1578 1581 if status and allowed_to_change_status:
1579 1582 message = (_('Status change %(transition_icon)s %(status)s')
1580 1583 % {'transition_icon': '>',
1581 1584 'status': ChangesetStatus.get_status_lbl(status)})
1582 1585 text = text or message
1583 1586
1584 1587 comment = CommentsModel().create(
1585 1588 text=text,
1586 1589 repo=self.db_repo.repo_id,
1587 1590 user=self._rhodecode_user.user_id,
1588 1591 pull_request=pull_request,
1589 1592 f_path=self.request.POST.get('f_path'),
1590 1593 line_no=self.request.POST.get('line'),
1591 1594 status_change=(ChangesetStatus.get_status_lbl(status)
1592 1595 if status and allowed_to_change_status else None),
1593 1596 status_change_type=(status
1594 1597 if status and allowed_to_change_status else None),
1595 1598 comment_type=comment_type,
1596 1599 resolves_comment_id=resolves_comment_id,
1597 1600 auth_user=self._rhodecode_user
1598 1601 )
1599 is_inline = bool(comment.f_path and comment.line_no)
1602 is_inline = comment.is_inline
1600 1603
1601 1604 if allowed_to_change_status:
1602 1605 # calculate old status before we change it
1603 1606 old_calculated_status = pull_request.calculated_review_status()
1604 1607
1605 1608 # get status if set !
1606 1609 if status:
1607 1610 ChangesetStatusModel().set_status(
1608 1611 self.db_repo.repo_id,
1609 1612 status,
1610 1613 self._rhodecode_user.user_id,
1611 1614 comment,
1612 1615 pull_request=pull_request
1613 1616 )
1614 1617
1615 1618 Session().flush()
1616 1619 # this is somehow required to get access to some relationship
1617 1620 # loaded on comment
1618 1621 Session().refresh(comment)
1619 1622
1620 1623 PullRequestModel().trigger_pull_request_hook(
1621 1624 pull_request, self._rhodecode_user, 'comment',
1622 1625 data={'comment': comment})
1623 1626
1624 1627 # we now calculate the status of pull request, and based on that
1625 1628 # calculation we set the commits status
1626 1629 calculated_status = pull_request.calculated_review_status()
1627 1630 if old_calculated_status != calculated_status:
1628 1631 PullRequestModel().trigger_pull_request_hook(
1629 1632 pull_request, self._rhodecode_user, 'review_status_change',
1630 1633 data={'status': calculated_status})
1631 1634
1632 1635 Session().commit()
1633 1636
1634 1637 data = {
1635 1638 'target_id': h.safeid(h.safe_unicode(
1636 1639 self.request.POST.get('f_path'))),
1637 1640 }
1638 1641 if comment:
1639 1642 c.co = comment
1640 1643 c.at_version_num = None
1641 1644 rendered_comment = render(
1642 1645 'rhodecode:templates/changeset/changeset_comment_block.mako',
1643 1646 self._get_template_context(c), self.request)
1644 1647
1645 1648 data.update(comment.get_dict())
1646 1649 data.update({'rendered_text': rendered_comment})
1647 1650
1648 1651 comment_broadcast_channel = channelstream.comment_channel(
1649 1652 self.db_repo_name, pull_request_obj=pull_request)
1650 1653
1651 1654 comment_data = data
1652 1655 comment_type = 'inline' if is_inline else 'general'
1653 1656 channelstream.comment_channelstream_push(
1654 1657 self.request, comment_broadcast_channel, self._rhodecode_user,
1655 1658 _('posted a new {} comment').format(comment_type),
1656 1659 comment_data=comment_data)
1657 1660
1658 1661 return data
1659 1662
1660 1663 @LoginRequired()
1661 1664 @NotAnonymous()
1662 1665 @HasRepoPermissionAnyDecorator(
1663 1666 'repository.read', 'repository.write', 'repository.admin')
1664 1667 @CSRFRequired()
1665 1668 @view_config(
1666 1669 route_name='pullrequest_comment_delete', request_method='POST',
1667 1670 renderer='json_ext')
1668 1671 def pull_request_comment_delete(self):
1669 1672 pull_request = PullRequest.get_or_404(
1670 1673 self.request.matchdict['pull_request_id'])
1671 1674
1672 1675 comment = ChangesetComment.get_or_404(
1673 1676 self.request.matchdict['comment_id'])
1674 1677 comment_id = comment.comment_id
1675 1678
1676 1679 if comment.immutable:
1677 1680 # don't allow deleting comments that are immutable
1678 1681 raise HTTPForbidden()
1679 1682
1680 1683 if pull_request.is_closed():
1681 1684 log.debug('comment: forbidden because pull request is closed')
1682 1685 raise HTTPForbidden()
1683 1686
1684 1687 if not comment:
1685 1688 log.debug('Comment with id:%s not found, skipping', comment_id)
1686 1689 # comment already deleted in another call probably
1687 1690 return True
1688 1691
1689 1692 if comment.pull_request.is_closed():
1690 1693 # don't allow deleting comments on closed pull request
1691 1694 raise HTTPForbidden()
1692 1695
1693 1696 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1694 1697 super_admin = h.HasPermissionAny('hg.admin')()
1695 1698 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1696 1699 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1697 1700 comment_repo_admin = is_repo_admin and is_repo_comment
1698 1701
1699 1702 if super_admin or comment_owner or comment_repo_admin:
1700 1703 old_calculated_status = comment.pull_request.calculated_review_status()
1701 1704 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1702 1705 Session().commit()
1703 1706 calculated_status = comment.pull_request.calculated_review_status()
1704 1707 if old_calculated_status != calculated_status:
1705 1708 PullRequestModel().trigger_pull_request_hook(
1706 1709 comment.pull_request, self._rhodecode_user, 'review_status_change',
1707 1710 data={'status': calculated_status})
1708 1711 return True
1709 1712 else:
1710 1713 log.warning('No permissions for user %s to delete comment_id: %s',
1711 1714 self._rhodecode_db_user, comment_id)
1712 1715 raise HTTPNotFound()
1713 1716
1714 1717 @LoginRequired()
1715 1718 @NotAnonymous()
1716 1719 @HasRepoPermissionAnyDecorator(
1717 1720 'repository.read', 'repository.write', 'repository.admin')
1718 1721 @CSRFRequired()
1719 1722 @view_config(
1720 1723 route_name='pullrequest_comment_edit', request_method='POST',
1721 1724 renderer='json_ext')
1722 1725 def pull_request_comment_edit(self):
1723 1726 self.load_default_context()
1724 1727
1725 1728 pull_request = PullRequest.get_or_404(
1726 1729 self.request.matchdict['pull_request_id']
1727 1730 )
1728 1731 comment = ChangesetComment.get_or_404(
1729 1732 self.request.matchdict['comment_id']
1730 1733 )
1731 1734 comment_id = comment.comment_id
1732 1735
1733 1736 if comment.immutable:
1734 1737 # don't allow deleting comments that are immutable
1735 1738 raise HTTPForbidden()
1736 1739
1737 1740 if pull_request.is_closed():
1738 1741 log.debug('comment: forbidden because pull request is closed')
1739 1742 raise HTTPForbidden()
1740 1743
1741 1744 if not comment:
1742 1745 log.debug('Comment with id:%s not found, skipping', comment_id)
1743 1746 # comment already deleted in another call probably
1744 1747 return True
1745 1748
1746 1749 if comment.pull_request.is_closed():
1747 1750 # don't allow deleting comments on closed pull request
1748 1751 raise HTTPForbidden()
1749 1752
1750 1753 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1751 1754 super_admin = h.HasPermissionAny('hg.admin')()
1752 1755 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1753 1756 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1754 1757 comment_repo_admin = is_repo_admin and is_repo_comment
1755 1758
1756 1759 if super_admin or comment_owner or comment_repo_admin:
1757 1760 text = self.request.POST.get('text')
1758 1761 version = self.request.POST.get('version')
1759 1762 if text == comment.text:
1760 1763 log.warning(
1761 1764 'Comment(PR): '
1762 1765 'Trying to create new version '
1763 1766 'with the same comment body {}'.format(
1764 1767 comment_id,
1765 1768 )
1766 1769 )
1767 1770 raise HTTPNotFound()
1768 1771
1769 1772 if version.isdigit():
1770 1773 version = int(version)
1771 1774 else:
1772 1775 log.warning(
1773 1776 'Comment(PR): Wrong version type {} {} '
1774 1777 'for comment {}'.format(
1775 1778 version,
1776 1779 type(version),
1777 1780 comment_id,
1778 1781 )
1779 1782 )
1780 1783 raise HTTPNotFound()
1781 1784
1782 1785 try:
1783 1786 comment_history = CommentsModel().edit(
1784 1787 comment_id=comment_id,
1785 1788 text=text,
1786 1789 auth_user=self._rhodecode_user,
1787 1790 version=version,
1788 1791 )
1789 1792 except CommentVersionMismatch:
1790 1793 raise HTTPConflict()
1791 1794
1792 1795 if not comment_history:
1793 1796 raise HTTPNotFound()
1794 1797
1795 1798 Session().commit()
1796 1799
1797 1800 PullRequestModel().trigger_pull_request_hook(
1798 1801 pull_request, self._rhodecode_user, 'comment_edit',
1799 1802 data={'comment': comment})
1800 1803
1801 1804 return {
1802 1805 'comment_history_id': comment_history.comment_history_id,
1803 1806 'comment_id': comment.comment_id,
1804 1807 'comment_version': comment_history.version,
1805 1808 'comment_author_username': comment_history.author.username,
1806 1809 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1807 1810 'comment_created_on': h.age_component(comment_history.created_on,
1808 1811 time_is_local=True),
1809 1812 }
1810 1813 else:
1811 1814 log.warning('No permissions for user %s to edit comment_id: %s',
1812 1815 self._rhodecode_db_user, comment_id)
1813 1816 raise HTTPNotFound()
@@ -1,1925 +1,1948 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class Reference(_Reference):
64 64
65 65 @property
66 66 def branch(self):
67 67 if self.type == 'branch':
68 68 return self.name
69 69
70 70 @property
71 71 def bookmark(self):
72 72 if self.type == 'book':
73 73 return self.name
74 74
75 75
76 def unicode_to_reference(raw):
77 """
78 Convert a unicode (or string) to a reference object.
79 If unicode evaluates to False it returns None.
80 """
81 if raw:
82 refs = raw.split(':')
83 return Reference(*refs)
84 else:
85 return None
86
87
88 def reference_to_unicode(ref):
89 """
90 Convert a reference object to unicode.
91 If reference is None it returns None.
92 """
93 if ref:
94 return u':'.join(ref)
95 else:
96 return None
97
98
76 99 class MergeFailureReason(object):
77 100 """
78 101 Enumeration with all the reasons why the server side merge could fail.
79 102
80 103 DO NOT change the number of the reasons, as they may be stored in the
81 104 database.
82 105
83 106 Changing the name of a reason is acceptable and encouraged to deprecate old
84 107 reasons.
85 108 """
86 109
87 110 # Everything went well.
88 111 NONE = 0
89 112
90 113 # An unexpected exception was raised. Check the logs for more details.
91 114 UNKNOWN = 1
92 115
93 116 # The merge was not successful, there are conflicts.
94 117 MERGE_FAILED = 2
95 118
96 119 # The merge succeeded but we could not push it to the target repository.
97 120 PUSH_FAILED = 3
98 121
99 122 # The specified target is not a head in the target repository.
100 123 TARGET_IS_NOT_HEAD = 4
101 124
102 125 # The source repository contains more branches than the target. Pushing
103 126 # the merge will create additional branches in the target.
104 127 HG_SOURCE_HAS_MORE_BRANCHES = 5
105 128
106 129 # The target reference has multiple heads. That does not allow to correctly
107 130 # identify the target location. This could only happen for mercurial
108 131 # branches.
109 132 HG_TARGET_HAS_MULTIPLE_HEADS = 6
110 133
111 134 # The target repository is locked
112 135 TARGET_IS_LOCKED = 7
113 136
114 137 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
115 138 # A involved commit could not be found.
116 139 _DEPRECATED_MISSING_COMMIT = 8
117 140
118 141 # The target repo reference is missing.
119 142 MISSING_TARGET_REF = 9
120 143
121 144 # The source repo reference is missing.
122 145 MISSING_SOURCE_REF = 10
123 146
124 147 # The merge was not successful, there are conflicts related to sub
125 148 # repositories.
126 149 SUBREPO_MERGE_FAILED = 11
127 150
128 151
129 152 class UpdateFailureReason(object):
130 153 """
131 154 Enumeration with all the reasons why the pull request update could fail.
132 155
133 156 DO NOT change the number of the reasons, as they may be stored in the
134 157 database.
135 158
136 159 Changing the name of a reason is acceptable and encouraged to deprecate old
137 160 reasons.
138 161 """
139 162
140 163 # Everything went well.
141 164 NONE = 0
142 165
143 166 # An unexpected exception was raised. Check the logs for more details.
144 167 UNKNOWN = 1
145 168
146 169 # The pull request is up to date.
147 170 NO_CHANGE = 2
148 171
149 172 # The pull request has a reference type that is not supported for update.
150 173 WRONG_REF_TYPE = 3
151 174
152 175 # Update failed because the target reference is missing.
153 176 MISSING_TARGET_REF = 4
154 177
155 178 # Update failed because the source reference is missing.
156 179 MISSING_SOURCE_REF = 5
157 180
158 181
159 182 class MergeResponse(object):
160 183
161 184 # uses .format(**metadata) for variables
162 185 MERGE_STATUS_MESSAGES = {
163 186 MergeFailureReason.NONE: lazy_ugettext(
164 187 u'This pull request can be automatically merged.'),
165 188 MergeFailureReason.UNKNOWN: lazy_ugettext(
166 189 u'This pull request cannot be merged because of an unhandled exception. '
167 190 u'{exception}'),
168 191 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
169 192 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
170 193 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
171 194 u'This pull request could not be merged because push to '
172 195 u'target:`{target}@{merge_commit}` failed.'),
173 196 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
174 197 u'This pull request cannot be merged because the target '
175 198 u'`{target_ref.name}` is not a head.'),
176 199 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
177 200 u'This pull request cannot be merged because the source contains '
178 201 u'more branches than the target.'),
179 202 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
180 203 u'This pull request cannot be merged because the target `{target_ref.name}` '
181 204 u'has multiple heads: `{heads}`.'),
182 205 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
183 206 u'This pull request cannot be merged because the target repository is '
184 207 u'locked by {locked_by}.'),
185 208
186 209 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
187 210 u'This pull request cannot be merged because the target '
188 211 u'reference `{target_ref.name}` is missing.'),
189 212 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
190 213 u'This pull request cannot be merged because the source '
191 214 u'reference `{source_ref.name}` is missing.'),
192 215 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
193 216 u'This pull request cannot be merged because of conflicts related '
194 217 u'to sub repositories.'),
195 218
196 219 # Deprecations
197 220 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
198 221 u'This pull request cannot be merged because the target or the '
199 222 u'source reference is missing.'),
200 223
201 224 }
202 225
203 226 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
204 227 self.possible = possible
205 228 self.executed = executed
206 229 self.merge_ref = merge_ref
207 230 self.failure_reason = failure_reason
208 231 self.metadata = metadata or {}
209 232
210 233 def __repr__(self):
211 234 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
212 235
213 236 def __eq__(self, other):
214 237 same_instance = isinstance(other, self.__class__)
215 238 return same_instance \
216 239 and self.possible == other.possible \
217 240 and self.executed == other.executed \
218 241 and self.failure_reason == other.failure_reason
219 242
220 243 @property
221 244 def label(self):
222 245 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
223 246 not k.startswith('_'))
224 247 return label_dict.get(self.failure_reason)
225 248
226 249 @property
227 250 def merge_status_message(self):
228 251 """
229 252 Return a human friendly error message for the given merge status code.
230 253 """
231 254 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
232 255
233 256 try:
234 257 return msg.format(**self.metadata)
235 258 except Exception:
236 259 log.exception('Failed to format %s message', self)
237 260 return msg
238 261
239 262 def asdict(self):
240 263 data = {}
241 264 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
242 265 'merge_status_message']:
243 266 data[k] = getattr(self, k)
244 267 return data
245 268
246 269
247 270 class TargetRefMissing(ValueError):
248 271 pass
249 272
250 273
251 274 class SourceRefMissing(ValueError):
252 275 pass
253 276
254 277
255 278 class BaseRepository(object):
256 279 """
257 280 Base Repository for final backends
258 281
259 282 .. attribute:: DEFAULT_BRANCH_NAME
260 283
261 284 name of default branch (i.e. "trunk" for svn, "master" for git etc.
262 285
263 286 .. attribute:: commit_ids
264 287
265 288 list of all available commit ids, in ascending order
266 289
267 290 .. attribute:: path
268 291
269 292 absolute path to the repository
270 293
271 294 .. attribute:: bookmarks
272 295
273 296 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
274 297 there are no bookmarks or the backend implementation does not support
275 298 bookmarks.
276 299
277 300 .. attribute:: tags
278 301
279 302 Mapping from name to :term:`Commit ID` of the tag.
280 303
281 304 """
282 305
283 306 DEFAULT_BRANCH_NAME = None
284 307 DEFAULT_CONTACT = u"Unknown"
285 308 DEFAULT_DESCRIPTION = u"unknown"
286 309 EMPTY_COMMIT_ID = '0' * 40
287 310
288 311 path = None
289 312
290 313 _is_empty = None
291 314 _commit_ids = {}
292 315
293 316 def __init__(self, repo_path, config=None, create=False, **kwargs):
294 317 """
295 318 Initializes repository. Raises RepositoryError if repository could
296 319 not be find at the given ``repo_path`` or directory at ``repo_path``
297 320 exists and ``create`` is set to True.
298 321
299 322 :param repo_path: local path of the repository
300 323 :param config: repository configuration
301 324 :param create=False: if set to True, would try to create repository.
302 325 :param src_url=None: if set, should be proper url from which repository
303 326 would be cloned; requires ``create`` parameter to be set to True -
304 327 raises RepositoryError if src_url is set and create evaluates to
305 328 False
306 329 """
307 330 raise NotImplementedError
308 331
309 332 def __repr__(self):
310 333 return '<%s at %s>' % (self.__class__.__name__, self.path)
311 334
312 335 def __len__(self):
313 336 return self.count()
314 337
315 338 def __eq__(self, other):
316 339 same_instance = isinstance(other, self.__class__)
317 340 return same_instance and other.path == self.path
318 341
319 342 def __ne__(self, other):
320 343 return not self.__eq__(other)
321 344
322 345 def get_create_shadow_cache_pr_path(self, db_repo):
323 346 path = db_repo.cached_diffs_dir
324 347 if not os.path.exists(path):
325 348 os.makedirs(path, 0o755)
326 349 return path
327 350
328 351 @classmethod
329 352 def get_default_config(cls, default=None):
330 353 config = Config()
331 354 if default and isinstance(default, list):
332 355 for section, key, val in default:
333 356 config.set(section, key, val)
334 357 return config
335 358
336 359 @LazyProperty
337 360 def _remote(self):
338 361 raise NotImplementedError
339 362
340 363 def _heads(self, branch=None):
341 364 return []
342 365
343 366 @LazyProperty
344 367 def EMPTY_COMMIT(self):
345 368 return EmptyCommit(self.EMPTY_COMMIT_ID)
346 369
347 370 @LazyProperty
348 371 def alias(self):
349 372 for k, v in settings.BACKENDS.items():
350 373 if v.split('.')[-1] == str(self.__class__.__name__):
351 374 return k
352 375
353 376 @LazyProperty
354 377 def name(self):
355 378 return safe_unicode(os.path.basename(self.path))
356 379
357 380 @LazyProperty
358 381 def description(self):
359 382 raise NotImplementedError
360 383
361 384 def refs(self):
362 385 """
363 386 returns a `dict` with branches, bookmarks, tags, and closed_branches
364 387 for this repository
365 388 """
366 389 return dict(
367 390 branches=self.branches,
368 391 branches_closed=self.branches_closed,
369 392 tags=self.tags,
370 393 bookmarks=self.bookmarks
371 394 )
372 395
373 396 @LazyProperty
374 397 def branches(self):
375 398 """
376 399 A `dict` which maps branch names to commit ids.
377 400 """
378 401 raise NotImplementedError
379 402
380 403 @LazyProperty
381 404 def branches_closed(self):
382 405 """
383 406 A `dict` which maps tags names to commit ids.
384 407 """
385 408 raise NotImplementedError
386 409
387 410 @LazyProperty
388 411 def bookmarks(self):
389 412 """
390 413 A `dict` which maps tags names to commit ids.
391 414 """
392 415 raise NotImplementedError
393 416
394 417 @LazyProperty
395 418 def tags(self):
396 419 """
397 420 A `dict` which maps tags names to commit ids.
398 421 """
399 422 raise NotImplementedError
400 423
401 424 @LazyProperty
402 425 def size(self):
403 426 """
404 427 Returns combined size in bytes for all repository files
405 428 """
406 429 tip = self.get_commit()
407 430 return tip.size
408 431
409 432 def size_at_commit(self, commit_id):
410 433 commit = self.get_commit(commit_id)
411 434 return commit.size
412 435
413 436 def _check_for_empty(self):
414 437 no_commits = len(self._commit_ids) == 0
415 438 if no_commits:
416 439 # check on remote to be sure
417 440 return self._remote.is_empty()
418 441 else:
419 442 return False
420 443
421 444 def is_empty(self):
422 445 if rhodecode.is_test:
423 446 return self._check_for_empty()
424 447
425 448 if self._is_empty is None:
426 449 # cache empty for production, but not tests
427 450 self._is_empty = self._check_for_empty()
428 451
429 452 return self._is_empty
430 453
431 454 @staticmethod
432 455 def check_url(url, config):
433 456 """
434 457 Function will check given url and try to verify if it's a valid
435 458 link.
436 459 """
437 460 raise NotImplementedError
438 461
439 462 @staticmethod
440 463 def is_valid_repository(path):
441 464 """
442 465 Check if given `path` contains a valid repository of this backend
443 466 """
444 467 raise NotImplementedError
445 468
446 469 # ==========================================================================
447 470 # COMMITS
448 471 # ==========================================================================
449 472
450 473 @CachedProperty
451 474 def commit_ids(self):
452 475 raise NotImplementedError
453 476
454 477 def append_commit_id(self, commit_id):
455 478 if commit_id not in self.commit_ids:
456 479 self._rebuild_cache(self.commit_ids + [commit_id])
457 480
458 481 # clear cache
459 482 self._invalidate_prop_cache('commit_ids')
460 483 self._is_empty = False
461 484
462 485 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
463 486 translate_tag=None, maybe_unreachable=False):
464 487 """
465 488 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
466 489 are both None, most recent commit is returned.
467 490
468 491 :param pre_load: Optional. List of commit attributes to load.
469 492
470 493 :raises ``EmptyRepositoryError``: if there are no commits
471 494 """
472 495 raise NotImplementedError
473 496
474 497 def __iter__(self):
475 498 for commit_id in self.commit_ids:
476 499 yield self.get_commit(commit_id=commit_id)
477 500
478 501 def get_commits(
479 502 self, start_id=None, end_id=None, start_date=None, end_date=None,
480 503 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
481 504 """
482 505 Returns iterator of `BaseCommit` objects from start to end
483 506 not inclusive. This should behave just like a list, ie. end is not
484 507 inclusive.
485 508
486 509 :param start_id: None or str, must be a valid commit id
487 510 :param end_id: None or str, must be a valid commit id
488 511 :param start_date:
489 512 :param end_date:
490 513 :param branch_name:
491 514 :param show_hidden:
492 515 :param pre_load:
493 516 :param translate_tags:
494 517 """
495 518 raise NotImplementedError
496 519
497 520 def __getitem__(self, key):
498 521 """
499 522 Allows index based access to the commit objects of this repository.
500 523 """
501 524 pre_load = ["author", "branch", "date", "message", "parents"]
502 525 if isinstance(key, slice):
503 526 return self._get_range(key, pre_load)
504 527 return self.get_commit(commit_idx=key, pre_load=pre_load)
505 528
506 529 def _get_range(self, slice_obj, pre_load):
507 530 for commit_id in self.commit_ids.__getitem__(slice_obj):
508 531 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
509 532
510 533 def count(self):
511 534 return len(self.commit_ids)
512 535
513 536 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
514 537 """
515 538 Creates and returns a tag for the given ``commit_id``.
516 539
517 540 :param name: name for new tag
518 541 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
519 542 :param commit_id: commit id for which new tag would be created
520 543 :param message: message of the tag's commit
521 544 :param date: date of tag's commit
522 545
523 546 :raises TagAlreadyExistError: if tag with same name already exists
524 547 """
525 548 raise NotImplementedError
526 549
527 550 def remove_tag(self, name, user, message=None, date=None):
528 551 """
529 552 Removes tag with the given ``name``.
530 553
531 554 :param name: name of the tag to be removed
532 555 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
533 556 :param message: message of the tag's removal commit
534 557 :param date: date of tag's removal commit
535 558
536 559 :raises TagDoesNotExistError: if tag with given name does not exists
537 560 """
538 561 raise NotImplementedError
539 562
540 563 def get_diff(
541 564 self, commit1, commit2, path=None, ignore_whitespace=False,
542 565 context=3, path1=None):
543 566 """
544 567 Returns (git like) *diff*, as plain text. Shows changes introduced by
545 568 `commit2` since `commit1`.
546 569
547 570 :param commit1: Entry point from which diff is shown. Can be
548 571 ``self.EMPTY_COMMIT`` - in this case, patch showing all
549 572 the changes since empty state of the repository until `commit2`
550 573 :param commit2: Until which commit changes should be shown.
551 574 :param path: Can be set to a path of a file to create a diff of that
552 575 file. If `path1` is also set, this value is only associated to
553 576 `commit2`.
554 577 :param ignore_whitespace: If set to ``True``, would not show whitespace
555 578 changes. Defaults to ``False``.
556 579 :param context: How many lines before/after changed lines should be
557 580 shown. Defaults to ``3``.
558 581 :param path1: Can be set to a path to associate with `commit1`. This
559 582 parameter works only for backends which support diff generation for
560 583 different paths. Other backends will raise a `ValueError` if `path1`
561 584 is set and has a different value than `path`.
562 585 :param file_path: filter this diff by given path pattern
563 586 """
564 587 raise NotImplementedError
565 588
566 589 def strip(self, commit_id, branch=None):
567 590 """
568 591 Strip given commit_id from the repository
569 592 """
570 593 raise NotImplementedError
571 594
572 595 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
573 596 """
574 597 Return a latest common ancestor commit if one exists for this repo
575 598 `commit_id1` vs `commit_id2` from `repo2`.
576 599
577 600 :param commit_id1: Commit it from this repository to use as a
578 601 target for the comparison.
579 602 :param commit_id2: Source commit id to use for comparison.
580 603 :param repo2: Source repository to use for comparison.
581 604 """
582 605 raise NotImplementedError
583 606
584 607 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
585 608 """
586 609 Compare this repository's revision `commit_id1` with `commit_id2`.
587 610
588 611 Returns a tuple(commits, ancestor) that would be merged from
589 612 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
590 613 will be returned as ancestor.
591 614
592 615 :param commit_id1: Commit it from this repository to use as a
593 616 target for the comparison.
594 617 :param commit_id2: Source commit id to use for comparison.
595 618 :param repo2: Source repository to use for comparison.
596 619 :param merge: If set to ``True`` will do a merge compare which also
597 620 returns the common ancestor.
598 621 :param pre_load: Optional. List of commit attributes to load.
599 622 """
600 623 raise NotImplementedError
601 624
602 625 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
603 626 user_name='', user_email='', message='', dry_run=False,
604 627 use_rebase=False, close_branch=False):
605 628 """
606 629 Merge the revisions specified in `source_ref` from `source_repo`
607 630 onto the `target_ref` of this repository.
608 631
609 632 `source_ref` and `target_ref` are named tupls with the following
610 633 fields `type`, `name` and `commit_id`.
611 634
612 635 Returns a MergeResponse named tuple with the following fields
613 636 'possible', 'executed', 'source_commit', 'target_commit',
614 637 'merge_commit'.
615 638
616 639 :param repo_id: `repo_id` target repo id.
617 640 :param workspace_id: `workspace_id` unique identifier.
618 641 :param target_ref: `target_ref` points to the commit on top of which
619 642 the `source_ref` should be merged.
620 643 :param source_repo: The repository that contains the commits to be
621 644 merged.
622 645 :param source_ref: `source_ref` points to the topmost commit from
623 646 the `source_repo` which should be merged.
624 647 :param user_name: Merge commit `user_name`.
625 648 :param user_email: Merge commit `user_email`.
626 649 :param message: Merge commit `message`.
627 650 :param dry_run: If `True` the merge will not take place.
628 651 :param use_rebase: If `True` commits from the source will be rebased
629 652 on top of the target instead of being merged.
630 653 :param close_branch: If `True` branch will be close before merging it
631 654 """
632 655 if dry_run:
633 656 message = message or settings.MERGE_DRY_RUN_MESSAGE
634 657 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
635 658 user_name = user_name or settings.MERGE_DRY_RUN_USER
636 659 else:
637 660 if not user_name:
638 661 raise ValueError('user_name cannot be empty')
639 662 if not user_email:
640 663 raise ValueError('user_email cannot be empty')
641 664 if not message:
642 665 raise ValueError('message cannot be empty')
643 666
644 667 try:
645 668 return self._merge_repo(
646 669 repo_id, workspace_id, target_ref, source_repo,
647 670 source_ref, message, user_name, user_email, dry_run=dry_run,
648 671 use_rebase=use_rebase, close_branch=close_branch)
649 672 except RepositoryError as exc:
650 673 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
651 674 return MergeResponse(
652 675 False, False, None, MergeFailureReason.UNKNOWN,
653 676 metadata={'exception': str(exc)})
654 677
655 678 def _merge_repo(self, repo_id, workspace_id, target_ref,
656 679 source_repo, source_ref, merge_message,
657 680 merger_name, merger_email, dry_run=False,
658 681 use_rebase=False, close_branch=False):
659 682 """Internal implementation of merge."""
660 683 raise NotImplementedError
661 684
662 685 def _maybe_prepare_merge_workspace(
663 686 self, repo_id, workspace_id, target_ref, source_ref):
664 687 """
665 688 Create the merge workspace.
666 689
667 690 :param workspace_id: `workspace_id` unique identifier.
668 691 """
669 692 raise NotImplementedError
670 693
671 694 @classmethod
672 695 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
673 696 """
674 697 Legacy version that was used before. We still need it for
675 698 backward compat
676 699 """
677 700 return os.path.join(
678 701 os.path.dirname(repo_path),
679 702 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
680 703
681 704 @classmethod
682 705 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
683 706 # The name of the shadow repository must start with '.', so it is
684 707 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
685 708 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
686 709 if os.path.exists(legacy_repository_path):
687 710 return legacy_repository_path
688 711 else:
689 712 return os.path.join(
690 713 os.path.dirname(repo_path),
691 714 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
692 715
693 716 def cleanup_merge_workspace(self, repo_id, workspace_id):
694 717 """
695 718 Remove merge workspace.
696 719
697 720 This function MUST not fail in case there is no workspace associated to
698 721 the given `workspace_id`.
699 722
700 723 :param workspace_id: `workspace_id` unique identifier.
701 724 """
702 725 shadow_repository_path = self._get_shadow_repository_path(
703 726 self.path, repo_id, workspace_id)
704 727 shadow_repository_path_del = '{}.{}.delete'.format(
705 728 shadow_repository_path, time.time())
706 729
707 730 # move the shadow repo, so it never conflicts with the one used.
708 731 # we use this method because shutil.rmtree had some edge case problems
709 732 # removing symlinked repositories
710 733 if not os.path.isdir(shadow_repository_path):
711 734 return
712 735
713 736 shutil.move(shadow_repository_path, shadow_repository_path_del)
714 737 try:
715 738 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
716 739 except Exception:
717 740 log.exception('Failed to gracefully remove shadow repo under %s',
718 741 shadow_repository_path_del)
719 742 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
720 743
721 744 # ========== #
722 745 # COMMIT API #
723 746 # ========== #
724 747
725 748 @LazyProperty
726 749 def in_memory_commit(self):
727 750 """
728 751 Returns :class:`InMemoryCommit` object for this repository.
729 752 """
730 753 raise NotImplementedError
731 754
732 755 # ======================== #
733 756 # UTILITIES FOR SUBCLASSES #
734 757 # ======================== #
735 758
736 759 def _validate_diff_commits(self, commit1, commit2):
737 760 """
738 761 Validates that the given commits are related to this repository.
739 762
740 763 Intended as a utility for sub classes to have a consistent validation
741 764 of input parameters in methods like :meth:`get_diff`.
742 765 """
743 766 self._validate_commit(commit1)
744 767 self._validate_commit(commit2)
745 768 if (isinstance(commit1, EmptyCommit) and
746 769 isinstance(commit2, EmptyCommit)):
747 770 raise ValueError("Cannot compare two empty commits")
748 771
749 772 def _validate_commit(self, commit):
750 773 if not isinstance(commit, BaseCommit):
751 774 raise TypeError(
752 775 "%s is not of type BaseCommit" % repr(commit))
753 776 if commit.repository != self and not isinstance(commit, EmptyCommit):
754 777 raise ValueError(
755 778 "Commit %s must be a valid commit from this repository %s, "
756 779 "related to this repository instead %s." %
757 780 (commit, self, commit.repository))
758 781
759 782 def _validate_commit_id(self, commit_id):
760 783 if not isinstance(commit_id, compat.string_types):
761 784 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
762 785
763 786 def _validate_commit_idx(self, commit_idx):
764 787 if not isinstance(commit_idx, (int, long)):
765 788 raise TypeError("commit_idx must be a numeric value")
766 789
767 790 def _validate_branch_name(self, branch_name):
768 791 if branch_name and branch_name not in self.branches_all:
769 792 msg = ("Branch %s not found in %s" % (branch_name, self))
770 793 raise BranchDoesNotExistError(msg)
771 794
772 795 #
773 796 # Supporting deprecated API parts
774 797 # TODO: johbo: consider to move this into a mixin
775 798 #
776 799
777 800 @property
778 801 def EMPTY_CHANGESET(self):
779 802 warnings.warn(
780 803 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
781 804 return self.EMPTY_COMMIT_ID
782 805
783 806 @property
784 807 def revisions(self):
785 808 warnings.warn("Use commits attribute instead", DeprecationWarning)
786 809 return self.commit_ids
787 810
788 811 @revisions.setter
789 812 def revisions(self, value):
790 813 warnings.warn("Use commits attribute instead", DeprecationWarning)
791 814 self.commit_ids = value
792 815
793 816 def get_changeset(self, revision=None, pre_load=None):
794 817 warnings.warn("Use get_commit instead", DeprecationWarning)
795 818 commit_id = None
796 819 commit_idx = None
797 820 if isinstance(revision, compat.string_types):
798 821 commit_id = revision
799 822 else:
800 823 commit_idx = revision
801 824 return self.get_commit(
802 825 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
803 826
804 827 def get_changesets(
805 828 self, start=None, end=None, start_date=None, end_date=None,
806 829 branch_name=None, pre_load=None):
807 830 warnings.warn("Use get_commits instead", DeprecationWarning)
808 831 start_id = self._revision_to_commit(start)
809 832 end_id = self._revision_to_commit(end)
810 833 return self.get_commits(
811 834 start_id=start_id, end_id=end_id, start_date=start_date,
812 835 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
813 836
814 837 def _revision_to_commit(self, revision):
815 838 """
816 839 Translates a revision to a commit_id
817 840
818 841 Helps to support the old changeset based API which allows to use
819 842 commit ids and commit indices interchangeable.
820 843 """
821 844 if revision is None:
822 845 return revision
823 846
824 847 if isinstance(revision, compat.string_types):
825 848 commit_id = revision
826 849 else:
827 850 commit_id = self.commit_ids[revision]
828 851 return commit_id
829 852
830 853 @property
831 854 def in_memory_changeset(self):
832 855 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
833 856 return self.in_memory_commit
834 857
835 858 def get_path_permissions(self, username):
836 859 """
837 860 Returns a path permission checker or None if not supported
838 861
839 862 :param username: session user name
840 863 :return: an instance of BasePathPermissionChecker or None
841 864 """
842 865 return None
843 866
844 867 def install_hooks(self, force=False):
845 868 return self._remote.install_hooks(force)
846 869
847 870 def get_hooks_info(self):
848 871 return self._remote.get_hooks_info()
849 872
850 873
851 874 class BaseCommit(object):
852 875 """
853 876 Each backend should implement it's commit representation.
854 877
855 878 **Attributes**
856 879
857 880 ``repository``
858 881 repository object within which commit exists
859 882
860 883 ``id``
861 884 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
862 885 just ``tip``.
863 886
864 887 ``raw_id``
865 888 raw commit representation (i.e. full 40 length sha for git
866 889 backend)
867 890
868 891 ``short_id``
869 892 shortened (if apply) version of ``raw_id``; it would be simple
870 893 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
871 894 as ``raw_id`` for subversion
872 895
873 896 ``idx``
874 897 commit index
875 898
876 899 ``files``
877 900 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
878 901
879 902 ``dirs``
880 903 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
881 904
882 905 ``nodes``
883 906 combined list of ``Node`` objects
884 907
885 908 ``author``
886 909 author of the commit, as unicode
887 910
888 911 ``message``
889 912 message of the commit, as unicode
890 913
891 914 ``parents``
892 915 list of parent commits
893 916
894 917 """
895 918
896 919 branch = None
897 920 """
898 921 Depending on the backend this should be set to the branch name of the
899 922 commit. Backends not supporting branches on commits should leave this
900 923 value as ``None``.
901 924 """
902 925
903 926 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
904 927 """
905 928 This template is used to generate a default prefix for repository archives
906 929 if no prefix has been specified.
907 930 """
908 931
909 932 def __str__(self):
910 933 return '<%s at %s:%s>' % (
911 934 self.__class__.__name__, self.idx, self.short_id)
912 935
913 936 def __repr__(self):
914 937 return self.__str__()
915 938
916 939 def __unicode__(self):
917 940 return u'%s:%s' % (self.idx, self.short_id)
918 941
919 942 def __eq__(self, other):
920 943 same_instance = isinstance(other, self.__class__)
921 944 return same_instance and self.raw_id == other.raw_id
922 945
923 946 def __json__(self):
924 947 parents = []
925 948 try:
926 949 for parent in self.parents:
927 950 parents.append({'raw_id': parent.raw_id})
928 951 except NotImplementedError:
929 952 # empty commit doesn't have parents implemented
930 953 pass
931 954
932 955 return {
933 956 'short_id': self.short_id,
934 957 'raw_id': self.raw_id,
935 958 'revision': self.idx,
936 959 'message': self.message,
937 960 'date': self.date,
938 961 'author': self.author,
939 962 'parents': parents,
940 963 'branch': self.branch
941 964 }
942 965
943 966 def __getstate__(self):
944 967 d = self.__dict__.copy()
945 968 d.pop('_remote', None)
946 969 d.pop('repository', None)
947 970 return d
948 971
949 972 def serialize(self):
950 973 return self.__json__()
951 974
952 975 def _get_refs(self):
953 976 return {
954 977 'branches': [self.branch] if self.branch else [],
955 978 'bookmarks': getattr(self, 'bookmarks', []),
956 979 'tags': self.tags
957 980 }
958 981
959 982 @LazyProperty
960 983 def last(self):
961 984 """
962 985 ``True`` if this is last commit in repository, ``False``
963 986 otherwise; trying to access this attribute while there is no
964 987 commits would raise `EmptyRepositoryError`
965 988 """
966 989 if self.repository is None:
967 990 raise CommitError("Cannot check if it's most recent commit")
968 991 return self.raw_id == self.repository.commit_ids[-1]
969 992
970 993 @LazyProperty
971 994 def parents(self):
972 995 """
973 996 Returns list of parent commits.
974 997 """
975 998 raise NotImplementedError
976 999
977 1000 @LazyProperty
978 1001 def first_parent(self):
979 1002 """
980 1003 Returns list of parent commits.
981 1004 """
982 1005 return self.parents[0] if self.parents else EmptyCommit()
983 1006
984 1007 @property
985 1008 def merge(self):
986 1009 """
987 1010 Returns boolean if commit is a merge.
988 1011 """
989 1012 return len(self.parents) > 1
990 1013
991 1014 @LazyProperty
992 1015 def children(self):
993 1016 """
994 1017 Returns list of child commits.
995 1018 """
996 1019 raise NotImplementedError
997 1020
998 1021 @LazyProperty
999 1022 def id(self):
1000 1023 """
1001 1024 Returns string identifying this commit.
1002 1025 """
1003 1026 raise NotImplementedError
1004 1027
1005 1028 @LazyProperty
1006 1029 def raw_id(self):
1007 1030 """
1008 1031 Returns raw string identifying this commit.
1009 1032 """
1010 1033 raise NotImplementedError
1011 1034
1012 1035 @LazyProperty
1013 1036 def short_id(self):
1014 1037 """
1015 1038 Returns shortened version of ``raw_id`` attribute, as string,
1016 1039 identifying this commit, useful for presentation to users.
1017 1040 """
1018 1041 raise NotImplementedError
1019 1042
1020 1043 @LazyProperty
1021 1044 def idx(self):
1022 1045 """
1023 1046 Returns integer identifying this commit.
1024 1047 """
1025 1048 raise NotImplementedError
1026 1049
1027 1050 @LazyProperty
1028 1051 def committer(self):
1029 1052 """
1030 1053 Returns committer for this commit
1031 1054 """
1032 1055 raise NotImplementedError
1033 1056
1034 1057 @LazyProperty
1035 1058 def committer_name(self):
1036 1059 """
1037 1060 Returns committer name for this commit
1038 1061 """
1039 1062
1040 1063 return author_name(self.committer)
1041 1064
1042 1065 @LazyProperty
1043 1066 def committer_email(self):
1044 1067 """
1045 1068 Returns committer email address for this commit
1046 1069 """
1047 1070
1048 1071 return author_email(self.committer)
1049 1072
1050 1073 @LazyProperty
1051 1074 def author(self):
1052 1075 """
1053 1076 Returns author for this commit
1054 1077 """
1055 1078
1056 1079 raise NotImplementedError
1057 1080
1058 1081 @LazyProperty
1059 1082 def author_name(self):
1060 1083 """
1061 1084 Returns author name for this commit
1062 1085 """
1063 1086
1064 1087 return author_name(self.author)
1065 1088
1066 1089 @LazyProperty
1067 1090 def author_email(self):
1068 1091 """
1069 1092 Returns author email address for this commit
1070 1093 """
1071 1094
1072 1095 return author_email(self.author)
1073 1096
1074 1097 def get_file_mode(self, path):
1075 1098 """
1076 1099 Returns stat mode of the file at `path`.
1077 1100 """
1078 1101 raise NotImplementedError
1079 1102
1080 1103 def is_link(self, path):
1081 1104 """
1082 1105 Returns ``True`` if given `path` is a symlink
1083 1106 """
1084 1107 raise NotImplementedError
1085 1108
1086 1109 def is_node_binary(self, path):
1087 1110 """
1088 1111 Returns ``True`` is given path is a binary file
1089 1112 """
1090 1113 raise NotImplementedError
1091 1114
1092 1115 def get_file_content(self, path):
1093 1116 """
1094 1117 Returns content of the file at the given `path`.
1095 1118 """
1096 1119 raise NotImplementedError
1097 1120
1098 1121 def get_file_content_streamed(self, path):
1099 1122 """
1100 1123 returns a streaming response from vcsserver with file content
1101 1124 """
1102 1125 raise NotImplementedError
1103 1126
1104 1127 def get_file_size(self, path):
1105 1128 """
1106 1129 Returns size of the file at the given `path`.
1107 1130 """
1108 1131 raise NotImplementedError
1109 1132
1110 1133 def get_path_commit(self, path, pre_load=None):
1111 1134 """
1112 1135 Returns last commit of the file at the given `path`.
1113 1136
1114 1137 :param pre_load: Optional. List of commit attributes to load.
1115 1138 """
1116 1139 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1117 1140 if not commits:
1118 1141 raise RepositoryError(
1119 1142 'Failed to fetch history for path {}. '
1120 1143 'Please check if such path exists in your repository'.format(
1121 1144 path))
1122 1145 return commits[0]
1123 1146
1124 1147 def get_path_history(self, path, limit=None, pre_load=None):
1125 1148 """
1126 1149 Returns history of file as reversed list of :class:`BaseCommit`
1127 1150 objects for which file at given `path` has been modified.
1128 1151
1129 1152 :param limit: Optional. Allows to limit the size of the returned
1130 1153 history. This is intended as a hint to the underlying backend, so
1131 1154 that it can apply optimizations depending on the limit.
1132 1155 :param pre_load: Optional. List of commit attributes to load.
1133 1156 """
1134 1157 raise NotImplementedError
1135 1158
1136 1159 def get_file_annotate(self, path, pre_load=None):
1137 1160 """
1138 1161 Returns a generator of four element tuples with
1139 1162 lineno, sha, commit lazy loader and line
1140 1163
1141 1164 :param pre_load: Optional. List of commit attributes to load.
1142 1165 """
1143 1166 raise NotImplementedError
1144 1167
1145 1168 def get_nodes(self, path):
1146 1169 """
1147 1170 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1148 1171 state of commit at the given ``path``.
1149 1172
1150 1173 :raises ``CommitError``: if node at the given ``path`` is not
1151 1174 instance of ``DirNode``
1152 1175 """
1153 1176 raise NotImplementedError
1154 1177
1155 1178 def get_node(self, path):
1156 1179 """
1157 1180 Returns ``Node`` object from the given ``path``.
1158 1181
1159 1182 :raises ``NodeDoesNotExistError``: if there is no node at the given
1160 1183 ``path``
1161 1184 """
1162 1185 raise NotImplementedError
1163 1186
1164 1187 def get_largefile_node(self, path):
1165 1188 """
1166 1189 Returns the path to largefile from Mercurial/Git-lfs storage.
1167 1190 or None if it's not a largefile node
1168 1191 """
1169 1192 return None
1170 1193
1171 1194 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1172 1195 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1173 1196 """
1174 1197 Creates an archive containing the contents of the repository.
1175 1198
1176 1199 :param archive_dest_path: path to the file which to create the archive.
1177 1200 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1178 1201 :param prefix: name of root directory in archive.
1179 1202 Default is repository name and commit's short_id joined with dash:
1180 1203 ``"{repo_name}-{short_id}"``.
1181 1204 :param write_metadata: write a metadata file into archive.
1182 1205 :param mtime: custom modification time for archive creation, defaults
1183 1206 to time.time() if not given.
1184 1207 :param archive_at_path: pack files at this path (default '/')
1185 1208
1186 1209 :raise VCSError: If prefix has a problem.
1187 1210 """
1188 1211 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1189 1212 if kind not in allowed_kinds:
1190 1213 raise ImproperArchiveTypeError(
1191 1214 'Archive kind (%s) not supported use one of %s' %
1192 1215 (kind, allowed_kinds))
1193 1216
1194 1217 prefix = self._validate_archive_prefix(prefix)
1195 1218
1196 1219 mtime = mtime is not None or time.mktime(self.date.timetuple())
1197 1220
1198 1221 file_info = []
1199 1222 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1200 1223 for _r, _d, files in cur_rev.walk(archive_at_path):
1201 1224 for f in files:
1202 1225 f_path = os.path.join(prefix, f.path)
1203 1226 file_info.append(
1204 1227 (f_path, f.mode, f.is_link(), f.raw_bytes))
1205 1228
1206 1229 if write_metadata:
1207 1230 metadata = [
1208 1231 ('repo_name', self.repository.name),
1209 1232 ('commit_id', self.raw_id),
1210 1233 ('mtime', mtime),
1211 1234 ('branch', self.branch),
1212 1235 ('tags', ','.join(self.tags)),
1213 1236 ]
1214 1237 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1215 1238 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1216 1239
1217 1240 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1218 1241
1219 1242 def _validate_archive_prefix(self, prefix):
1220 1243 if prefix is None:
1221 1244 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1222 1245 repo_name=safe_str(self.repository.name),
1223 1246 short_id=self.short_id)
1224 1247 elif not isinstance(prefix, str):
1225 1248 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1226 1249 elif prefix.startswith('/'):
1227 1250 raise VCSError("Prefix cannot start with leading slash")
1228 1251 elif prefix.strip() == '':
1229 1252 raise VCSError("Prefix cannot be empty")
1230 1253 return prefix
1231 1254
1232 1255 @LazyProperty
1233 1256 def root(self):
1234 1257 """
1235 1258 Returns ``RootNode`` object for this commit.
1236 1259 """
1237 1260 return self.get_node('')
1238 1261
1239 1262 def next(self, branch=None):
1240 1263 """
1241 1264 Returns next commit from current, if branch is gives it will return
1242 1265 next commit belonging to this branch
1243 1266
1244 1267 :param branch: show commits within the given named branch
1245 1268 """
1246 1269 indexes = xrange(self.idx + 1, self.repository.count())
1247 1270 return self._find_next(indexes, branch)
1248 1271
1249 1272 def prev(self, branch=None):
1250 1273 """
1251 1274 Returns previous commit from current, if branch is gives it will
1252 1275 return previous commit belonging to this branch
1253 1276
1254 1277 :param branch: show commit within the given named branch
1255 1278 """
1256 1279 indexes = xrange(self.idx - 1, -1, -1)
1257 1280 return self._find_next(indexes, branch)
1258 1281
1259 1282 def _find_next(self, indexes, branch=None):
1260 1283 if branch and self.branch != branch:
1261 1284 raise VCSError('Branch option used on commit not belonging '
1262 1285 'to that branch')
1263 1286
1264 1287 for next_idx in indexes:
1265 1288 commit = self.repository.get_commit(commit_idx=next_idx)
1266 1289 if branch and branch != commit.branch:
1267 1290 continue
1268 1291 return commit
1269 1292 raise CommitDoesNotExistError
1270 1293
1271 1294 def diff(self, ignore_whitespace=True, context=3):
1272 1295 """
1273 1296 Returns a `Diff` object representing the change made by this commit.
1274 1297 """
1275 1298 parent = self.first_parent
1276 1299 diff = self.repository.get_diff(
1277 1300 parent, self,
1278 1301 ignore_whitespace=ignore_whitespace,
1279 1302 context=context)
1280 1303 return diff
1281 1304
1282 1305 @LazyProperty
1283 1306 def added(self):
1284 1307 """
1285 1308 Returns list of added ``FileNode`` objects.
1286 1309 """
1287 1310 raise NotImplementedError
1288 1311
1289 1312 @LazyProperty
1290 1313 def changed(self):
1291 1314 """
1292 1315 Returns list of modified ``FileNode`` objects.
1293 1316 """
1294 1317 raise NotImplementedError
1295 1318
1296 1319 @LazyProperty
1297 1320 def removed(self):
1298 1321 """
1299 1322 Returns list of removed ``FileNode`` objects.
1300 1323 """
1301 1324 raise NotImplementedError
1302 1325
1303 1326 @LazyProperty
1304 1327 def size(self):
1305 1328 """
1306 1329 Returns total number of bytes from contents of all filenodes.
1307 1330 """
1308 1331 return sum((node.size for node in self.get_filenodes_generator()))
1309 1332
1310 1333 def walk(self, topurl=''):
1311 1334 """
1312 1335 Similar to os.walk method. Insted of filesystem it walks through
1313 1336 commit starting at given ``topurl``. Returns generator of tuples
1314 1337 (topnode, dirnodes, filenodes).
1315 1338 """
1316 1339 topnode = self.get_node(topurl)
1317 1340 if not topnode.is_dir():
1318 1341 return
1319 1342 yield (topnode, topnode.dirs, topnode.files)
1320 1343 for dirnode in topnode.dirs:
1321 1344 for tup in self.walk(dirnode.path):
1322 1345 yield tup
1323 1346
1324 1347 def get_filenodes_generator(self):
1325 1348 """
1326 1349 Returns generator that yields *all* file nodes.
1327 1350 """
1328 1351 for topnode, dirs, files in self.walk():
1329 1352 for node in files:
1330 1353 yield node
1331 1354
1332 1355 #
1333 1356 # Utilities for sub classes to support consistent behavior
1334 1357 #
1335 1358
1336 1359 def no_node_at_path(self, path):
1337 1360 return NodeDoesNotExistError(
1338 1361 u"There is no file nor directory at the given path: "
1339 1362 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1340 1363
1341 1364 def _fix_path(self, path):
1342 1365 """
1343 1366 Paths are stored without trailing slash so we need to get rid off it if
1344 1367 needed.
1345 1368 """
1346 1369 return path.rstrip('/')
1347 1370
1348 1371 #
1349 1372 # Deprecated API based on changesets
1350 1373 #
1351 1374
1352 1375 @property
1353 1376 def revision(self):
1354 1377 warnings.warn("Use idx instead", DeprecationWarning)
1355 1378 return self.idx
1356 1379
1357 1380 @revision.setter
1358 1381 def revision(self, value):
1359 1382 warnings.warn("Use idx instead", DeprecationWarning)
1360 1383 self.idx = value
1361 1384
1362 1385 def get_file_changeset(self, path):
1363 1386 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1364 1387 return self.get_path_commit(path)
1365 1388
1366 1389
1367 1390 class BaseChangesetClass(type):
1368 1391
1369 1392 def __instancecheck__(self, instance):
1370 1393 return isinstance(instance, BaseCommit)
1371 1394
1372 1395
1373 1396 class BaseChangeset(BaseCommit):
1374 1397
1375 1398 __metaclass__ = BaseChangesetClass
1376 1399
1377 1400 def __new__(cls, *args, **kwargs):
1378 1401 warnings.warn(
1379 1402 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1380 1403 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1381 1404
1382 1405
1383 1406 class BaseInMemoryCommit(object):
1384 1407 """
1385 1408 Represents differences between repository's state (most recent head) and
1386 1409 changes made *in place*.
1387 1410
1388 1411 **Attributes**
1389 1412
1390 1413 ``repository``
1391 1414 repository object for this in-memory-commit
1392 1415
1393 1416 ``added``
1394 1417 list of ``FileNode`` objects marked as *added*
1395 1418
1396 1419 ``changed``
1397 1420 list of ``FileNode`` objects marked as *changed*
1398 1421
1399 1422 ``removed``
1400 1423 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1401 1424 *removed*
1402 1425
1403 1426 ``parents``
1404 1427 list of :class:`BaseCommit` instances representing parents of
1405 1428 in-memory commit. Should always be 2-element sequence.
1406 1429
1407 1430 """
1408 1431
1409 1432 def __init__(self, repository):
1410 1433 self.repository = repository
1411 1434 self.added = []
1412 1435 self.changed = []
1413 1436 self.removed = []
1414 1437 self.parents = []
1415 1438
1416 1439 def add(self, *filenodes):
1417 1440 """
1418 1441 Marks given ``FileNode`` objects as *to be committed*.
1419 1442
1420 1443 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1421 1444 latest commit
1422 1445 :raises ``NodeAlreadyAddedError``: if node with same path is already
1423 1446 marked as *added*
1424 1447 """
1425 1448 # Check if not already marked as *added* first
1426 1449 for node in filenodes:
1427 1450 if node.path in (n.path for n in self.added):
1428 1451 raise NodeAlreadyAddedError(
1429 1452 "Such FileNode %s is already marked for addition"
1430 1453 % node.path)
1431 1454 for node in filenodes:
1432 1455 self.added.append(node)
1433 1456
1434 1457 def change(self, *filenodes):
1435 1458 """
1436 1459 Marks given ``FileNode`` objects to be *changed* in next commit.
1437 1460
1438 1461 :raises ``EmptyRepositoryError``: if there are no commits yet
1439 1462 :raises ``NodeAlreadyExistsError``: if node with same path is already
1440 1463 marked to be *changed*
1441 1464 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1442 1465 marked to be *removed*
1443 1466 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1444 1467 commit
1445 1468 :raises ``NodeNotChangedError``: if node hasn't really be changed
1446 1469 """
1447 1470 for node in filenodes:
1448 1471 if node.path in (n.path for n in self.removed):
1449 1472 raise NodeAlreadyRemovedError(
1450 1473 "Node at %s is already marked as removed" % node.path)
1451 1474 try:
1452 1475 self.repository.get_commit()
1453 1476 except EmptyRepositoryError:
1454 1477 raise EmptyRepositoryError(
1455 1478 "Nothing to change - try to *add* new nodes rather than "
1456 1479 "changing them")
1457 1480 for node in filenodes:
1458 1481 if node.path in (n.path for n in self.changed):
1459 1482 raise NodeAlreadyChangedError(
1460 1483 "Node at '%s' is already marked as changed" % node.path)
1461 1484 self.changed.append(node)
1462 1485
1463 1486 def remove(self, *filenodes):
1464 1487 """
1465 1488 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1466 1489 *removed* in next commit.
1467 1490
1468 1491 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1469 1492 be *removed*
1470 1493 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1471 1494 be *changed*
1472 1495 """
1473 1496 for node in filenodes:
1474 1497 if node.path in (n.path for n in self.removed):
1475 1498 raise NodeAlreadyRemovedError(
1476 1499 "Node is already marked to for removal at %s" % node.path)
1477 1500 if node.path in (n.path for n in self.changed):
1478 1501 raise NodeAlreadyChangedError(
1479 1502 "Node is already marked to be changed at %s" % node.path)
1480 1503 # We only mark node as *removed* - real removal is done by
1481 1504 # commit method
1482 1505 self.removed.append(node)
1483 1506
1484 1507 def reset(self):
1485 1508 """
1486 1509 Resets this instance to initial state (cleans ``added``, ``changed``
1487 1510 and ``removed`` lists).
1488 1511 """
1489 1512 self.added = []
1490 1513 self.changed = []
1491 1514 self.removed = []
1492 1515 self.parents = []
1493 1516
1494 1517 def get_ipaths(self):
1495 1518 """
1496 1519 Returns generator of paths from nodes marked as added, changed or
1497 1520 removed.
1498 1521 """
1499 1522 for node in itertools.chain(self.added, self.changed, self.removed):
1500 1523 yield node.path
1501 1524
1502 1525 def get_paths(self):
1503 1526 """
1504 1527 Returns list of paths from nodes marked as added, changed or removed.
1505 1528 """
1506 1529 return list(self.get_ipaths())
1507 1530
1508 1531 def check_integrity(self, parents=None):
1509 1532 """
1510 1533 Checks in-memory commit's integrity. Also, sets parents if not
1511 1534 already set.
1512 1535
1513 1536 :raises CommitError: if any error occurs (i.e.
1514 1537 ``NodeDoesNotExistError``).
1515 1538 """
1516 1539 if not self.parents:
1517 1540 parents = parents or []
1518 1541 if len(parents) == 0:
1519 1542 try:
1520 1543 parents = [self.repository.get_commit(), None]
1521 1544 except EmptyRepositoryError:
1522 1545 parents = [None, None]
1523 1546 elif len(parents) == 1:
1524 1547 parents += [None]
1525 1548 self.parents = parents
1526 1549
1527 1550 # Local parents, only if not None
1528 1551 parents = [p for p in self.parents if p]
1529 1552
1530 1553 # Check nodes marked as added
1531 1554 for p in parents:
1532 1555 for node in self.added:
1533 1556 try:
1534 1557 p.get_node(node.path)
1535 1558 except NodeDoesNotExistError:
1536 1559 pass
1537 1560 else:
1538 1561 raise NodeAlreadyExistsError(
1539 1562 "Node `%s` already exists at %s" % (node.path, p))
1540 1563
1541 1564 # Check nodes marked as changed
1542 1565 missing = set(self.changed)
1543 1566 not_changed = set(self.changed)
1544 1567 if self.changed and not parents:
1545 1568 raise NodeDoesNotExistError(str(self.changed[0].path))
1546 1569 for p in parents:
1547 1570 for node in self.changed:
1548 1571 try:
1549 1572 old = p.get_node(node.path)
1550 1573 missing.remove(node)
1551 1574 # if content actually changed, remove node from not_changed
1552 1575 if old.content != node.content:
1553 1576 not_changed.remove(node)
1554 1577 except NodeDoesNotExistError:
1555 1578 pass
1556 1579 if self.changed and missing:
1557 1580 raise NodeDoesNotExistError(
1558 1581 "Node `%s` marked as modified but missing in parents: %s"
1559 1582 % (node.path, parents))
1560 1583
1561 1584 if self.changed and not_changed:
1562 1585 raise NodeNotChangedError(
1563 1586 "Node `%s` wasn't actually changed (parents: %s)"
1564 1587 % (not_changed.pop().path, parents))
1565 1588
1566 1589 # Check nodes marked as removed
1567 1590 if self.removed and not parents:
1568 1591 raise NodeDoesNotExistError(
1569 1592 "Cannot remove node at %s as there "
1570 1593 "were no parents specified" % self.removed[0].path)
1571 1594 really_removed = set()
1572 1595 for p in parents:
1573 1596 for node in self.removed:
1574 1597 try:
1575 1598 p.get_node(node.path)
1576 1599 really_removed.add(node)
1577 1600 except CommitError:
1578 1601 pass
1579 1602 not_removed = set(self.removed) - really_removed
1580 1603 if not_removed:
1581 1604 # TODO: johbo: This code branch does not seem to be covered
1582 1605 raise NodeDoesNotExistError(
1583 1606 "Cannot remove node at %s from "
1584 1607 "following parents: %s" % (not_removed, parents))
1585 1608
1586 1609 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1587 1610 """
1588 1611 Performs in-memory commit (doesn't check workdir in any way) and
1589 1612 returns newly created :class:`BaseCommit`. Updates repository's
1590 1613 attribute `commits`.
1591 1614
1592 1615 .. note::
1593 1616
1594 1617 While overriding this method each backend's should call
1595 1618 ``self.check_integrity(parents)`` in the first place.
1596 1619
1597 1620 :param message: message of the commit
1598 1621 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1599 1622 :param parents: single parent or sequence of parents from which commit
1600 1623 would be derived
1601 1624 :param date: ``datetime.datetime`` instance. Defaults to
1602 1625 ``datetime.datetime.now()``.
1603 1626 :param branch: branch name, as string. If none given, default backend's
1604 1627 branch would be used.
1605 1628
1606 1629 :raises ``CommitError``: if any error occurs while committing
1607 1630 """
1608 1631 raise NotImplementedError
1609 1632
1610 1633
1611 1634 class BaseInMemoryChangesetClass(type):
1612 1635
1613 1636 def __instancecheck__(self, instance):
1614 1637 return isinstance(instance, BaseInMemoryCommit)
1615 1638
1616 1639
1617 1640 class BaseInMemoryChangeset(BaseInMemoryCommit):
1618 1641
1619 1642 __metaclass__ = BaseInMemoryChangesetClass
1620 1643
1621 1644 def __new__(cls, *args, **kwargs):
1622 1645 warnings.warn(
1623 1646 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1624 1647 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1625 1648
1626 1649
1627 1650 class EmptyCommit(BaseCommit):
1628 1651 """
1629 1652 An dummy empty commit. It's possible to pass hash when creating
1630 1653 an EmptyCommit
1631 1654 """
1632 1655
1633 1656 def __init__(
1634 1657 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1635 1658 message='', author='', date=None):
1636 1659 self._empty_commit_id = commit_id
1637 1660 # TODO: johbo: Solve idx parameter, default value does not make
1638 1661 # too much sense
1639 1662 self.idx = idx
1640 1663 self.message = message
1641 1664 self.author = author
1642 1665 self.date = date or datetime.datetime.fromtimestamp(0)
1643 1666 self.repository = repo
1644 1667 self.alias = alias
1645 1668
1646 1669 @LazyProperty
1647 1670 def raw_id(self):
1648 1671 """
1649 1672 Returns raw string identifying this commit, useful for web
1650 1673 representation.
1651 1674 """
1652 1675
1653 1676 return self._empty_commit_id
1654 1677
1655 1678 @LazyProperty
1656 1679 def branch(self):
1657 1680 if self.alias:
1658 1681 from rhodecode.lib.vcs.backends import get_backend
1659 1682 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1660 1683
1661 1684 @LazyProperty
1662 1685 def short_id(self):
1663 1686 return self.raw_id[:12]
1664 1687
1665 1688 @LazyProperty
1666 1689 def id(self):
1667 1690 return self.raw_id
1668 1691
1669 1692 def get_path_commit(self, path):
1670 1693 return self
1671 1694
1672 1695 def get_file_content(self, path):
1673 1696 return u''
1674 1697
1675 1698 def get_file_content_streamed(self, path):
1676 1699 yield self.get_file_content()
1677 1700
1678 1701 def get_file_size(self, path):
1679 1702 return 0
1680 1703
1681 1704
1682 1705 class EmptyChangesetClass(type):
1683 1706
1684 1707 def __instancecheck__(self, instance):
1685 1708 return isinstance(instance, EmptyCommit)
1686 1709
1687 1710
1688 1711 class EmptyChangeset(EmptyCommit):
1689 1712
1690 1713 __metaclass__ = EmptyChangesetClass
1691 1714
1692 1715 def __new__(cls, *args, **kwargs):
1693 1716 warnings.warn(
1694 1717 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1695 1718 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1696 1719
1697 1720 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1698 1721 alias=None, revision=-1, message='', author='', date=None):
1699 1722 if requested_revision is not None:
1700 1723 warnings.warn(
1701 1724 "Parameter requested_revision not supported anymore",
1702 1725 DeprecationWarning)
1703 1726 super(EmptyChangeset, self).__init__(
1704 1727 commit_id=cs, repo=repo, alias=alias, idx=revision,
1705 1728 message=message, author=author, date=date)
1706 1729
1707 1730 @property
1708 1731 def revision(self):
1709 1732 warnings.warn("Use idx instead", DeprecationWarning)
1710 1733 return self.idx
1711 1734
1712 1735 @revision.setter
1713 1736 def revision(self, value):
1714 1737 warnings.warn("Use idx instead", DeprecationWarning)
1715 1738 self.idx = value
1716 1739
1717 1740
1718 1741 class EmptyRepository(BaseRepository):
1719 1742 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1720 1743 pass
1721 1744
1722 1745 def get_diff(self, *args, **kwargs):
1723 1746 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1724 1747 return GitDiff('')
1725 1748
1726 1749
1727 1750 class CollectionGenerator(object):
1728 1751
1729 1752 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1730 1753 self.repo = repo
1731 1754 self.commit_ids = commit_ids
1732 1755 # TODO: (oliver) this isn't currently hooked up
1733 1756 self.collection_size = None
1734 1757 self.pre_load = pre_load
1735 1758 self.translate_tag = translate_tag
1736 1759
1737 1760 def __len__(self):
1738 1761 if self.collection_size is not None:
1739 1762 return self.collection_size
1740 1763 return self.commit_ids.__len__()
1741 1764
1742 1765 def __iter__(self):
1743 1766 for commit_id in self.commit_ids:
1744 1767 # TODO: johbo: Mercurial passes in commit indices or commit ids
1745 1768 yield self._commit_factory(commit_id)
1746 1769
1747 1770 def _commit_factory(self, commit_id):
1748 1771 """
1749 1772 Allows backends to override the way commits are generated.
1750 1773 """
1751 1774 return self.repo.get_commit(
1752 1775 commit_id=commit_id, pre_load=self.pre_load,
1753 1776 translate_tag=self.translate_tag)
1754 1777
1755 1778 def __getslice__(self, i, j):
1756 1779 """
1757 1780 Returns an iterator of sliced repository
1758 1781 """
1759 1782 commit_ids = self.commit_ids[i:j]
1760 1783 return self.__class__(
1761 1784 self.repo, commit_ids, pre_load=self.pre_load,
1762 1785 translate_tag=self.translate_tag)
1763 1786
1764 1787 def __repr__(self):
1765 1788 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1766 1789
1767 1790
1768 1791 class Config(object):
1769 1792 """
1770 1793 Represents the configuration for a repository.
1771 1794
1772 1795 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1773 1796 standard library. It implements only the needed subset.
1774 1797 """
1775 1798
1776 1799 def __init__(self):
1777 1800 self._values = {}
1778 1801
1779 1802 def copy(self):
1780 1803 clone = Config()
1781 1804 for section, values in self._values.items():
1782 1805 clone._values[section] = values.copy()
1783 1806 return clone
1784 1807
1785 1808 def __repr__(self):
1786 1809 return '<Config(%s sections) at %s>' % (
1787 1810 len(self._values), hex(id(self)))
1788 1811
1789 1812 def items(self, section):
1790 1813 return self._values.get(section, {}).iteritems()
1791 1814
1792 1815 def get(self, section, option):
1793 1816 return self._values.get(section, {}).get(option)
1794 1817
1795 1818 def set(self, section, option, value):
1796 1819 section_values = self._values.setdefault(section, {})
1797 1820 section_values[option] = value
1798 1821
1799 1822 def clear_section(self, section):
1800 1823 self._values[section] = {}
1801 1824
1802 1825 def serialize(self):
1803 1826 """
1804 1827 Creates a list of three tuples (section, key, value) representing
1805 1828 this config object.
1806 1829 """
1807 1830 items = []
1808 1831 for section in self._values:
1809 1832 for option, value in self._values[section].items():
1810 1833 items.append(
1811 1834 (safe_str(section), safe_str(option), safe_str(value)))
1812 1835 return items
1813 1836
1814 1837
1815 1838 class Diff(object):
1816 1839 """
1817 1840 Represents a diff result from a repository backend.
1818 1841
1819 1842 Subclasses have to provide a backend specific value for
1820 1843 :attr:`_header_re` and :attr:`_meta_re`.
1821 1844 """
1822 1845 _meta_re = None
1823 1846 _header_re = None
1824 1847
1825 1848 def __init__(self, raw_diff):
1826 1849 self.raw = raw_diff
1827 1850
1828 1851 def chunks(self):
1829 1852 """
1830 1853 split the diff in chunks of separate --git a/file b/file chunks
1831 1854 to make diffs consistent we must prepend with \n, and make sure
1832 1855 we can detect last chunk as this was also has special rule
1833 1856 """
1834 1857
1835 1858 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1836 1859 header = diff_parts[0]
1837 1860
1838 1861 if self._meta_re:
1839 1862 match = self._meta_re.match(header)
1840 1863
1841 1864 chunks = diff_parts[1:]
1842 1865 total_chunks = len(chunks)
1843 1866
1844 1867 return (
1845 1868 DiffChunk(chunk, self, cur_chunk == total_chunks)
1846 1869 for cur_chunk, chunk in enumerate(chunks, start=1))
1847 1870
1848 1871
1849 1872 class DiffChunk(object):
1850 1873
1851 1874 def __init__(self, chunk, diff, last_chunk):
1852 1875 self._diff = diff
1853 1876
1854 1877 # since we split by \ndiff --git that part is lost from original diff
1855 1878 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1856 1879 if not last_chunk:
1857 1880 chunk += '\n'
1858 1881
1859 1882 match = self._diff._header_re.match(chunk)
1860 1883 self.header = match.groupdict()
1861 1884 self.diff = chunk[match.end():]
1862 1885 self.raw = chunk
1863 1886
1864 1887
1865 1888 class BasePathPermissionChecker(object):
1866 1889
1867 1890 @staticmethod
1868 1891 def create_from_patterns(includes, excludes):
1869 1892 if includes and '*' in includes and not excludes:
1870 1893 return AllPathPermissionChecker()
1871 1894 elif excludes and '*' in excludes:
1872 1895 return NonePathPermissionChecker()
1873 1896 else:
1874 1897 return PatternPathPermissionChecker(includes, excludes)
1875 1898
1876 1899 @property
1877 1900 def has_full_access(self):
1878 1901 raise NotImplemented()
1879 1902
1880 1903 def has_access(self, path):
1881 1904 raise NotImplemented()
1882 1905
1883 1906
1884 1907 class AllPathPermissionChecker(BasePathPermissionChecker):
1885 1908
1886 1909 @property
1887 1910 def has_full_access(self):
1888 1911 return True
1889 1912
1890 1913 def has_access(self, path):
1891 1914 return True
1892 1915
1893 1916
1894 1917 class NonePathPermissionChecker(BasePathPermissionChecker):
1895 1918
1896 1919 @property
1897 1920 def has_full_access(self):
1898 1921 return False
1899 1922
1900 1923 def has_access(self, path):
1901 1924 return False
1902 1925
1903 1926
1904 1927 class PatternPathPermissionChecker(BasePathPermissionChecker):
1905 1928
1906 1929 def __init__(self, includes, excludes):
1907 1930 self.includes = includes
1908 1931 self.excludes = excludes
1909 1932 self.includes_re = [] if not includes else [
1910 1933 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1911 1934 self.excludes_re = [] if not excludes else [
1912 1935 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1913 1936
1914 1937 @property
1915 1938 def has_full_access(self):
1916 1939 return '*' in self.includes and not self.excludes
1917 1940
1918 1941 def has_access(self, path):
1919 1942 for regex in self.excludes_re:
1920 1943 if regex.match(path):
1921 1944 return False
1922 1945 for regex in self.includes_re:
1923 1946 if regex.match(path):
1924 1947 return True
1925 1948 return False
@@ -1,5794 +1,5783 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
59 from rhodecode.lib.vcs.backends.base import (
60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
60 61 from rhodecode.lib.utils2 import (
61 62 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 65 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 66 JsonRaw
66 67 from rhodecode.lib.ext_json import json
67 68 from rhodecode.lib.caching_query import FromCache
68 69 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 70 from rhodecode.lib.encrypt2 import Encryptor
70 71 from rhodecode.lib.exceptions import (
71 72 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 73 from rhodecode.model.meta import Base, Session
73 74
74 75 URL_SEP = '/'
75 76 log = logging.getLogger(__name__)
76 77
77 78 # =============================================================================
78 79 # BASE CLASSES
79 80 # =============================================================================
80 81
81 82 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 83 # beaker.session.secret if first is not set.
83 84 # and initialized at environment.py
84 85 ENCRYPTION_KEY = None
85 86
86 87 # used to sort permissions by types, '#' used here is not allowed to be in
87 88 # usernames, and it's very early in sorted string.printable table.
88 89 PERMISSION_TYPE_SORT = {
89 90 'admin': '####',
90 91 'write': '###',
91 92 'read': '##',
92 93 'none': '#',
93 94 }
94 95
95 96
96 97 def display_user_sort(obj):
97 98 """
98 99 Sort function used to sort permissions in .permissions() function of
99 100 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 101 of all other resources
101 102 """
102 103
103 104 if obj.username == User.DEFAULT_USER:
104 105 return '#####'
105 106 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 107 extra_sort_num = '1' # default
107 108
108 109 # NOTE(dan): inactive duplicates goes last
109 110 if getattr(obj, 'duplicate_perm', None):
110 111 extra_sort_num = '9'
111 112 return prefix + extra_sort_num + obj.username
112 113
113 114
114 115 def display_user_group_sort(obj):
115 116 """
116 117 Sort function used to sort permissions in .permissions() function of
117 118 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 119 of all other resources
119 120 """
120 121
121 122 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 123 return prefix + obj.users_group_name
123 124
124 125
125 126 def _hash_key(k):
126 127 return sha1_safe(k)
127 128
128 129
129 130 def in_filter_generator(qry, items, limit=500):
130 131 """
131 132 Splits IN() into multiple with OR
132 133 e.g.::
133 134 cnt = Repository.query().filter(
134 135 or_(
135 136 *in_filter_generator(Repository.repo_id, range(100000))
136 137 )).count()
137 138 """
138 139 if not items:
139 140 # empty list will cause empty query which might cause security issues
140 141 # this can lead to hidden unpleasant results
141 142 items = [-1]
142 143
143 144 parts = []
144 145 for chunk in xrange(0, len(items), limit):
145 146 parts.append(
146 147 qry.in_(items[chunk: chunk + limit])
147 148 )
148 149
149 150 return parts
150 151
151 152
152 153 base_table_args = {
153 154 'extend_existing': True,
154 155 'mysql_engine': 'InnoDB',
155 156 'mysql_charset': 'utf8',
156 157 'sqlite_autoincrement': True
157 158 }
158 159
159 160
160 161 class EncryptedTextValue(TypeDecorator):
161 162 """
162 163 Special column for encrypted long text data, use like::
163 164
164 165 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165 166
166 167 This column is intelligent so if value is in unencrypted form it return
167 168 unencrypted form, but on save it always encrypts
168 169 """
169 170 impl = Text
170 171
171 172 def process_bind_param(self, value, dialect):
172 173 """
173 174 Setter for storing value
174 175 """
175 176 import rhodecode
176 177 if not value:
177 178 return value
178 179
179 180 # protect against double encrypting if values is already encrypted
180 181 if value.startswith('enc$aes$') \
181 182 or value.startswith('enc$aes_hmac$') \
182 183 or value.startswith('enc2$'):
183 184 raise ValueError('value needs to be in unencrypted format, '
184 185 'ie. not starting with enc$ or enc2$')
185 186
186 187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
187 188 if algo == 'aes':
188 189 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
189 190 elif algo == 'fernet':
190 191 return Encryptor(ENCRYPTION_KEY).encrypt(value)
191 192 else:
192 193 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
193 194
194 195 def process_result_value(self, value, dialect):
195 196 """
196 197 Getter for retrieving value
197 198 """
198 199
199 200 import rhodecode
200 201 if not value:
201 202 return value
202 203
203 204 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
204 205 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
205 206 if algo == 'aes':
206 207 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
207 208 elif algo == 'fernet':
208 209 return Encryptor(ENCRYPTION_KEY).decrypt(value)
209 210 else:
210 211 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
211 212 return decrypted_data
212 213
213 214
214 215 class BaseModel(object):
215 216 """
216 217 Base Model for all classes
217 218 """
218 219
219 220 @classmethod
220 221 def _get_keys(cls):
221 222 """return column names for this model """
222 223 return class_mapper(cls).c.keys()
223 224
224 225 def get_dict(self):
225 226 """
226 227 return dict with keys and values corresponding
227 228 to this model data """
228 229
229 230 d = {}
230 231 for k in self._get_keys():
231 232 d[k] = getattr(self, k)
232 233
233 234 # also use __json__() if present to get additional fields
234 235 _json_attr = getattr(self, '__json__', None)
235 236 if _json_attr:
236 237 # update with attributes from __json__
237 238 if callable(_json_attr):
238 239 _json_attr = _json_attr()
239 240 for k, val in _json_attr.iteritems():
240 241 d[k] = val
241 242 return d
242 243
243 244 def get_appstruct(self):
244 245 """return list with keys and values tuples corresponding
245 246 to this model data """
246 247
247 248 lst = []
248 249 for k in self._get_keys():
249 250 lst.append((k, getattr(self, k),))
250 251 return lst
251 252
252 253 def populate_obj(self, populate_dict):
253 254 """populate model with data from given populate_dict"""
254 255
255 256 for k in self._get_keys():
256 257 if k in populate_dict:
257 258 setattr(self, k, populate_dict[k])
258 259
259 260 @classmethod
260 261 def query(cls):
261 262 return Session().query(cls)
262 263
263 264 @classmethod
264 265 def get(cls, id_):
265 266 if id_:
266 267 return cls.query().get(id_)
267 268
268 269 @classmethod
269 270 def get_or_404(cls, id_):
270 271 from pyramid.httpexceptions import HTTPNotFound
271 272
272 273 try:
273 274 id_ = int(id_)
274 275 except (TypeError, ValueError):
275 276 raise HTTPNotFound()
276 277
277 278 res = cls.query().get(id_)
278 279 if not res:
279 280 raise HTTPNotFound()
280 281 return res
281 282
282 283 @classmethod
283 284 def getAll(cls):
284 285 # deprecated and left for backward compatibility
285 286 return cls.get_all()
286 287
287 288 @classmethod
288 289 def get_all(cls):
289 290 return cls.query().all()
290 291
291 292 @classmethod
292 293 def delete(cls, id_):
293 294 obj = cls.query().get(id_)
294 295 Session().delete(obj)
295 296
296 297 @classmethod
297 298 def identity_cache(cls, session, attr_name, value):
298 299 exist_in_session = []
299 300 for (item_cls, pkey), instance in session.identity_map.items():
300 301 if cls == item_cls and getattr(instance, attr_name) == value:
301 302 exist_in_session.append(instance)
302 303 if exist_in_session:
303 304 if len(exist_in_session) == 1:
304 305 return exist_in_session[0]
305 306 log.exception(
306 307 'multiple objects with attr %s and '
307 308 'value %s found with same name: %r',
308 309 attr_name, value, exist_in_session)
309 310
310 311 def __repr__(self):
311 312 if hasattr(self, '__unicode__'):
312 313 # python repr needs to return str
313 314 try:
314 315 return safe_str(self.__unicode__())
315 316 except UnicodeDecodeError:
316 317 pass
317 318 return '<DB:%s>' % (self.__class__.__name__)
318 319
319 320
320 321 class RhodeCodeSetting(Base, BaseModel):
321 322 __tablename__ = 'rhodecode_settings'
322 323 __table_args__ = (
323 324 UniqueConstraint('app_settings_name'),
324 325 base_table_args
325 326 )
326 327
327 328 SETTINGS_TYPES = {
328 329 'str': safe_str,
329 330 'int': safe_int,
330 331 'unicode': safe_unicode,
331 332 'bool': str2bool,
332 333 'list': functools.partial(aslist, sep=',')
333 334 }
334 335 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
335 336 GLOBAL_CONF_KEY = 'app_settings'
336 337
337 338 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
338 339 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
339 340 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
340 341 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
341 342
342 343 def __init__(self, key='', val='', type='unicode'):
343 344 self.app_settings_name = key
344 345 self.app_settings_type = type
345 346 self.app_settings_value = val
346 347
347 348 @validates('_app_settings_value')
348 349 def validate_settings_value(self, key, val):
349 350 assert type(val) == unicode
350 351 return val
351 352
352 353 @hybrid_property
353 354 def app_settings_value(self):
354 355 v = self._app_settings_value
355 356 _type = self.app_settings_type
356 357 if _type:
357 358 _type = self.app_settings_type.split('.')[0]
358 359 # decode the encrypted value
359 360 if 'encrypted' in self.app_settings_type:
360 361 cipher = EncryptedTextValue()
361 362 v = safe_unicode(cipher.process_result_value(v, None))
362 363
363 364 converter = self.SETTINGS_TYPES.get(_type) or \
364 365 self.SETTINGS_TYPES['unicode']
365 366 return converter(v)
366 367
367 368 @app_settings_value.setter
368 369 def app_settings_value(self, val):
369 370 """
370 371 Setter that will always make sure we use unicode in app_settings_value
371 372
372 373 :param val:
373 374 """
374 375 val = safe_unicode(val)
375 376 # encode the encrypted value
376 377 if 'encrypted' in self.app_settings_type:
377 378 cipher = EncryptedTextValue()
378 379 val = safe_unicode(cipher.process_bind_param(val, None))
379 380 self._app_settings_value = val
380 381
381 382 @hybrid_property
382 383 def app_settings_type(self):
383 384 return self._app_settings_type
384 385
385 386 @app_settings_type.setter
386 387 def app_settings_type(self, val):
387 388 if val.split('.')[0] not in self.SETTINGS_TYPES:
388 389 raise Exception('type must be one of %s got %s'
389 390 % (self.SETTINGS_TYPES.keys(), val))
390 391 self._app_settings_type = val
391 392
392 393 @classmethod
393 394 def get_by_prefix(cls, prefix):
394 395 return RhodeCodeSetting.query()\
395 396 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
396 397 .all()
397 398
398 399 def __unicode__(self):
399 400 return u"<%s('%s:%s[%s]')>" % (
400 401 self.__class__.__name__,
401 402 self.app_settings_name, self.app_settings_value,
402 403 self.app_settings_type
403 404 )
404 405
405 406
406 407 class RhodeCodeUi(Base, BaseModel):
407 408 __tablename__ = 'rhodecode_ui'
408 409 __table_args__ = (
409 410 UniqueConstraint('ui_key'),
410 411 base_table_args
411 412 )
412 413
413 414 HOOK_REPO_SIZE = 'changegroup.repo_size'
414 415 # HG
415 416 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
416 417 HOOK_PULL = 'outgoing.pull_logger'
417 418 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
418 419 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
419 420 HOOK_PUSH = 'changegroup.push_logger'
420 421 HOOK_PUSH_KEY = 'pushkey.key_push'
421 422
422 423 HOOKS_BUILTIN = [
423 424 HOOK_PRE_PULL,
424 425 HOOK_PULL,
425 426 HOOK_PRE_PUSH,
426 427 HOOK_PRETX_PUSH,
427 428 HOOK_PUSH,
428 429 HOOK_PUSH_KEY,
429 430 ]
430 431
431 432 # TODO: johbo: Unify way how hooks are configured for git and hg,
432 433 # git part is currently hardcoded.
433 434
434 435 # SVN PATTERNS
435 436 SVN_BRANCH_ID = 'vcs_svn_branch'
436 437 SVN_TAG_ID = 'vcs_svn_tag'
437 438
438 439 ui_id = Column(
439 440 "ui_id", Integer(), nullable=False, unique=True, default=None,
440 441 primary_key=True)
441 442 ui_section = Column(
442 443 "ui_section", String(255), nullable=True, unique=None, default=None)
443 444 ui_key = Column(
444 445 "ui_key", String(255), nullable=True, unique=None, default=None)
445 446 ui_value = Column(
446 447 "ui_value", String(255), nullable=True, unique=None, default=None)
447 448 ui_active = Column(
448 449 "ui_active", Boolean(), nullable=True, unique=None, default=True)
449 450
450 451 def __repr__(self):
451 452 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
452 453 self.ui_key, self.ui_value)
453 454
454 455
455 456 class RepoRhodeCodeSetting(Base, BaseModel):
456 457 __tablename__ = 'repo_rhodecode_settings'
457 458 __table_args__ = (
458 459 UniqueConstraint(
459 460 'app_settings_name', 'repository_id',
460 461 name='uq_repo_rhodecode_setting_name_repo_id'),
461 462 base_table_args
462 463 )
463 464
464 465 repository_id = Column(
465 466 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
466 467 nullable=False)
467 468 app_settings_id = Column(
468 469 "app_settings_id", Integer(), nullable=False, unique=True,
469 470 default=None, primary_key=True)
470 471 app_settings_name = Column(
471 472 "app_settings_name", String(255), nullable=True, unique=None,
472 473 default=None)
473 474 _app_settings_value = Column(
474 475 "app_settings_value", String(4096), nullable=True, unique=None,
475 476 default=None)
476 477 _app_settings_type = Column(
477 478 "app_settings_type", String(255), nullable=True, unique=None,
478 479 default=None)
479 480
480 481 repository = relationship('Repository')
481 482
482 483 def __init__(self, repository_id, key='', val='', type='unicode'):
483 484 self.repository_id = repository_id
484 485 self.app_settings_name = key
485 486 self.app_settings_type = type
486 487 self.app_settings_value = val
487 488
488 489 @validates('_app_settings_value')
489 490 def validate_settings_value(self, key, val):
490 491 assert type(val) == unicode
491 492 return val
492 493
493 494 @hybrid_property
494 495 def app_settings_value(self):
495 496 v = self._app_settings_value
496 497 type_ = self.app_settings_type
497 498 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
498 499 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
499 500 return converter(v)
500 501
501 502 @app_settings_value.setter
502 503 def app_settings_value(self, val):
503 504 """
504 505 Setter that will always make sure we use unicode in app_settings_value
505 506
506 507 :param val:
507 508 """
508 509 self._app_settings_value = safe_unicode(val)
509 510
510 511 @hybrid_property
511 512 def app_settings_type(self):
512 513 return self._app_settings_type
513 514
514 515 @app_settings_type.setter
515 516 def app_settings_type(self, val):
516 517 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
517 518 if val not in SETTINGS_TYPES:
518 519 raise Exception('type must be one of %s got %s'
519 520 % (SETTINGS_TYPES.keys(), val))
520 521 self._app_settings_type = val
521 522
522 523 def __unicode__(self):
523 524 return u"<%s('%s:%s:%s[%s]')>" % (
524 525 self.__class__.__name__, self.repository.repo_name,
525 526 self.app_settings_name, self.app_settings_value,
526 527 self.app_settings_type
527 528 )
528 529
529 530
530 531 class RepoRhodeCodeUi(Base, BaseModel):
531 532 __tablename__ = 'repo_rhodecode_ui'
532 533 __table_args__ = (
533 534 UniqueConstraint(
534 535 'repository_id', 'ui_section', 'ui_key',
535 536 name='uq_repo_rhodecode_ui_repository_id_section_key'),
536 537 base_table_args
537 538 )
538 539
539 540 repository_id = Column(
540 541 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
541 542 nullable=False)
542 543 ui_id = Column(
543 544 "ui_id", Integer(), nullable=False, unique=True, default=None,
544 545 primary_key=True)
545 546 ui_section = Column(
546 547 "ui_section", String(255), nullable=True, unique=None, default=None)
547 548 ui_key = Column(
548 549 "ui_key", String(255), nullable=True, unique=None, default=None)
549 550 ui_value = Column(
550 551 "ui_value", String(255), nullable=True, unique=None, default=None)
551 552 ui_active = Column(
552 553 "ui_active", Boolean(), nullable=True, unique=None, default=True)
553 554
554 555 repository = relationship('Repository')
555 556
556 557 def __repr__(self):
557 558 return '<%s[%s:%s]%s=>%s]>' % (
558 559 self.__class__.__name__, self.repository.repo_name,
559 560 self.ui_section, self.ui_key, self.ui_value)
560 561
561 562
562 563 class User(Base, BaseModel):
563 564 __tablename__ = 'users'
564 565 __table_args__ = (
565 566 UniqueConstraint('username'), UniqueConstraint('email'),
566 567 Index('u_username_idx', 'username'),
567 568 Index('u_email_idx', 'email'),
568 569 base_table_args
569 570 )
570 571
571 572 DEFAULT_USER = 'default'
572 573 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
573 574 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
574 575
575 576 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
576 577 username = Column("username", String(255), nullable=True, unique=None, default=None)
577 578 password = Column("password", String(255), nullable=True, unique=None, default=None)
578 579 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
579 580 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
580 581 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
581 582 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
582 583 _email = Column("email", String(255), nullable=True, unique=None, default=None)
583 584 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
584 585 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
585 586 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
586 587
587 588 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
588 589 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
589 590 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
590 591 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
591 592 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
592 593 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
593 594
594 595 user_log = relationship('UserLog')
595 596 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
596 597
597 598 repositories = relationship('Repository')
598 599 repository_groups = relationship('RepoGroup')
599 600 user_groups = relationship('UserGroup')
600 601
601 602 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
602 603 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
603 604
604 605 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
605 606 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
606 607 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
607 608
608 609 group_member = relationship('UserGroupMember', cascade='all')
609 610
610 611 notifications = relationship('UserNotification', cascade='all')
611 612 # notifications assigned to this user
612 613 user_created_notifications = relationship('Notification', cascade='all')
613 614 # comments created by this user
614 615 user_comments = relationship('ChangesetComment', cascade='all')
615 616 # user profile extra info
616 617 user_emails = relationship('UserEmailMap', cascade='all')
617 618 user_ip_map = relationship('UserIpMap', cascade='all')
618 619 user_auth_tokens = relationship('UserApiKeys', cascade='all')
619 620 user_ssh_keys = relationship('UserSshKeys', cascade='all')
620 621
621 622 # gists
622 623 user_gists = relationship('Gist', cascade='all')
623 624 # user pull requests
624 625 user_pull_requests = relationship('PullRequest', cascade='all')
625 626
626 627 # external identities
627 628 external_identities = relationship(
628 629 'ExternalIdentity',
629 630 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
630 631 cascade='all')
631 632 # review rules
632 633 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
633 634
634 635 # artifacts owned
635 636 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
636 637
637 638 # no cascade, set NULL
638 639 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
639 640
640 641 def __unicode__(self):
641 642 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
642 643 self.user_id, self.username)
643 644
644 645 @hybrid_property
645 646 def email(self):
646 647 return self._email
647 648
648 649 @email.setter
649 650 def email(self, val):
650 651 self._email = val.lower() if val else None
651 652
652 653 @hybrid_property
653 654 def first_name(self):
654 655 from rhodecode.lib import helpers as h
655 656 if self.name:
656 657 return h.escape(self.name)
657 658 return self.name
658 659
659 660 @hybrid_property
660 661 def last_name(self):
661 662 from rhodecode.lib import helpers as h
662 663 if self.lastname:
663 664 return h.escape(self.lastname)
664 665 return self.lastname
665 666
666 667 @hybrid_property
667 668 def api_key(self):
668 669 """
669 670 Fetch if exist an auth-token with role ALL connected to this user
670 671 """
671 672 user_auth_token = UserApiKeys.query()\
672 673 .filter(UserApiKeys.user_id == self.user_id)\
673 674 .filter(or_(UserApiKeys.expires == -1,
674 675 UserApiKeys.expires >= time.time()))\
675 676 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
676 677 if user_auth_token:
677 678 user_auth_token = user_auth_token.api_key
678 679
679 680 return user_auth_token
680 681
681 682 @api_key.setter
682 683 def api_key(self, val):
683 684 # don't allow to set API key this is deprecated for now
684 685 self._api_key = None
685 686
686 687 @property
687 688 def reviewer_pull_requests(self):
688 689 return PullRequestReviewers.query() \
689 690 .options(joinedload(PullRequestReviewers.pull_request)) \
690 691 .filter(PullRequestReviewers.user_id == self.user_id) \
691 692 .all()
692 693
693 694 @property
694 695 def firstname(self):
695 696 # alias for future
696 697 return self.name
697 698
698 699 @property
699 700 def emails(self):
700 701 other = UserEmailMap.query()\
701 702 .filter(UserEmailMap.user == self) \
702 703 .order_by(UserEmailMap.email_id.asc()) \
703 704 .all()
704 705 return [self.email] + [x.email for x in other]
705 706
706 707 def emails_cached(self):
707 708 emails = UserEmailMap.query()\
708 709 .filter(UserEmailMap.user == self) \
709 710 .order_by(UserEmailMap.email_id.asc())
710 711
711 712 emails = emails.options(
712 713 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
713 714 )
714 715
715 716 return [self.email] + [x.email for x in emails]
716 717
717 718 @property
718 719 def auth_tokens(self):
719 720 auth_tokens = self.get_auth_tokens()
720 721 return [x.api_key for x in auth_tokens]
721 722
722 723 def get_auth_tokens(self):
723 724 return UserApiKeys.query()\
724 725 .filter(UserApiKeys.user == self)\
725 726 .order_by(UserApiKeys.user_api_key_id.asc())\
726 727 .all()
727 728
728 729 @LazyProperty
729 730 def feed_token(self):
730 731 return self.get_feed_token()
731 732
732 733 def get_feed_token(self, cache=True):
733 734 feed_tokens = UserApiKeys.query()\
734 735 .filter(UserApiKeys.user == self)\
735 736 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
736 737 if cache:
737 738 feed_tokens = feed_tokens.options(
738 739 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
739 740
740 741 feed_tokens = feed_tokens.all()
741 742 if feed_tokens:
742 743 return feed_tokens[0].api_key
743 744 return 'NO_FEED_TOKEN_AVAILABLE'
744 745
745 746 @LazyProperty
746 747 def artifact_token(self):
747 748 return self.get_artifact_token()
748 749
749 750 def get_artifact_token(self, cache=True):
750 751 artifacts_tokens = UserApiKeys.query()\
751 752 .filter(UserApiKeys.user == self)\
752 753 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
753 754 if cache:
754 755 artifacts_tokens = artifacts_tokens.options(
755 756 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
756 757
757 758 artifacts_tokens = artifacts_tokens.all()
758 759 if artifacts_tokens:
759 760 return artifacts_tokens[0].api_key
760 761 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
761 762
762 763 @classmethod
763 764 def get(cls, user_id, cache=False):
764 765 if not user_id:
765 766 return
766 767
767 768 user = cls.query()
768 769 if cache:
769 770 user = user.options(
770 771 FromCache("sql_cache_short", "get_users_%s" % user_id))
771 772 return user.get(user_id)
772 773
773 774 @classmethod
774 775 def extra_valid_auth_tokens(cls, user, role=None):
775 776 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
776 777 .filter(or_(UserApiKeys.expires == -1,
777 778 UserApiKeys.expires >= time.time()))
778 779 if role:
779 780 tokens = tokens.filter(or_(UserApiKeys.role == role,
780 781 UserApiKeys.role == UserApiKeys.ROLE_ALL))
781 782 return tokens.all()
782 783
783 784 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
784 785 from rhodecode.lib import auth
785 786
786 787 log.debug('Trying to authenticate user: %s via auth-token, '
787 788 'and roles: %s', self, roles)
788 789
789 790 if not auth_token:
790 791 return False
791 792
792 793 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
793 794 tokens_q = UserApiKeys.query()\
794 795 .filter(UserApiKeys.user_id == self.user_id)\
795 796 .filter(or_(UserApiKeys.expires == -1,
796 797 UserApiKeys.expires >= time.time()))
797 798
798 799 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
799 800
800 801 crypto_backend = auth.crypto_backend()
801 802 enc_token_map = {}
802 803 plain_token_map = {}
803 804 for token in tokens_q:
804 805 if token.api_key.startswith(crypto_backend.ENC_PREF):
805 806 enc_token_map[token.api_key] = token
806 807 else:
807 808 plain_token_map[token.api_key] = token
808 809 log.debug(
809 810 'Found %s plain and %s encrypted tokens to check for authentication for this user',
810 811 len(plain_token_map), len(enc_token_map))
811 812
812 813 # plain token match comes first
813 814 match = plain_token_map.get(auth_token)
814 815
815 816 # check encrypted tokens now
816 817 if not match:
817 818 for token_hash, token in enc_token_map.items():
818 819 # NOTE(marcink): this is expensive to calculate, but most secure
819 820 if crypto_backend.hash_check(auth_token, token_hash):
820 821 match = token
821 822 break
822 823
823 824 if match:
824 825 log.debug('Found matching token %s', match)
825 826 if match.repo_id:
826 827 log.debug('Found scope, checking for scope match of token %s', match)
827 828 if match.repo_id == scope_repo_id:
828 829 return True
829 830 else:
830 831 log.debug(
831 832 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
832 833 'and calling scope is:%s, skipping further checks',
833 834 match.repo, scope_repo_id)
834 835 return False
835 836 else:
836 837 return True
837 838
838 839 return False
839 840
840 841 @property
841 842 def ip_addresses(self):
842 843 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
843 844 return [x.ip_addr for x in ret]
844 845
845 846 @property
846 847 def username_and_name(self):
847 848 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
848 849
849 850 @property
850 851 def username_or_name_or_email(self):
851 852 full_name = self.full_name if self.full_name is not ' ' else None
852 853 return self.username or full_name or self.email
853 854
854 855 @property
855 856 def full_name(self):
856 857 return '%s %s' % (self.first_name, self.last_name)
857 858
858 859 @property
859 860 def full_name_or_username(self):
860 861 return ('%s %s' % (self.first_name, self.last_name)
861 862 if (self.first_name and self.last_name) else self.username)
862 863
863 864 @property
864 865 def full_contact(self):
865 866 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
866 867
867 868 @property
868 869 def short_contact(self):
869 870 return '%s %s' % (self.first_name, self.last_name)
870 871
871 872 @property
872 873 def is_admin(self):
873 874 return self.admin
874 875
875 876 @property
876 877 def language(self):
877 878 return self.user_data.get('language')
878 879
879 880 def AuthUser(self, **kwargs):
880 881 """
881 882 Returns instance of AuthUser for this user
882 883 """
883 884 from rhodecode.lib.auth import AuthUser
884 885 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
885 886
886 887 @hybrid_property
887 888 def user_data(self):
888 889 if not self._user_data:
889 890 return {}
890 891
891 892 try:
892 893 return json.loads(self._user_data)
893 894 except TypeError:
894 895 return {}
895 896
896 897 @user_data.setter
897 898 def user_data(self, val):
898 899 if not isinstance(val, dict):
899 900 raise Exception('user_data must be dict, got %s' % type(val))
900 901 try:
901 902 self._user_data = json.dumps(val)
902 903 except Exception:
903 904 log.error(traceback.format_exc())
904 905
905 906 @classmethod
906 907 def get_by_username(cls, username, case_insensitive=False,
907 908 cache=False, identity_cache=False):
908 909 session = Session()
909 910
910 911 if case_insensitive:
911 912 q = cls.query().filter(
912 913 func.lower(cls.username) == func.lower(username))
913 914 else:
914 915 q = cls.query().filter(cls.username == username)
915 916
916 917 if cache:
917 918 if identity_cache:
918 919 val = cls.identity_cache(session, 'username', username)
919 920 if val:
920 921 return val
921 922 else:
922 923 cache_key = "get_user_by_name_%s" % _hash_key(username)
923 924 q = q.options(
924 925 FromCache("sql_cache_short", cache_key))
925 926
926 927 return q.scalar()
927 928
928 929 @classmethod
929 930 def get_by_auth_token(cls, auth_token, cache=False):
930 931 q = UserApiKeys.query()\
931 932 .filter(UserApiKeys.api_key == auth_token)\
932 933 .filter(or_(UserApiKeys.expires == -1,
933 934 UserApiKeys.expires >= time.time()))
934 935 if cache:
935 936 q = q.options(
936 937 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
937 938
938 939 match = q.first()
939 940 if match:
940 941 return match.user
941 942
942 943 @classmethod
943 944 def get_by_email(cls, email, case_insensitive=False, cache=False):
944 945
945 946 if case_insensitive:
946 947 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
947 948
948 949 else:
949 950 q = cls.query().filter(cls.email == email)
950 951
951 952 email_key = _hash_key(email)
952 953 if cache:
953 954 q = q.options(
954 955 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
955 956
956 957 ret = q.scalar()
957 958 if ret is None:
958 959 q = UserEmailMap.query()
959 960 # try fetching in alternate email map
960 961 if case_insensitive:
961 962 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
962 963 else:
963 964 q = q.filter(UserEmailMap.email == email)
964 965 q = q.options(joinedload(UserEmailMap.user))
965 966 if cache:
966 967 q = q.options(
967 968 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
968 969 ret = getattr(q.scalar(), 'user', None)
969 970
970 971 return ret
971 972
972 973 @classmethod
973 974 def get_from_cs_author(cls, author):
974 975 """
975 976 Tries to get User objects out of commit author string
976 977
977 978 :param author:
978 979 """
979 980 from rhodecode.lib.helpers import email, author_name
980 981 # Valid email in the attribute passed, see if they're in the system
981 982 _email = email(author)
982 983 if _email:
983 984 user = cls.get_by_email(_email, case_insensitive=True)
984 985 if user:
985 986 return user
986 987 # Maybe we can match by username?
987 988 _author = author_name(author)
988 989 user = cls.get_by_username(_author, case_insensitive=True)
989 990 if user:
990 991 return user
991 992
992 993 def update_userdata(self, **kwargs):
993 994 usr = self
994 995 old = usr.user_data
995 996 old.update(**kwargs)
996 997 usr.user_data = old
997 998 Session().add(usr)
998 999 log.debug('updated userdata with %s', kwargs)
999 1000
1000 1001 def update_lastlogin(self):
1001 1002 """Update user lastlogin"""
1002 1003 self.last_login = datetime.datetime.now()
1003 1004 Session().add(self)
1004 1005 log.debug('updated user %s lastlogin', self.username)
1005 1006
1006 1007 def update_password(self, new_password):
1007 1008 from rhodecode.lib.auth import get_crypt_password
1008 1009
1009 1010 self.password = get_crypt_password(new_password)
1010 1011 Session().add(self)
1011 1012
1012 1013 @classmethod
1013 1014 def get_first_super_admin(cls):
1014 1015 user = User.query()\
1015 1016 .filter(User.admin == true()) \
1016 1017 .order_by(User.user_id.asc()) \
1017 1018 .first()
1018 1019
1019 1020 if user is None:
1020 1021 raise Exception('FATAL: Missing administrative account!')
1021 1022 return user
1022 1023
1023 1024 @classmethod
1024 1025 def get_all_super_admins(cls, only_active=False):
1025 1026 """
1026 1027 Returns all admin accounts sorted by username
1027 1028 """
1028 1029 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1029 1030 if only_active:
1030 1031 qry = qry.filter(User.active == true())
1031 1032 return qry.all()
1032 1033
1033 1034 @classmethod
1034 1035 def get_all_user_ids(cls, only_active=True):
1035 1036 """
1036 1037 Returns all users IDs
1037 1038 """
1038 1039 qry = Session().query(User.user_id)
1039 1040
1040 1041 if only_active:
1041 1042 qry = qry.filter(User.active == true())
1042 1043 return [x.user_id for x in qry]
1043 1044
1044 1045 @classmethod
1045 1046 def get_default_user(cls, cache=False, refresh=False):
1046 1047 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1047 1048 if user is None:
1048 1049 raise Exception('FATAL: Missing default account!')
1049 1050 if refresh:
1050 1051 # The default user might be based on outdated state which
1051 1052 # has been loaded from the cache.
1052 1053 # A call to refresh() ensures that the
1053 1054 # latest state from the database is used.
1054 1055 Session().refresh(user)
1055 1056 return user
1056 1057
1057 1058 @classmethod
1058 1059 def get_default_user_id(cls):
1059 1060 import rhodecode
1060 1061 return rhodecode.CONFIG['default_user_id']
1061 1062
1062 1063 def _get_default_perms(self, user, suffix=''):
1063 1064 from rhodecode.model.permission import PermissionModel
1064 1065 return PermissionModel().get_default_perms(user.user_perms, suffix)
1065 1066
1066 1067 def get_default_perms(self, suffix=''):
1067 1068 return self._get_default_perms(self, suffix)
1068 1069
1069 1070 def get_api_data(self, include_secrets=False, details='full'):
1070 1071 """
1071 1072 Common function for generating user related data for API
1072 1073
1073 1074 :param include_secrets: By default secrets in the API data will be replaced
1074 1075 by a placeholder value to prevent exposing this data by accident. In case
1075 1076 this data shall be exposed, set this flag to ``True``.
1076 1077
1077 1078 :param details: details can be 'basic|full' basic gives only a subset of
1078 1079 the available user information that includes user_id, name and emails.
1079 1080 """
1080 1081 user = self
1081 1082 user_data = self.user_data
1082 1083 data = {
1083 1084 'user_id': user.user_id,
1084 1085 'username': user.username,
1085 1086 'firstname': user.name,
1086 1087 'lastname': user.lastname,
1087 1088 'description': user.description,
1088 1089 'email': user.email,
1089 1090 'emails': user.emails,
1090 1091 }
1091 1092 if details == 'basic':
1092 1093 return data
1093 1094
1094 1095 auth_token_length = 40
1095 1096 auth_token_replacement = '*' * auth_token_length
1096 1097
1097 1098 extras = {
1098 1099 'auth_tokens': [auth_token_replacement],
1099 1100 'active': user.active,
1100 1101 'admin': user.admin,
1101 1102 'extern_type': user.extern_type,
1102 1103 'extern_name': user.extern_name,
1103 1104 'last_login': user.last_login,
1104 1105 'last_activity': user.last_activity,
1105 1106 'ip_addresses': user.ip_addresses,
1106 1107 'language': user_data.get('language')
1107 1108 }
1108 1109 data.update(extras)
1109 1110
1110 1111 if include_secrets:
1111 1112 data['auth_tokens'] = user.auth_tokens
1112 1113 return data
1113 1114
1114 1115 def __json__(self):
1115 1116 data = {
1116 1117 'full_name': self.full_name,
1117 1118 'full_name_or_username': self.full_name_or_username,
1118 1119 'short_contact': self.short_contact,
1119 1120 'full_contact': self.full_contact,
1120 1121 }
1121 1122 data.update(self.get_api_data())
1122 1123 return data
1123 1124
1124 1125
1125 1126 class UserApiKeys(Base, BaseModel):
1126 1127 __tablename__ = 'user_api_keys'
1127 1128 __table_args__ = (
1128 1129 Index('uak_api_key_idx', 'api_key'),
1129 1130 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1130 1131 base_table_args
1131 1132 )
1132 1133 __mapper_args__ = {}
1133 1134
1134 1135 # ApiKey role
1135 1136 ROLE_ALL = 'token_role_all'
1136 1137 ROLE_VCS = 'token_role_vcs'
1137 1138 ROLE_API = 'token_role_api'
1138 1139 ROLE_HTTP = 'token_role_http'
1139 1140 ROLE_FEED = 'token_role_feed'
1140 1141 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1141 1142 # The last one is ignored in the list as we only
1142 1143 # use it for one action, and cannot be created by users
1143 1144 ROLE_PASSWORD_RESET = 'token_password_reset'
1144 1145
1145 1146 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1146 1147
1147 1148 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1148 1149 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1149 1150 api_key = Column("api_key", String(255), nullable=False, unique=True)
1150 1151 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1151 1152 expires = Column('expires', Float(53), nullable=False)
1152 1153 role = Column('role', String(255), nullable=True)
1153 1154 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1154 1155
1155 1156 # scope columns
1156 1157 repo_id = Column(
1157 1158 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1158 1159 nullable=True, unique=None, default=None)
1159 1160 repo = relationship('Repository', lazy='joined')
1160 1161
1161 1162 repo_group_id = Column(
1162 1163 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1163 1164 nullable=True, unique=None, default=None)
1164 1165 repo_group = relationship('RepoGroup', lazy='joined')
1165 1166
1166 1167 user = relationship('User', lazy='joined')
1167 1168
1168 1169 def __unicode__(self):
1169 1170 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1170 1171
1171 1172 def __json__(self):
1172 1173 data = {
1173 1174 'auth_token': self.api_key,
1174 1175 'role': self.role,
1175 1176 'scope': self.scope_humanized,
1176 1177 'expired': self.expired
1177 1178 }
1178 1179 return data
1179 1180
1180 1181 def get_api_data(self, include_secrets=False):
1181 1182 data = self.__json__()
1182 1183 if include_secrets:
1183 1184 return data
1184 1185 else:
1185 1186 data['auth_token'] = self.token_obfuscated
1186 1187 return data
1187 1188
1188 1189 @hybrid_property
1189 1190 def description_safe(self):
1190 1191 from rhodecode.lib import helpers as h
1191 1192 return h.escape(self.description)
1192 1193
1193 1194 @property
1194 1195 def expired(self):
1195 1196 if self.expires == -1:
1196 1197 return False
1197 1198 return time.time() > self.expires
1198 1199
1199 1200 @classmethod
1200 1201 def _get_role_name(cls, role):
1201 1202 return {
1202 1203 cls.ROLE_ALL: _('all'),
1203 1204 cls.ROLE_HTTP: _('http/web interface'),
1204 1205 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1205 1206 cls.ROLE_API: _('api calls'),
1206 1207 cls.ROLE_FEED: _('feed access'),
1207 1208 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1208 1209 }.get(role, role)
1209 1210
1210 1211 @classmethod
1211 1212 def _get_role_description(cls, role):
1212 1213 return {
1213 1214 cls.ROLE_ALL: _('Token for all actions.'),
1214 1215 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1215 1216 'login using `api_access_controllers_whitelist` functionality.'),
1216 1217 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1217 1218 'Requires auth_token authentication plugin to be active. <br/>'
1218 1219 'Such Token should be used then instead of a password to '
1219 1220 'interact with a repository, and additionally can be '
1220 1221 'limited to single repository using repo scope.'),
1221 1222 cls.ROLE_API: _('Token limited to api calls.'),
1222 1223 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1223 1224 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1224 1225 }.get(role, role)
1225 1226
1226 1227 @property
1227 1228 def role_humanized(self):
1228 1229 return self._get_role_name(self.role)
1229 1230
1230 1231 def _get_scope(self):
1231 1232 if self.repo:
1232 1233 return 'Repository: {}'.format(self.repo.repo_name)
1233 1234 if self.repo_group:
1234 1235 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1235 1236 return 'Global'
1236 1237
1237 1238 @property
1238 1239 def scope_humanized(self):
1239 1240 return self._get_scope()
1240 1241
1241 1242 @property
1242 1243 def token_obfuscated(self):
1243 1244 if self.api_key:
1244 1245 return self.api_key[:4] + "****"
1245 1246
1246 1247
1247 1248 class UserEmailMap(Base, BaseModel):
1248 1249 __tablename__ = 'user_email_map'
1249 1250 __table_args__ = (
1250 1251 Index('uem_email_idx', 'email'),
1251 1252 UniqueConstraint('email'),
1252 1253 base_table_args
1253 1254 )
1254 1255 __mapper_args__ = {}
1255 1256
1256 1257 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1257 1258 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1258 1259 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1259 1260 user = relationship('User', lazy='joined')
1260 1261
1261 1262 @validates('_email')
1262 1263 def validate_email(self, key, email):
1263 1264 # check if this email is not main one
1264 1265 main_email = Session().query(User).filter(User.email == email).scalar()
1265 1266 if main_email is not None:
1266 1267 raise AttributeError('email %s is present is user table' % email)
1267 1268 return email
1268 1269
1269 1270 @hybrid_property
1270 1271 def email(self):
1271 1272 return self._email
1272 1273
1273 1274 @email.setter
1274 1275 def email(self, val):
1275 1276 self._email = val.lower() if val else None
1276 1277
1277 1278
1278 1279 class UserIpMap(Base, BaseModel):
1279 1280 __tablename__ = 'user_ip_map'
1280 1281 __table_args__ = (
1281 1282 UniqueConstraint('user_id', 'ip_addr'),
1282 1283 base_table_args
1283 1284 )
1284 1285 __mapper_args__ = {}
1285 1286
1286 1287 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1287 1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1288 1289 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1289 1290 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1290 1291 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1291 1292 user = relationship('User', lazy='joined')
1292 1293
1293 1294 @hybrid_property
1294 1295 def description_safe(self):
1295 1296 from rhodecode.lib import helpers as h
1296 1297 return h.escape(self.description)
1297 1298
1298 1299 @classmethod
1299 1300 def _get_ip_range(cls, ip_addr):
1300 1301 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1301 1302 return [str(net.network_address), str(net.broadcast_address)]
1302 1303
1303 1304 def __json__(self):
1304 1305 return {
1305 1306 'ip_addr': self.ip_addr,
1306 1307 'ip_range': self._get_ip_range(self.ip_addr),
1307 1308 }
1308 1309
1309 1310 def __unicode__(self):
1310 1311 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1311 1312 self.user_id, self.ip_addr)
1312 1313
1313 1314
1314 1315 class UserSshKeys(Base, BaseModel):
1315 1316 __tablename__ = 'user_ssh_keys'
1316 1317 __table_args__ = (
1317 1318 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1318 1319
1319 1320 UniqueConstraint('ssh_key_fingerprint'),
1320 1321
1321 1322 base_table_args
1322 1323 )
1323 1324 __mapper_args__ = {}
1324 1325
1325 1326 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1326 1327 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1327 1328 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1328 1329
1329 1330 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1330 1331
1331 1332 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1332 1333 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1333 1334 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1334 1335
1335 1336 user = relationship('User', lazy='joined')
1336 1337
1337 1338 def __json__(self):
1338 1339 data = {
1339 1340 'ssh_fingerprint': self.ssh_key_fingerprint,
1340 1341 'description': self.description,
1341 1342 'created_on': self.created_on
1342 1343 }
1343 1344 return data
1344 1345
1345 1346 def get_api_data(self):
1346 1347 data = self.__json__()
1347 1348 return data
1348 1349
1349 1350
1350 1351 class UserLog(Base, BaseModel):
1351 1352 __tablename__ = 'user_logs'
1352 1353 __table_args__ = (
1353 1354 base_table_args,
1354 1355 )
1355 1356
1356 1357 VERSION_1 = 'v1'
1357 1358 VERSION_2 = 'v2'
1358 1359 VERSIONS = [VERSION_1, VERSION_2]
1359 1360
1360 1361 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1361 1362 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1362 1363 username = Column("username", String(255), nullable=True, unique=None, default=None)
1363 1364 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1364 1365 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1365 1366 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1366 1367 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1367 1368 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1368 1369
1369 1370 version = Column("version", String(255), nullable=True, default=VERSION_1)
1370 1371 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1371 1372 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1372 1373
1373 1374 def __unicode__(self):
1374 1375 return u"<%s('id:%s:%s')>" % (
1375 1376 self.__class__.__name__, self.repository_name, self.action)
1376 1377
1377 1378 def __json__(self):
1378 1379 return {
1379 1380 'user_id': self.user_id,
1380 1381 'username': self.username,
1381 1382 'repository_id': self.repository_id,
1382 1383 'repository_name': self.repository_name,
1383 1384 'user_ip': self.user_ip,
1384 1385 'action_date': self.action_date,
1385 1386 'action': self.action,
1386 1387 }
1387 1388
1388 1389 @hybrid_property
1389 1390 def entry_id(self):
1390 1391 return self.user_log_id
1391 1392
1392 1393 @property
1393 1394 def action_as_day(self):
1394 1395 return datetime.date(*self.action_date.timetuple()[:3])
1395 1396
1396 1397 user = relationship('User')
1397 1398 repository = relationship('Repository', cascade='')
1398 1399
1399 1400
1400 1401 class UserGroup(Base, BaseModel):
1401 1402 __tablename__ = 'users_groups'
1402 1403 __table_args__ = (
1403 1404 base_table_args,
1404 1405 )
1405 1406
1406 1407 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1407 1408 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1408 1409 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1409 1410 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1410 1411 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1411 1412 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1412 1413 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1413 1414 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1414 1415
1415 1416 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1416 1417 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1417 1418 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1418 1419 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1419 1420 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1420 1421 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1421 1422
1422 1423 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1423 1424 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1424 1425
1425 1426 @classmethod
1426 1427 def _load_group_data(cls, column):
1427 1428 if not column:
1428 1429 return {}
1429 1430
1430 1431 try:
1431 1432 return json.loads(column) or {}
1432 1433 except TypeError:
1433 1434 return {}
1434 1435
1435 1436 @hybrid_property
1436 1437 def description_safe(self):
1437 1438 from rhodecode.lib import helpers as h
1438 1439 return h.escape(self.user_group_description)
1439 1440
1440 1441 @hybrid_property
1441 1442 def group_data(self):
1442 1443 return self._load_group_data(self._group_data)
1443 1444
1444 1445 @group_data.expression
1445 1446 def group_data(self, **kwargs):
1446 1447 return self._group_data
1447 1448
1448 1449 @group_data.setter
1449 1450 def group_data(self, val):
1450 1451 try:
1451 1452 self._group_data = json.dumps(val)
1452 1453 except Exception:
1453 1454 log.error(traceback.format_exc())
1454 1455
1455 1456 @classmethod
1456 1457 def _load_sync(cls, group_data):
1457 1458 if group_data:
1458 1459 return group_data.get('extern_type')
1459 1460
1460 1461 @property
1461 1462 def sync(self):
1462 1463 return self._load_sync(self.group_data)
1463 1464
1464 1465 def __unicode__(self):
1465 1466 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1466 1467 self.users_group_id,
1467 1468 self.users_group_name)
1468 1469
1469 1470 @classmethod
1470 1471 def get_by_group_name(cls, group_name, cache=False,
1471 1472 case_insensitive=False):
1472 1473 if case_insensitive:
1473 1474 q = cls.query().filter(func.lower(cls.users_group_name) ==
1474 1475 func.lower(group_name))
1475 1476
1476 1477 else:
1477 1478 q = cls.query().filter(cls.users_group_name == group_name)
1478 1479 if cache:
1479 1480 q = q.options(
1480 1481 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1481 1482 return q.scalar()
1482 1483
1483 1484 @classmethod
1484 1485 def get(cls, user_group_id, cache=False):
1485 1486 if not user_group_id:
1486 1487 return
1487 1488
1488 1489 user_group = cls.query()
1489 1490 if cache:
1490 1491 user_group = user_group.options(
1491 1492 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1492 1493 return user_group.get(user_group_id)
1493 1494
1494 1495 def permissions(self, with_admins=True, with_owner=True,
1495 1496 expand_from_user_groups=False):
1496 1497 """
1497 1498 Permissions for user groups
1498 1499 """
1499 1500 _admin_perm = 'usergroup.admin'
1500 1501
1501 1502 owner_row = []
1502 1503 if with_owner:
1503 1504 usr = AttributeDict(self.user.get_dict())
1504 1505 usr.owner_row = True
1505 1506 usr.permission = _admin_perm
1506 1507 owner_row.append(usr)
1507 1508
1508 1509 super_admin_ids = []
1509 1510 super_admin_rows = []
1510 1511 if with_admins:
1511 1512 for usr in User.get_all_super_admins():
1512 1513 super_admin_ids.append(usr.user_id)
1513 1514 # if this admin is also owner, don't double the record
1514 1515 if usr.user_id == owner_row[0].user_id:
1515 1516 owner_row[0].admin_row = True
1516 1517 else:
1517 1518 usr = AttributeDict(usr.get_dict())
1518 1519 usr.admin_row = True
1519 1520 usr.permission = _admin_perm
1520 1521 super_admin_rows.append(usr)
1521 1522
1522 1523 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1523 1524 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1524 1525 joinedload(UserUserGroupToPerm.user),
1525 1526 joinedload(UserUserGroupToPerm.permission),)
1526 1527
1527 1528 # get owners and admins and permissions. We do a trick of re-writing
1528 1529 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1529 1530 # has a global reference and changing one object propagates to all
1530 1531 # others. This means if admin is also an owner admin_row that change
1531 1532 # would propagate to both objects
1532 1533 perm_rows = []
1533 1534 for _usr in q.all():
1534 1535 usr = AttributeDict(_usr.user.get_dict())
1535 1536 # if this user is also owner/admin, mark as duplicate record
1536 1537 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1537 1538 usr.duplicate_perm = True
1538 1539 usr.permission = _usr.permission.permission_name
1539 1540 perm_rows.append(usr)
1540 1541
1541 1542 # filter the perm rows by 'default' first and then sort them by
1542 1543 # admin,write,read,none permissions sorted again alphabetically in
1543 1544 # each group
1544 1545 perm_rows = sorted(perm_rows, key=display_user_sort)
1545 1546
1546 1547 user_groups_rows = []
1547 1548 if expand_from_user_groups:
1548 1549 for ug in self.permission_user_groups(with_members=True):
1549 1550 for user_data in ug.members:
1550 1551 user_groups_rows.append(user_data)
1551 1552
1552 1553 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1553 1554
1554 1555 def permission_user_groups(self, with_members=False):
1555 1556 q = UserGroupUserGroupToPerm.query()\
1556 1557 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1557 1558 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1558 1559 joinedload(UserGroupUserGroupToPerm.target_user_group),
1559 1560 joinedload(UserGroupUserGroupToPerm.permission),)
1560 1561
1561 1562 perm_rows = []
1562 1563 for _user_group in q.all():
1563 1564 entry = AttributeDict(_user_group.user_group.get_dict())
1564 1565 entry.permission = _user_group.permission.permission_name
1565 1566 if with_members:
1566 1567 entry.members = [x.user.get_dict()
1567 1568 for x in _user_group.user_group.members]
1568 1569 perm_rows.append(entry)
1569 1570
1570 1571 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1571 1572 return perm_rows
1572 1573
1573 1574 def _get_default_perms(self, user_group, suffix=''):
1574 1575 from rhodecode.model.permission import PermissionModel
1575 1576 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1576 1577
1577 1578 def get_default_perms(self, suffix=''):
1578 1579 return self._get_default_perms(self, suffix)
1579 1580
1580 1581 def get_api_data(self, with_group_members=True, include_secrets=False):
1581 1582 """
1582 1583 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1583 1584 basically forwarded.
1584 1585
1585 1586 """
1586 1587 user_group = self
1587 1588 data = {
1588 1589 'users_group_id': user_group.users_group_id,
1589 1590 'group_name': user_group.users_group_name,
1590 1591 'group_description': user_group.user_group_description,
1591 1592 'active': user_group.users_group_active,
1592 1593 'owner': user_group.user.username,
1593 1594 'sync': user_group.sync,
1594 1595 'owner_email': user_group.user.email,
1595 1596 }
1596 1597
1597 1598 if with_group_members:
1598 1599 users = []
1599 1600 for user in user_group.members:
1600 1601 user = user.user
1601 1602 users.append(user.get_api_data(include_secrets=include_secrets))
1602 1603 data['users'] = users
1603 1604
1604 1605 return data
1605 1606
1606 1607
1607 1608 class UserGroupMember(Base, BaseModel):
1608 1609 __tablename__ = 'users_groups_members'
1609 1610 __table_args__ = (
1610 1611 base_table_args,
1611 1612 )
1612 1613
1613 1614 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1614 1615 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1615 1616 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1616 1617
1617 1618 user = relationship('User', lazy='joined')
1618 1619 users_group = relationship('UserGroup')
1619 1620
1620 1621 def __init__(self, gr_id='', u_id=''):
1621 1622 self.users_group_id = gr_id
1622 1623 self.user_id = u_id
1623 1624
1624 1625
1625 1626 class RepositoryField(Base, BaseModel):
1626 1627 __tablename__ = 'repositories_fields'
1627 1628 __table_args__ = (
1628 1629 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1629 1630 base_table_args,
1630 1631 )
1631 1632
1632 1633 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1633 1634
1634 1635 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1635 1636 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1636 1637 field_key = Column("field_key", String(250))
1637 1638 field_label = Column("field_label", String(1024), nullable=False)
1638 1639 field_value = Column("field_value", String(10000), nullable=False)
1639 1640 field_desc = Column("field_desc", String(1024), nullable=False)
1640 1641 field_type = Column("field_type", String(255), nullable=False, unique=None)
1641 1642 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1642 1643
1643 1644 repository = relationship('Repository')
1644 1645
1645 1646 @property
1646 1647 def field_key_prefixed(self):
1647 1648 return 'ex_%s' % self.field_key
1648 1649
1649 1650 @classmethod
1650 1651 def un_prefix_key(cls, key):
1651 1652 if key.startswith(cls.PREFIX):
1652 1653 return key[len(cls.PREFIX):]
1653 1654 return key
1654 1655
1655 1656 @classmethod
1656 1657 def get_by_key_name(cls, key, repo):
1657 1658 row = cls.query()\
1658 1659 .filter(cls.repository == repo)\
1659 1660 .filter(cls.field_key == key).scalar()
1660 1661 return row
1661 1662
1662 1663
1663 1664 class Repository(Base, BaseModel):
1664 1665 __tablename__ = 'repositories'
1665 1666 __table_args__ = (
1666 1667 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1667 1668 base_table_args,
1668 1669 )
1669 1670 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1670 1671 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1671 1672 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1672 1673
1673 1674 STATE_CREATED = 'repo_state_created'
1674 1675 STATE_PENDING = 'repo_state_pending'
1675 1676 STATE_ERROR = 'repo_state_error'
1676 1677
1677 1678 LOCK_AUTOMATIC = 'lock_auto'
1678 1679 LOCK_API = 'lock_api'
1679 1680 LOCK_WEB = 'lock_web'
1680 1681 LOCK_PULL = 'lock_pull'
1681 1682
1682 1683 NAME_SEP = URL_SEP
1683 1684
1684 1685 repo_id = Column(
1685 1686 "repo_id", Integer(), nullable=False, unique=True, default=None,
1686 1687 primary_key=True)
1687 1688 _repo_name = Column(
1688 1689 "repo_name", Text(), nullable=False, default=None)
1689 1690 repo_name_hash = Column(
1690 1691 "repo_name_hash", String(255), nullable=False, unique=True)
1691 1692 repo_state = Column("repo_state", String(255), nullable=True)
1692 1693
1693 1694 clone_uri = Column(
1694 1695 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1695 1696 default=None)
1696 1697 push_uri = Column(
1697 1698 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1698 1699 default=None)
1699 1700 repo_type = Column(
1700 1701 "repo_type", String(255), nullable=False, unique=False, default=None)
1701 1702 user_id = Column(
1702 1703 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1703 1704 unique=False, default=None)
1704 1705 private = Column(
1705 1706 "private", Boolean(), nullable=True, unique=None, default=None)
1706 1707 archived = Column(
1707 1708 "archived", Boolean(), nullable=True, unique=None, default=None)
1708 1709 enable_statistics = Column(
1709 1710 "statistics", Boolean(), nullable=True, unique=None, default=True)
1710 1711 enable_downloads = Column(
1711 1712 "downloads", Boolean(), nullable=True, unique=None, default=True)
1712 1713 description = Column(
1713 1714 "description", String(10000), nullable=True, unique=None, default=None)
1714 1715 created_on = Column(
1715 1716 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1716 1717 default=datetime.datetime.now)
1717 1718 updated_on = Column(
1718 1719 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1719 1720 default=datetime.datetime.now)
1720 1721 _landing_revision = Column(
1721 1722 "landing_revision", String(255), nullable=False, unique=False,
1722 1723 default=None)
1723 1724 enable_locking = Column(
1724 1725 "enable_locking", Boolean(), nullable=False, unique=None,
1725 1726 default=False)
1726 1727 _locked = Column(
1727 1728 "locked", String(255), nullable=True, unique=False, default=None)
1728 1729 _changeset_cache = Column(
1729 1730 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1730 1731
1731 1732 fork_id = Column(
1732 1733 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1733 1734 nullable=True, unique=False, default=None)
1734 1735 group_id = Column(
1735 1736 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1736 1737 unique=False, default=None)
1737 1738
1738 1739 user = relationship('User', lazy='joined')
1739 1740 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1740 1741 group = relationship('RepoGroup', lazy='joined')
1741 1742 repo_to_perm = relationship(
1742 1743 'UserRepoToPerm', cascade='all',
1743 1744 order_by='UserRepoToPerm.repo_to_perm_id')
1744 1745 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1745 1746 stats = relationship('Statistics', cascade='all', uselist=False)
1746 1747
1747 1748 followers = relationship(
1748 1749 'UserFollowing',
1749 1750 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1750 1751 cascade='all')
1751 1752 extra_fields = relationship(
1752 1753 'RepositoryField', cascade="all, delete-orphan")
1753 1754 logs = relationship('UserLog')
1754 1755 comments = relationship(
1755 1756 'ChangesetComment', cascade="all, delete-orphan")
1756 1757 pull_requests_source = relationship(
1757 1758 'PullRequest',
1758 1759 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1759 1760 cascade="all, delete-orphan")
1760 1761 pull_requests_target = relationship(
1761 1762 'PullRequest',
1762 1763 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1763 1764 cascade="all, delete-orphan")
1764 1765 ui = relationship('RepoRhodeCodeUi', cascade="all")
1765 1766 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1766 1767 integrations = relationship('Integration', cascade="all, delete-orphan")
1767 1768
1768 1769 scoped_tokens = relationship('UserApiKeys', cascade="all")
1769 1770
1770 1771 # no cascade, set NULL
1771 1772 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1772 1773
1773 1774 def __unicode__(self):
1774 1775 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1775 1776 safe_unicode(self.repo_name))
1776 1777
1777 1778 @hybrid_property
1778 1779 def description_safe(self):
1779 1780 from rhodecode.lib import helpers as h
1780 1781 return h.escape(self.description)
1781 1782
1782 1783 @hybrid_property
1783 1784 def landing_rev(self):
1784 1785 # always should return [rev_type, rev], e.g ['branch', 'master']
1785 1786 if self._landing_revision:
1786 1787 _rev_info = self._landing_revision.split(':')
1787 1788 if len(_rev_info) < 2:
1788 1789 _rev_info.insert(0, 'rev')
1789 1790 return [_rev_info[0], _rev_info[1]]
1790 1791 return [None, None]
1791 1792
1792 1793 @property
1793 1794 def landing_ref_type(self):
1794 1795 return self.landing_rev[0]
1795 1796
1796 1797 @property
1797 1798 def landing_ref_name(self):
1798 1799 return self.landing_rev[1]
1799 1800
1800 1801 @landing_rev.setter
1801 1802 def landing_rev(self, val):
1802 1803 if ':' not in val:
1803 1804 raise ValueError('value must be delimited with `:` and consist '
1804 1805 'of <rev_type>:<rev>, got %s instead' % val)
1805 1806 self._landing_revision = val
1806 1807
1807 1808 @hybrid_property
1808 1809 def locked(self):
1809 1810 if self._locked:
1810 1811 user_id, timelocked, reason = self._locked.split(':')
1811 1812 lock_values = int(user_id), timelocked, reason
1812 1813 else:
1813 1814 lock_values = [None, None, None]
1814 1815 return lock_values
1815 1816
1816 1817 @locked.setter
1817 1818 def locked(self, val):
1818 1819 if val and isinstance(val, (list, tuple)):
1819 1820 self._locked = ':'.join(map(str, val))
1820 1821 else:
1821 1822 self._locked = None
1822 1823
1823 1824 @classmethod
1824 1825 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1825 1826 from rhodecode.lib.vcs.backends.base import EmptyCommit
1826 1827 dummy = EmptyCommit().__json__()
1827 1828 if not changeset_cache_raw:
1828 1829 dummy['source_repo_id'] = repo_id
1829 1830 return json.loads(json.dumps(dummy))
1830 1831
1831 1832 try:
1832 1833 return json.loads(changeset_cache_raw)
1833 1834 except TypeError:
1834 1835 return dummy
1835 1836 except Exception:
1836 1837 log.error(traceback.format_exc())
1837 1838 return dummy
1838 1839
1839 1840 @hybrid_property
1840 1841 def changeset_cache(self):
1841 1842 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1842 1843
1843 1844 @changeset_cache.setter
1844 1845 def changeset_cache(self, val):
1845 1846 try:
1846 1847 self._changeset_cache = json.dumps(val)
1847 1848 except Exception:
1848 1849 log.error(traceback.format_exc())
1849 1850
1850 1851 @hybrid_property
1851 1852 def repo_name(self):
1852 1853 return self._repo_name
1853 1854
1854 1855 @repo_name.setter
1855 1856 def repo_name(self, value):
1856 1857 self._repo_name = value
1857 1858 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1858 1859
1859 1860 @classmethod
1860 1861 def normalize_repo_name(cls, repo_name):
1861 1862 """
1862 1863 Normalizes os specific repo_name to the format internally stored inside
1863 1864 database using URL_SEP
1864 1865
1865 1866 :param cls:
1866 1867 :param repo_name:
1867 1868 """
1868 1869 return cls.NAME_SEP.join(repo_name.split(os.sep))
1869 1870
1870 1871 @classmethod
1871 1872 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1872 1873 session = Session()
1873 1874 q = session.query(cls).filter(cls.repo_name == repo_name)
1874 1875
1875 1876 if cache:
1876 1877 if identity_cache:
1877 1878 val = cls.identity_cache(session, 'repo_name', repo_name)
1878 1879 if val:
1879 1880 return val
1880 1881 else:
1881 1882 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1882 1883 q = q.options(
1883 1884 FromCache("sql_cache_short", cache_key))
1884 1885
1885 1886 return q.scalar()
1886 1887
1887 1888 @classmethod
1888 1889 def get_by_id_or_repo_name(cls, repoid):
1889 1890 if isinstance(repoid, (int, long)):
1890 1891 try:
1891 1892 repo = cls.get(repoid)
1892 1893 except ValueError:
1893 1894 repo = None
1894 1895 else:
1895 1896 repo = cls.get_by_repo_name(repoid)
1896 1897 return repo
1897 1898
1898 1899 @classmethod
1899 1900 def get_by_full_path(cls, repo_full_path):
1900 1901 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1901 1902 repo_name = cls.normalize_repo_name(repo_name)
1902 1903 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1903 1904
1904 1905 @classmethod
1905 1906 def get_repo_forks(cls, repo_id):
1906 1907 return cls.query().filter(Repository.fork_id == repo_id)
1907 1908
1908 1909 @classmethod
1909 1910 def base_path(cls):
1910 1911 """
1911 1912 Returns base path when all repos are stored
1912 1913
1913 1914 :param cls:
1914 1915 """
1915 1916 q = Session().query(RhodeCodeUi)\
1916 1917 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1917 1918 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1918 1919 return q.one().ui_value
1919 1920
1920 1921 @classmethod
1921 1922 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1922 1923 case_insensitive=True, archived=False):
1923 1924 q = Repository.query()
1924 1925
1925 1926 if not archived:
1926 1927 q = q.filter(Repository.archived.isnot(true()))
1927 1928
1928 1929 if not isinstance(user_id, Optional):
1929 1930 q = q.filter(Repository.user_id == user_id)
1930 1931
1931 1932 if not isinstance(group_id, Optional):
1932 1933 q = q.filter(Repository.group_id == group_id)
1933 1934
1934 1935 if case_insensitive:
1935 1936 q = q.order_by(func.lower(Repository.repo_name))
1936 1937 else:
1937 1938 q = q.order_by(Repository.repo_name)
1938 1939
1939 1940 return q.all()
1940 1941
1941 1942 @property
1942 1943 def repo_uid(self):
1943 1944 return '_{}'.format(self.repo_id)
1944 1945
1945 1946 @property
1946 1947 def forks(self):
1947 1948 """
1948 1949 Return forks of this repo
1949 1950 """
1950 1951 return Repository.get_repo_forks(self.repo_id)
1951 1952
1952 1953 @property
1953 1954 def parent(self):
1954 1955 """
1955 1956 Returns fork parent
1956 1957 """
1957 1958 return self.fork
1958 1959
1959 1960 @property
1960 1961 def just_name(self):
1961 1962 return self.repo_name.split(self.NAME_SEP)[-1]
1962 1963
1963 1964 @property
1964 1965 def groups_with_parents(self):
1965 1966 groups = []
1966 1967 if self.group is None:
1967 1968 return groups
1968 1969
1969 1970 cur_gr = self.group
1970 1971 groups.insert(0, cur_gr)
1971 1972 while 1:
1972 1973 gr = getattr(cur_gr, 'parent_group', None)
1973 1974 cur_gr = cur_gr.parent_group
1974 1975 if gr is None:
1975 1976 break
1976 1977 groups.insert(0, gr)
1977 1978
1978 1979 return groups
1979 1980
1980 1981 @property
1981 1982 def groups_and_repo(self):
1982 1983 return self.groups_with_parents, self
1983 1984
1984 1985 @LazyProperty
1985 1986 def repo_path(self):
1986 1987 """
1987 1988 Returns base full path for that repository means where it actually
1988 1989 exists on a filesystem
1989 1990 """
1990 1991 q = Session().query(RhodeCodeUi).filter(
1991 1992 RhodeCodeUi.ui_key == self.NAME_SEP)
1992 1993 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1993 1994 return q.one().ui_value
1994 1995
1995 1996 @property
1996 1997 def repo_full_path(self):
1997 1998 p = [self.repo_path]
1998 1999 # we need to split the name by / since this is how we store the
1999 2000 # names in the database, but that eventually needs to be converted
2000 2001 # into a valid system path
2001 2002 p += self.repo_name.split(self.NAME_SEP)
2002 2003 return os.path.join(*map(safe_unicode, p))
2003 2004
2004 2005 @property
2005 2006 def cache_keys(self):
2006 2007 """
2007 2008 Returns associated cache keys for that repo
2008 2009 """
2009 2010 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2010 2011 repo_id=self.repo_id)
2011 2012 return CacheKey.query()\
2012 2013 .filter(CacheKey.cache_args == invalidation_namespace)\
2013 2014 .order_by(CacheKey.cache_key)\
2014 2015 .all()
2015 2016
2016 2017 @property
2017 2018 def cached_diffs_relative_dir(self):
2018 2019 """
2019 2020 Return a relative to the repository store path of cached diffs
2020 2021 used for safe display for users, who shouldn't know the absolute store
2021 2022 path
2022 2023 """
2023 2024 return os.path.join(
2024 2025 os.path.dirname(self.repo_name),
2025 2026 self.cached_diffs_dir.split(os.path.sep)[-1])
2026 2027
2027 2028 @property
2028 2029 def cached_diffs_dir(self):
2029 2030 path = self.repo_full_path
2030 2031 return os.path.join(
2031 2032 os.path.dirname(path),
2032 2033 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2033 2034
2034 2035 def cached_diffs(self):
2035 2036 diff_cache_dir = self.cached_diffs_dir
2036 2037 if os.path.isdir(diff_cache_dir):
2037 2038 return os.listdir(diff_cache_dir)
2038 2039 return []
2039 2040
2040 2041 def shadow_repos(self):
2041 2042 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2042 2043 return [
2043 2044 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2044 2045 if x.startswith(shadow_repos_pattern)]
2045 2046
2046 2047 def get_new_name(self, repo_name):
2047 2048 """
2048 2049 returns new full repository name based on assigned group and new new
2049 2050
2050 2051 :param group_name:
2051 2052 """
2052 2053 path_prefix = self.group.full_path_splitted if self.group else []
2053 2054 return self.NAME_SEP.join(path_prefix + [repo_name])
2054 2055
2055 2056 @property
2056 2057 def _config(self):
2057 2058 """
2058 2059 Returns db based config object.
2059 2060 """
2060 2061 from rhodecode.lib.utils import make_db_config
2061 2062 return make_db_config(clear_session=False, repo=self)
2062 2063
2063 2064 def permissions(self, with_admins=True, with_owner=True,
2064 2065 expand_from_user_groups=False):
2065 2066 """
2066 2067 Permissions for repositories
2067 2068 """
2068 2069 _admin_perm = 'repository.admin'
2069 2070
2070 2071 owner_row = []
2071 2072 if with_owner:
2072 2073 usr = AttributeDict(self.user.get_dict())
2073 2074 usr.owner_row = True
2074 2075 usr.permission = _admin_perm
2075 2076 usr.permission_id = None
2076 2077 owner_row.append(usr)
2077 2078
2078 2079 super_admin_ids = []
2079 2080 super_admin_rows = []
2080 2081 if with_admins:
2081 2082 for usr in User.get_all_super_admins():
2082 2083 super_admin_ids.append(usr.user_id)
2083 2084 # if this admin is also owner, don't double the record
2084 2085 if usr.user_id == owner_row[0].user_id:
2085 2086 owner_row[0].admin_row = True
2086 2087 else:
2087 2088 usr = AttributeDict(usr.get_dict())
2088 2089 usr.admin_row = True
2089 2090 usr.permission = _admin_perm
2090 2091 usr.permission_id = None
2091 2092 super_admin_rows.append(usr)
2092 2093
2093 2094 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2094 2095 q = q.options(joinedload(UserRepoToPerm.repository),
2095 2096 joinedload(UserRepoToPerm.user),
2096 2097 joinedload(UserRepoToPerm.permission),)
2097 2098
2098 2099 # get owners and admins and permissions. We do a trick of re-writing
2099 2100 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2100 2101 # has a global reference and changing one object propagates to all
2101 2102 # others. This means if admin is also an owner admin_row that change
2102 2103 # would propagate to both objects
2103 2104 perm_rows = []
2104 2105 for _usr in q.all():
2105 2106 usr = AttributeDict(_usr.user.get_dict())
2106 2107 # if this user is also owner/admin, mark as duplicate record
2107 2108 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2108 2109 usr.duplicate_perm = True
2109 2110 # also check if this permission is maybe used by branch_permissions
2110 2111 if _usr.branch_perm_entry:
2111 2112 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2112 2113
2113 2114 usr.permission = _usr.permission.permission_name
2114 2115 usr.permission_id = _usr.repo_to_perm_id
2115 2116 perm_rows.append(usr)
2116 2117
2117 2118 # filter the perm rows by 'default' first and then sort them by
2118 2119 # admin,write,read,none permissions sorted again alphabetically in
2119 2120 # each group
2120 2121 perm_rows = sorted(perm_rows, key=display_user_sort)
2121 2122
2122 2123 user_groups_rows = []
2123 2124 if expand_from_user_groups:
2124 2125 for ug in self.permission_user_groups(with_members=True):
2125 2126 for user_data in ug.members:
2126 2127 user_groups_rows.append(user_data)
2127 2128
2128 2129 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2129 2130
2130 2131 def permission_user_groups(self, with_members=True):
2131 2132 q = UserGroupRepoToPerm.query()\
2132 2133 .filter(UserGroupRepoToPerm.repository == self)
2133 2134 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2134 2135 joinedload(UserGroupRepoToPerm.users_group),
2135 2136 joinedload(UserGroupRepoToPerm.permission),)
2136 2137
2137 2138 perm_rows = []
2138 2139 for _user_group in q.all():
2139 2140 entry = AttributeDict(_user_group.users_group.get_dict())
2140 2141 entry.permission = _user_group.permission.permission_name
2141 2142 if with_members:
2142 2143 entry.members = [x.user.get_dict()
2143 2144 for x in _user_group.users_group.members]
2144 2145 perm_rows.append(entry)
2145 2146
2146 2147 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2147 2148 return perm_rows
2148 2149
2149 2150 def get_api_data(self, include_secrets=False):
2150 2151 """
2151 2152 Common function for generating repo api data
2152 2153
2153 2154 :param include_secrets: See :meth:`User.get_api_data`.
2154 2155
2155 2156 """
2156 2157 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2157 2158 # move this methods on models level.
2158 2159 from rhodecode.model.settings import SettingsModel
2159 2160 from rhodecode.model.repo import RepoModel
2160 2161
2161 2162 repo = self
2162 2163 _user_id, _time, _reason = self.locked
2163 2164
2164 2165 data = {
2165 2166 'repo_id': repo.repo_id,
2166 2167 'repo_name': repo.repo_name,
2167 2168 'repo_type': repo.repo_type,
2168 2169 'clone_uri': repo.clone_uri or '',
2169 2170 'push_uri': repo.push_uri or '',
2170 2171 'url': RepoModel().get_url(self),
2171 2172 'private': repo.private,
2172 2173 'created_on': repo.created_on,
2173 2174 'description': repo.description_safe,
2174 2175 'landing_rev': repo.landing_rev,
2175 2176 'owner': repo.user.username,
2176 2177 'fork_of': repo.fork.repo_name if repo.fork else None,
2177 2178 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2178 2179 'enable_statistics': repo.enable_statistics,
2179 2180 'enable_locking': repo.enable_locking,
2180 2181 'enable_downloads': repo.enable_downloads,
2181 2182 'last_changeset': repo.changeset_cache,
2182 2183 'locked_by': User.get(_user_id).get_api_data(
2183 2184 include_secrets=include_secrets) if _user_id else None,
2184 2185 'locked_date': time_to_datetime(_time) if _time else None,
2185 2186 'lock_reason': _reason if _reason else None,
2186 2187 }
2187 2188
2188 2189 # TODO: mikhail: should be per-repo settings here
2189 2190 rc_config = SettingsModel().get_all_settings()
2190 2191 repository_fields = str2bool(
2191 2192 rc_config.get('rhodecode_repository_fields'))
2192 2193 if repository_fields:
2193 2194 for f in self.extra_fields:
2194 2195 data[f.field_key_prefixed] = f.field_value
2195 2196
2196 2197 return data
2197 2198
2198 2199 @classmethod
2199 2200 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2200 2201 if not lock_time:
2201 2202 lock_time = time.time()
2202 2203 if not lock_reason:
2203 2204 lock_reason = cls.LOCK_AUTOMATIC
2204 2205 repo.locked = [user_id, lock_time, lock_reason]
2205 2206 Session().add(repo)
2206 2207 Session().commit()
2207 2208
2208 2209 @classmethod
2209 2210 def unlock(cls, repo):
2210 2211 repo.locked = None
2211 2212 Session().add(repo)
2212 2213 Session().commit()
2213 2214
2214 2215 @classmethod
2215 2216 def getlock(cls, repo):
2216 2217 return repo.locked
2217 2218
2218 2219 def is_user_lock(self, user_id):
2219 2220 if self.lock[0]:
2220 2221 lock_user_id = safe_int(self.lock[0])
2221 2222 user_id = safe_int(user_id)
2222 2223 # both are ints, and they are equal
2223 2224 return all([lock_user_id, user_id]) and lock_user_id == user_id
2224 2225
2225 2226 return False
2226 2227
2227 2228 def get_locking_state(self, action, user_id, only_when_enabled=True):
2228 2229 """
2229 2230 Checks locking on this repository, if locking is enabled and lock is
2230 2231 present returns a tuple of make_lock, locked, locked_by.
2231 2232 make_lock can have 3 states None (do nothing) True, make lock
2232 2233 False release lock, This value is later propagated to hooks, which
2233 2234 do the locking. Think about this as signals passed to hooks what to do.
2234 2235
2235 2236 """
2236 2237 # TODO: johbo: This is part of the business logic and should be moved
2237 2238 # into the RepositoryModel.
2238 2239
2239 2240 if action not in ('push', 'pull'):
2240 2241 raise ValueError("Invalid action value: %s" % repr(action))
2241 2242
2242 2243 # defines if locked error should be thrown to user
2243 2244 currently_locked = False
2244 2245 # defines if new lock should be made, tri-state
2245 2246 make_lock = None
2246 2247 repo = self
2247 2248 user = User.get(user_id)
2248 2249
2249 2250 lock_info = repo.locked
2250 2251
2251 2252 if repo and (repo.enable_locking or not only_when_enabled):
2252 2253 if action == 'push':
2253 2254 # check if it's already locked !, if it is compare users
2254 2255 locked_by_user_id = lock_info[0]
2255 2256 if user.user_id == locked_by_user_id:
2256 2257 log.debug(
2257 2258 'Got `push` action from user %s, now unlocking', user)
2258 2259 # unlock if we have push from user who locked
2259 2260 make_lock = False
2260 2261 else:
2261 2262 # we're not the same user who locked, ban with
2262 2263 # code defined in settings (default is 423 HTTP Locked) !
2263 2264 log.debug('Repo %s is currently locked by %s', repo, user)
2264 2265 currently_locked = True
2265 2266 elif action == 'pull':
2266 2267 # [0] user [1] date
2267 2268 if lock_info[0] and lock_info[1]:
2268 2269 log.debug('Repo %s is currently locked by %s', repo, user)
2269 2270 currently_locked = True
2270 2271 else:
2271 2272 log.debug('Setting lock on repo %s by %s', repo, user)
2272 2273 make_lock = True
2273 2274
2274 2275 else:
2275 2276 log.debug('Repository %s do not have locking enabled', repo)
2276 2277
2277 2278 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2278 2279 make_lock, currently_locked, lock_info)
2279 2280
2280 2281 from rhodecode.lib.auth import HasRepoPermissionAny
2281 2282 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2282 2283 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2283 2284 # if we don't have at least write permission we cannot make a lock
2284 2285 log.debug('lock state reset back to FALSE due to lack '
2285 2286 'of at least read permission')
2286 2287 make_lock = False
2287 2288
2288 2289 return make_lock, currently_locked, lock_info
2289 2290
2290 2291 @property
2291 2292 def last_commit_cache_update_diff(self):
2292 2293 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2293 2294
2294 2295 @classmethod
2295 2296 def _load_commit_change(cls, last_commit_cache):
2296 2297 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2297 2298 empty_date = datetime.datetime.fromtimestamp(0)
2298 2299 date_latest = last_commit_cache.get('date', empty_date)
2299 2300 try:
2300 2301 return parse_datetime(date_latest)
2301 2302 except Exception:
2302 2303 return empty_date
2303 2304
2304 2305 @property
2305 2306 def last_commit_change(self):
2306 2307 return self._load_commit_change(self.changeset_cache)
2307 2308
2308 2309 @property
2309 2310 def last_db_change(self):
2310 2311 return self.updated_on
2311 2312
2312 2313 @property
2313 2314 def clone_uri_hidden(self):
2314 2315 clone_uri = self.clone_uri
2315 2316 if clone_uri:
2316 2317 import urlobject
2317 2318 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2318 2319 if url_obj.password:
2319 2320 clone_uri = url_obj.with_password('*****')
2320 2321 return clone_uri
2321 2322
2322 2323 @property
2323 2324 def push_uri_hidden(self):
2324 2325 push_uri = self.push_uri
2325 2326 if push_uri:
2326 2327 import urlobject
2327 2328 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2328 2329 if url_obj.password:
2329 2330 push_uri = url_obj.with_password('*****')
2330 2331 return push_uri
2331 2332
2332 2333 def clone_url(self, **override):
2333 2334 from rhodecode.model.settings import SettingsModel
2334 2335
2335 2336 uri_tmpl = None
2336 2337 if 'with_id' in override:
2337 2338 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2338 2339 del override['with_id']
2339 2340
2340 2341 if 'uri_tmpl' in override:
2341 2342 uri_tmpl = override['uri_tmpl']
2342 2343 del override['uri_tmpl']
2343 2344
2344 2345 ssh = False
2345 2346 if 'ssh' in override:
2346 2347 ssh = True
2347 2348 del override['ssh']
2348 2349
2349 2350 # we didn't override our tmpl from **overrides
2350 2351 request = get_current_request()
2351 2352 if not uri_tmpl:
2352 2353 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2353 2354 rc_config = request.call_context.rc_config
2354 2355 else:
2355 2356 rc_config = SettingsModel().get_all_settings(cache=True)
2356 2357
2357 2358 if ssh:
2358 2359 uri_tmpl = rc_config.get(
2359 2360 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2360 2361
2361 2362 else:
2362 2363 uri_tmpl = rc_config.get(
2363 2364 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2364 2365
2365 2366 return get_clone_url(request=request,
2366 2367 uri_tmpl=uri_tmpl,
2367 2368 repo_name=self.repo_name,
2368 2369 repo_id=self.repo_id,
2369 2370 repo_type=self.repo_type,
2370 2371 **override)
2371 2372
2372 2373 def set_state(self, state):
2373 2374 self.repo_state = state
2374 2375 Session().add(self)
2375 2376 #==========================================================================
2376 2377 # SCM PROPERTIES
2377 2378 #==========================================================================
2378 2379
2379 2380 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2380 2381 return get_commit_safe(
2381 2382 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2382 2383 maybe_unreachable=maybe_unreachable)
2383 2384
2384 2385 def get_changeset(self, rev=None, pre_load=None):
2385 2386 warnings.warn("Use get_commit", DeprecationWarning)
2386 2387 commit_id = None
2387 2388 commit_idx = None
2388 2389 if isinstance(rev, compat.string_types):
2389 2390 commit_id = rev
2390 2391 else:
2391 2392 commit_idx = rev
2392 2393 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2393 2394 pre_load=pre_load)
2394 2395
2395 2396 def get_landing_commit(self):
2396 2397 """
2397 2398 Returns landing commit, or if that doesn't exist returns the tip
2398 2399 """
2399 2400 _rev_type, _rev = self.landing_rev
2400 2401 commit = self.get_commit(_rev)
2401 2402 if isinstance(commit, EmptyCommit):
2402 2403 return self.get_commit()
2403 2404 return commit
2404 2405
2405 2406 def flush_commit_cache(self):
2406 2407 self.update_commit_cache(cs_cache={'raw_id':'0'})
2407 2408 self.update_commit_cache()
2408 2409
2409 2410 def update_commit_cache(self, cs_cache=None, config=None):
2410 2411 """
2411 2412 Update cache of last commit for repository
2412 2413 cache_keys should be::
2413 2414
2414 2415 source_repo_id
2415 2416 short_id
2416 2417 raw_id
2417 2418 revision
2418 2419 parents
2419 2420 message
2420 2421 date
2421 2422 author
2422 2423 updated_on
2423 2424
2424 2425 """
2425 2426 from rhodecode.lib.vcs.backends.base import BaseChangeset
2426 2427 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2427 2428 empty_date = datetime.datetime.fromtimestamp(0)
2428 2429
2429 2430 if cs_cache is None:
2430 2431 # use no-cache version here
2431 2432 try:
2432 2433 scm_repo = self.scm_instance(cache=False, config=config)
2433 2434 except VCSError:
2434 2435 scm_repo = None
2435 2436 empty = scm_repo is None or scm_repo.is_empty()
2436 2437
2437 2438 if not empty:
2438 2439 cs_cache = scm_repo.get_commit(
2439 2440 pre_load=["author", "date", "message", "parents", "branch"])
2440 2441 else:
2441 2442 cs_cache = EmptyCommit()
2442 2443
2443 2444 if isinstance(cs_cache, BaseChangeset):
2444 2445 cs_cache = cs_cache.__json__()
2445 2446
2446 2447 def is_outdated(new_cs_cache):
2447 2448 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2448 2449 new_cs_cache['revision'] != self.changeset_cache['revision']):
2449 2450 return True
2450 2451 return False
2451 2452
2452 2453 # check if we have maybe already latest cached revision
2453 2454 if is_outdated(cs_cache) or not self.changeset_cache:
2454 2455 _current_datetime = datetime.datetime.utcnow()
2455 2456 last_change = cs_cache.get('date') or _current_datetime
2456 2457 # we check if last update is newer than the new value
2457 2458 # if yes, we use the current timestamp instead. Imagine you get
2458 2459 # old commit pushed 1y ago, we'd set last update 1y to ago.
2459 2460 last_change_timestamp = datetime_to_time(last_change)
2460 2461 current_timestamp = datetime_to_time(last_change)
2461 2462 if last_change_timestamp > current_timestamp and not empty:
2462 2463 cs_cache['date'] = _current_datetime
2463 2464
2464 2465 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2465 2466 cs_cache['updated_on'] = time.time()
2466 2467 self.changeset_cache = cs_cache
2467 2468 self.updated_on = last_change
2468 2469 Session().add(self)
2469 2470 Session().commit()
2470 2471
2471 2472 else:
2472 2473 if empty:
2473 2474 cs_cache = EmptyCommit().__json__()
2474 2475 else:
2475 2476 cs_cache = self.changeset_cache
2476 2477
2477 2478 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2478 2479
2479 2480 cs_cache['updated_on'] = time.time()
2480 2481 self.changeset_cache = cs_cache
2481 2482 self.updated_on = _date_latest
2482 2483 Session().add(self)
2483 2484 Session().commit()
2484 2485
2485 2486 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2486 2487 self.repo_name, cs_cache, _date_latest)
2487 2488
2488 2489 @property
2489 2490 def tip(self):
2490 2491 return self.get_commit('tip')
2491 2492
2492 2493 @property
2493 2494 def author(self):
2494 2495 return self.tip.author
2495 2496
2496 2497 @property
2497 2498 def last_change(self):
2498 2499 return self.scm_instance().last_change
2499 2500
2500 2501 def get_comments(self, revisions=None):
2501 2502 """
2502 2503 Returns comments for this repository grouped by revisions
2503 2504
2504 2505 :param revisions: filter query by revisions only
2505 2506 """
2506 2507 cmts = ChangesetComment.query()\
2507 2508 .filter(ChangesetComment.repo == self)
2508 2509 if revisions:
2509 2510 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2510 2511 grouped = collections.defaultdict(list)
2511 2512 for cmt in cmts.all():
2512 2513 grouped[cmt.revision].append(cmt)
2513 2514 return grouped
2514 2515
2515 2516 def statuses(self, revisions=None):
2516 2517 """
2517 2518 Returns statuses for this repository
2518 2519
2519 2520 :param revisions: list of revisions to get statuses for
2520 2521 """
2521 2522 statuses = ChangesetStatus.query()\
2522 2523 .filter(ChangesetStatus.repo == self)\
2523 2524 .filter(ChangesetStatus.version == 0)
2524 2525
2525 2526 if revisions:
2526 2527 # Try doing the filtering in chunks to avoid hitting limits
2527 2528 size = 500
2528 2529 status_results = []
2529 2530 for chunk in xrange(0, len(revisions), size):
2530 2531 status_results += statuses.filter(
2531 2532 ChangesetStatus.revision.in_(
2532 2533 revisions[chunk: chunk+size])
2533 2534 ).all()
2534 2535 else:
2535 2536 status_results = statuses.all()
2536 2537
2537 2538 grouped = {}
2538 2539
2539 2540 # maybe we have open new pullrequest without a status?
2540 2541 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2541 2542 status_lbl = ChangesetStatus.get_status_lbl(stat)
2542 2543 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2543 2544 for rev in pr.revisions:
2544 2545 pr_id = pr.pull_request_id
2545 2546 pr_repo = pr.target_repo.repo_name
2546 2547 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2547 2548
2548 2549 for stat in status_results:
2549 2550 pr_id = pr_repo = None
2550 2551 if stat.pull_request:
2551 2552 pr_id = stat.pull_request.pull_request_id
2552 2553 pr_repo = stat.pull_request.target_repo.repo_name
2553 2554 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2554 2555 pr_id, pr_repo]
2555 2556 return grouped
2556 2557
2557 2558 # ==========================================================================
2558 2559 # SCM CACHE INSTANCE
2559 2560 # ==========================================================================
2560 2561
2561 2562 def scm_instance(self, **kwargs):
2562 2563 import rhodecode
2563 2564
2564 2565 # Passing a config will not hit the cache currently only used
2565 2566 # for repo2dbmapper
2566 2567 config = kwargs.pop('config', None)
2567 2568 cache = kwargs.pop('cache', None)
2568 2569 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2569 2570 if vcs_full_cache is not None:
2570 2571 # allows override global config
2571 2572 full_cache = vcs_full_cache
2572 2573 else:
2573 2574 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2574 2575 # if cache is NOT defined use default global, else we have a full
2575 2576 # control over cache behaviour
2576 2577 if cache is None and full_cache and not config:
2577 2578 log.debug('Initializing pure cached instance for %s', self.repo_path)
2578 2579 return self._get_instance_cached()
2579 2580
2580 2581 # cache here is sent to the "vcs server"
2581 2582 return self._get_instance(cache=bool(cache), config=config)
2582 2583
2583 2584 def _get_instance_cached(self):
2584 2585 from rhodecode.lib import rc_cache
2585 2586
2586 2587 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2587 2588 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2588 2589 repo_id=self.repo_id)
2589 2590 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2590 2591
2591 2592 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2592 2593 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2593 2594 return self._get_instance(repo_state_uid=_cache_state_uid)
2594 2595
2595 2596 # we must use thread scoped cache here,
2596 2597 # because each thread of gevent needs it's own not shared connection and cache
2597 2598 # we also alter `args` so the cache key is individual for every green thread.
2598 2599 inv_context_manager = rc_cache.InvalidationContext(
2599 2600 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2600 2601 thread_scoped=True)
2601 2602 with inv_context_manager as invalidation_context:
2602 2603 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2603 2604 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2604 2605
2605 2606 # re-compute and store cache if we get invalidate signal
2606 2607 if invalidation_context.should_invalidate():
2607 2608 instance = get_instance_cached.refresh(*args)
2608 2609 else:
2609 2610 instance = get_instance_cached(*args)
2610 2611
2611 2612 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2612 2613 return instance
2613 2614
2614 2615 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2615 2616 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2616 2617 self.repo_type, self.repo_path, cache)
2617 2618 config = config or self._config
2618 2619 custom_wire = {
2619 2620 'cache': cache, # controls the vcs.remote cache
2620 2621 'repo_state_uid': repo_state_uid
2621 2622 }
2622 2623 repo = get_vcs_instance(
2623 2624 repo_path=safe_str(self.repo_full_path),
2624 2625 config=config,
2625 2626 with_wire=custom_wire,
2626 2627 create=False,
2627 2628 _vcs_alias=self.repo_type)
2628 2629 if repo is not None:
2629 2630 repo.count() # cache rebuild
2630 2631 return repo
2631 2632
2632 2633 def get_shadow_repository_path(self, workspace_id):
2633 2634 from rhodecode.lib.vcs.backends.base import BaseRepository
2634 2635 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2635 2636 self.repo_full_path, self.repo_id, workspace_id)
2636 2637 return shadow_repo_path
2637 2638
2638 2639 def __json__(self):
2639 2640 return {'landing_rev': self.landing_rev}
2640 2641
2641 2642 def get_dict(self):
2642 2643
2643 2644 # Since we transformed `repo_name` to a hybrid property, we need to
2644 2645 # keep compatibility with the code which uses `repo_name` field.
2645 2646
2646 2647 result = super(Repository, self).get_dict()
2647 2648 result['repo_name'] = result.pop('_repo_name', None)
2648 2649 return result
2649 2650
2650 2651
2651 2652 class RepoGroup(Base, BaseModel):
2652 2653 __tablename__ = 'groups'
2653 2654 __table_args__ = (
2654 2655 UniqueConstraint('group_name', 'group_parent_id'),
2655 2656 base_table_args,
2656 2657 )
2657 2658 __mapper_args__ = {'order_by': 'group_name'}
2658 2659
2659 2660 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2660 2661
2661 2662 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2662 2663 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2663 2664 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2664 2665 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2665 2666 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2666 2667 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2667 2668 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2668 2669 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2669 2670 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2670 2671 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2671 2672 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2672 2673
2673 2674 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2674 2675 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2675 2676 parent_group = relationship('RepoGroup', remote_side=group_id)
2676 2677 user = relationship('User')
2677 2678 integrations = relationship('Integration', cascade="all, delete-orphan")
2678 2679
2679 2680 # no cascade, set NULL
2680 2681 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2681 2682
2682 2683 def __init__(self, group_name='', parent_group=None):
2683 2684 self.group_name = group_name
2684 2685 self.parent_group = parent_group
2685 2686
2686 2687 def __unicode__(self):
2687 2688 return u"<%s('id:%s:%s')>" % (
2688 2689 self.__class__.__name__, self.group_id, self.group_name)
2689 2690
2690 2691 @hybrid_property
2691 2692 def group_name(self):
2692 2693 return self._group_name
2693 2694
2694 2695 @group_name.setter
2695 2696 def group_name(self, value):
2696 2697 self._group_name = value
2697 2698 self.group_name_hash = self.hash_repo_group_name(value)
2698 2699
2699 2700 @classmethod
2700 2701 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2701 2702 from rhodecode.lib.vcs.backends.base import EmptyCommit
2702 2703 dummy = EmptyCommit().__json__()
2703 2704 if not changeset_cache_raw:
2704 2705 dummy['source_repo_id'] = repo_id
2705 2706 return json.loads(json.dumps(dummy))
2706 2707
2707 2708 try:
2708 2709 return json.loads(changeset_cache_raw)
2709 2710 except TypeError:
2710 2711 return dummy
2711 2712 except Exception:
2712 2713 log.error(traceback.format_exc())
2713 2714 return dummy
2714 2715
2715 2716 @hybrid_property
2716 2717 def changeset_cache(self):
2717 2718 return self._load_changeset_cache('', self._changeset_cache)
2718 2719
2719 2720 @changeset_cache.setter
2720 2721 def changeset_cache(self, val):
2721 2722 try:
2722 2723 self._changeset_cache = json.dumps(val)
2723 2724 except Exception:
2724 2725 log.error(traceback.format_exc())
2725 2726
2726 2727 @validates('group_parent_id')
2727 2728 def validate_group_parent_id(self, key, val):
2728 2729 """
2729 2730 Check cycle references for a parent group to self
2730 2731 """
2731 2732 if self.group_id and val:
2732 2733 assert val != self.group_id
2733 2734
2734 2735 return val
2735 2736
2736 2737 @hybrid_property
2737 2738 def description_safe(self):
2738 2739 from rhodecode.lib import helpers as h
2739 2740 return h.escape(self.group_description)
2740 2741
2741 2742 @classmethod
2742 2743 def hash_repo_group_name(cls, repo_group_name):
2743 2744 val = remove_formatting(repo_group_name)
2744 2745 val = safe_str(val).lower()
2745 2746 chars = []
2746 2747 for c in val:
2747 2748 if c not in string.ascii_letters:
2748 2749 c = str(ord(c))
2749 2750 chars.append(c)
2750 2751
2751 2752 return ''.join(chars)
2752 2753
2753 2754 @classmethod
2754 2755 def _generate_choice(cls, repo_group):
2755 2756 from webhelpers2.html import literal as _literal
2756 2757 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2757 2758 return repo_group.group_id, _name(repo_group.full_path_splitted)
2758 2759
2759 2760 @classmethod
2760 2761 def groups_choices(cls, groups=None, show_empty_group=True):
2761 2762 if not groups:
2762 2763 groups = cls.query().all()
2763 2764
2764 2765 repo_groups = []
2765 2766 if show_empty_group:
2766 2767 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2767 2768
2768 2769 repo_groups.extend([cls._generate_choice(x) for x in groups])
2769 2770
2770 2771 repo_groups = sorted(
2771 2772 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2772 2773 return repo_groups
2773 2774
2774 2775 @classmethod
2775 2776 def url_sep(cls):
2776 2777 return URL_SEP
2777 2778
2778 2779 @classmethod
2779 2780 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2780 2781 if case_insensitive:
2781 2782 gr = cls.query().filter(func.lower(cls.group_name)
2782 2783 == func.lower(group_name))
2783 2784 else:
2784 2785 gr = cls.query().filter(cls.group_name == group_name)
2785 2786 if cache:
2786 2787 name_key = _hash_key(group_name)
2787 2788 gr = gr.options(
2788 2789 FromCache("sql_cache_short", "get_group_%s" % name_key))
2789 2790 return gr.scalar()
2790 2791
2791 2792 @classmethod
2792 2793 def get_user_personal_repo_group(cls, user_id):
2793 2794 user = User.get(user_id)
2794 2795 if user.username == User.DEFAULT_USER:
2795 2796 return None
2796 2797
2797 2798 return cls.query()\
2798 2799 .filter(cls.personal == true()) \
2799 2800 .filter(cls.user == user) \
2800 2801 .order_by(cls.group_id.asc()) \
2801 2802 .first()
2802 2803
2803 2804 @classmethod
2804 2805 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2805 2806 case_insensitive=True):
2806 2807 q = RepoGroup.query()
2807 2808
2808 2809 if not isinstance(user_id, Optional):
2809 2810 q = q.filter(RepoGroup.user_id == user_id)
2810 2811
2811 2812 if not isinstance(group_id, Optional):
2812 2813 q = q.filter(RepoGroup.group_parent_id == group_id)
2813 2814
2814 2815 if case_insensitive:
2815 2816 q = q.order_by(func.lower(RepoGroup.group_name))
2816 2817 else:
2817 2818 q = q.order_by(RepoGroup.group_name)
2818 2819 return q.all()
2819 2820
2820 2821 @property
2821 2822 def parents(self, parents_recursion_limit=10):
2822 2823 groups = []
2823 2824 if self.parent_group is None:
2824 2825 return groups
2825 2826 cur_gr = self.parent_group
2826 2827 groups.insert(0, cur_gr)
2827 2828 cnt = 0
2828 2829 while 1:
2829 2830 cnt += 1
2830 2831 gr = getattr(cur_gr, 'parent_group', None)
2831 2832 cur_gr = cur_gr.parent_group
2832 2833 if gr is None:
2833 2834 break
2834 2835 if cnt == parents_recursion_limit:
2835 2836 # this will prevent accidental infinit loops
2836 2837 log.error('more than %s parents found for group %s, stopping '
2837 2838 'recursive parent fetching', parents_recursion_limit, self)
2838 2839 break
2839 2840
2840 2841 groups.insert(0, gr)
2841 2842 return groups
2842 2843
2843 2844 @property
2844 2845 def last_commit_cache_update_diff(self):
2845 2846 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2846 2847
2847 2848 @classmethod
2848 2849 def _load_commit_change(cls, last_commit_cache):
2849 2850 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2850 2851 empty_date = datetime.datetime.fromtimestamp(0)
2851 2852 date_latest = last_commit_cache.get('date', empty_date)
2852 2853 try:
2853 2854 return parse_datetime(date_latest)
2854 2855 except Exception:
2855 2856 return empty_date
2856 2857
2857 2858 @property
2858 2859 def last_commit_change(self):
2859 2860 return self._load_commit_change(self.changeset_cache)
2860 2861
2861 2862 @property
2862 2863 def last_db_change(self):
2863 2864 return self.updated_on
2864 2865
2865 2866 @property
2866 2867 def children(self):
2867 2868 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2868 2869
2869 2870 @property
2870 2871 def name(self):
2871 2872 return self.group_name.split(RepoGroup.url_sep())[-1]
2872 2873
2873 2874 @property
2874 2875 def full_path(self):
2875 2876 return self.group_name
2876 2877
2877 2878 @property
2878 2879 def full_path_splitted(self):
2879 2880 return self.group_name.split(RepoGroup.url_sep())
2880 2881
2881 2882 @property
2882 2883 def repositories(self):
2883 2884 return Repository.query()\
2884 2885 .filter(Repository.group == self)\
2885 2886 .order_by(Repository.repo_name)
2886 2887
2887 2888 @property
2888 2889 def repositories_recursive_count(self):
2889 2890 cnt = self.repositories.count()
2890 2891
2891 2892 def children_count(group):
2892 2893 cnt = 0
2893 2894 for child in group.children:
2894 2895 cnt += child.repositories.count()
2895 2896 cnt += children_count(child)
2896 2897 return cnt
2897 2898
2898 2899 return cnt + children_count(self)
2899 2900
2900 2901 def _recursive_objects(self, include_repos=True, include_groups=True):
2901 2902 all_ = []
2902 2903
2903 2904 def _get_members(root_gr):
2904 2905 if include_repos:
2905 2906 for r in root_gr.repositories:
2906 2907 all_.append(r)
2907 2908 childs = root_gr.children.all()
2908 2909 if childs:
2909 2910 for gr in childs:
2910 2911 if include_groups:
2911 2912 all_.append(gr)
2912 2913 _get_members(gr)
2913 2914
2914 2915 root_group = []
2915 2916 if include_groups:
2916 2917 root_group = [self]
2917 2918
2918 2919 _get_members(self)
2919 2920 return root_group + all_
2920 2921
2921 2922 def recursive_groups_and_repos(self):
2922 2923 """
2923 2924 Recursive return all groups, with repositories in those groups
2924 2925 """
2925 2926 return self._recursive_objects()
2926 2927
2927 2928 def recursive_groups(self):
2928 2929 """
2929 2930 Returns all children groups for this group including children of children
2930 2931 """
2931 2932 return self._recursive_objects(include_repos=False)
2932 2933
2933 2934 def recursive_repos(self):
2934 2935 """
2935 2936 Returns all children repositories for this group
2936 2937 """
2937 2938 return self._recursive_objects(include_groups=False)
2938 2939
2939 2940 def get_new_name(self, group_name):
2940 2941 """
2941 2942 returns new full group name based on parent and new name
2942 2943
2943 2944 :param group_name:
2944 2945 """
2945 2946 path_prefix = (self.parent_group.full_path_splitted if
2946 2947 self.parent_group else [])
2947 2948 return RepoGroup.url_sep().join(path_prefix + [group_name])
2948 2949
2949 2950 def update_commit_cache(self, config=None):
2950 2951 """
2951 2952 Update cache of last commit for newest repository inside this repository group.
2952 2953 cache_keys should be::
2953 2954
2954 2955 source_repo_id
2955 2956 short_id
2956 2957 raw_id
2957 2958 revision
2958 2959 parents
2959 2960 message
2960 2961 date
2961 2962 author
2962 2963
2963 2964 """
2964 2965 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2965 2966 empty_date = datetime.datetime.fromtimestamp(0)
2966 2967
2967 2968 def repo_groups_and_repos(root_gr):
2968 2969 for _repo in root_gr.repositories:
2969 2970 yield _repo
2970 2971 for child_group in root_gr.children.all():
2971 2972 yield child_group
2972 2973
2973 2974 latest_repo_cs_cache = {}
2974 2975 for obj in repo_groups_and_repos(self):
2975 2976 repo_cs_cache = obj.changeset_cache
2976 2977 date_latest = latest_repo_cs_cache.get('date', empty_date)
2977 2978 date_current = repo_cs_cache.get('date', empty_date)
2978 2979 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2979 2980 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2980 2981 latest_repo_cs_cache = repo_cs_cache
2981 2982 if hasattr(obj, 'repo_id'):
2982 2983 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2983 2984 else:
2984 2985 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2985 2986
2986 2987 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2987 2988
2988 2989 latest_repo_cs_cache['updated_on'] = time.time()
2989 2990 self.changeset_cache = latest_repo_cs_cache
2990 2991 self.updated_on = _date_latest
2991 2992 Session().add(self)
2992 2993 Session().commit()
2993 2994
2994 2995 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2995 2996 self.group_name, latest_repo_cs_cache, _date_latest)
2996 2997
2997 2998 def permissions(self, with_admins=True, with_owner=True,
2998 2999 expand_from_user_groups=False):
2999 3000 """
3000 3001 Permissions for repository groups
3001 3002 """
3002 3003 _admin_perm = 'group.admin'
3003 3004
3004 3005 owner_row = []
3005 3006 if with_owner:
3006 3007 usr = AttributeDict(self.user.get_dict())
3007 3008 usr.owner_row = True
3008 3009 usr.permission = _admin_perm
3009 3010 owner_row.append(usr)
3010 3011
3011 3012 super_admin_ids = []
3012 3013 super_admin_rows = []
3013 3014 if with_admins:
3014 3015 for usr in User.get_all_super_admins():
3015 3016 super_admin_ids.append(usr.user_id)
3016 3017 # if this admin is also owner, don't double the record
3017 3018 if usr.user_id == owner_row[0].user_id:
3018 3019 owner_row[0].admin_row = True
3019 3020 else:
3020 3021 usr = AttributeDict(usr.get_dict())
3021 3022 usr.admin_row = True
3022 3023 usr.permission = _admin_perm
3023 3024 super_admin_rows.append(usr)
3024 3025
3025 3026 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3026 3027 q = q.options(joinedload(UserRepoGroupToPerm.group),
3027 3028 joinedload(UserRepoGroupToPerm.user),
3028 3029 joinedload(UserRepoGroupToPerm.permission),)
3029 3030
3030 3031 # get owners and admins and permissions. We do a trick of re-writing
3031 3032 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3032 3033 # has a global reference and changing one object propagates to all
3033 3034 # others. This means if admin is also an owner admin_row that change
3034 3035 # would propagate to both objects
3035 3036 perm_rows = []
3036 3037 for _usr in q.all():
3037 3038 usr = AttributeDict(_usr.user.get_dict())
3038 3039 # if this user is also owner/admin, mark as duplicate record
3039 3040 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3040 3041 usr.duplicate_perm = True
3041 3042 usr.permission = _usr.permission.permission_name
3042 3043 perm_rows.append(usr)
3043 3044
3044 3045 # filter the perm rows by 'default' first and then sort them by
3045 3046 # admin,write,read,none permissions sorted again alphabetically in
3046 3047 # each group
3047 3048 perm_rows = sorted(perm_rows, key=display_user_sort)
3048 3049
3049 3050 user_groups_rows = []
3050 3051 if expand_from_user_groups:
3051 3052 for ug in self.permission_user_groups(with_members=True):
3052 3053 for user_data in ug.members:
3053 3054 user_groups_rows.append(user_data)
3054 3055
3055 3056 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3056 3057
3057 3058 def permission_user_groups(self, with_members=False):
3058 3059 q = UserGroupRepoGroupToPerm.query()\
3059 3060 .filter(UserGroupRepoGroupToPerm.group == self)
3060 3061 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3061 3062 joinedload(UserGroupRepoGroupToPerm.users_group),
3062 3063 joinedload(UserGroupRepoGroupToPerm.permission),)
3063 3064
3064 3065 perm_rows = []
3065 3066 for _user_group in q.all():
3066 3067 entry = AttributeDict(_user_group.users_group.get_dict())
3067 3068 entry.permission = _user_group.permission.permission_name
3068 3069 if with_members:
3069 3070 entry.members = [x.user.get_dict()
3070 3071 for x in _user_group.users_group.members]
3071 3072 perm_rows.append(entry)
3072 3073
3073 3074 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3074 3075 return perm_rows
3075 3076
3076 3077 def get_api_data(self):
3077 3078 """
3078 3079 Common function for generating api data
3079 3080
3080 3081 """
3081 3082 group = self
3082 3083 data = {
3083 3084 'group_id': group.group_id,
3084 3085 'group_name': group.group_name,
3085 3086 'group_description': group.description_safe,
3086 3087 'parent_group': group.parent_group.group_name if group.parent_group else None,
3087 3088 'repositories': [x.repo_name for x in group.repositories],
3088 3089 'owner': group.user.username,
3089 3090 }
3090 3091 return data
3091 3092
3092 3093 def get_dict(self):
3093 3094 # Since we transformed `group_name` to a hybrid property, we need to
3094 3095 # keep compatibility with the code which uses `group_name` field.
3095 3096 result = super(RepoGroup, self).get_dict()
3096 3097 result['group_name'] = result.pop('_group_name', None)
3097 3098 return result
3098 3099
3099 3100
3100 3101 class Permission(Base, BaseModel):
3101 3102 __tablename__ = 'permissions'
3102 3103 __table_args__ = (
3103 3104 Index('p_perm_name_idx', 'permission_name'),
3104 3105 base_table_args,
3105 3106 )
3106 3107
3107 3108 PERMS = [
3108 3109 ('hg.admin', _('RhodeCode Super Administrator')),
3109 3110
3110 3111 ('repository.none', _('Repository no access')),
3111 3112 ('repository.read', _('Repository read access')),
3112 3113 ('repository.write', _('Repository write access')),
3113 3114 ('repository.admin', _('Repository admin access')),
3114 3115
3115 3116 ('group.none', _('Repository group no access')),
3116 3117 ('group.read', _('Repository group read access')),
3117 3118 ('group.write', _('Repository group write access')),
3118 3119 ('group.admin', _('Repository group admin access')),
3119 3120
3120 3121 ('usergroup.none', _('User group no access')),
3121 3122 ('usergroup.read', _('User group read access')),
3122 3123 ('usergroup.write', _('User group write access')),
3123 3124 ('usergroup.admin', _('User group admin access')),
3124 3125
3125 3126 ('branch.none', _('Branch no permissions')),
3126 3127 ('branch.merge', _('Branch access by web merge')),
3127 3128 ('branch.push', _('Branch access by push')),
3128 3129 ('branch.push_force', _('Branch access by push with force')),
3129 3130
3130 3131 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3131 3132 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3132 3133
3133 3134 ('hg.usergroup.create.false', _('User Group creation disabled')),
3134 3135 ('hg.usergroup.create.true', _('User Group creation enabled')),
3135 3136
3136 3137 ('hg.create.none', _('Repository creation disabled')),
3137 3138 ('hg.create.repository', _('Repository creation enabled')),
3138 3139 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3139 3140 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3140 3141
3141 3142 ('hg.fork.none', _('Repository forking disabled')),
3142 3143 ('hg.fork.repository', _('Repository forking enabled')),
3143 3144
3144 3145 ('hg.register.none', _('Registration disabled')),
3145 3146 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3146 3147 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3147 3148
3148 3149 ('hg.password_reset.enabled', _('Password reset enabled')),
3149 3150 ('hg.password_reset.hidden', _('Password reset hidden')),
3150 3151 ('hg.password_reset.disabled', _('Password reset disabled')),
3151 3152
3152 3153 ('hg.extern_activate.manual', _('Manual activation of external account')),
3153 3154 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3154 3155
3155 3156 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3156 3157 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3157 3158 ]
3158 3159
3159 3160 # definition of system default permissions for DEFAULT user, created on
3160 3161 # system setup
3161 3162 DEFAULT_USER_PERMISSIONS = [
3162 3163 # object perms
3163 3164 'repository.read',
3164 3165 'group.read',
3165 3166 'usergroup.read',
3166 3167 # branch, for backward compat we need same value as before so forced pushed
3167 3168 'branch.push_force',
3168 3169 # global
3169 3170 'hg.create.repository',
3170 3171 'hg.repogroup.create.false',
3171 3172 'hg.usergroup.create.false',
3172 3173 'hg.create.write_on_repogroup.true',
3173 3174 'hg.fork.repository',
3174 3175 'hg.register.manual_activate',
3175 3176 'hg.password_reset.enabled',
3176 3177 'hg.extern_activate.auto',
3177 3178 'hg.inherit_default_perms.true',
3178 3179 ]
3179 3180
3180 3181 # defines which permissions are more important higher the more important
3181 3182 # Weight defines which permissions are more important.
3182 3183 # The higher number the more important.
3183 3184 PERM_WEIGHTS = {
3184 3185 'repository.none': 0,
3185 3186 'repository.read': 1,
3186 3187 'repository.write': 3,
3187 3188 'repository.admin': 4,
3188 3189
3189 3190 'group.none': 0,
3190 3191 'group.read': 1,
3191 3192 'group.write': 3,
3192 3193 'group.admin': 4,
3193 3194
3194 3195 'usergroup.none': 0,
3195 3196 'usergroup.read': 1,
3196 3197 'usergroup.write': 3,
3197 3198 'usergroup.admin': 4,
3198 3199
3199 3200 'branch.none': 0,
3200 3201 'branch.merge': 1,
3201 3202 'branch.push': 3,
3202 3203 'branch.push_force': 4,
3203 3204
3204 3205 'hg.repogroup.create.false': 0,
3205 3206 'hg.repogroup.create.true': 1,
3206 3207
3207 3208 'hg.usergroup.create.false': 0,
3208 3209 'hg.usergroup.create.true': 1,
3209 3210
3210 3211 'hg.fork.none': 0,
3211 3212 'hg.fork.repository': 1,
3212 3213 'hg.create.none': 0,
3213 3214 'hg.create.repository': 1
3214 3215 }
3215 3216
3216 3217 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3217 3218 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3218 3219 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3219 3220
3220 3221 def __unicode__(self):
3221 3222 return u"<%s('%s:%s')>" % (
3222 3223 self.__class__.__name__, self.permission_id, self.permission_name
3223 3224 )
3224 3225
3225 3226 @classmethod
3226 3227 def get_by_key(cls, key):
3227 3228 return cls.query().filter(cls.permission_name == key).scalar()
3228 3229
3229 3230 @classmethod
3230 3231 def get_default_repo_perms(cls, user_id, repo_id=None):
3231 3232 q = Session().query(UserRepoToPerm, Repository, Permission)\
3232 3233 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3233 3234 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3234 3235 .filter(UserRepoToPerm.user_id == user_id)
3235 3236 if repo_id:
3236 3237 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3237 3238 return q.all()
3238 3239
3239 3240 @classmethod
3240 3241 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3241 3242 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3242 3243 .join(
3243 3244 Permission,
3244 3245 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3245 3246 .join(
3246 3247 UserRepoToPerm,
3247 3248 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3248 3249 .filter(UserRepoToPerm.user_id == user_id)
3249 3250
3250 3251 if repo_id:
3251 3252 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3252 3253 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3253 3254
3254 3255 @classmethod
3255 3256 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3256 3257 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3257 3258 .join(
3258 3259 Permission,
3259 3260 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3260 3261 .join(
3261 3262 Repository,
3262 3263 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3263 3264 .join(
3264 3265 UserGroup,
3265 3266 UserGroupRepoToPerm.users_group_id ==
3266 3267 UserGroup.users_group_id)\
3267 3268 .join(
3268 3269 UserGroupMember,
3269 3270 UserGroupRepoToPerm.users_group_id ==
3270 3271 UserGroupMember.users_group_id)\
3271 3272 .filter(
3272 3273 UserGroupMember.user_id == user_id,
3273 3274 UserGroup.users_group_active == true())
3274 3275 if repo_id:
3275 3276 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3276 3277 return q.all()
3277 3278
3278 3279 @classmethod
3279 3280 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3280 3281 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3281 3282 .join(
3282 3283 Permission,
3283 3284 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3284 3285 .join(
3285 3286 UserGroupRepoToPerm,
3286 3287 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3287 3288 .join(
3288 3289 UserGroup,
3289 3290 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3290 3291 .join(
3291 3292 UserGroupMember,
3292 3293 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3293 3294 .filter(
3294 3295 UserGroupMember.user_id == user_id,
3295 3296 UserGroup.users_group_active == true())
3296 3297
3297 3298 if repo_id:
3298 3299 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3299 3300 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3300 3301
3301 3302 @classmethod
3302 3303 def get_default_group_perms(cls, user_id, repo_group_id=None):
3303 3304 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3304 3305 .join(
3305 3306 Permission,
3306 3307 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3307 3308 .join(
3308 3309 RepoGroup,
3309 3310 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3310 3311 .filter(UserRepoGroupToPerm.user_id == user_id)
3311 3312 if repo_group_id:
3312 3313 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3313 3314 return q.all()
3314 3315
3315 3316 @classmethod
3316 3317 def get_default_group_perms_from_user_group(
3317 3318 cls, user_id, repo_group_id=None):
3318 3319 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3319 3320 .join(
3320 3321 Permission,
3321 3322 UserGroupRepoGroupToPerm.permission_id ==
3322 3323 Permission.permission_id)\
3323 3324 .join(
3324 3325 RepoGroup,
3325 3326 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3326 3327 .join(
3327 3328 UserGroup,
3328 3329 UserGroupRepoGroupToPerm.users_group_id ==
3329 3330 UserGroup.users_group_id)\
3330 3331 .join(
3331 3332 UserGroupMember,
3332 3333 UserGroupRepoGroupToPerm.users_group_id ==
3333 3334 UserGroupMember.users_group_id)\
3334 3335 .filter(
3335 3336 UserGroupMember.user_id == user_id,
3336 3337 UserGroup.users_group_active == true())
3337 3338 if repo_group_id:
3338 3339 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3339 3340 return q.all()
3340 3341
3341 3342 @classmethod
3342 3343 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3343 3344 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3344 3345 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3345 3346 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3346 3347 .filter(UserUserGroupToPerm.user_id == user_id)
3347 3348 if user_group_id:
3348 3349 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3349 3350 return q.all()
3350 3351
3351 3352 @classmethod
3352 3353 def get_default_user_group_perms_from_user_group(
3353 3354 cls, user_id, user_group_id=None):
3354 3355 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3355 3356 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3356 3357 .join(
3357 3358 Permission,
3358 3359 UserGroupUserGroupToPerm.permission_id ==
3359 3360 Permission.permission_id)\
3360 3361 .join(
3361 3362 TargetUserGroup,
3362 3363 UserGroupUserGroupToPerm.target_user_group_id ==
3363 3364 TargetUserGroup.users_group_id)\
3364 3365 .join(
3365 3366 UserGroup,
3366 3367 UserGroupUserGroupToPerm.user_group_id ==
3367 3368 UserGroup.users_group_id)\
3368 3369 .join(
3369 3370 UserGroupMember,
3370 3371 UserGroupUserGroupToPerm.user_group_id ==
3371 3372 UserGroupMember.users_group_id)\
3372 3373 .filter(
3373 3374 UserGroupMember.user_id == user_id,
3374 3375 UserGroup.users_group_active == true())
3375 3376 if user_group_id:
3376 3377 q = q.filter(
3377 3378 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3378 3379
3379 3380 return q.all()
3380 3381
3381 3382
3382 3383 class UserRepoToPerm(Base, BaseModel):
3383 3384 __tablename__ = 'repo_to_perm'
3384 3385 __table_args__ = (
3385 3386 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3386 3387 base_table_args
3387 3388 )
3388 3389
3389 3390 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3390 3391 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3391 3392 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3392 3393 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3393 3394
3394 3395 user = relationship('User')
3395 3396 repository = relationship('Repository')
3396 3397 permission = relationship('Permission')
3397 3398
3398 3399 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3399 3400
3400 3401 @classmethod
3401 3402 def create(cls, user, repository, permission):
3402 3403 n = cls()
3403 3404 n.user = user
3404 3405 n.repository = repository
3405 3406 n.permission = permission
3406 3407 Session().add(n)
3407 3408 return n
3408 3409
3409 3410 def __unicode__(self):
3410 3411 return u'<%s => %s >' % (self.user, self.repository)
3411 3412
3412 3413
3413 3414 class UserUserGroupToPerm(Base, BaseModel):
3414 3415 __tablename__ = 'user_user_group_to_perm'
3415 3416 __table_args__ = (
3416 3417 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3417 3418 base_table_args
3418 3419 )
3419 3420
3420 3421 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3421 3422 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3422 3423 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3423 3424 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3424 3425
3425 3426 user = relationship('User')
3426 3427 user_group = relationship('UserGroup')
3427 3428 permission = relationship('Permission')
3428 3429
3429 3430 @classmethod
3430 3431 def create(cls, user, user_group, permission):
3431 3432 n = cls()
3432 3433 n.user = user
3433 3434 n.user_group = user_group
3434 3435 n.permission = permission
3435 3436 Session().add(n)
3436 3437 return n
3437 3438
3438 3439 def __unicode__(self):
3439 3440 return u'<%s => %s >' % (self.user, self.user_group)
3440 3441
3441 3442
3442 3443 class UserToPerm(Base, BaseModel):
3443 3444 __tablename__ = 'user_to_perm'
3444 3445 __table_args__ = (
3445 3446 UniqueConstraint('user_id', 'permission_id'),
3446 3447 base_table_args
3447 3448 )
3448 3449
3449 3450 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3450 3451 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3451 3452 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3452 3453
3453 3454 user = relationship('User')
3454 3455 permission = relationship('Permission', lazy='joined')
3455 3456
3456 3457 def __unicode__(self):
3457 3458 return u'<%s => %s >' % (self.user, self.permission)
3458 3459
3459 3460
3460 3461 class UserGroupRepoToPerm(Base, BaseModel):
3461 3462 __tablename__ = 'users_group_repo_to_perm'
3462 3463 __table_args__ = (
3463 3464 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3464 3465 base_table_args
3465 3466 )
3466 3467
3467 3468 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3468 3469 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3469 3470 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3470 3471 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3471 3472
3472 3473 users_group = relationship('UserGroup')
3473 3474 permission = relationship('Permission')
3474 3475 repository = relationship('Repository')
3475 3476 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3476 3477
3477 3478 @classmethod
3478 3479 def create(cls, users_group, repository, permission):
3479 3480 n = cls()
3480 3481 n.users_group = users_group
3481 3482 n.repository = repository
3482 3483 n.permission = permission
3483 3484 Session().add(n)
3484 3485 return n
3485 3486
3486 3487 def __unicode__(self):
3487 3488 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3488 3489
3489 3490
3490 3491 class UserGroupUserGroupToPerm(Base, BaseModel):
3491 3492 __tablename__ = 'user_group_user_group_to_perm'
3492 3493 __table_args__ = (
3493 3494 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3494 3495 CheckConstraint('target_user_group_id != user_group_id'),
3495 3496 base_table_args
3496 3497 )
3497 3498
3498 3499 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3499 3500 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3500 3501 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3501 3502 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3502 3503
3503 3504 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3504 3505 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3505 3506 permission = relationship('Permission')
3506 3507
3507 3508 @classmethod
3508 3509 def create(cls, target_user_group, user_group, permission):
3509 3510 n = cls()
3510 3511 n.target_user_group = target_user_group
3511 3512 n.user_group = user_group
3512 3513 n.permission = permission
3513 3514 Session().add(n)
3514 3515 return n
3515 3516
3516 3517 def __unicode__(self):
3517 3518 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3518 3519
3519 3520
3520 3521 class UserGroupToPerm(Base, BaseModel):
3521 3522 __tablename__ = 'users_group_to_perm'
3522 3523 __table_args__ = (
3523 3524 UniqueConstraint('users_group_id', 'permission_id',),
3524 3525 base_table_args
3525 3526 )
3526 3527
3527 3528 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3528 3529 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3529 3530 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3530 3531
3531 3532 users_group = relationship('UserGroup')
3532 3533 permission = relationship('Permission')
3533 3534
3534 3535
3535 3536 class UserRepoGroupToPerm(Base, BaseModel):
3536 3537 __tablename__ = 'user_repo_group_to_perm'
3537 3538 __table_args__ = (
3538 3539 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3539 3540 base_table_args
3540 3541 )
3541 3542
3542 3543 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3543 3544 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3544 3545 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3545 3546 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3546 3547
3547 3548 user = relationship('User')
3548 3549 group = relationship('RepoGroup')
3549 3550 permission = relationship('Permission')
3550 3551
3551 3552 @classmethod
3552 3553 def create(cls, user, repository_group, permission):
3553 3554 n = cls()
3554 3555 n.user = user
3555 3556 n.group = repository_group
3556 3557 n.permission = permission
3557 3558 Session().add(n)
3558 3559 return n
3559 3560
3560 3561
3561 3562 class UserGroupRepoGroupToPerm(Base, BaseModel):
3562 3563 __tablename__ = 'users_group_repo_group_to_perm'
3563 3564 __table_args__ = (
3564 3565 UniqueConstraint('users_group_id', 'group_id'),
3565 3566 base_table_args
3566 3567 )
3567 3568
3568 3569 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3569 3570 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3570 3571 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3571 3572 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3572 3573
3573 3574 users_group = relationship('UserGroup')
3574 3575 permission = relationship('Permission')
3575 3576 group = relationship('RepoGroup')
3576 3577
3577 3578 @classmethod
3578 3579 def create(cls, user_group, repository_group, permission):
3579 3580 n = cls()
3580 3581 n.users_group = user_group
3581 3582 n.group = repository_group
3582 3583 n.permission = permission
3583 3584 Session().add(n)
3584 3585 return n
3585 3586
3586 3587 def __unicode__(self):
3587 3588 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3588 3589
3589 3590
3590 3591 class Statistics(Base, BaseModel):
3591 3592 __tablename__ = 'statistics'
3592 3593 __table_args__ = (
3593 3594 base_table_args
3594 3595 )
3595 3596
3596 3597 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3597 3598 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3598 3599 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3599 3600 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3600 3601 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3601 3602 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3602 3603
3603 3604 repository = relationship('Repository', single_parent=True)
3604 3605
3605 3606
3606 3607 class UserFollowing(Base, BaseModel):
3607 3608 __tablename__ = 'user_followings'
3608 3609 __table_args__ = (
3609 3610 UniqueConstraint('user_id', 'follows_repository_id'),
3610 3611 UniqueConstraint('user_id', 'follows_user_id'),
3611 3612 base_table_args
3612 3613 )
3613 3614
3614 3615 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3615 3616 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3616 3617 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3617 3618 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3618 3619 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3619 3620
3620 3621 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3621 3622
3622 3623 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3623 3624 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3624 3625
3625 3626 @classmethod
3626 3627 def get_repo_followers(cls, repo_id):
3627 3628 return cls.query().filter(cls.follows_repo_id == repo_id)
3628 3629
3629 3630
3630 3631 class CacheKey(Base, BaseModel):
3631 3632 __tablename__ = 'cache_invalidation'
3632 3633 __table_args__ = (
3633 3634 UniqueConstraint('cache_key'),
3634 3635 Index('key_idx', 'cache_key'),
3635 3636 base_table_args,
3636 3637 )
3637 3638
3638 3639 CACHE_TYPE_FEED = 'FEED'
3639 3640
3640 3641 # namespaces used to register process/thread aware caches
3641 3642 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3642 3643 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3643 3644
3644 3645 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3645 3646 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3646 3647 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3647 3648 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3648 3649 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3649 3650
3650 3651 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3651 3652 self.cache_key = cache_key
3652 3653 self.cache_args = cache_args
3653 3654 self.cache_active = False
3654 3655 # first key should be same for all entries, since all workers should share it
3655 3656 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3656 3657
3657 3658 def __unicode__(self):
3658 3659 return u"<%s('%s:%s[%s]')>" % (
3659 3660 self.__class__.__name__,
3660 3661 self.cache_id, self.cache_key, self.cache_active)
3661 3662
3662 3663 def _cache_key_partition(self):
3663 3664 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3664 3665 return prefix, repo_name, suffix
3665 3666
3666 3667 def get_prefix(self):
3667 3668 """
3668 3669 Try to extract prefix from existing cache key. The key could consist
3669 3670 of prefix, repo_name, suffix
3670 3671 """
3671 3672 # this returns prefix, repo_name, suffix
3672 3673 return self._cache_key_partition()[0]
3673 3674
3674 3675 def get_suffix(self):
3675 3676 """
3676 3677 get suffix that might have been used in _get_cache_key to
3677 3678 generate self.cache_key. Only used for informational purposes
3678 3679 in repo_edit.mako.
3679 3680 """
3680 3681 # prefix, repo_name, suffix
3681 3682 return self._cache_key_partition()[2]
3682 3683
3683 3684 @classmethod
3684 3685 def generate_new_state_uid(cls, based_on=None):
3685 3686 if based_on:
3686 3687 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3687 3688 else:
3688 3689 return str(uuid.uuid4())
3689 3690
3690 3691 @classmethod
3691 3692 def delete_all_cache(cls):
3692 3693 """
3693 3694 Delete all cache keys from database.
3694 3695 Should only be run when all instances are down and all entries
3695 3696 thus stale.
3696 3697 """
3697 3698 cls.query().delete()
3698 3699 Session().commit()
3699 3700
3700 3701 @classmethod
3701 3702 def set_invalidate(cls, cache_uid, delete=False):
3702 3703 """
3703 3704 Mark all caches of a repo as invalid in the database.
3704 3705 """
3705 3706
3706 3707 try:
3707 3708 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3708 3709 if delete:
3709 3710 qry.delete()
3710 3711 log.debug('cache objects deleted for cache args %s',
3711 3712 safe_str(cache_uid))
3712 3713 else:
3713 3714 qry.update({"cache_active": False,
3714 3715 "cache_state_uid": cls.generate_new_state_uid()})
3715 3716 log.debug('cache objects marked as invalid for cache args %s',
3716 3717 safe_str(cache_uid))
3717 3718
3718 3719 Session().commit()
3719 3720 except Exception:
3720 3721 log.exception(
3721 3722 'Cache key invalidation failed for cache args %s',
3722 3723 safe_str(cache_uid))
3723 3724 Session().rollback()
3724 3725
3725 3726 @classmethod
3726 3727 def get_active_cache(cls, cache_key):
3727 3728 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3728 3729 if inv_obj:
3729 3730 return inv_obj
3730 3731 return None
3731 3732
3732 3733 @classmethod
3733 3734 def get_namespace_map(cls, namespace):
3734 3735 return {
3735 3736 x.cache_key: x
3736 3737 for x in cls.query().filter(cls.cache_args == namespace)}
3737 3738
3738 3739
3739 3740 class ChangesetComment(Base, BaseModel):
3740 3741 __tablename__ = 'changeset_comments'
3741 3742 __table_args__ = (
3742 3743 Index('cc_revision_idx', 'revision'),
3743 3744 base_table_args,
3744 3745 )
3745 3746
3746 3747 COMMENT_OUTDATED = u'comment_outdated'
3747 3748 COMMENT_TYPE_NOTE = u'note'
3748 3749 COMMENT_TYPE_TODO = u'todo'
3749 3750 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3750 3751
3751 3752 OP_IMMUTABLE = u'immutable'
3752 3753 OP_CHANGEABLE = u'changeable'
3753 3754
3754 3755 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3755 3756 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3756 3757 revision = Column('revision', String(40), nullable=True)
3757 3758 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3758 3759 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3759 3760 line_no = Column('line_no', Unicode(10), nullable=True)
3760 3761 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3761 3762 f_path = Column('f_path', Unicode(1000), nullable=True)
3762 3763 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3763 3764 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3764 3765 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3765 3766 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3766 3767 renderer = Column('renderer', Unicode(64), nullable=True)
3767 3768 display_state = Column('display_state', Unicode(128), nullable=True)
3768 3769 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3769 3770
3770 3771 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3771 3772 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3772 3773
3773 3774 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3774 3775 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3775 3776
3776 3777 author = relationship('User', lazy='select')
3777 3778 repo = relationship('Repository')
3778 3779 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3779 3780 pull_request = relationship('PullRequest', lazy='select')
3780 3781 pull_request_version = relationship('PullRequestVersion', lazy='select')
3781 3782 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3782 3783
3783 3784 @classmethod
3784 3785 def get_users(cls, revision=None, pull_request_id=None):
3785 3786 """
3786 3787 Returns user associated with this ChangesetComment. ie those
3787 3788 who actually commented
3788 3789
3789 3790 :param cls:
3790 3791 :param revision:
3791 3792 """
3792 3793 q = Session().query(User)\
3793 3794 .join(ChangesetComment.author)
3794 3795 if revision:
3795 3796 q = q.filter(cls.revision == revision)
3796 3797 elif pull_request_id:
3797 3798 q = q.filter(cls.pull_request_id == pull_request_id)
3798 3799 return q.all()
3799 3800
3800 3801 @classmethod
3801 3802 def get_index_from_version(cls, pr_version, versions):
3802 3803 num_versions = [x.pull_request_version_id for x in versions]
3803 3804 try:
3804 3805 return num_versions.index(pr_version) + 1
3805 3806 except (IndexError, ValueError):
3806 3807 return
3807 3808
3808 3809 @property
3809 3810 def outdated(self):
3810 3811 return self.display_state == self.COMMENT_OUTDATED
3811 3812
3812 3813 @property
3813 3814 def outdated_js(self):
3814 3815 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3815 3816
3816 3817 @property
3817 3818 def immutable(self):
3818 3819 return self.immutable_state == self.OP_IMMUTABLE
3819 3820
3820 3821 def outdated_at_version(self, version):
3821 3822 """
3822 3823 Checks if comment is outdated for given pull request version
3823 3824 """
3824 3825 def version_check():
3825 3826 return self.pull_request_version_id and self.pull_request_version_id != version
3826 3827
3827 3828 if self.is_inline:
3828 3829 return self.outdated and version_check()
3829 3830 else:
3830 3831 # general comments don't have .outdated set, also latest don't have a version
3831 3832 return version_check()
3832 3833
3833 3834 def outdated_at_version_js(self, version):
3834 3835 """
3835 3836 Checks if comment is outdated for given pull request version
3836 3837 """
3837 3838 return json.dumps(self.outdated_at_version(version))
3838 3839
3839 3840 def older_than_version(self, version):
3840 3841 """
3841 3842 Checks if comment is made from previous version than given
3842 3843 """
3843 3844 if version is None:
3844 3845 return self.pull_request_version != version
3845 3846
3846 3847 return self.pull_request_version < version
3847 3848
3848 3849 def older_than_version_js(self, version):
3849 3850 """
3850 3851 Checks if comment is made from previous version than given
3851 3852 """
3852 3853 return json.dumps(self.older_than_version(version))
3853 3854
3854 3855 @property
3855 3856 def commit_id(self):
3856 3857 """New style naming to stop using .revision"""
3857 3858 return self.revision
3858 3859
3859 3860 @property
3860 3861 def resolved(self):
3861 3862 return self.resolved_by[0] if self.resolved_by else None
3862 3863
3863 3864 @property
3864 3865 def is_todo(self):
3865 3866 return self.comment_type == self.COMMENT_TYPE_TODO
3866 3867
3867 3868 @property
3868 3869 def is_inline(self):
3869 3870 if self.line_no and self.f_path:
3870 3871 return True
3871 3872 return False
3872 3873
3873 3874 @property
3874 3875 def last_version(self):
3875 3876 version = 0
3876 3877 if self.history:
3877 3878 version = self.history[-1].version
3878 3879 return version
3879 3880
3880 3881 def get_index_version(self, versions):
3881 3882 return self.get_index_from_version(
3882 3883 self.pull_request_version_id, versions)
3883 3884
3884 3885 @property
3885 3886 def review_status(self):
3886 3887 if self.status_change:
3887 3888 return self.status_change[0].status
3888 3889
3889 3890 @property
3890 3891 def review_status_lbl(self):
3891 3892 if self.status_change:
3892 3893 return self.status_change[0].status_lbl
3893 3894
3894 3895 def __repr__(self):
3895 3896 if self.comment_id:
3896 3897 return '<DB:Comment #%s>' % self.comment_id
3897 3898 else:
3898 3899 return '<DB:Comment at %#x>' % id(self)
3899 3900
3900 3901 def get_api_data(self):
3901 3902 comment = self
3902 3903
3903 3904 data = {
3904 3905 'comment_id': comment.comment_id,
3905 3906 'comment_type': comment.comment_type,
3906 3907 'comment_text': comment.text,
3907 3908 'comment_status': comment.status_change,
3908 3909 'comment_f_path': comment.f_path,
3909 3910 'comment_lineno': comment.line_no,
3910 3911 'comment_author': comment.author,
3911 3912 'comment_created_on': comment.created_on,
3912 3913 'comment_resolved_by': self.resolved,
3913 3914 'comment_commit_id': comment.revision,
3914 3915 'comment_pull_request_id': comment.pull_request_id,
3915 3916 'comment_last_version': self.last_version
3916 3917 }
3917 3918 return data
3918 3919
3919 3920 def __json__(self):
3920 3921 data = dict()
3921 3922 data.update(self.get_api_data())
3922 3923 return data
3923 3924
3924 3925
3925 3926 class ChangesetCommentHistory(Base, BaseModel):
3926 3927 __tablename__ = 'changeset_comments_history'
3927 3928 __table_args__ = (
3928 3929 Index('cch_comment_id_idx', 'comment_id'),
3929 3930 base_table_args,
3930 3931 )
3931 3932
3932 3933 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3933 3934 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3934 3935 version = Column("version", Integer(), nullable=False, default=0)
3935 3936 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3936 3937 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3937 3938 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3938 3939 deleted = Column('deleted', Boolean(), default=False)
3939 3940
3940 3941 author = relationship('User', lazy='joined')
3941 3942 comment = relationship('ChangesetComment', cascade="all, delete")
3942 3943
3943 3944 @classmethod
3944 3945 def get_version(cls, comment_id):
3945 3946 q = Session().query(ChangesetCommentHistory).filter(
3946 3947 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3947 3948 if q.count() == 0:
3948 3949 return 1
3949 3950 elif q.count() >= q[0].version:
3950 3951 return q.count() + 1
3951 3952 else:
3952 3953 return q[0].version + 1
3953 3954
3954 3955
3955 3956 class ChangesetStatus(Base, BaseModel):
3956 3957 __tablename__ = 'changeset_statuses'
3957 3958 __table_args__ = (
3958 3959 Index('cs_revision_idx', 'revision'),
3959 3960 Index('cs_version_idx', 'version'),
3960 3961 UniqueConstraint('repo_id', 'revision', 'version'),
3961 3962 base_table_args
3962 3963 )
3963 3964
3964 3965 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3965 3966 STATUS_APPROVED = 'approved'
3966 3967 STATUS_REJECTED = 'rejected'
3967 3968 STATUS_UNDER_REVIEW = 'under_review'
3968 3969
3969 3970 STATUSES = [
3970 3971 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3971 3972 (STATUS_APPROVED, _("Approved")),
3972 3973 (STATUS_REJECTED, _("Rejected")),
3973 3974 (STATUS_UNDER_REVIEW, _("Under Review")),
3974 3975 ]
3975 3976
3976 3977 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3977 3978 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3978 3979 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3979 3980 revision = Column('revision', String(40), nullable=False)
3980 3981 status = Column('status', String(128), nullable=False, default=DEFAULT)
3981 3982 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3982 3983 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3983 3984 version = Column('version', Integer(), nullable=False, default=0)
3984 3985 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3985 3986
3986 3987 author = relationship('User', lazy='select')
3987 3988 repo = relationship('Repository', lazy='select')
3988 3989 comment = relationship('ChangesetComment', lazy='select')
3989 3990 pull_request = relationship('PullRequest', lazy='select')
3990 3991
3991 3992 def __unicode__(self):
3992 3993 return u"<%s('%s[v%s]:%s')>" % (
3993 3994 self.__class__.__name__,
3994 3995 self.status, self.version, self.author
3995 3996 )
3996 3997
3997 3998 @classmethod
3998 3999 def get_status_lbl(cls, value):
3999 4000 return dict(cls.STATUSES).get(value)
4000 4001
4001 4002 @property
4002 4003 def status_lbl(self):
4003 4004 return ChangesetStatus.get_status_lbl(self.status)
4004 4005
4005 4006 def get_api_data(self):
4006 4007 status = self
4007 4008 data = {
4008 4009 'status_id': status.changeset_status_id,
4009 4010 'status': status.status,
4010 4011 }
4011 4012 return data
4012 4013
4013 4014 def __json__(self):
4014 4015 data = dict()
4015 4016 data.update(self.get_api_data())
4016 4017 return data
4017 4018
4018 4019
4019 4020 class _SetState(object):
4020 4021 """
4021 4022 Context processor allowing changing state for sensitive operation such as
4022 4023 pull request update or merge
4023 4024 """
4024 4025
4025 4026 def __init__(self, pull_request, pr_state, back_state=None):
4026 4027 self._pr = pull_request
4027 4028 self._org_state = back_state or pull_request.pull_request_state
4028 4029 self._pr_state = pr_state
4029 4030 self._current_state = None
4030 4031
4031 4032 def __enter__(self):
4032 4033 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4033 4034 self._pr, self._pr_state)
4034 4035 self.set_pr_state(self._pr_state)
4035 4036 return self
4036 4037
4037 4038 def __exit__(self, exc_type, exc_val, exc_tb):
4038 4039 if exc_val is not None:
4039 4040 log.error(traceback.format_exc(exc_tb))
4040 4041 return None
4041 4042
4042 4043 self.set_pr_state(self._org_state)
4043 4044 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4044 4045 self._pr, self._org_state)
4045 4046
4046 4047 @property
4047 4048 def state(self):
4048 4049 return self._current_state
4049 4050
4050 4051 def set_pr_state(self, pr_state):
4051 4052 try:
4052 4053 self._pr.pull_request_state = pr_state
4053 4054 Session().add(self._pr)
4054 4055 Session().commit()
4055 4056 self._current_state = pr_state
4056 4057 except Exception:
4057 4058 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4058 4059 raise
4059 4060
4060 4061
4061 4062 class _PullRequestBase(BaseModel):
4062 4063 """
4063 4064 Common attributes of pull request and version entries.
4064 4065 """
4065 4066
4066 4067 # .status values
4067 4068 STATUS_NEW = u'new'
4068 4069 STATUS_OPEN = u'open'
4069 4070 STATUS_CLOSED = u'closed'
4070 4071
4071 4072 # available states
4072 4073 STATE_CREATING = u'creating'
4073 4074 STATE_UPDATING = u'updating'
4074 4075 STATE_MERGING = u'merging'
4075 4076 STATE_CREATED = u'created'
4076 4077
4077 4078 title = Column('title', Unicode(255), nullable=True)
4078 4079 description = Column(
4079 4080 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4080 4081 nullable=True)
4081 4082 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4082 4083
4083 4084 # new/open/closed status of pull request (not approve/reject/etc)
4084 4085 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4085 4086 created_on = Column(
4086 4087 'created_on', DateTime(timezone=False), nullable=False,
4087 4088 default=datetime.datetime.now)
4088 4089 updated_on = Column(
4089 4090 'updated_on', DateTime(timezone=False), nullable=False,
4090 4091 default=datetime.datetime.now)
4091 4092
4092 4093 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4093 4094
4094 4095 @declared_attr
4095 4096 def user_id(cls):
4096 4097 return Column(
4097 4098 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4098 4099 unique=None)
4099 4100
4100 4101 # 500 revisions max
4101 4102 _revisions = Column(
4102 4103 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4103 4104
4104 4105 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4105 4106
4106 4107 @declared_attr
4107 4108 def source_repo_id(cls):
4108 4109 # TODO: dan: rename column to source_repo_id
4109 4110 return Column(
4110 4111 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4111 4112 nullable=False)
4112 4113
4113 4114 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4114 4115
4115 4116 @hybrid_property
4116 4117 def source_ref(self):
4117 4118 return self._source_ref
4118 4119
4119 4120 @source_ref.setter
4120 4121 def source_ref(self, val):
4121 4122 parts = (val or '').split(':')
4122 4123 if len(parts) != 3:
4123 4124 raise ValueError(
4124 4125 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4125 4126 self._source_ref = safe_unicode(val)
4126 4127
4127 4128 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4128 4129
4129 4130 @hybrid_property
4130 4131 def target_ref(self):
4131 4132 return self._target_ref
4132 4133
4133 4134 @target_ref.setter
4134 4135 def target_ref(self, val):
4135 4136 parts = (val or '').split(':')
4136 4137 if len(parts) != 3:
4137 4138 raise ValueError(
4138 4139 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4139 4140 self._target_ref = safe_unicode(val)
4140 4141
4141 4142 @declared_attr
4142 4143 def target_repo_id(cls):
4143 4144 # TODO: dan: rename column to target_repo_id
4144 4145 return Column(
4145 4146 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4146 4147 nullable=False)
4147 4148
4148 4149 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4149 4150
4150 4151 # TODO: dan: rename column to last_merge_source_rev
4151 4152 _last_merge_source_rev = Column(
4152 4153 'last_merge_org_rev', String(40), nullable=True)
4153 4154 # TODO: dan: rename column to last_merge_target_rev
4154 4155 _last_merge_target_rev = Column(
4155 4156 'last_merge_other_rev', String(40), nullable=True)
4156 4157 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4157 4158 last_merge_metadata = Column(
4158 4159 'last_merge_metadata', MutationObj.as_mutable(
4159 4160 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4160 4161
4161 4162 merge_rev = Column('merge_rev', String(40), nullable=True)
4162 4163
4163 4164 reviewer_data = Column(
4164 4165 'reviewer_data_json', MutationObj.as_mutable(
4165 4166 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4166 4167
4167 4168 @property
4168 4169 def reviewer_data_json(self):
4169 4170 return json.dumps(self.reviewer_data)
4170 4171
4171 4172 @property
4172 4173 def last_merge_metadata_parsed(self):
4173 4174 metadata = {}
4174 4175 if not self.last_merge_metadata:
4175 4176 return metadata
4176 4177
4177 4178 if hasattr(self.last_merge_metadata, 'de_coerce'):
4178 4179 for k, v in self.last_merge_metadata.de_coerce().items():
4179 4180 if k in ['target_ref', 'source_ref']:
4180 4181 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4181 4182 else:
4182 4183 if hasattr(v, 'de_coerce'):
4183 4184 metadata[k] = v.de_coerce()
4184 4185 else:
4185 4186 metadata[k] = v
4186 4187 return metadata
4187 4188
4188 4189 @property
4189 4190 def work_in_progress(self):
4190 4191 """checks if pull request is work in progress by checking the title"""
4191 4192 title = self.title.upper()
4192 4193 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4193 4194 return True
4194 4195 return False
4195 4196
4196 4197 @hybrid_property
4197 4198 def description_safe(self):
4198 4199 from rhodecode.lib import helpers as h
4199 4200 return h.escape(self.description)
4200 4201
4201 4202 @hybrid_property
4202 4203 def revisions(self):
4203 4204 return self._revisions.split(':') if self._revisions else []
4204 4205
4205 4206 @revisions.setter
4206 4207 def revisions(self, val):
4207 4208 self._revisions = u':'.join(val)
4208 4209
4209 4210 @hybrid_property
4210 4211 def last_merge_status(self):
4211 4212 return safe_int(self._last_merge_status)
4212 4213
4213 4214 @last_merge_status.setter
4214 4215 def last_merge_status(self, val):
4215 4216 self._last_merge_status = val
4216 4217
4217 4218 @declared_attr
4218 4219 def author(cls):
4219 4220 return relationship('User', lazy='joined')
4220 4221
4221 4222 @declared_attr
4222 4223 def source_repo(cls):
4223 4224 return relationship(
4224 4225 'Repository',
4225 4226 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4226 4227
4227 4228 @property
4228 4229 def source_ref_parts(self):
4229 4230 return self.unicode_to_reference(self.source_ref)
4230 4231
4231 4232 @declared_attr
4232 4233 def target_repo(cls):
4233 4234 return relationship(
4234 4235 'Repository',
4235 4236 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4236 4237
4237 4238 @property
4238 4239 def target_ref_parts(self):
4239 4240 return self.unicode_to_reference(self.target_ref)
4240 4241
4241 4242 @property
4242 4243 def shadow_merge_ref(self):
4243 4244 return self.unicode_to_reference(self._shadow_merge_ref)
4244 4245
4245 4246 @shadow_merge_ref.setter
4246 4247 def shadow_merge_ref(self, ref):
4247 4248 self._shadow_merge_ref = self.reference_to_unicode(ref)
4248 4249
4249 4250 @staticmethod
4250 4251 def unicode_to_reference(raw):
4251 """
4252 Convert a unicode (or string) to a reference object.
4253 If unicode evaluates to False it returns None.
4254 """
4255 if raw:
4256 refs = raw.split(':')
4257 return Reference(*refs)
4258 else:
4259 return None
4252 return unicode_to_reference(raw)
4260 4253
4261 4254 @staticmethod
4262 4255 def reference_to_unicode(ref):
4263 """
4264 Convert a reference object to unicode.
4265 If reference is None it returns None.
4266 """
4267 if ref:
4268 return u':'.join(ref)
4269 else:
4270 return None
4256 return reference_to_unicode(ref)
4271 4257
4272 4258 def get_api_data(self, with_merge_state=True):
4273 4259 from rhodecode.model.pull_request import PullRequestModel
4274 4260
4275 4261 pull_request = self
4276 4262 if with_merge_state:
4277 4263 merge_response, merge_status, msg = \
4278 4264 PullRequestModel().merge_status(pull_request)
4279 4265 merge_state = {
4280 4266 'status': merge_status,
4281 4267 'message': safe_unicode(msg),
4282 4268 }
4283 4269 else:
4284 4270 merge_state = {'status': 'not_available',
4285 4271 'message': 'not_available'}
4286 4272
4287 4273 merge_data = {
4288 4274 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4289 4275 'reference': (
4290 4276 pull_request.shadow_merge_ref._asdict()
4291 4277 if pull_request.shadow_merge_ref else None),
4292 4278 }
4293 4279
4294 4280 data = {
4295 4281 'pull_request_id': pull_request.pull_request_id,
4296 4282 'url': PullRequestModel().get_url(pull_request),
4297 4283 'title': pull_request.title,
4298 4284 'description': pull_request.description,
4299 4285 'status': pull_request.status,
4300 4286 'state': pull_request.pull_request_state,
4301 4287 'created_on': pull_request.created_on,
4302 4288 'updated_on': pull_request.updated_on,
4303 4289 'commit_ids': pull_request.revisions,
4304 4290 'review_status': pull_request.calculated_review_status(),
4305 4291 'mergeable': merge_state,
4306 4292 'source': {
4307 4293 'clone_url': pull_request.source_repo.clone_url(),
4308 4294 'repository': pull_request.source_repo.repo_name,
4309 4295 'reference': {
4310 4296 'name': pull_request.source_ref_parts.name,
4311 4297 'type': pull_request.source_ref_parts.type,
4312 4298 'commit_id': pull_request.source_ref_parts.commit_id,
4313 4299 },
4314 4300 },
4315 4301 'target': {
4316 4302 'clone_url': pull_request.target_repo.clone_url(),
4317 4303 'repository': pull_request.target_repo.repo_name,
4318 4304 'reference': {
4319 4305 'name': pull_request.target_ref_parts.name,
4320 4306 'type': pull_request.target_ref_parts.type,
4321 4307 'commit_id': pull_request.target_ref_parts.commit_id,
4322 4308 },
4323 4309 },
4324 4310 'merge': merge_data,
4325 4311 'author': pull_request.author.get_api_data(include_secrets=False,
4326 4312 details='basic'),
4327 4313 'reviewers': [
4328 4314 {
4329 4315 'user': reviewer.get_api_data(include_secrets=False,
4330 4316 details='basic'),
4331 4317 'reasons': reasons,
4332 4318 'review_status': st[0][1].status if st else 'not_reviewed',
4333 4319 }
4334 4320 for obj, reviewer, reasons, mandatory, st in
4335 4321 pull_request.reviewers_statuses()
4336 4322 ]
4337 4323 }
4338 4324
4339 4325 return data
4340 4326
4341 4327 def set_state(self, pull_request_state, final_state=None):
4342 4328 """
4343 4329 # goes from initial state to updating to initial state.
4344 4330 # initial state can be changed by specifying back_state=
4345 4331 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4346 4332 pull_request.merge()
4347 4333
4348 4334 :param pull_request_state:
4349 4335 :param final_state:
4350 4336
4351 4337 """
4352 4338
4353 4339 return _SetState(self, pull_request_state, back_state=final_state)
4354 4340
4355 4341
4356 4342 class PullRequest(Base, _PullRequestBase):
4357 4343 __tablename__ = 'pull_requests'
4358 4344 __table_args__ = (
4359 4345 base_table_args,
4360 4346 )
4361 4347 LATEST_VER = 'latest'
4362 4348
4363 4349 pull_request_id = Column(
4364 4350 'pull_request_id', Integer(), nullable=False, primary_key=True)
4365 4351
4366 4352 def __repr__(self):
4367 4353 if self.pull_request_id:
4368 4354 return '<DB:PullRequest #%s>' % self.pull_request_id
4369 4355 else:
4370 4356 return '<DB:PullRequest at %#x>' % id(self)
4371 4357
4372 4358 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4373 4359 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4374 4360 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4375 4361 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4376 4362 lazy='dynamic')
4377 4363
4378 4364 @classmethod
4379 4365 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4380 4366 internal_methods=None):
4381 4367
4382 4368 class PullRequestDisplay(object):
4383 4369 """
4384 4370 Special object wrapper for showing PullRequest data via Versions
4385 4371 It mimics PR object as close as possible. This is read only object
4386 4372 just for display
4387 4373 """
4388 4374
4389 4375 def __init__(self, attrs, internal=None):
4390 4376 self.attrs = attrs
4391 4377 # internal have priority over the given ones via attrs
4392 4378 self.internal = internal or ['versions']
4393 4379
4394 4380 def __getattr__(self, item):
4395 4381 if item in self.internal:
4396 4382 return getattr(self, item)
4397 4383 try:
4398 4384 return self.attrs[item]
4399 4385 except KeyError:
4400 4386 raise AttributeError(
4401 4387 '%s object has no attribute %s' % (self, item))
4402 4388
4403 4389 def __repr__(self):
4404 4390 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4405 4391
4406 4392 def versions(self):
4407 4393 return pull_request_obj.versions.order_by(
4408 4394 PullRequestVersion.pull_request_version_id).all()
4409 4395
4410 4396 def is_closed(self):
4411 4397 return pull_request_obj.is_closed()
4412 4398
4413 4399 def is_state_changing(self):
4414 4400 return pull_request_obj.is_state_changing()
4415 4401
4416 4402 @property
4417 4403 def pull_request_version_id(self):
4418 4404 return getattr(pull_request_obj, 'pull_request_version_id', None)
4419 4405
4420 4406 @property
4421 4407 def pull_request_last_version(self):
4422 4408 return pull_request_obj.pull_request_last_version
4423 4409
4424 4410 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4425 4411
4426 4412 attrs.author = StrictAttributeDict(
4427 4413 pull_request_obj.author.get_api_data())
4428 4414 if pull_request_obj.target_repo:
4429 4415 attrs.target_repo = StrictAttributeDict(
4430 4416 pull_request_obj.target_repo.get_api_data())
4431 4417 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4432 4418
4433 4419 if pull_request_obj.source_repo:
4434 4420 attrs.source_repo = StrictAttributeDict(
4435 4421 pull_request_obj.source_repo.get_api_data())
4436 4422 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4437 4423
4438 4424 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4439 4425 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4440 4426 attrs.revisions = pull_request_obj.revisions
4441 4427 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4442 4428 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4443 4429 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4444 4430 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4445 4431
4446 4432 return PullRequestDisplay(attrs, internal=internal_methods)
4447 4433
4448 4434 def is_closed(self):
4449 4435 return self.status == self.STATUS_CLOSED
4450 4436
4451 4437 def is_state_changing(self):
4452 4438 return self.pull_request_state != PullRequest.STATE_CREATED
4453 4439
4454 4440 def __json__(self):
4455 4441 return {
4456 4442 'revisions': self.revisions,
4457 4443 'versions': self.versions_count
4458 4444 }
4459 4445
4460 4446 def calculated_review_status(self):
4461 4447 from rhodecode.model.changeset_status import ChangesetStatusModel
4462 4448 return ChangesetStatusModel().calculated_review_status(self)
4463 4449
4464 4450 def reviewers_statuses(self):
4465 4451 from rhodecode.model.changeset_status import ChangesetStatusModel
4466 4452 return ChangesetStatusModel().reviewers_statuses(self)
4467 4453
4468 4454 def get_pull_request_reviewers(self, role=None):
4469 4455 qry = PullRequestReviewers.query()\
4470 4456 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4471 4457 if role:
4472 4458 qry = qry.filter(PullRequestReviewers.role == role)
4473 4459
4474 4460 return qry.all()
4475 4461
4476 4462 @property
4477 4463 def reviewers_count(self):
4478 4464 qry = PullRequestReviewers.query()\
4479 4465 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4480 4466 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4481 4467 return qry.count()
4482 4468
4483 4469 @property
4484 4470 def observers_count(self):
4485 4471 qry = PullRequestReviewers.query()\
4486 4472 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4487 4473 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4488 4474 return qry.count()
4489 4475
4490 4476 def observers(self):
4491 4477 qry = PullRequestReviewers.query()\
4492 4478 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4493 4479 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4494 4480 .all()
4495 4481
4496 4482 for entry in qry:
4497 4483 yield entry, entry.user
4498 4484
4499 4485 @property
4500 4486 def workspace_id(self):
4501 4487 from rhodecode.model.pull_request import PullRequestModel
4502 4488 return PullRequestModel()._workspace_id(self)
4503 4489
4504 4490 def get_shadow_repo(self):
4505 4491 workspace_id = self.workspace_id
4506 4492 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4507 4493 if os.path.isdir(shadow_repository_path):
4508 4494 vcs_obj = self.target_repo.scm_instance()
4509 4495 return vcs_obj.get_shadow_instance(shadow_repository_path)
4510 4496
4511 4497 @property
4512 4498 def versions_count(self):
4513 4499 """
4514 4500 return number of versions this PR have, e.g a PR that once been
4515 4501 updated will have 2 versions
4516 4502 """
4517 4503 return self.versions.count() + 1
4518 4504
4519 4505 @property
4520 4506 def pull_request_last_version(self):
4521 4507 return self.versions_count
4522 4508
4523 4509
4524 4510 class PullRequestVersion(Base, _PullRequestBase):
4525 4511 __tablename__ = 'pull_request_versions'
4526 4512 __table_args__ = (
4527 4513 base_table_args,
4528 4514 )
4529 4515
4530 4516 pull_request_version_id = Column(
4531 4517 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4532 4518 pull_request_id = Column(
4533 4519 'pull_request_id', Integer(),
4534 4520 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4535 4521 pull_request = relationship('PullRequest')
4536 4522
4537 4523 def __repr__(self):
4538 4524 if self.pull_request_version_id:
4539 4525 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4540 4526 else:
4541 4527 return '<DB:PullRequestVersion at %#x>' % id(self)
4542 4528
4543 4529 @property
4544 4530 def reviewers(self):
4545 4531 return self.pull_request.reviewers
4532 @property
4533 def reviewers(self):
4534 return self.pull_request.reviewers
4546 4535
4547 4536 @property
4548 4537 def versions(self):
4549 4538 return self.pull_request.versions
4550 4539
4551 4540 def is_closed(self):
4552 4541 # calculate from original
4553 4542 return self.pull_request.status == self.STATUS_CLOSED
4554 4543
4555 4544 def is_state_changing(self):
4556 4545 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4557 4546
4558 4547 def calculated_review_status(self):
4559 4548 return self.pull_request.calculated_review_status()
4560 4549
4561 4550 def reviewers_statuses(self):
4562 4551 return self.pull_request.reviewers_statuses()
4563 4552
4564 def observer(self):
4553 def observers(self):
4565 4554 return self.pull_request.observers()
4566 4555
4567 4556
4568 4557 class PullRequestReviewers(Base, BaseModel):
4569 4558 __tablename__ = 'pull_request_reviewers'
4570 4559 __table_args__ = (
4571 4560 base_table_args,
4572 4561 )
4573 4562 ROLE_REVIEWER = u'reviewer'
4574 4563 ROLE_OBSERVER = u'observer'
4575 4564 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4576 4565
4577 4566 @hybrid_property
4578 4567 def reasons(self):
4579 4568 if not self._reasons:
4580 4569 return []
4581 4570 return self._reasons
4582 4571
4583 4572 @reasons.setter
4584 4573 def reasons(self, val):
4585 4574 val = val or []
4586 4575 if any(not isinstance(x, compat.string_types) for x in val):
4587 4576 raise Exception('invalid reasons type, must be list of strings')
4588 4577 self._reasons = val
4589 4578
4590 4579 pull_requests_reviewers_id = Column(
4591 4580 'pull_requests_reviewers_id', Integer(), nullable=False,
4592 4581 primary_key=True)
4593 4582 pull_request_id = Column(
4594 4583 "pull_request_id", Integer(),
4595 4584 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4596 4585 user_id = Column(
4597 4586 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4598 4587 _reasons = Column(
4599 4588 'reason', MutationList.as_mutable(
4600 4589 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4601 4590
4602 4591 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4603 4592 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4604 4593
4605 4594 user = relationship('User')
4606 4595 pull_request = relationship('PullRequest')
4607 4596
4608 4597 rule_data = Column(
4609 4598 'rule_data_json',
4610 4599 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4611 4600
4612 4601 def rule_user_group_data(self):
4613 4602 """
4614 4603 Returns the voting user group rule data for this reviewer
4615 4604 """
4616 4605
4617 4606 if self.rule_data and 'vote_rule' in self.rule_data:
4618 4607 user_group_data = {}
4619 4608 if 'rule_user_group_entry_id' in self.rule_data:
4620 4609 # means a group with voting rules !
4621 4610 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4622 4611 user_group_data['name'] = self.rule_data['rule_name']
4623 4612 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4624 4613
4625 4614 return user_group_data
4626 4615
4627 4616 @classmethod
4628 4617 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4629 4618 qry = PullRequestReviewers.query()\
4630 4619 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4631 4620 if role:
4632 4621 qry = qry.filter(PullRequestReviewers.role == role)
4633 4622
4634 4623 return qry.all()
4635 4624
4636 4625 def __unicode__(self):
4637 4626 return u"<%s('id:%s')>" % (self.__class__.__name__,
4638 4627 self.pull_requests_reviewers_id)
4639 4628
4640 4629
4641 4630 class Notification(Base, BaseModel):
4642 4631 __tablename__ = 'notifications'
4643 4632 __table_args__ = (
4644 4633 Index('notification_type_idx', 'type'),
4645 4634 base_table_args,
4646 4635 )
4647 4636
4648 4637 TYPE_CHANGESET_COMMENT = u'cs_comment'
4649 4638 TYPE_MESSAGE = u'message'
4650 4639 TYPE_MENTION = u'mention'
4651 4640 TYPE_REGISTRATION = u'registration'
4652 4641 TYPE_PULL_REQUEST = u'pull_request'
4653 4642 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4654 4643 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4655 4644
4656 4645 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4657 4646 subject = Column('subject', Unicode(512), nullable=True)
4658 4647 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4659 4648 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4660 4649 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4661 4650 type_ = Column('type', Unicode(255))
4662 4651
4663 4652 created_by_user = relationship('User')
4664 4653 notifications_to_users = relationship('UserNotification', lazy='joined',
4665 4654 cascade="all, delete-orphan")
4666 4655
4667 4656 @property
4668 4657 def recipients(self):
4669 4658 return [x.user for x in UserNotification.query()\
4670 4659 .filter(UserNotification.notification == self)\
4671 4660 .order_by(UserNotification.user_id.asc()).all()]
4672 4661
4673 4662 @classmethod
4674 4663 def create(cls, created_by, subject, body, recipients, type_=None):
4675 4664 if type_ is None:
4676 4665 type_ = Notification.TYPE_MESSAGE
4677 4666
4678 4667 notification = cls()
4679 4668 notification.created_by_user = created_by
4680 4669 notification.subject = subject
4681 4670 notification.body = body
4682 4671 notification.type_ = type_
4683 4672 notification.created_on = datetime.datetime.now()
4684 4673
4685 4674 # For each recipient link the created notification to his account
4686 4675 for u in recipients:
4687 4676 assoc = UserNotification()
4688 4677 assoc.user_id = u.user_id
4689 4678 assoc.notification = notification
4690 4679
4691 4680 # if created_by is inside recipients mark his notification
4692 4681 # as read
4693 4682 if u.user_id == created_by.user_id:
4694 4683 assoc.read = True
4695 4684 Session().add(assoc)
4696 4685
4697 4686 Session().add(notification)
4698 4687
4699 4688 return notification
4700 4689
4701 4690
4702 4691 class UserNotification(Base, BaseModel):
4703 4692 __tablename__ = 'user_to_notification'
4704 4693 __table_args__ = (
4705 4694 UniqueConstraint('user_id', 'notification_id'),
4706 4695 base_table_args
4707 4696 )
4708 4697
4709 4698 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4710 4699 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4711 4700 read = Column('read', Boolean, default=False)
4712 4701 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4713 4702
4714 4703 user = relationship('User', lazy="joined")
4715 4704 notification = relationship('Notification', lazy="joined",
4716 4705 order_by=lambda: Notification.created_on.desc(),)
4717 4706
4718 4707 def mark_as_read(self):
4719 4708 self.read = True
4720 4709 Session().add(self)
4721 4710
4722 4711
4723 4712 class UserNotice(Base, BaseModel):
4724 4713 __tablename__ = 'user_notices'
4725 4714 __table_args__ = (
4726 4715 base_table_args
4727 4716 )
4728 4717
4729 4718 NOTIFICATION_TYPE_MESSAGE = 'message'
4730 4719 NOTIFICATION_TYPE_NOTICE = 'notice'
4731 4720
4732 4721 NOTIFICATION_LEVEL_INFO = 'info'
4733 4722 NOTIFICATION_LEVEL_WARNING = 'warning'
4734 4723 NOTIFICATION_LEVEL_ERROR = 'error'
4735 4724
4736 4725 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4737 4726
4738 4727 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4739 4728 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4740 4729
4741 4730 notice_read = Column('notice_read', Boolean, default=False)
4742 4731
4743 4732 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4744 4733 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4745 4734
4746 4735 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4747 4736 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4748 4737
4749 4738 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4750 4739 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4751 4740
4752 4741 @classmethod
4753 4742 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4754 4743
4755 4744 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4756 4745 cls.NOTIFICATION_LEVEL_WARNING,
4757 4746 cls.NOTIFICATION_LEVEL_INFO]:
4758 4747 return
4759 4748
4760 4749 from rhodecode.model.user import UserModel
4761 4750 user = UserModel().get_user(user)
4762 4751
4763 4752 new_notice = UserNotice()
4764 4753 if not allow_duplicate:
4765 4754 existing_msg = UserNotice().query() \
4766 4755 .filter(UserNotice.user == user) \
4767 4756 .filter(UserNotice.notice_body == body) \
4768 4757 .filter(UserNotice.notice_read == false()) \
4769 4758 .scalar()
4770 4759 if existing_msg:
4771 4760 log.warning('Ignoring duplicate notice for user %s', user)
4772 4761 return
4773 4762
4774 4763 new_notice.user = user
4775 4764 new_notice.notice_subject = subject
4776 4765 new_notice.notice_body = body
4777 4766 new_notice.notification_level = notice_level
4778 4767 Session().add(new_notice)
4779 4768 Session().commit()
4780 4769
4781 4770
4782 4771 class Gist(Base, BaseModel):
4783 4772 __tablename__ = 'gists'
4784 4773 __table_args__ = (
4785 4774 Index('g_gist_access_id_idx', 'gist_access_id'),
4786 4775 Index('g_created_on_idx', 'created_on'),
4787 4776 base_table_args
4788 4777 )
4789 4778
4790 4779 GIST_PUBLIC = u'public'
4791 4780 GIST_PRIVATE = u'private'
4792 4781 DEFAULT_FILENAME = u'gistfile1.txt'
4793 4782
4794 4783 ACL_LEVEL_PUBLIC = u'acl_public'
4795 4784 ACL_LEVEL_PRIVATE = u'acl_private'
4796 4785
4797 4786 gist_id = Column('gist_id', Integer(), primary_key=True)
4798 4787 gist_access_id = Column('gist_access_id', Unicode(250))
4799 4788 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4800 4789 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4801 4790 gist_expires = Column('gist_expires', Float(53), nullable=False)
4802 4791 gist_type = Column('gist_type', Unicode(128), nullable=False)
4803 4792 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4804 4793 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4805 4794 acl_level = Column('acl_level', Unicode(128), nullable=True)
4806 4795
4807 4796 owner = relationship('User')
4808 4797
4809 4798 def __repr__(self):
4810 4799 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4811 4800
4812 4801 @hybrid_property
4813 4802 def description_safe(self):
4814 4803 from rhodecode.lib import helpers as h
4815 4804 return h.escape(self.gist_description)
4816 4805
4817 4806 @classmethod
4818 4807 def get_or_404(cls, id_):
4819 4808 from pyramid.httpexceptions import HTTPNotFound
4820 4809
4821 4810 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4822 4811 if not res:
4823 4812 raise HTTPNotFound()
4824 4813 return res
4825 4814
4826 4815 @classmethod
4827 4816 def get_by_access_id(cls, gist_access_id):
4828 4817 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4829 4818
4830 4819 def gist_url(self):
4831 4820 from rhodecode.model.gist import GistModel
4832 4821 return GistModel().get_url(self)
4833 4822
4834 4823 @classmethod
4835 4824 def base_path(cls):
4836 4825 """
4837 4826 Returns base path when all gists are stored
4838 4827
4839 4828 :param cls:
4840 4829 """
4841 4830 from rhodecode.model.gist import GIST_STORE_LOC
4842 4831 q = Session().query(RhodeCodeUi)\
4843 4832 .filter(RhodeCodeUi.ui_key == URL_SEP)
4844 4833 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4845 4834 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4846 4835
4847 4836 def get_api_data(self):
4848 4837 """
4849 4838 Common function for generating gist related data for API
4850 4839 """
4851 4840 gist = self
4852 4841 data = {
4853 4842 'gist_id': gist.gist_id,
4854 4843 'type': gist.gist_type,
4855 4844 'access_id': gist.gist_access_id,
4856 4845 'description': gist.gist_description,
4857 4846 'url': gist.gist_url(),
4858 4847 'expires': gist.gist_expires,
4859 4848 'created_on': gist.created_on,
4860 4849 'modified_at': gist.modified_at,
4861 4850 'content': None,
4862 4851 'acl_level': gist.acl_level,
4863 4852 }
4864 4853 return data
4865 4854
4866 4855 def __json__(self):
4867 4856 data = dict(
4868 4857 )
4869 4858 data.update(self.get_api_data())
4870 4859 return data
4871 4860 # SCM functions
4872 4861
4873 4862 def scm_instance(self, **kwargs):
4874 4863 """
4875 4864 Get an instance of VCS Repository
4876 4865
4877 4866 :param kwargs:
4878 4867 """
4879 4868 from rhodecode.model.gist import GistModel
4880 4869 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4881 4870 return get_vcs_instance(
4882 4871 repo_path=safe_str(full_repo_path), create=False,
4883 4872 _vcs_alias=GistModel.vcs_backend)
4884 4873
4885 4874
4886 4875 class ExternalIdentity(Base, BaseModel):
4887 4876 __tablename__ = 'external_identities'
4888 4877 __table_args__ = (
4889 4878 Index('local_user_id_idx', 'local_user_id'),
4890 4879 Index('external_id_idx', 'external_id'),
4891 4880 base_table_args
4892 4881 )
4893 4882
4894 4883 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4895 4884 external_username = Column('external_username', Unicode(1024), default=u'')
4896 4885 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4897 4886 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4898 4887 access_token = Column('access_token', String(1024), default=u'')
4899 4888 alt_token = Column('alt_token', String(1024), default=u'')
4900 4889 token_secret = Column('token_secret', String(1024), default=u'')
4901 4890
4902 4891 @classmethod
4903 4892 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4904 4893 """
4905 4894 Returns ExternalIdentity instance based on search params
4906 4895
4907 4896 :param external_id:
4908 4897 :param provider_name:
4909 4898 :return: ExternalIdentity
4910 4899 """
4911 4900 query = cls.query()
4912 4901 query = query.filter(cls.external_id == external_id)
4913 4902 query = query.filter(cls.provider_name == provider_name)
4914 4903 if local_user_id:
4915 4904 query = query.filter(cls.local_user_id == local_user_id)
4916 4905 return query.first()
4917 4906
4918 4907 @classmethod
4919 4908 def user_by_external_id_and_provider(cls, external_id, provider_name):
4920 4909 """
4921 4910 Returns User instance based on search params
4922 4911
4923 4912 :param external_id:
4924 4913 :param provider_name:
4925 4914 :return: User
4926 4915 """
4927 4916 query = User.query()
4928 4917 query = query.filter(cls.external_id == external_id)
4929 4918 query = query.filter(cls.provider_name == provider_name)
4930 4919 query = query.filter(User.user_id == cls.local_user_id)
4931 4920 return query.first()
4932 4921
4933 4922 @classmethod
4934 4923 def by_local_user_id(cls, local_user_id):
4935 4924 """
4936 4925 Returns all tokens for user
4937 4926
4938 4927 :param local_user_id:
4939 4928 :return: ExternalIdentity
4940 4929 """
4941 4930 query = cls.query()
4942 4931 query = query.filter(cls.local_user_id == local_user_id)
4943 4932 return query
4944 4933
4945 4934 @classmethod
4946 4935 def load_provider_plugin(cls, plugin_id):
4947 4936 from rhodecode.authentication.base import loadplugin
4948 4937 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4949 4938 auth_plugin = loadplugin(_plugin_id)
4950 4939 return auth_plugin
4951 4940
4952 4941
4953 4942 class Integration(Base, BaseModel):
4954 4943 __tablename__ = 'integrations'
4955 4944 __table_args__ = (
4956 4945 base_table_args
4957 4946 )
4958 4947
4959 4948 integration_id = Column('integration_id', Integer(), primary_key=True)
4960 4949 integration_type = Column('integration_type', String(255))
4961 4950 enabled = Column('enabled', Boolean(), nullable=False)
4962 4951 name = Column('name', String(255), nullable=False)
4963 4952 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4964 4953 default=False)
4965 4954
4966 4955 settings = Column(
4967 4956 'settings_json', MutationObj.as_mutable(
4968 4957 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4969 4958 repo_id = Column(
4970 4959 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4971 4960 nullable=True, unique=None, default=None)
4972 4961 repo = relationship('Repository', lazy='joined')
4973 4962
4974 4963 repo_group_id = Column(
4975 4964 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4976 4965 nullable=True, unique=None, default=None)
4977 4966 repo_group = relationship('RepoGroup', lazy='joined')
4978 4967
4979 4968 @property
4980 4969 def scope(self):
4981 4970 if self.repo:
4982 4971 return repr(self.repo)
4983 4972 if self.repo_group:
4984 4973 if self.child_repos_only:
4985 4974 return repr(self.repo_group) + ' (child repos only)'
4986 4975 else:
4987 4976 return repr(self.repo_group) + ' (recursive)'
4988 4977 if self.child_repos_only:
4989 4978 return 'root_repos'
4990 4979 return 'global'
4991 4980
4992 4981 def __repr__(self):
4993 4982 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4994 4983
4995 4984
4996 4985 class RepoReviewRuleUser(Base, BaseModel):
4997 4986 __tablename__ = 'repo_review_rules_users'
4998 4987 __table_args__ = (
4999 4988 base_table_args
5000 4989 )
5001 4990 ROLE_REVIEWER = u'reviewer'
5002 4991 ROLE_OBSERVER = u'observer'
5003 4992 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5004 4993
5005 4994 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5006 4995 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5007 4996 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5008 4997 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5009 4998 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5010 4999 user = relationship('User')
5011 5000
5012 5001 def rule_data(self):
5013 5002 return {
5014 5003 'mandatory': self.mandatory,
5015 5004 'role': self.role,
5016 5005 }
5017 5006
5018 5007
5019 5008 class RepoReviewRuleUserGroup(Base, BaseModel):
5020 5009 __tablename__ = 'repo_review_rules_users_groups'
5021 5010 __table_args__ = (
5022 5011 base_table_args
5023 5012 )
5024 5013
5025 5014 VOTE_RULE_ALL = -1
5026 5015 ROLE_REVIEWER = u'reviewer'
5027 5016 ROLE_OBSERVER = u'observer'
5028 5017 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5029 5018
5030 5019 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5031 5020 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5032 5021 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5033 5022 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5034 5023 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5035 5024 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5036 5025 users_group = relationship('UserGroup')
5037 5026
5038 5027 def rule_data(self):
5039 5028 return {
5040 5029 'mandatory': self.mandatory,
5041 5030 'role': self.role,
5042 5031 'vote_rule': self.vote_rule
5043 5032 }
5044 5033
5045 5034 @property
5046 5035 def vote_rule_label(self):
5047 5036 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5048 5037 return 'all must vote'
5049 5038 else:
5050 5039 return 'min. vote {}'.format(self.vote_rule)
5051 5040
5052 5041
5053 5042 class RepoReviewRule(Base, BaseModel):
5054 5043 __tablename__ = 'repo_review_rules'
5055 5044 __table_args__ = (
5056 5045 base_table_args
5057 5046 )
5058 5047
5059 5048 repo_review_rule_id = Column(
5060 5049 'repo_review_rule_id', Integer(), primary_key=True)
5061 5050 repo_id = Column(
5062 5051 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5063 5052 repo = relationship('Repository', backref='review_rules')
5064 5053
5065 5054 review_rule_name = Column('review_rule_name', String(255))
5066 5055 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5067 5056 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5068 5057 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5069 5058
5070 5059 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5071 5060 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5072 5061 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5073 5062 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5074 5063
5075 5064 rule_users = relationship('RepoReviewRuleUser')
5076 5065 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5077 5066
5078 5067 def _validate_pattern(self, value):
5079 5068 re.compile('^' + glob2re(value) + '$')
5080 5069
5081 5070 @hybrid_property
5082 5071 def source_branch_pattern(self):
5083 5072 return self._branch_pattern or '*'
5084 5073
5085 5074 @source_branch_pattern.setter
5086 5075 def source_branch_pattern(self, value):
5087 5076 self._validate_pattern(value)
5088 5077 self._branch_pattern = value or '*'
5089 5078
5090 5079 @hybrid_property
5091 5080 def target_branch_pattern(self):
5092 5081 return self._target_branch_pattern or '*'
5093 5082
5094 5083 @target_branch_pattern.setter
5095 5084 def target_branch_pattern(self, value):
5096 5085 self._validate_pattern(value)
5097 5086 self._target_branch_pattern = value or '*'
5098 5087
5099 5088 @hybrid_property
5100 5089 def file_pattern(self):
5101 5090 return self._file_pattern or '*'
5102 5091
5103 5092 @file_pattern.setter
5104 5093 def file_pattern(self, value):
5105 5094 self._validate_pattern(value)
5106 5095 self._file_pattern = value or '*'
5107 5096
5108 5097 def matches(self, source_branch, target_branch, files_changed):
5109 5098 """
5110 5099 Check if this review rule matches a branch/files in a pull request
5111 5100
5112 5101 :param source_branch: source branch name for the commit
5113 5102 :param target_branch: target branch name for the commit
5114 5103 :param files_changed: list of file paths changed in the pull request
5115 5104 """
5116 5105
5117 5106 source_branch = source_branch or ''
5118 5107 target_branch = target_branch or ''
5119 5108 files_changed = files_changed or []
5120 5109
5121 5110 branch_matches = True
5122 5111 if source_branch or target_branch:
5123 5112 if self.source_branch_pattern == '*':
5124 5113 source_branch_match = True
5125 5114 else:
5126 5115 if self.source_branch_pattern.startswith('re:'):
5127 5116 source_pattern = self.source_branch_pattern[3:]
5128 5117 else:
5129 5118 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5130 5119 source_branch_regex = re.compile(source_pattern)
5131 5120 source_branch_match = bool(source_branch_regex.search(source_branch))
5132 5121 if self.target_branch_pattern == '*':
5133 5122 target_branch_match = True
5134 5123 else:
5135 5124 if self.target_branch_pattern.startswith('re:'):
5136 5125 target_pattern = self.target_branch_pattern[3:]
5137 5126 else:
5138 5127 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5139 5128 target_branch_regex = re.compile(target_pattern)
5140 5129 target_branch_match = bool(target_branch_regex.search(target_branch))
5141 5130
5142 5131 branch_matches = source_branch_match and target_branch_match
5143 5132
5144 5133 files_matches = True
5145 5134 if self.file_pattern != '*':
5146 5135 files_matches = False
5147 5136 if self.file_pattern.startswith('re:'):
5148 5137 file_pattern = self.file_pattern[3:]
5149 5138 else:
5150 5139 file_pattern = glob2re(self.file_pattern)
5151 5140 file_regex = re.compile(file_pattern)
5152 5141 for file_data in files_changed:
5153 5142 filename = file_data.get('filename')
5154 5143
5155 5144 if file_regex.search(filename):
5156 5145 files_matches = True
5157 5146 break
5158 5147
5159 5148 return branch_matches and files_matches
5160 5149
5161 5150 @property
5162 5151 def review_users(self):
5163 5152 """ Returns the users which this rule applies to """
5164 5153
5165 5154 users = collections.OrderedDict()
5166 5155
5167 5156 for rule_user in self.rule_users:
5168 5157 if rule_user.user.active:
5169 5158 if rule_user.user not in users:
5170 5159 users[rule_user.user.username] = {
5171 5160 'user': rule_user.user,
5172 5161 'source': 'user',
5173 5162 'source_data': {},
5174 5163 'data': rule_user.rule_data()
5175 5164 }
5176 5165
5177 5166 for rule_user_group in self.rule_user_groups:
5178 5167 source_data = {
5179 5168 'user_group_id': rule_user_group.users_group.users_group_id,
5180 5169 'name': rule_user_group.users_group.users_group_name,
5181 5170 'members': len(rule_user_group.users_group.members)
5182 5171 }
5183 5172 for member in rule_user_group.users_group.members:
5184 5173 if member.user.active:
5185 5174 key = member.user.username
5186 5175 if key in users:
5187 5176 # skip this member as we have him already
5188 5177 # this prevents from override the "first" matched
5189 5178 # users with duplicates in multiple groups
5190 5179 continue
5191 5180
5192 5181 users[key] = {
5193 5182 'user': member.user,
5194 5183 'source': 'user_group',
5195 5184 'source_data': source_data,
5196 5185 'data': rule_user_group.rule_data()
5197 5186 }
5198 5187
5199 5188 return users
5200 5189
5201 5190 def user_group_vote_rule(self, user_id):
5202 5191
5203 5192 rules = []
5204 5193 if not self.rule_user_groups:
5205 5194 return rules
5206 5195
5207 5196 for user_group in self.rule_user_groups:
5208 5197 user_group_members = [x.user_id for x in user_group.users_group.members]
5209 5198 if user_id in user_group_members:
5210 5199 rules.append(user_group)
5211 5200 return rules
5212 5201
5213 5202 def __repr__(self):
5214 5203 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5215 5204 self.repo_review_rule_id, self.repo)
5216 5205
5217 5206
5218 5207 class ScheduleEntry(Base, BaseModel):
5219 5208 __tablename__ = 'schedule_entries'
5220 5209 __table_args__ = (
5221 5210 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5222 5211 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5223 5212 base_table_args,
5224 5213 )
5225 5214
5226 5215 schedule_types = ['crontab', 'timedelta', 'integer']
5227 5216 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5228 5217
5229 5218 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5230 5219 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5231 5220 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5232 5221
5233 5222 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5234 5223 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5235 5224
5236 5225 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5237 5226 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5238 5227
5239 5228 # task
5240 5229 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5241 5230 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5242 5231 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5243 5232 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5244 5233
5245 5234 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5246 5235 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5247 5236
5248 5237 @hybrid_property
5249 5238 def schedule_type(self):
5250 5239 return self._schedule_type
5251 5240
5252 5241 @schedule_type.setter
5253 5242 def schedule_type(self, val):
5254 5243 if val not in self.schedule_types:
5255 5244 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5256 5245 val, self.schedule_type))
5257 5246
5258 5247 self._schedule_type = val
5259 5248
5260 5249 @classmethod
5261 5250 def get_uid(cls, obj):
5262 5251 args = obj.task_args
5263 5252 kwargs = obj.task_kwargs
5264 5253 if isinstance(args, JsonRaw):
5265 5254 try:
5266 5255 args = json.loads(args)
5267 5256 except ValueError:
5268 5257 args = tuple()
5269 5258
5270 5259 if isinstance(kwargs, JsonRaw):
5271 5260 try:
5272 5261 kwargs = json.loads(kwargs)
5273 5262 except ValueError:
5274 5263 kwargs = dict()
5275 5264
5276 5265 dot_notation = obj.task_dot_notation
5277 5266 val = '.'.join(map(safe_str, [
5278 5267 sorted(dot_notation), args, sorted(kwargs.items())]))
5279 5268 return hashlib.sha1(val).hexdigest()
5280 5269
5281 5270 @classmethod
5282 5271 def get_by_schedule_name(cls, schedule_name):
5283 5272 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5284 5273
5285 5274 @classmethod
5286 5275 def get_by_schedule_id(cls, schedule_id):
5287 5276 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5288 5277
5289 5278 @property
5290 5279 def task(self):
5291 5280 return self.task_dot_notation
5292 5281
5293 5282 @property
5294 5283 def schedule(self):
5295 5284 from rhodecode.lib.celerylib.utils import raw_2_schedule
5296 5285 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5297 5286 return schedule
5298 5287
5299 5288 @property
5300 5289 def args(self):
5301 5290 try:
5302 5291 return list(self.task_args or [])
5303 5292 except ValueError:
5304 5293 return list()
5305 5294
5306 5295 @property
5307 5296 def kwargs(self):
5308 5297 try:
5309 5298 return dict(self.task_kwargs or {})
5310 5299 except ValueError:
5311 5300 return dict()
5312 5301
5313 5302 def _as_raw(self, val):
5314 5303 if hasattr(val, 'de_coerce'):
5315 5304 val = val.de_coerce()
5316 5305 if val:
5317 5306 val = json.dumps(val)
5318 5307
5319 5308 return val
5320 5309
5321 5310 @property
5322 5311 def schedule_definition_raw(self):
5323 5312 return self._as_raw(self.schedule_definition)
5324 5313
5325 5314 @property
5326 5315 def args_raw(self):
5327 5316 return self._as_raw(self.task_args)
5328 5317
5329 5318 @property
5330 5319 def kwargs_raw(self):
5331 5320 return self._as_raw(self.task_kwargs)
5332 5321
5333 5322 def __repr__(self):
5334 5323 return '<DB:ScheduleEntry({}:{})>'.format(
5335 5324 self.schedule_entry_id, self.schedule_name)
5336 5325
5337 5326
5338 5327 @event.listens_for(ScheduleEntry, 'before_update')
5339 5328 def update_task_uid(mapper, connection, target):
5340 5329 target.task_uid = ScheduleEntry.get_uid(target)
5341 5330
5342 5331
5343 5332 @event.listens_for(ScheduleEntry, 'before_insert')
5344 5333 def set_task_uid(mapper, connection, target):
5345 5334 target.task_uid = ScheduleEntry.get_uid(target)
5346 5335
5347 5336
5348 5337 class _BaseBranchPerms(BaseModel):
5349 5338 @classmethod
5350 5339 def compute_hash(cls, value):
5351 5340 return sha1_safe(value)
5352 5341
5353 5342 @hybrid_property
5354 5343 def branch_pattern(self):
5355 5344 return self._branch_pattern or '*'
5356 5345
5357 5346 @hybrid_property
5358 5347 def branch_hash(self):
5359 5348 return self._branch_hash
5360 5349
5361 5350 def _validate_glob(self, value):
5362 5351 re.compile('^' + glob2re(value) + '$')
5363 5352
5364 5353 @branch_pattern.setter
5365 5354 def branch_pattern(self, value):
5366 5355 self._validate_glob(value)
5367 5356 self._branch_pattern = value or '*'
5368 5357 # set the Hash when setting the branch pattern
5369 5358 self._branch_hash = self.compute_hash(self._branch_pattern)
5370 5359
5371 5360 def matches(self, branch):
5372 5361 """
5373 5362 Check if this the branch matches entry
5374 5363
5375 5364 :param branch: branch name for the commit
5376 5365 """
5377 5366
5378 5367 branch = branch or ''
5379 5368
5380 5369 branch_matches = True
5381 5370 if branch:
5382 5371 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5383 5372 branch_matches = bool(branch_regex.search(branch))
5384 5373
5385 5374 return branch_matches
5386 5375
5387 5376
5388 5377 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5389 5378 __tablename__ = 'user_to_repo_branch_permissions'
5390 5379 __table_args__ = (
5391 5380 base_table_args
5392 5381 )
5393 5382
5394 5383 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5395 5384
5396 5385 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5397 5386 repo = relationship('Repository', backref='user_branch_perms')
5398 5387
5399 5388 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5400 5389 permission = relationship('Permission')
5401 5390
5402 5391 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5403 5392 user_repo_to_perm = relationship('UserRepoToPerm')
5404 5393
5405 5394 rule_order = Column('rule_order', Integer(), nullable=False)
5406 5395 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5407 5396 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5408 5397
5409 5398 def __unicode__(self):
5410 5399 return u'<UserBranchPermission(%s => %r)>' % (
5411 5400 self.user_repo_to_perm, self.branch_pattern)
5412 5401
5413 5402
5414 5403 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5415 5404 __tablename__ = 'user_group_to_repo_branch_permissions'
5416 5405 __table_args__ = (
5417 5406 base_table_args
5418 5407 )
5419 5408
5420 5409 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5421 5410
5422 5411 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5423 5412 repo = relationship('Repository', backref='user_group_branch_perms')
5424 5413
5425 5414 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5426 5415 permission = relationship('Permission')
5427 5416
5428 5417 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5429 5418 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5430 5419
5431 5420 rule_order = Column('rule_order', Integer(), nullable=False)
5432 5421 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5433 5422 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5434 5423
5435 5424 def __unicode__(self):
5436 5425 return u'<UserBranchPermission(%s => %r)>' % (
5437 5426 self.user_group_repo_to_perm, self.branch_pattern)
5438 5427
5439 5428
5440 5429 class UserBookmark(Base, BaseModel):
5441 5430 __tablename__ = 'user_bookmarks'
5442 5431 __table_args__ = (
5443 5432 UniqueConstraint('user_id', 'bookmark_repo_id'),
5444 5433 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5445 5434 UniqueConstraint('user_id', 'bookmark_position'),
5446 5435 base_table_args
5447 5436 )
5448 5437
5449 5438 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5450 5439 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5451 5440 position = Column("bookmark_position", Integer(), nullable=False)
5452 5441 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5453 5442 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5454 5443 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5455 5444
5456 5445 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5457 5446 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5458 5447
5459 5448 user = relationship("User")
5460 5449
5461 5450 repository = relationship("Repository")
5462 5451 repository_group = relationship("RepoGroup")
5463 5452
5464 5453 @classmethod
5465 5454 def get_by_position_for_user(cls, position, user_id):
5466 5455 return cls.query() \
5467 5456 .filter(UserBookmark.user_id == user_id) \
5468 5457 .filter(UserBookmark.position == position).scalar()
5469 5458
5470 5459 @classmethod
5471 5460 def get_bookmarks_for_user(cls, user_id, cache=True):
5472 5461 bookmarks = cls.query() \
5473 5462 .filter(UserBookmark.user_id == user_id) \
5474 5463 .options(joinedload(UserBookmark.repository)) \
5475 5464 .options(joinedload(UserBookmark.repository_group)) \
5476 5465 .order_by(UserBookmark.position.asc())
5477 5466
5478 5467 if cache:
5479 5468 bookmarks = bookmarks.options(
5480 5469 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5481 5470 )
5482 5471
5483 5472 return bookmarks.all()
5484 5473
5485 5474 def __unicode__(self):
5486 5475 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5487 5476
5488 5477
5489 5478 class FileStore(Base, BaseModel):
5490 5479 __tablename__ = 'file_store'
5491 5480 __table_args__ = (
5492 5481 base_table_args
5493 5482 )
5494 5483
5495 5484 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5496 5485 file_uid = Column('file_uid', String(1024), nullable=False)
5497 5486 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5498 5487 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5499 5488 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5500 5489
5501 5490 # sha256 hash
5502 5491 file_hash = Column('file_hash', String(512), nullable=False)
5503 5492 file_size = Column('file_size', BigInteger(), nullable=False)
5504 5493
5505 5494 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5506 5495 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5507 5496 accessed_count = Column('accessed_count', Integer(), default=0)
5508 5497
5509 5498 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5510 5499
5511 5500 # if repo/repo_group reference is set, check for permissions
5512 5501 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5513 5502
5514 5503 # hidden defines an attachment that should be hidden from showing in artifact listing
5515 5504 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5516 5505
5517 5506 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5518 5507 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5519 5508
5520 5509 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5521 5510
5522 5511 # scope limited to user, which requester have access to
5523 5512 scope_user_id = Column(
5524 5513 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5525 5514 nullable=True, unique=None, default=None)
5526 5515 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5527 5516
5528 5517 # scope limited to user group, which requester have access to
5529 5518 scope_user_group_id = Column(
5530 5519 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5531 5520 nullable=True, unique=None, default=None)
5532 5521 user_group = relationship('UserGroup', lazy='joined')
5533 5522
5534 5523 # scope limited to repo, which requester have access to
5535 5524 scope_repo_id = Column(
5536 5525 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5537 5526 nullable=True, unique=None, default=None)
5538 5527 repo = relationship('Repository', lazy='joined')
5539 5528
5540 5529 # scope limited to repo group, which requester have access to
5541 5530 scope_repo_group_id = Column(
5542 5531 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5543 5532 nullable=True, unique=None, default=None)
5544 5533 repo_group = relationship('RepoGroup', lazy='joined')
5545 5534
5546 5535 @classmethod
5547 5536 def get_by_store_uid(cls, file_store_uid, safe=False):
5548 5537 if safe:
5549 5538 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5550 5539 else:
5551 5540 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5552 5541
5553 5542 @classmethod
5554 5543 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5555 5544 file_description='', enabled=True, hidden=False, check_acl=True,
5556 5545 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5557 5546
5558 5547 store_entry = FileStore()
5559 5548 store_entry.file_uid = file_uid
5560 5549 store_entry.file_display_name = file_display_name
5561 5550 store_entry.file_org_name = filename
5562 5551 store_entry.file_size = file_size
5563 5552 store_entry.file_hash = file_hash
5564 5553 store_entry.file_description = file_description
5565 5554
5566 5555 store_entry.check_acl = check_acl
5567 5556 store_entry.enabled = enabled
5568 5557 store_entry.hidden = hidden
5569 5558
5570 5559 store_entry.user_id = user_id
5571 5560 store_entry.scope_user_id = scope_user_id
5572 5561 store_entry.scope_repo_id = scope_repo_id
5573 5562 store_entry.scope_repo_group_id = scope_repo_group_id
5574 5563
5575 5564 return store_entry
5576 5565
5577 5566 @classmethod
5578 5567 def store_metadata(cls, file_store_id, args, commit=True):
5579 5568 file_store = FileStore.get(file_store_id)
5580 5569 if file_store is None:
5581 5570 return
5582 5571
5583 5572 for section, key, value, value_type in args:
5584 5573 has_key = FileStoreMetadata().query() \
5585 5574 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5586 5575 .filter(FileStoreMetadata.file_store_meta_section == section) \
5587 5576 .filter(FileStoreMetadata.file_store_meta_key == key) \
5588 5577 .scalar()
5589 5578 if has_key:
5590 5579 msg = 'key `{}` already defined under section `{}` for this file.'\
5591 5580 .format(key, section)
5592 5581 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5593 5582
5594 5583 # NOTE(marcink): raises ArtifactMetadataBadValueType
5595 5584 FileStoreMetadata.valid_value_type(value_type)
5596 5585
5597 5586 meta_entry = FileStoreMetadata()
5598 5587 meta_entry.file_store = file_store
5599 5588 meta_entry.file_store_meta_section = section
5600 5589 meta_entry.file_store_meta_key = key
5601 5590 meta_entry.file_store_meta_value_type = value_type
5602 5591 meta_entry.file_store_meta_value = value
5603 5592
5604 5593 Session().add(meta_entry)
5605 5594
5606 5595 try:
5607 5596 if commit:
5608 5597 Session().commit()
5609 5598 except IntegrityError:
5610 5599 Session().rollback()
5611 5600 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5612 5601
5613 5602 @classmethod
5614 5603 def bump_access_counter(cls, file_uid, commit=True):
5615 5604 FileStore().query()\
5616 5605 .filter(FileStore.file_uid == file_uid)\
5617 5606 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5618 5607 FileStore.accessed_on: datetime.datetime.now()})
5619 5608 if commit:
5620 5609 Session().commit()
5621 5610
5622 5611 def __json__(self):
5623 5612 data = {
5624 5613 'filename': self.file_display_name,
5625 5614 'filename_org': self.file_org_name,
5626 5615 'file_uid': self.file_uid,
5627 5616 'description': self.file_description,
5628 5617 'hidden': self.hidden,
5629 5618 'size': self.file_size,
5630 5619 'created_on': self.created_on,
5631 5620 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5632 5621 'downloaded_times': self.accessed_count,
5633 5622 'sha256': self.file_hash,
5634 5623 'metadata': self.file_metadata,
5635 5624 }
5636 5625
5637 5626 return data
5638 5627
5639 5628 def __repr__(self):
5640 5629 return '<FileStore({})>'.format(self.file_store_id)
5641 5630
5642 5631
5643 5632 class FileStoreMetadata(Base, BaseModel):
5644 5633 __tablename__ = 'file_store_metadata'
5645 5634 __table_args__ = (
5646 5635 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5647 5636 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5648 5637 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5649 5638 base_table_args
5650 5639 )
5651 5640 SETTINGS_TYPES = {
5652 5641 'str': safe_str,
5653 5642 'int': safe_int,
5654 5643 'unicode': safe_unicode,
5655 5644 'bool': str2bool,
5656 5645 'list': functools.partial(aslist, sep=',')
5657 5646 }
5658 5647
5659 5648 file_store_meta_id = Column(
5660 5649 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5661 5650 primary_key=True)
5662 5651 _file_store_meta_section = Column(
5663 5652 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5664 5653 nullable=True, unique=None, default=None)
5665 5654 _file_store_meta_section_hash = Column(
5666 5655 "file_store_meta_section_hash", String(255),
5667 5656 nullable=True, unique=None, default=None)
5668 5657 _file_store_meta_key = Column(
5669 5658 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5670 5659 nullable=True, unique=None, default=None)
5671 5660 _file_store_meta_key_hash = Column(
5672 5661 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5673 5662 _file_store_meta_value = Column(
5674 5663 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5675 5664 nullable=True, unique=None, default=None)
5676 5665 _file_store_meta_value_type = Column(
5677 5666 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5678 5667 default='unicode')
5679 5668
5680 5669 file_store_id = Column(
5681 5670 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5682 5671 nullable=True, unique=None, default=None)
5683 5672
5684 5673 file_store = relationship('FileStore', lazy='joined')
5685 5674
5686 5675 @classmethod
5687 5676 def valid_value_type(cls, value):
5688 5677 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5689 5678 raise ArtifactMetadataBadValueType(
5690 5679 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5691 5680
5692 5681 @hybrid_property
5693 5682 def file_store_meta_section(self):
5694 5683 return self._file_store_meta_section
5695 5684
5696 5685 @file_store_meta_section.setter
5697 5686 def file_store_meta_section(self, value):
5698 5687 self._file_store_meta_section = value
5699 5688 self._file_store_meta_section_hash = _hash_key(value)
5700 5689
5701 5690 @hybrid_property
5702 5691 def file_store_meta_key(self):
5703 5692 return self._file_store_meta_key
5704 5693
5705 5694 @file_store_meta_key.setter
5706 5695 def file_store_meta_key(self, value):
5707 5696 self._file_store_meta_key = value
5708 5697 self._file_store_meta_key_hash = _hash_key(value)
5709 5698
5710 5699 @hybrid_property
5711 5700 def file_store_meta_value(self):
5712 5701 val = self._file_store_meta_value
5713 5702
5714 5703 if self._file_store_meta_value_type:
5715 5704 # e.g unicode.encrypted == unicode
5716 5705 _type = self._file_store_meta_value_type.split('.')[0]
5717 5706 # decode the encrypted value if it's encrypted field type
5718 5707 if '.encrypted' in self._file_store_meta_value_type:
5719 5708 cipher = EncryptedTextValue()
5720 5709 val = safe_unicode(cipher.process_result_value(val, None))
5721 5710 # do final type conversion
5722 5711 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5723 5712 val = converter(val)
5724 5713
5725 5714 return val
5726 5715
5727 5716 @file_store_meta_value.setter
5728 5717 def file_store_meta_value(self, val):
5729 5718 val = safe_unicode(val)
5730 5719 # encode the encrypted value
5731 5720 if '.encrypted' in self.file_store_meta_value_type:
5732 5721 cipher = EncryptedTextValue()
5733 5722 val = safe_unicode(cipher.process_bind_param(val, None))
5734 5723 self._file_store_meta_value = val
5735 5724
5736 5725 @hybrid_property
5737 5726 def file_store_meta_value_type(self):
5738 5727 return self._file_store_meta_value_type
5739 5728
5740 5729 @file_store_meta_value_type.setter
5741 5730 def file_store_meta_value_type(self, val):
5742 5731 # e.g unicode.encrypted
5743 5732 self.valid_value_type(val)
5744 5733 self._file_store_meta_value_type = val
5745 5734
5746 5735 def __json__(self):
5747 5736 data = {
5748 5737 'artifact': self.file_store.file_uid,
5749 5738 'section': self.file_store_meta_section,
5750 5739 'key': self.file_store_meta_key,
5751 5740 'value': self.file_store_meta_value,
5752 5741 }
5753 5742
5754 5743 return data
5755 5744
5756 5745 def __repr__(self):
5757 5746 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5758 5747 self.file_store_meta_key, self.file_store_meta_value)
5759 5748
5760 5749
5761 5750 class DbMigrateVersion(Base, BaseModel):
5762 5751 __tablename__ = 'db_migrate_version'
5763 5752 __table_args__ = (
5764 5753 base_table_args,
5765 5754 )
5766 5755
5767 5756 repository_id = Column('repository_id', String(250), primary_key=True)
5768 5757 repository_path = Column('repository_path', Text)
5769 5758 version = Column('version', Integer)
5770 5759
5771 5760 @classmethod
5772 5761 def set_version(cls, version):
5773 5762 """
5774 5763 Helper for forcing a different version, usually for debugging purposes via ishell.
5775 5764 """
5776 5765 ver = DbMigrateVersion.query().first()
5777 5766 ver.version = version
5778 5767 Session().commit()
5779 5768
5780 5769
5781 5770 class DbSession(Base, BaseModel):
5782 5771 __tablename__ = 'db_session'
5783 5772 __table_args__ = (
5784 5773 base_table_args,
5785 5774 )
5786 5775
5787 5776 def __repr__(self):
5788 5777 return '<DB:DbSession({})>'.format(self.id)
5789 5778
5790 5779 id = Column('id', Integer())
5791 5780 namespace = Column('namespace', String(255), primary_key=True)
5792 5781 accessed = Column('accessed', DateTime, nullable=False)
5793 5782 created = Column('created', DateTime, nullable=False)
5794 5783 data = Column('data', PickleType, nullable=False)
@@ -1,2237 +1,2237 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'id': PullRequest.pull_request_id,
352 352 'title': PullRequest.title,
353 353 'updated_on_raw': PullRequest.updated_on,
354 354 'target_repo': PullRequest.target_repo_id
355 355 }
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
409 409 opened_by=None):
410 410 """
411 411 Count the number of pull requests for a specific repository that are
412 412 awaiting review.
413 413
414 414 :param repo_name: target or source repo
415 415 :param search_q: filter by text
416 416 :param source: boolean flag to specify if repo_name refers to source
417 417 :param statuses: list of pull request statuses
418 418 :param opened_by: author user of the pull request
419 419 :returns: int number of pull requests
420 420 """
421 421 pull_requests = self.get_awaiting_review(
422 422 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
423 423
424 424 return len(pull_requests)
425 425
426 426 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
427 427 opened_by=None, offset=0, length=None,
428 428 order_by=None, order_dir='desc'):
429 429 """
430 430 Get all pull requests for a specific repository that are awaiting
431 431 review.
432 432
433 433 :param repo_name: target or source repo
434 434 :param search_q: filter by text
435 435 :param source: boolean flag to specify if repo_name refers to source
436 436 :param statuses: list of pull request statuses
437 437 :param opened_by: author user of the pull request
438 438 :param offset: pagination offset
439 439 :param length: length of returned list
440 440 :param order_by: order of the returned list
441 441 :param order_dir: 'asc' or 'desc' ordering direction
442 442 :returns: list of pull requests
443 443 """
444 444 pull_requests = self.get_all(
445 445 repo_name, search_q=search_q, source=source, statuses=statuses,
446 446 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
447 447
448 448 _filtered_pull_requests = []
449 449 for pr in pull_requests:
450 450 status = pr.calculated_review_status()
451 451 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
452 452 ChangesetStatus.STATUS_UNDER_REVIEW]:
453 453 _filtered_pull_requests.append(pr)
454 454 if length:
455 455 return _filtered_pull_requests[offset:offset+length]
456 456 else:
457 457 return _filtered_pull_requests
458 458
459 459 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
460 460 opened_by=None, user_id=None):
461 461 """
462 462 Count the number of pull requests for a specific repository that are
463 463 awaiting review from a specific user.
464 464
465 465 :param repo_name: target or source repo
466 466 :param search_q: filter by text
467 467 :param source: boolean flag to specify if repo_name refers to source
468 468 :param statuses: list of pull request statuses
469 469 :param opened_by: author user of the pull request
470 470 :param user_id: reviewer user of the pull request
471 471 :returns: int number of pull requests
472 472 """
473 473 pull_requests = self.get_awaiting_my_review(
474 474 repo_name, search_q=search_q, source=source, statuses=statuses,
475 475 opened_by=opened_by, user_id=user_id)
476 476
477 477 return len(pull_requests)
478 478
479 479 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
480 480 opened_by=None, user_id=None, offset=0,
481 481 length=None, order_by=None, order_dir='desc'):
482 482 """
483 483 Get all pull requests for a specific repository that are awaiting
484 484 review from a specific user.
485 485
486 486 :param repo_name: target or source repo
487 487 :param search_q: filter by text
488 488 :param source: boolean flag to specify if repo_name refers to source
489 489 :param statuses: list of pull request statuses
490 490 :param opened_by: author user of the pull request
491 491 :param user_id: reviewer user of the pull request
492 492 :param offset: pagination offset
493 493 :param length: length of returned list
494 494 :param order_by: order of the returned list
495 495 :param order_dir: 'asc' or 'desc' ordering direction
496 496 :returns: list of pull requests
497 497 """
498 498 pull_requests = self.get_all(
499 499 repo_name, search_q=search_q, source=source, statuses=statuses,
500 500 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
501 501
502 502 _my = PullRequestModel().get_not_reviewed(user_id)
503 503 my_participation = []
504 504 for pr in pull_requests:
505 505 if pr in _my:
506 506 my_participation.append(pr)
507 507 _filtered_pull_requests = my_participation
508 508 if length:
509 509 return _filtered_pull_requests[offset:offset+length]
510 510 else:
511 511 return _filtered_pull_requests
512 512
513 513 def get_not_reviewed(self, user_id):
514 514 return [
515 515 x.pull_request for x in PullRequestReviewers.query().filter(
516 516 PullRequestReviewers.user_id == user_id).all()
517 517 ]
518 518
519 519 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
520 520 order_by=None, order_dir='desc'):
521 521 q = PullRequest.query()
522 522 if user_id:
523 523 reviewers_subquery = Session().query(
524 524 PullRequestReviewers.pull_request_id).filter(
525 525 PullRequestReviewers.user_id == user_id).subquery()
526 526 user_filter = or_(
527 527 PullRequest.user_id == user_id,
528 528 PullRequest.pull_request_id.in_(reviewers_subquery)
529 529 )
530 530 q = PullRequest.query().filter(user_filter)
531 531
532 532 # closed,opened
533 533 if statuses:
534 534 q = q.filter(PullRequest.status.in_(statuses))
535 535
536 536 if query:
537 537 like_expression = u'%{}%'.format(safe_unicode(query))
538 538 q = q.join(User)
539 539 q = q.filter(or_(
540 540 cast(PullRequest.pull_request_id, String).ilike(like_expression),
541 541 User.username.ilike(like_expression),
542 542 PullRequest.title.ilike(like_expression),
543 543 PullRequest.description.ilike(like_expression),
544 544 ))
545 545 if order_by:
546 546 order_map = {
547 547 'name_raw': PullRequest.pull_request_id,
548 548 'title': PullRequest.title,
549 549 'updated_on_raw': PullRequest.updated_on,
550 550 'target_repo': PullRequest.target_repo_id
551 551 }
552 552 if order_dir == 'asc':
553 553 q = q.order_by(order_map[order_by].asc())
554 554 else:
555 555 q = q.order_by(order_map[order_by].desc())
556 556
557 557 return q
558 558
559 559 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
560 560 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
561 561 return q.count()
562 562
563 563 def get_im_participating_in(
564 564 self, user_id=None, statuses=None, query='', offset=0,
565 565 length=None, order_by=None, order_dir='desc'):
566 566 """
567 567 Get all Pull requests that i'm participating in, or i have opened
568 568 """
569 569
570 570 q = self._prepare_participating_query(
571 571 user_id, statuses=statuses, query=query, order_by=order_by,
572 572 order_dir=order_dir)
573 573
574 574 if length:
575 575 pull_requests = q.limit(length).offset(offset).all()
576 576 else:
577 577 pull_requests = q.all()
578 578
579 579 return pull_requests
580 580
581 581 def get_versions(self, pull_request):
582 582 """
583 583 returns version of pull request sorted by ID descending
584 584 """
585 585 return PullRequestVersion.query()\
586 586 .filter(PullRequestVersion.pull_request == pull_request)\
587 587 .order_by(PullRequestVersion.pull_request_version_id.asc())\
588 588 .all()
589 589
590 590 def get_pr_version(self, pull_request_id, version=None):
591 591 at_version = None
592 592
593 593 if version and version == 'latest':
594 594 pull_request_ver = PullRequest.get(pull_request_id)
595 595 pull_request_obj = pull_request_ver
596 596 _org_pull_request_obj = pull_request_obj
597 597 at_version = 'latest'
598 598 elif version:
599 599 pull_request_ver = PullRequestVersion.get_or_404(version)
600 600 pull_request_obj = pull_request_ver
601 601 _org_pull_request_obj = pull_request_ver.pull_request
602 602 at_version = pull_request_ver.pull_request_version_id
603 603 else:
604 604 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
605 605 pull_request_id)
606 606
607 607 pull_request_display_obj = PullRequest.get_pr_display_object(
608 608 pull_request_obj, _org_pull_request_obj)
609 609
610 610 return _org_pull_request_obj, pull_request_obj, \
611 611 pull_request_display_obj, at_version
612 612
613 613 def create(self, created_by, source_repo, source_ref, target_repo,
614 614 target_ref, revisions, reviewers, observers, title, description=None,
615 615 common_ancestor_id=None,
616 616 description_renderer=None,
617 617 reviewer_data=None, translator=None, auth_user=None):
618 618 translator = translator or get_current_request().translate
619 619
620 620 created_by_user = self._get_user(created_by)
621 621 auth_user = auth_user or created_by_user.AuthUser()
622 622 source_repo = self._get_repo(source_repo)
623 623 target_repo = self._get_repo(target_repo)
624 624
625 625 pull_request = PullRequest()
626 626 pull_request.source_repo = source_repo
627 627 pull_request.source_ref = source_ref
628 628 pull_request.target_repo = target_repo
629 629 pull_request.target_ref = target_ref
630 630 pull_request.revisions = revisions
631 631 pull_request.title = title
632 632 pull_request.description = description
633 633 pull_request.description_renderer = description_renderer
634 634 pull_request.author = created_by_user
635 635 pull_request.reviewer_data = reviewer_data
636 636 pull_request.pull_request_state = pull_request.STATE_CREATING
637 637 pull_request.common_ancestor_id = common_ancestor_id
638 638
639 639 Session().add(pull_request)
640 640 Session().flush()
641 641
642 642 reviewer_ids = set()
643 643 # members / reviewers
644 644 for reviewer_object in reviewers:
645 645 user_id, reasons, mandatory, role, rules = reviewer_object
646 646 user = self._get_user(user_id)
647 647
648 648 # skip duplicates
649 649 if user.user_id in reviewer_ids:
650 650 continue
651 651
652 652 reviewer_ids.add(user.user_id)
653 653
654 654 reviewer = PullRequestReviewers()
655 655 reviewer.user = user
656 656 reviewer.pull_request = pull_request
657 657 reviewer.reasons = reasons
658 658 reviewer.mandatory = mandatory
659 659 reviewer.role = role
660 660
661 661 # NOTE(marcink): pick only first rule for now
662 662 rule_id = list(rules)[0] if rules else None
663 663 rule = RepoReviewRule.get(rule_id) if rule_id else None
664 664 if rule:
665 665 review_group = rule.user_group_vote_rule(user_id)
666 666 # we check if this particular reviewer is member of a voting group
667 667 if review_group:
668 668 # NOTE(marcink):
669 669 # can be that user is member of more but we pick the first same,
670 670 # same as default reviewers algo
671 671 review_group = review_group[0]
672 672
673 673 rule_data = {
674 674 'rule_name':
675 675 rule.review_rule_name,
676 676 'rule_user_group_entry_id':
677 677 review_group.repo_review_rule_users_group_id,
678 678 'rule_user_group_name':
679 679 review_group.users_group.users_group_name,
680 680 'rule_user_group_members':
681 681 [x.user.username for x in review_group.users_group.members],
682 682 'rule_user_group_members_id':
683 683 [x.user.user_id for x in review_group.users_group.members],
684 684 }
685 685 # e.g {'vote_rule': -1, 'mandatory': True}
686 686 rule_data.update(review_group.rule_data())
687 687
688 688 reviewer.rule_data = rule_data
689 689
690 690 Session().add(reviewer)
691 691 Session().flush()
692 692
693 693 for observer_object in observers:
694 694 user_id, reasons, mandatory, role, rules = observer_object
695 695 user = self._get_user(user_id)
696 696
697 697 # skip duplicates from reviewers
698 698 if user.user_id in reviewer_ids:
699 699 continue
700 700
701 701 #reviewer_ids.add(user.user_id)
702 702
703 703 observer = PullRequestReviewers()
704 704 observer.user = user
705 705 observer.pull_request = pull_request
706 706 observer.reasons = reasons
707 707 observer.mandatory = mandatory
708 708 observer.role = role
709 709
710 710 # NOTE(marcink): pick only first rule for now
711 711 rule_id = list(rules)[0] if rules else None
712 712 rule = RepoReviewRule.get(rule_id) if rule_id else None
713 713 if rule:
714 714 # TODO(marcink): do we need this for observers ??
715 715 pass
716 716
717 717 Session().add(observer)
718 718 Session().flush()
719 719
720 720 # Set approval status to "Under Review" for all commits which are
721 721 # part of this pull request.
722 722 ChangesetStatusModel().set_status(
723 723 repo=target_repo,
724 724 status=ChangesetStatus.STATUS_UNDER_REVIEW,
725 725 user=created_by_user,
726 726 pull_request=pull_request
727 727 )
728 728 # we commit early at this point. This has to do with a fact
729 729 # that before queries do some row-locking. And because of that
730 730 # we need to commit and finish transaction before below validate call
731 731 # that for large repos could be long resulting in long row locks
732 732 Session().commit()
733 733
734 734 # prepare workspace, and run initial merge simulation. Set state during that
735 735 # operation
736 736 pull_request = PullRequest.get(pull_request.pull_request_id)
737 737
738 738 # set as merging, for merge simulation, and if finished to created so we mark
739 739 # simulation is working fine
740 740 with pull_request.set_state(PullRequest.STATE_MERGING,
741 741 final_state=PullRequest.STATE_CREATED) as state_obj:
742 742 MergeCheck.validate(
743 743 pull_request, auth_user=auth_user, translator=translator)
744 744
745 745 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
746 746 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
747 747
748 748 creation_data = pull_request.get_api_data(with_merge_state=False)
749 749 self._log_audit_action(
750 750 'repo.pull_request.create', {'data': creation_data},
751 751 auth_user, pull_request)
752 752
753 753 return pull_request
754 754
755 755 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
756 756 pull_request = self.__get_pull_request(pull_request)
757 757 target_scm = pull_request.target_repo.scm_instance()
758 758 if action == 'create':
759 759 trigger_hook = hooks_utils.trigger_create_pull_request_hook
760 760 elif action == 'merge':
761 761 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
762 762 elif action == 'close':
763 763 trigger_hook = hooks_utils.trigger_close_pull_request_hook
764 764 elif action == 'review_status_change':
765 765 trigger_hook = hooks_utils.trigger_review_pull_request_hook
766 766 elif action == 'update':
767 767 trigger_hook = hooks_utils.trigger_update_pull_request_hook
768 768 elif action == 'comment':
769 769 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
770 770 elif action == 'comment_edit':
771 771 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
772 772 else:
773 773 return
774 774
775 775 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
776 776 pull_request, action, trigger_hook)
777 777 trigger_hook(
778 778 username=user.username,
779 779 repo_name=pull_request.target_repo.repo_name,
780 780 repo_type=target_scm.alias,
781 781 pull_request=pull_request,
782 782 data=data)
783 783
784 784 def _get_commit_ids(self, pull_request):
785 785 """
786 786 Return the commit ids of the merged pull request.
787 787
788 788 This method is not dealing correctly yet with the lack of autoupdates
789 789 nor with the implicit target updates.
790 790 For example: if a commit in the source repo is already in the target it
791 791 will be reported anyways.
792 792 """
793 793 merge_rev = pull_request.merge_rev
794 794 if merge_rev is None:
795 795 raise ValueError('This pull request was not merged yet')
796 796
797 797 commit_ids = list(pull_request.revisions)
798 798 if merge_rev not in commit_ids:
799 799 commit_ids.append(merge_rev)
800 800
801 801 return commit_ids
802 802
803 803 def merge_repo(self, pull_request, user, extras):
804 804 log.debug("Merging pull request %s", pull_request.pull_request_id)
805 805 extras['user_agent'] = 'internal-merge'
806 806 merge_state = self._merge_pull_request(pull_request, user, extras)
807 807 if merge_state.executed:
808 808 log.debug("Merge was successful, updating the pull request comments.")
809 809 self._comment_and_close_pr(pull_request, user, merge_state)
810 810
811 811 self._log_audit_action(
812 812 'repo.pull_request.merge',
813 813 {'merge_state': merge_state.__dict__},
814 814 user, pull_request)
815 815
816 816 else:
817 817 log.warn("Merge failed, not updating the pull request.")
818 818 return merge_state
819 819
820 820 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
821 821 target_vcs = pull_request.target_repo.scm_instance()
822 822 source_vcs = pull_request.source_repo.scm_instance()
823 823
824 824 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
825 825 pr_id=pull_request.pull_request_id,
826 826 pr_title=pull_request.title,
827 827 source_repo=source_vcs.name,
828 828 source_ref_name=pull_request.source_ref_parts.name,
829 829 target_repo=target_vcs.name,
830 830 target_ref_name=pull_request.target_ref_parts.name,
831 831 )
832 832
833 833 workspace_id = self._workspace_id(pull_request)
834 834 repo_id = pull_request.target_repo.repo_id
835 835 use_rebase = self._use_rebase_for_merging(pull_request)
836 836 close_branch = self._close_branch_before_merging(pull_request)
837 837 user_name = self._user_name_for_merging(pull_request, user)
838 838
839 839 target_ref = self._refresh_reference(
840 840 pull_request.target_ref_parts, target_vcs)
841 841
842 842 callback_daemon, extras = prepare_callback_daemon(
843 843 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
844 844 host=vcs_settings.HOOKS_HOST,
845 845 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
846 846
847 847 with callback_daemon:
848 848 # TODO: johbo: Implement a clean way to run a config_override
849 849 # for a single call.
850 850 target_vcs.config.set(
851 851 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
852 852
853 853 merge_state = target_vcs.merge(
854 854 repo_id, workspace_id, target_ref, source_vcs,
855 855 pull_request.source_ref_parts,
856 856 user_name=user_name, user_email=user.email,
857 857 message=message, use_rebase=use_rebase,
858 858 close_branch=close_branch)
859 859 return merge_state
860 860
861 861 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
862 862 pull_request.merge_rev = merge_state.merge_ref.commit_id
863 863 pull_request.updated_on = datetime.datetime.now()
864 864 close_msg = close_msg or 'Pull request merged and closed'
865 865
866 866 CommentsModel().create(
867 867 text=safe_unicode(close_msg),
868 868 repo=pull_request.target_repo.repo_id,
869 869 user=user.user_id,
870 870 pull_request=pull_request.pull_request_id,
871 871 f_path=None,
872 872 line_no=None,
873 873 closing_pr=True
874 874 )
875 875
876 876 Session().add(pull_request)
877 877 Session().flush()
878 878 # TODO: paris: replace invalidation with less radical solution
879 879 ScmModel().mark_for_invalidation(
880 880 pull_request.target_repo.repo_name)
881 881 self.trigger_pull_request_hook(pull_request, user, 'merge')
882 882
883 883 def has_valid_update_type(self, pull_request):
884 884 source_ref_type = pull_request.source_ref_parts.type
885 885 return source_ref_type in self.REF_TYPES
886 886
887 887 def get_flow_commits(self, pull_request):
888 888
889 889 # source repo
890 890 source_ref_name = pull_request.source_ref_parts.name
891 891 source_ref_type = pull_request.source_ref_parts.type
892 892 source_ref_id = pull_request.source_ref_parts.commit_id
893 893 source_repo = pull_request.source_repo.scm_instance()
894 894
895 895 try:
896 896 if source_ref_type in self.REF_TYPES:
897 897 source_commit = source_repo.get_commit(source_ref_name)
898 898 else:
899 899 source_commit = source_repo.get_commit(source_ref_id)
900 900 except CommitDoesNotExistError:
901 901 raise SourceRefMissing()
902 902
903 903 # target repo
904 904 target_ref_name = pull_request.target_ref_parts.name
905 905 target_ref_type = pull_request.target_ref_parts.type
906 906 target_ref_id = pull_request.target_ref_parts.commit_id
907 907 target_repo = pull_request.target_repo.scm_instance()
908 908
909 909 try:
910 910 if target_ref_type in self.REF_TYPES:
911 911 target_commit = target_repo.get_commit(target_ref_name)
912 912 else:
913 913 target_commit = target_repo.get_commit(target_ref_id)
914 914 except CommitDoesNotExistError:
915 915 raise TargetRefMissing()
916 916
917 917 return source_commit, target_commit
918 918
919 919 def update_commits(self, pull_request, updating_user):
920 920 """
921 921 Get the updated list of commits for the pull request
922 922 and return the new pull request version and the list
923 923 of commits processed by this update action
924 924
925 925 updating_user is the user_object who triggered the update
926 926 """
927 927 pull_request = self.__get_pull_request(pull_request)
928 928 source_ref_type = pull_request.source_ref_parts.type
929 929 source_ref_name = pull_request.source_ref_parts.name
930 930 source_ref_id = pull_request.source_ref_parts.commit_id
931 931
932 932 target_ref_type = pull_request.target_ref_parts.type
933 933 target_ref_name = pull_request.target_ref_parts.name
934 934 target_ref_id = pull_request.target_ref_parts.commit_id
935 935
936 936 if not self.has_valid_update_type(pull_request):
937 937 log.debug("Skipping update of pull request %s due to ref type: %s",
938 938 pull_request, source_ref_type)
939 939 return UpdateResponse(
940 940 executed=False,
941 941 reason=UpdateFailureReason.WRONG_REF_TYPE,
942 942 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
943 943 source_changed=False, target_changed=False)
944 944
945 945 try:
946 946 source_commit, target_commit = self.get_flow_commits(pull_request)
947 947 except SourceRefMissing:
948 948 return UpdateResponse(
949 949 executed=False,
950 950 reason=UpdateFailureReason.MISSING_SOURCE_REF,
951 951 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
952 952 source_changed=False, target_changed=False)
953 953 except TargetRefMissing:
954 954 return UpdateResponse(
955 955 executed=False,
956 956 reason=UpdateFailureReason.MISSING_TARGET_REF,
957 957 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
958 958 source_changed=False, target_changed=False)
959 959
960 960 source_changed = source_ref_id != source_commit.raw_id
961 961 target_changed = target_ref_id != target_commit.raw_id
962 962
963 963 if not (source_changed or target_changed):
964 964 log.debug("Nothing changed in pull request %s", pull_request)
965 965 return UpdateResponse(
966 966 executed=False,
967 967 reason=UpdateFailureReason.NO_CHANGE,
968 968 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
969 969 source_changed=target_changed, target_changed=source_changed)
970 970
971 971 change_in_found = 'target repo' if target_changed else 'source repo'
972 972 log.debug('Updating pull request because of change in %s detected',
973 973 change_in_found)
974 974
975 975 # Finally there is a need for an update, in case of source change
976 976 # we create a new version, else just an update
977 977 if source_changed:
978 978 pull_request_version = self._create_version_from_snapshot(pull_request)
979 979 self._link_comments_to_version(pull_request_version)
980 980 else:
981 981 try:
982 982 ver = pull_request.versions[-1]
983 983 except IndexError:
984 984 ver = None
985 985
986 986 pull_request.pull_request_version_id = \
987 987 ver.pull_request_version_id if ver else None
988 988 pull_request_version = pull_request
989 989
990 990 source_repo = pull_request.source_repo.scm_instance()
991 991 target_repo = pull_request.target_repo.scm_instance()
992 992
993 993 # re-compute commit ids
994 994 old_commit_ids = pull_request.revisions
995 995 pre_load = ["author", "date", "message", "branch"]
996 996 commit_ranges = target_repo.compare(
997 997 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
998 998 pre_load=pre_load)
999 999
1000 1000 target_ref = target_commit.raw_id
1001 1001 source_ref = source_commit.raw_id
1002 1002 ancestor_commit_id = target_repo.get_common_ancestor(
1003 1003 target_ref, source_ref, source_repo)
1004 1004
1005 1005 if not ancestor_commit_id:
1006 1006 raise ValueError(
1007 1007 'cannot calculate diff info without a common ancestor. '
1008 1008 'Make sure both repositories are related, and have a common forking commit.')
1009 1009
1010 1010 pull_request.common_ancestor_id = ancestor_commit_id
1011 1011
1012 1012 pull_request.source_ref = '%s:%s:%s' % (
1013 1013 source_ref_type, source_ref_name, source_commit.raw_id)
1014 1014 pull_request.target_ref = '%s:%s:%s' % (
1015 1015 target_ref_type, target_ref_name, ancestor_commit_id)
1016 1016
1017 1017 pull_request.revisions = [
1018 1018 commit.raw_id for commit in reversed(commit_ranges)]
1019 1019 pull_request.updated_on = datetime.datetime.now()
1020 1020 Session().add(pull_request)
1021 1021 new_commit_ids = pull_request.revisions
1022 1022
1023 1023 old_diff_data, new_diff_data = self._generate_update_diffs(
1024 1024 pull_request, pull_request_version)
1025 1025
1026 1026 # calculate commit and file changes
1027 1027 commit_changes = self._calculate_commit_id_changes(
1028 1028 old_commit_ids, new_commit_ids)
1029 1029 file_changes = self._calculate_file_changes(
1030 1030 old_diff_data, new_diff_data)
1031 1031
1032 1032 # set comments as outdated if DIFFS changed
1033 1033 CommentsModel().outdate_comments(
1034 1034 pull_request, old_diff_data=old_diff_data,
1035 1035 new_diff_data=new_diff_data)
1036 1036
1037 1037 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1038 1038 file_node_changes = (
1039 1039 file_changes.added or file_changes.modified or file_changes.removed)
1040 1040 pr_has_changes = valid_commit_changes or file_node_changes
1041 1041
1042 1042 # Add an automatic comment to the pull request, in case
1043 1043 # anything has changed
1044 1044 if pr_has_changes:
1045 1045 update_comment = CommentsModel().create(
1046 1046 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1047 1047 repo=pull_request.target_repo,
1048 1048 user=pull_request.author,
1049 1049 pull_request=pull_request,
1050 1050 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1051 1051
1052 1052 # Update status to "Under Review" for added commits
1053 1053 for commit_id in commit_changes.added:
1054 1054 ChangesetStatusModel().set_status(
1055 1055 repo=pull_request.source_repo,
1056 1056 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1057 1057 comment=update_comment,
1058 1058 user=pull_request.author,
1059 1059 pull_request=pull_request,
1060 1060 revision=commit_id)
1061 1061
1062 1062 # send update email to users
1063 1063 try:
1064 1064 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1065 1065 ancestor_commit_id=ancestor_commit_id,
1066 1066 commit_changes=commit_changes,
1067 1067 file_changes=file_changes)
1068 1068 except Exception:
1069 1069 log.exception('Failed to send email notification to users')
1070 1070
1071 1071 log.debug(
1072 1072 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1073 1073 'removed_ids: %s', pull_request.pull_request_id,
1074 1074 commit_changes.added, commit_changes.common, commit_changes.removed)
1075 1075 log.debug(
1076 1076 'Updated pull request with the following file changes: %s',
1077 1077 file_changes)
1078 1078
1079 1079 log.info(
1080 1080 "Updated pull request %s from commit %s to commit %s, "
1081 1081 "stored new version %s of this pull request.",
1082 1082 pull_request.pull_request_id, source_ref_id,
1083 1083 pull_request.source_ref_parts.commit_id,
1084 1084 pull_request_version.pull_request_version_id)
1085 1085 Session().commit()
1086 1086 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1087 1087
1088 1088 return UpdateResponse(
1089 1089 executed=True, reason=UpdateFailureReason.NONE,
1090 1090 old=pull_request, new=pull_request_version,
1091 1091 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1092 1092 source_changed=source_changed, target_changed=target_changed)
1093 1093
1094 1094 def _create_version_from_snapshot(self, pull_request):
1095 1095 version = PullRequestVersion()
1096 1096 version.title = pull_request.title
1097 1097 version.description = pull_request.description
1098 1098 version.status = pull_request.status
1099 1099 version.pull_request_state = pull_request.pull_request_state
1100 1100 version.created_on = datetime.datetime.now()
1101 1101 version.updated_on = pull_request.updated_on
1102 1102 version.user_id = pull_request.user_id
1103 1103 version.source_repo = pull_request.source_repo
1104 1104 version.source_ref = pull_request.source_ref
1105 1105 version.target_repo = pull_request.target_repo
1106 1106 version.target_ref = pull_request.target_ref
1107 1107
1108 1108 version._last_merge_source_rev = pull_request._last_merge_source_rev
1109 1109 version._last_merge_target_rev = pull_request._last_merge_target_rev
1110 1110 version.last_merge_status = pull_request.last_merge_status
1111 1111 version.last_merge_metadata = pull_request.last_merge_metadata
1112 1112 version.shadow_merge_ref = pull_request.shadow_merge_ref
1113 1113 version.merge_rev = pull_request.merge_rev
1114 1114 version.reviewer_data = pull_request.reviewer_data
1115 1115
1116 1116 version.revisions = pull_request.revisions
1117 1117 version.common_ancestor_id = pull_request.common_ancestor_id
1118 1118 version.pull_request = pull_request
1119 1119 Session().add(version)
1120 1120 Session().flush()
1121 1121
1122 1122 return version
1123 1123
1124 1124 def _generate_update_diffs(self, pull_request, pull_request_version):
1125 1125
1126 1126 diff_context = (
1127 1127 self.DIFF_CONTEXT +
1128 1128 CommentsModel.needed_extra_diff_context())
1129 1129 hide_whitespace_changes = False
1130 1130 source_repo = pull_request_version.source_repo
1131 1131 source_ref_id = pull_request_version.source_ref_parts.commit_id
1132 1132 target_ref_id = pull_request_version.target_ref_parts.commit_id
1133 1133 old_diff = self._get_diff_from_pr_or_version(
1134 1134 source_repo, source_ref_id, target_ref_id,
1135 1135 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1136 1136
1137 1137 source_repo = pull_request.source_repo
1138 1138 source_ref_id = pull_request.source_ref_parts.commit_id
1139 1139 target_ref_id = pull_request.target_ref_parts.commit_id
1140 1140
1141 1141 new_diff = self._get_diff_from_pr_or_version(
1142 1142 source_repo, source_ref_id, target_ref_id,
1143 1143 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1144 1144
1145 1145 old_diff_data = diffs.DiffProcessor(old_diff)
1146 1146 old_diff_data.prepare()
1147 1147 new_diff_data = diffs.DiffProcessor(new_diff)
1148 1148 new_diff_data.prepare()
1149 1149
1150 1150 return old_diff_data, new_diff_data
1151 1151
1152 1152 def _link_comments_to_version(self, pull_request_version):
1153 1153 """
1154 1154 Link all unlinked comments of this pull request to the given version.
1155 1155
1156 1156 :param pull_request_version: The `PullRequestVersion` to which
1157 1157 the comments shall be linked.
1158 1158
1159 1159 """
1160 1160 pull_request = pull_request_version.pull_request
1161 1161 comments = ChangesetComment.query()\
1162 1162 .filter(
1163 1163 # TODO: johbo: Should we query for the repo at all here?
1164 1164 # Pending decision on how comments of PRs are to be related
1165 1165 # to either the source repo, the target repo or no repo at all.
1166 1166 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1167 1167 ChangesetComment.pull_request == pull_request,
1168 1168 ChangesetComment.pull_request_version == None)\
1169 1169 .order_by(ChangesetComment.comment_id.asc())
1170 1170
1171 1171 # TODO: johbo: Find out why this breaks if it is done in a bulk
1172 1172 # operation.
1173 1173 for comment in comments:
1174 1174 comment.pull_request_version_id = (
1175 1175 pull_request_version.pull_request_version_id)
1176 1176 Session().add(comment)
1177 1177
1178 1178 def _calculate_commit_id_changes(self, old_ids, new_ids):
1179 1179 added = [x for x in new_ids if x not in old_ids]
1180 1180 common = [x for x in new_ids if x in old_ids]
1181 1181 removed = [x for x in old_ids if x not in new_ids]
1182 1182 total = new_ids
1183 1183 return ChangeTuple(added, common, removed, total)
1184 1184
1185 1185 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1186 1186
1187 1187 old_files = OrderedDict()
1188 1188 for diff_data in old_diff_data.parsed_diff:
1189 1189 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1190 1190
1191 1191 added_files = []
1192 1192 modified_files = []
1193 1193 removed_files = []
1194 1194 for diff_data in new_diff_data.parsed_diff:
1195 1195 new_filename = diff_data['filename']
1196 1196 new_hash = md5_safe(diff_data['raw_diff'])
1197 1197
1198 1198 old_hash = old_files.get(new_filename)
1199 1199 if not old_hash:
1200 1200 # file is not present in old diff, we have to figure out from parsed diff
1201 1201 # operation ADD/REMOVE
1202 1202 operations_dict = diff_data['stats']['ops']
1203 1203 if diffs.DEL_FILENODE in operations_dict:
1204 1204 removed_files.append(new_filename)
1205 1205 else:
1206 1206 added_files.append(new_filename)
1207 1207 else:
1208 1208 if new_hash != old_hash:
1209 1209 modified_files.append(new_filename)
1210 1210 # now remove a file from old, since we have seen it already
1211 1211 del old_files[new_filename]
1212 1212
1213 1213 # removed files is when there are present in old, but not in NEW,
1214 1214 # since we remove old files that are present in new diff, left-overs
1215 1215 # if any should be the removed files
1216 1216 removed_files.extend(old_files.keys())
1217 1217
1218 1218 return FileChangeTuple(added_files, modified_files, removed_files)
1219 1219
1220 1220 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1221 1221 """
1222 1222 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1223 1223 so it's always looking the same disregarding on which default
1224 1224 renderer system is using.
1225 1225
1226 1226 :param ancestor_commit_id: ancestor raw_id
1227 1227 :param changes: changes named tuple
1228 1228 :param file_changes: file changes named tuple
1229 1229
1230 1230 """
1231 1231 new_status = ChangesetStatus.get_status_lbl(
1232 1232 ChangesetStatus.STATUS_UNDER_REVIEW)
1233 1233
1234 1234 changed_files = (
1235 1235 file_changes.added + file_changes.modified + file_changes.removed)
1236 1236
1237 1237 params = {
1238 1238 'under_review_label': new_status,
1239 1239 'added_commits': changes.added,
1240 1240 'removed_commits': changes.removed,
1241 1241 'changed_files': changed_files,
1242 1242 'added_files': file_changes.added,
1243 1243 'modified_files': file_changes.modified,
1244 1244 'removed_files': file_changes.removed,
1245 1245 'ancestor_commit_id': ancestor_commit_id
1246 1246 }
1247 1247 renderer = RstTemplateRenderer()
1248 1248 return renderer.render('pull_request_update.mako', **params)
1249 1249
1250 1250 def edit(self, pull_request, title, description, description_renderer, user):
1251 1251 pull_request = self.__get_pull_request(pull_request)
1252 1252 old_data = pull_request.get_api_data(with_merge_state=False)
1253 1253 if pull_request.is_closed():
1254 1254 raise ValueError('This pull request is closed')
1255 1255 if title:
1256 1256 pull_request.title = title
1257 1257 pull_request.description = description
1258 1258 pull_request.updated_on = datetime.datetime.now()
1259 1259 pull_request.description_renderer = description_renderer
1260 1260 Session().add(pull_request)
1261 1261 self._log_audit_action(
1262 1262 'repo.pull_request.edit', {'old_data': old_data},
1263 1263 user, pull_request)
1264 1264
1265 1265 def update_reviewers(self, pull_request, reviewer_data, user):
1266 1266 """
1267 1267 Update the reviewers in the pull request
1268 1268
1269 1269 :param pull_request: the pr to update
1270 1270 :param reviewer_data: list of tuples
1271 1271 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1272 1272 :param user: current use who triggers this action
1273 1273 """
1274 1274
1275 1275 pull_request = self.__get_pull_request(pull_request)
1276 1276 if pull_request.is_closed():
1277 1277 raise ValueError('This pull request is closed')
1278 1278
1279 1279 reviewers = {}
1280 1280 for user_id, reasons, mandatory, role, rules in reviewer_data:
1281 1281 if isinstance(user_id, (int, compat.string_types)):
1282 1282 user_id = self._get_user(user_id).user_id
1283 1283 reviewers[user_id] = {
1284 1284 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1285 1285
1286 1286 reviewers_ids = set(reviewers.keys())
1287 1287 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1288 1288 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1289 1289
1290 1290 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1291 1291
1292 1292 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1293 1293 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1294 1294
1295 1295 log.debug("Adding %s reviewers", ids_to_add)
1296 1296 log.debug("Removing %s reviewers", ids_to_remove)
1297 1297 changed = False
1298 1298 added_audit_reviewers = []
1299 1299 removed_audit_reviewers = []
1300 1300
1301 1301 for uid in ids_to_add:
1302 1302 changed = True
1303 1303 _usr = self._get_user(uid)
1304 1304 reviewer = PullRequestReviewers()
1305 1305 reviewer.user = _usr
1306 1306 reviewer.pull_request = pull_request
1307 1307 reviewer.reasons = reviewers[uid]['reasons']
1308 1308 # NOTE(marcink): mandatory shouldn't be changed now
1309 1309 # reviewer.mandatory = reviewers[uid]['reasons']
1310 1310 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1311 1311 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1312 1312 Session().add(reviewer)
1313 1313 added_audit_reviewers.append(reviewer.get_dict())
1314 1314
1315 1315 for uid in ids_to_remove:
1316 1316 changed = True
1317 1317 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1318 1318 # This is an edge case that handles previous state of having the same reviewer twice.
1319 1319 # this CAN happen due to the lack of DB checks
1320 1320 reviewers = PullRequestReviewers.query()\
1321 1321 .filter(PullRequestReviewers.user_id == uid,
1322 1322 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1323 1323 PullRequestReviewers.pull_request == pull_request)\
1324 1324 .all()
1325 1325
1326 1326 for obj in reviewers:
1327 1327 added_audit_reviewers.append(obj.get_dict())
1328 1328 Session().delete(obj)
1329 1329
1330 1330 if changed:
1331 1331 Session().expire_all()
1332 1332 pull_request.updated_on = datetime.datetime.now()
1333 1333 Session().add(pull_request)
1334 1334
1335 1335 # finally store audit logs
1336 1336 for user_data in added_audit_reviewers:
1337 1337 self._log_audit_action(
1338 1338 'repo.pull_request.reviewer.add', {'data': user_data},
1339 1339 user, pull_request)
1340 1340 for user_data in removed_audit_reviewers:
1341 1341 self._log_audit_action(
1342 1342 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1343 1343 user, pull_request)
1344 1344
1345 self.notify_reviewers(pull_request, ids_to_add, user.get_instance())
1345 self.notify_reviewers(pull_request, ids_to_add, user)
1346 1346 return ids_to_add, ids_to_remove
1347 1347
1348 1348 def update_observers(self, pull_request, observer_data, user):
1349 1349 """
1350 1350 Update the observers in the pull request
1351 1351
1352 1352 :param pull_request: the pr to update
1353 1353 :param observer_data: list of tuples
1354 1354 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1355 1355 :param user: current use who triggers this action
1356 1356 """
1357 1357 pull_request = self.__get_pull_request(pull_request)
1358 1358 if pull_request.is_closed():
1359 1359 raise ValueError('This pull request is closed')
1360 1360
1361 1361 observers = {}
1362 1362 for user_id, reasons, mandatory, role, rules in observer_data:
1363 1363 if isinstance(user_id, (int, compat.string_types)):
1364 1364 user_id = self._get_user(user_id).user_id
1365 1365 observers[user_id] = {
1366 1366 'reasons': reasons, 'observers': mandatory, 'role': role}
1367 1367
1368 1368 observers_ids = set(observers.keys())
1369 1369 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1370 1370 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1371 1371
1372 1372 current_observers_ids = set([x.user.user_id for x in current_observers])
1373 1373
1374 1374 ids_to_add = observers_ids.difference(current_observers_ids)
1375 1375 ids_to_remove = current_observers_ids.difference(observers_ids)
1376 1376
1377 1377 log.debug("Adding %s observer", ids_to_add)
1378 1378 log.debug("Removing %s observer", ids_to_remove)
1379 1379 changed = False
1380 1380 added_audit_observers = []
1381 1381 removed_audit_observers = []
1382 1382
1383 1383 for uid in ids_to_add:
1384 1384 changed = True
1385 1385 _usr = self._get_user(uid)
1386 1386 observer = PullRequestReviewers()
1387 1387 observer.user = _usr
1388 1388 observer.pull_request = pull_request
1389 1389 observer.reasons = observers[uid]['reasons']
1390 1390 # NOTE(marcink): mandatory shouldn't be changed now
1391 1391 # observer.mandatory = observer[uid]['reasons']
1392 1392
1393 1393 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1394 1394 observer.role = PullRequestReviewers.ROLE_OBSERVER
1395 1395 Session().add(observer)
1396 1396 added_audit_observers.append(observer.get_dict())
1397 1397
1398 1398 for uid in ids_to_remove:
1399 1399 changed = True
1400 1400 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1401 1401 # This is an edge case that handles previous state of having the same reviewer twice.
1402 1402 # this CAN happen due to the lack of DB checks
1403 1403 observers = PullRequestReviewers.query()\
1404 1404 .filter(PullRequestReviewers.user_id == uid,
1405 1405 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1406 1406 PullRequestReviewers.pull_request == pull_request)\
1407 1407 .all()
1408 1408
1409 1409 for obj in observers:
1410 1410 added_audit_observers.append(obj.get_dict())
1411 1411 Session().delete(obj)
1412 1412
1413 1413 if changed:
1414 1414 Session().expire_all()
1415 1415 pull_request.updated_on = datetime.datetime.now()
1416 1416 Session().add(pull_request)
1417 1417
1418 1418 # finally store audit logs
1419 1419 for user_data in added_audit_observers:
1420 1420 self._log_audit_action(
1421 1421 'repo.pull_request.observer.add', {'data': user_data},
1422 1422 user, pull_request)
1423 1423 for user_data in removed_audit_observers:
1424 1424 self._log_audit_action(
1425 1425 'repo.pull_request.observer.delete', {'old_data': user_data},
1426 1426 user, pull_request)
1427 1427
1428 self.notify_observers(pull_request, ids_to_add, user.get_instance())
1428 self.notify_observers(pull_request, ids_to_add, user)
1429 1429 return ids_to_add, ids_to_remove
1430 1430
1431 1431 def get_url(self, pull_request, request=None, permalink=False):
1432 1432 if not request:
1433 1433 request = get_current_request()
1434 1434
1435 1435 if permalink:
1436 1436 return request.route_url(
1437 1437 'pull_requests_global',
1438 1438 pull_request_id=pull_request.pull_request_id,)
1439 1439 else:
1440 1440 return request.route_url('pullrequest_show',
1441 1441 repo_name=safe_str(pull_request.target_repo.repo_name),
1442 1442 pull_request_id=pull_request.pull_request_id,)
1443 1443
1444 1444 def get_shadow_clone_url(self, pull_request, request=None):
1445 1445 """
1446 1446 Returns qualified url pointing to the shadow repository. If this pull
1447 1447 request is closed there is no shadow repository and ``None`` will be
1448 1448 returned.
1449 1449 """
1450 1450 if pull_request.is_closed():
1451 1451 return None
1452 1452 else:
1453 1453 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1454 1454 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1455 1455
1456 1456 def _notify_reviewers(self, pull_request, user_ids, role, user):
1457 1457 # notification to reviewers/observers
1458 1458 if not user_ids:
1459 1459 return
1460 1460
1461 1461 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1462 1462
1463 1463 pull_request_obj = pull_request
1464 1464 # get the current participants of this pull request
1465 1465 recipients = user_ids
1466 1466 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1467 1467
1468 1468 pr_source_repo = pull_request_obj.source_repo
1469 1469 pr_target_repo = pull_request_obj.target_repo
1470 1470
1471 1471 pr_url = h.route_url('pullrequest_show',
1472 1472 repo_name=pr_target_repo.repo_name,
1473 1473 pull_request_id=pull_request_obj.pull_request_id,)
1474 1474
1475 1475 # set some variables for email notification
1476 1476 pr_target_repo_url = h.route_url(
1477 1477 'repo_summary', repo_name=pr_target_repo.repo_name)
1478 1478
1479 1479 pr_source_repo_url = h.route_url(
1480 1480 'repo_summary', repo_name=pr_source_repo.repo_name)
1481 1481
1482 1482 # pull request specifics
1483 1483 pull_request_commits = [
1484 1484 (x.raw_id, x.message)
1485 1485 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1486 1486
1487 1487 current_rhodecode_user = user
1488 1488 kwargs = {
1489 1489 'user': current_rhodecode_user,
1490 1490 'pull_request_author': pull_request.author,
1491 1491 'pull_request': pull_request_obj,
1492 1492 'pull_request_commits': pull_request_commits,
1493 1493
1494 1494 'pull_request_target_repo': pr_target_repo,
1495 1495 'pull_request_target_repo_url': pr_target_repo_url,
1496 1496
1497 1497 'pull_request_source_repo': pr_source_repo,
1498 1498 'pull_request_source_repo_url': pr_source_repo_url,
1499 1499
1500 1500 'pull_request_url': pr_url,
1501 1501 'thread_ids': [pr_url],
1502 1502 'user_role': role
1503 1503 }
1504 1504
1505 1505 # pre-generate the subject for notification itself
1506 1506 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1507 1507 notification_type, **kwargs)
1508 1508
1509 1509 # create notification objects, and emails
1510 1510 NotificationModel().create(
1511 1511 created_by=current_rhodecode_user,
1512 1512 notification_subject=subject,
1513 1513 notification_body=body_plaintext,
1514 1514 notification_type=notification_type,
1515 1515 recipients=recipients,
1516 1516 email_kwargs=kwargs,
1517 1517 )
1518 1518
1519 1519 def notify_reviewers(self, pull_request, reviewers_ids, user):
1520 1520 return self._notify_reviewers(pull_request, reviewers_ids,
1521 1521 PullRequestReviewers.ROLE_REVIEWER, user)
1522 1522
1523 1523 def notify_observers(self, pull_request, observers_ids, user):
1524 1524 return self._notify_reviewers(pull_request, observers_ids,
1525 1525 PullRequestReviewers.ROLE_OBSERVER, user)
1526 1526
1527 1527 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1528 1528 commit_changes, file_changes):
1529 1529
1530 1530 updating_user_id = updating_user.user_id
1531 1531 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1532 1532 # NOTE(marcink): send notification to all other users except to
1533 1533 # person who updated the PR
1534 1534 recipients = reviewers.difference(set([updating_user_id]))
1535 1535
1536 1536 log.debug('Notify following recipients about pull-request update %s', recipients)
1537 1537
1538 1538 pull_request_obj = pull_request
1539 1539
1540 1540 # send email about the update
1541 1541 changed_files = (
1542 1542 file_changes.added + file_changes.modified + file_changes.removed)
1543 1543
1544 1544 pr_source_repo = pull_request_obj.source_repo
1545 1545 pr_target_repo = pull_request_obj.target_repo
1546 1546
1547 1547 pr_url = h.route_url('pullrequest_show',
1548 1548 repo_name=pr_target_repo.repo_name,
1549 1549 pull_request_id=pull_request_obj.pull_request_id,)
1550 1550
1551 1551 # set some variables for email notification
1552 1552 pr_target_repo_url = h.route_url(
1553 1553 'repo_summary', repo_name=pr_target_repo.repo_name)
1554 1554
1555 1555 pr_source_repo_url = h.route_url(
1556 1556 'repo_summary', repo_name=pr_source_repo.repo_name)
1557 1557
1558 1558 email_kwargs = {
1559 1559 'date': datetime.datetime.now(),
1560 1560 'updating_user': updating_user,
1561 1561
1562 1562 'pull_request': pull_request_obj,
1563 1563
1564 1564 'pull_request_target_repo': pr_target_repo,
1565 1565 'pull_request_target_repo_url': pr_target_repo_url,
1566 1566
1567 1567 'pull_request_source_repo': pr_source_repo,
1568 1568 'pull_request_source_repo_url': pr_source_repo_url,
1569 1569
1570 1570 'pull_request_url': pr_url,
1571 1571
1572 1572 'ancestor_commit_id': ancestor_commit_id,
1573 1573 'added_commits': commit_changes.added,
1574 1574 'removed_commits': commit_changes.removed,
1575 1575 'changed_files': changed_files,
1576 1576 'added_files': file_changes.added,
1577 1577 'modified_files': file_changes.modified,
1578 1578 'removed_files': file_changes.removed,
1579 1579 'thread_ids': [pr_url],
1580 1580 }
1581 1581
1582 1582 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1583 1583 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1584 1584
1585 1585 # create notification objects, and emails
1586 1586 NotificationModel().create(
1587 1587 created_by=updating_user,
1588 1588 notification_subject=subject,
1589 1589 notification_body=body_plaintext,
1590 1590 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1591 1591 recipients=recipients,
1592 1592 email_kwargs=email_kwargs,
1593 1593 )
1594 1594
1595 1595 def delete(self, pull_request, user=None):
1596 1596 if not user:
1597 1597 user = getattr(get_current_rhodecode_user(), 'username', None)
1598 1598
1599 1599 pull_request = self.__get_pull_request(pull_request)
1600 1600 old_data = pull_request.get_api_data(with_merge_state=False)
1601 1601 self._cleanup_merge_workspace(pull_request)
1602 1602 self._log_audit_action(
1603 1603 'repo.pull_request.delete', {'old_data': old_data},
1604 1604 user, pull_request)
1605 1605 Session().delete(pull_request)
1606 1606
1607 1607 def close_pull_request(self, pull_request, user):
1608 1608 pull_request = self.__get_pull_request(pull_request)
1609 1609 self._cleanup_merge_workspace(pull_request)
1610 1610 pull_request.status = PullRequest.STATUS_CLOSED
1611 1611 pull_request.updated_on = datetime.datetime.now()
1612 1612 Session().add(pull_request)
1613 1613 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1614 1614
1615 1615 pr_data = pull_request.get_api_data(with_merge_state=False)
1616 1616 self._log_audit_action(
1617 1617 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1618 1618
1619 1619 def close_pull_request_with_comment(
1620 1620 self, pull_request, user, repo, message=None, auth_user=None):
1621 1621
1622 1622 pull_request_review_status = pull_request.calculated_review_status()
1623 1623
1624 1624 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1625 1625 # approved only if we have voting consent
1626 1626 status = ChangesetStatus.STATUS_APPROVED
1627 1627 else:
1628 1628 status = ChangesetStatus.STATUS_REJECTED
1629 1629 status_lbl = ChangesetStatus.get_status_lbl(status)
1630 1630
1631 1631 default_message = (
1632 1632 'Closing with status change {transition_icon} {status}.'
1633 1633 ).format(transition_icon='>', status=status_lbl)
1634 1634 text = message or default_message
1635 1635
1636 1636 # create a comment, and link it to new status
1637 1637 comment = CommentsModel().create(
1638 1638 text=text,
1639 1639 repo=repo.repo_id,
1640 1640 user=user.user_id,
1641 1641 pull_request=pull_request.pull_request_id,
1642 1642 status_change=status_lbl,
1643 1643 status_change_type=status,
1644 1644 closing_pr=True,
1645 1645 auth_user=auth_user,
1646 1646 )
1647 1647
1648 1648 # calculate old status before we change it
1649 1649 old_calculated_status = pull_request.calculated_review_status()
1650 1650 ChangesetStatusModel().set_status(
1651 1651 repo.repo_id,
1652 1652 status,
1653 1653 user.user_id,
1654 1654 comment=comment,
1655 1655 pull_request=pull_request.pull_request_id
1656 1656 )
1657 1657
1658 1658 Session().flush()
1659 1659
1660 1660 self.trigger_pull_request_hook(pull_request, user, 'comment',
1661 1661 data={'comment': comment})
1662 1662
1663 1663 # we now calculate the status of pull request again, and based on that
1664 1664 # calculation trigger status change. This might happen in cases
1665 1665 # that non-reviewer admin closes a pr, which means his vote doesn't
1666 1666 # change the status, while if he's a reviewer this might change it.
1667 1667 calculated_status = pull_request.calculated_review_status()
1668 1668 if old_calculated_status != calculated_status:
1669 1669 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1670 1670 data={'status': calculated_status})
1671 1671
1672 1672 # finally close the PR
1673 1673 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1674 1674
1675 1675 return comment, status
1676 1676
1677 1677 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1678 1678 _ = translator or get_current_request().translate
1679 1679
1680 1680 if not self._is_merge_enabled(pull_request):
1681 1681 return None, False, _('Server-side pull request merging is disabled.')
1682 1682
1683 1683 if pull_request.is_closed():
1684 1684 return None, False, _('This pull request is closed.')
1685 1685
1686 1686 merge_possible, msg = self._check_repo_requirements(
1687 1687 target=pull_request.target_repo, source=pull_request.source_repo,
1688 1688 translator=_)
1689 1689 if not merge_possible:
1690 1690 return None, merge_possible, msg
1691 1691
1692 1692 try:
1693 1693 merge_response = self._try_merge(
1694 1694 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1695 1695 log.debug("Merge response: %s", merge_response)
1696 1696 return merge_response, merge_response.possible, merge_response.merge_status_message
1697 1697 except NotImplementedError:
1698 1698 return None, False, _('Pull request merging is not supported.')
1699 1699
1700 1700 def _check_repo_requirements(self, target, source, translator):
1701 1701 """
1702 1702 Check if `target` and `source` have compatible requirements.
1703 1703
1704 1704 Currently this is just checking for largefiles.
1705 1705 """
1706 1706 _ = translator
1707 1707 target_has_largefiles = self._has_largefiles(target)
1708 1708 source_has_largefiles = self._has_largefiles(source)
1709 1709 merge_possible = True
1710 1710 message = u''
1711 1711
1712 1712 if target_has_largefiles != source_has_largefiles:
1713 1713 merge_possible = False
1714 1714 if source_has_largefiles:
1715 1715 message = _(
1716 1716 'Target repository large files support is disabled.')
1717 1717 else:
1718 1718 message = _(
1719 1719 'Source repository large files support is disabled.')
1720 1720
1721 1721 return merge_possible, message
1722 1722
1723 1723 def _has_largefiles(self, repo):
1724 1724 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1725 1725 'extensions', 'largefiles')
1726 1726 return largefiles_ui and largefiles_ui[0].active
1727 1727
1728 1728 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1729 1729 """
1730 1730 Try to merge the pull request and return the merge status.
1731 1731 """
1732 1732 log.debug(
1733 1733 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1734 1734 pull_request.pull_request_id, force_shadow_repo_refresh)
1735 1735 target_vcs = pull_request.target_repo.scm_instance()
1736 1736 # Refresh the target reference.
1737 1737 try:
1738 1738 target_ref = self._refresh_reference(
1739 1739 pull_request.target_ref_parts, target_vcs)
1740 1740 except CommitDoesNotExistError:
1741 1741 merge_state = MergeResponse(
1742 1742 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1743 1743 metadata={'target_ref': pull_request.target_ref_parts})
1744 1744 return merge_state
1745 1745
1746 1746 target_locked = pull_request.target_repo.locked
1747 1747 if target_locked and target_locked[0]:
1748 1748 locked_by = 'user:{}'.format(target_locked[0])
1749 1749 log.debug("The target repository is locked by %s.", locked_by)
1750 1750 merge_state = MergeResponse(
1751 1751 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1752 1752 metadata={'locked_by': locked_by})
1753 1753 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1754 1754 pull_request, target_ref):
1755 1755 log.debug("Refreshing the merge status of the repository.")
1756 1756 merge_state = self._refresh_merge_state(
1757 1757 pull_request, target_vcs, target_ref)
1758 1758 else:
1759 1759 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1760 1760 metadata = {
1761 1761 'unresolved_files': '',
1762 1762 'target_ref': pull_request.target_ref_parts,
1763 1763 'source_ref': pull_request.source_ref_parts,
1764 1764 }
1765 1765 if pull_request.last_merge_metadata:
1766 1766 metadata.update(pull_request.last_merge_metadata_parsed)
1767 1767
1768 1768 if not possible and target_ref.type == 'branch':
1769 1769 # NOTE(marcink): case for mercurial multiple heads on branch
1770 1770 heads = target_vcs._heads(target_ref.name)
1771 1771 if len(heads) != 1:
1772 1772 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1773 1773 metadata.update({
1774 1774 'heads': heads
1775 1775 })
1776 1776
1777 1777 merge_state = MergeResponse(
1778 1778 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1779 1779
1780 1780 return merge_state
1781 1781
1782 1782 def _refresh_reference(self, reference, vcs_repository):
1783 1783 if reference.type in self.UPDATABLE_REF_TYPES:
1784 1784 name_or_id = reference.name
1785 1785 else:
1786 1786 name_or_id = reference.commit_id
1787 1787
1788 1788 refreshed_commit = vcs_repository.get_commit(name_or_id)
1789 1789 refreshed_reference = Reference(
1790 1790 reference.type, reference.name, refreshed_commit.raw_id)
1791 1791 return refreshed_reference
1792 1792
1793 1793 def _needs_merge_state_refresh(self, pull_request, target_reference):
1794 1794 return not(
1795 1795 pull_request.revisions and
1796 1796 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1797 1797 target_reference.commit_id == pull_request._last_merge_target_rev)
1798 1798
1799 1799 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1800 1800 workspace_id = self._workspace_id(pull_request)
1801 1801 source_vcs = pull_request.source_repo.scm_instance()
1802 1802 repo_id = pull_request.target_repo.repo_id
1803 1803 use_rebase = self._use_rebase_for_merging(pull_request)
1804 1804 close_branch = self._close_branch_before_merging(pull_request)
1805 1805 merge_state = target_vcs.merge(
1806 1806 repo_id, workspace_id,
1807 1807 target_reference, source_vcs, pull_request.source_ref_parts,
1808 1808 dry_run=True, use_rebase=use_rebase,
1809 1809 close_branch=close_branch)
1810 1810
1811 1811 # Do not store the response if there was an unknown error.
1812 1812 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1813 1813 pull_request._last_merge_source_rev = \
1814 1814 pull_request.source_ref_parts.commit_id
1815 1815 pull_request._last_merge_target_rev = target_reference.commit_id
1816 1816 pull_request.last_merge_status = merge_state.failure_reason
1817 1817 pull_request.last_merge_metadata = merge_state.metadata
1818 1818
1819 1819 pull_request.shadow_merge_ref = merge_state.merge_ref
1820 1820 Session().add(pull_request)
1821 1821 Session().commit()
1822 1822
1823 1823 return merge_state
1824 1824
1825 1825 def _workspace_id(self, pull_request):
1826 1826 workspace_id = 'pr-%s' % pull_request.pull_request_id
1827 1827 return workspace_id
1828 1828
1829 1829 def generate_repo_data(self, repo, commit_id=None, branch=None,
1830 1830 bookmark=None, translator=None):
1831 1831 from rhodecode.model.repo import RepoModel
1832 1832
1833 1833 all_refs, selected_ref = \
1834 1834 self._get_repo_pullrequest_sources(
1835 1835 repo.scm_instance(), commit_id=commit_id,
1836 1836 branch=branch, bookmark=bookmark, translator=translator)
1837 1837
1838 1838 refs_select2 = []
1839 1839 for element in all_refs:
1840 1840 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1841 1841 refs_select2.append({'text': element[1], 'children': children})
1842 1842
1843 1843 return {
1844 1844 'user': {
1845 1845 'user_id': repo.user.user_id,
1846 1846 'username': repo.user.username,
1847 1847 'firstname': repo.user.first_name,
1848 1848 'lastname': repo.user.last_name,
1849 1849 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1850 1850 },
1851 1851 'name': repo.repo_name,
1852 1852 'link': RepoModel().get_url(repo),
1853 1853 'description': h.chop_at_smart(repo.description_safe, '\n'),
1854 1854 'refs': {
1855 1855 'all_refs': all_refs,
1856 1856 'selected_ref': selected_ref,
1857 1857 'select2_refs': refs_select2
1858 1858 }
1859 1859 }
1860 1860
1861 1861 def generate_pullrequest_title(self, source, source_ref, target):
1862 1862 return u'{source}#{at_ref} to {target}'.format(
1863 1863 source=source,
1864 1864 at_ref=source_ref,
1865 1865 target=target,
1866 1866 )
1867 1867
1868 1868 def _cleanup_merge_workspace(self, pull_request):
1869 1869 # Merging related cleanup
1870 1870 repo_id = pull_request.target_repo.repo_id
1871 1871 target_scm = pull_request.target_repo.scm_instance()
1872 1872 workspace_id = self._workspace_id(pull_request)
1873 1873
1874 1874 try:
1875 1875 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1876 1876 except NotImplementedError:
1877 1877 pass
1878 1878
1879 1879 def _get_repo_pullrequest_sources(
1880 1880 self, repo, commit_id=None, branch=None, bookmark=None,
1881 1881 translator=None):
1882 1882 """
1883 1883 Return a structure with repo's interesting commits, suitable for
1884 1884 the selectors in pullrequest controller
1885 1885
1886 1886 :param commit_id: a commit that must be in the list somehow
1887 1887 and selected by default
1888 1888 :param branch: a branch that must be in the list and selected
1889 1889 by default - even if closed
1890 1890 :param bookmark: a bookmark that must be in the list and selected
1891 1891 """
1892 1892 _ = translator or get_current_request().translate
1893 1893
1894 1894 commit_id = safe_str(commit_id) if commit_id else None
1895 1895 branch = safe_unicode(branch) if branch else None
1896 1896 bookmark = safe_unicode(bookmark) if bookmark else None
1897 1897
1898 1898 selected = None
1899 1899
1900 1900 # order matters: first source that has commit_id in it will be selected
1901 1901 sources = []
1902 1902 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1903 1903 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1904 1904
1905 1905 if commit_id:
1906 1906 ref_commit = (h.short_id(commit_id), commit_id)
1907 1907 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1908 1908
1909 1909 sources.append(
1910 1910 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1911 1911 )
1912 1912
1913 1913 groups = []
1914 1914
1915 1915 for group_key, ref_list, group_name, match in sources:
1916 1916 group_refs = []
1917 1917 for ref_name, ref_id in ref_list:
1918 1918 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1919 1919 group_refs.append((ref_key, ref_name))
1920 1920
1921 1921 if not selected:
1922 1922 if set([commit_id, match]) & set([ref_id, ref_name]):
1923 1923 selected = ref_key
1924 1924
1925 1925 if group_refs:
1926 1926 groups.append((group_refs, group_name))
1927 1927
1928 1928 if not selected:
1929 1929 ref = commit_id or branch or bookmark
1930 1930 if ref:
1931 1931 raise CommitDoesNotExistError(
1932 1932 u'No commit refs could be found matching: {}'.format(ref))
1933 1933 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1934 1934 selected = u'branch:{}:{}'.format(
1935 1935 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1936 1936 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1937 1937 )
1938 1938 elif repo.commit_ids:
1939 1939 # make the user select in this case
1940 1940 selected = None
1941 1941 else:
1942 1942 raise EmptyRepositoryError()
1943 1943 return groups, selected
1944 1944
1945 1945 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1946 1946 hide_whitespace_changes, diff_context):
1947 1947
1948 1948 return self._get_diff_from_pr_or_version(
1949 1949 source_repo, source_ref_id, target_ref_id,
1950 1950 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1951 1951
1952 1952 def _get_diff_from_pr_or_version(
1953 1953 self, source_repo, source_ref_id, target_ref_id,
1954 1954 hide_whitespace_changes, diff_context):
1955 1955
1956 1956 target_commit = source_repo.get_commit(
1957 1957 commit_id=safe_str(target_ref_id))
1958 1958 source_commit = source_repo.get_commit(
1959 1959 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1960 1960 if isinstance(source_repo, Repository):
1961 1961 vcs_repo = source_repo.scm_instance()
1962 1962 else:
1963 1963 vcs_repo = source_repo
1964 1964
1965 1965 # TODO: johbo: In the context of an update, we cannot reach
1966 1966 # the old commit anymore with our normal mechanisms. It needs
1967 1967 # some sort of special support in the vcs layer to avoid this
1968 1968 # workaround.
1969 1969 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1970 1970 vcs_repo.alias == 'git'):
1971 1971 source_commit.raw_id = safe_str(source_ref_id)
1972 1972
1973 1973 log.debug('calculating diff between '
1974 1974 'source_ref:%s and target_ref:%s for repo `%s`',
1975 1975 target_ref_id, source_ref_id,
1976 1976 safe_unicode(vcs_repo.path))
1977 1977
1978 1978 vcs_diff = vcs_repo.get_diff(
1979 1979 commit1=target_commit, commit2=source_commit,
1980 1980 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1981 1981 return vcs_diff
1982 1982
1983 1983 def _is_merge_enabled(self, pull_request):
1984 1984 return self._get_general_setting(
1985 1985 pull_request, 'rhodecode_pr_merge_enabled')
1986 1986
1987 1987 def _use_rebase_for_merging(self, pull_request):
1988 1988 repo_type = pull_request.target_repo.repo_type
1989 1989 if repo_type == 'hg':
1990 1990 return self._get_general_setting(
1991 1991 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1992 1992 elif repo_type == 'git':
1993 1993 return self._get_general_setting(
1994 1994 pull_request, 'rhodecode_git_use_rebase_for_merging')
1995 1995
1996 1996 return False
1997 1997
1998 1998 def _user_name_for_merging(self, pull_request, user):
1999 1999 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2000 2000 if env_user_name_attr and hasattr(user, env_user_name_attr):
2001 2001 user_name_attr = env_user_name_attr
2002 2002 else:
2003 2003 user_name_attr = 'short_contact'
2004 2004
2005 2005 user_name = getattr(user, user_name_attr)
2006 2006 return user_name
2007 2007
2008 2008 def _close_branch_before_merging(self, pull_request):
2009 2009 repo_type = pull_request.target_repo.repo_type
2010 2010 if repo_type == 'hg':
2011 2011 return self._get_general_setting(
2012 2012 pull_request, 'rhodecode_hg_close_branch_before_merging')
2013 2013 elif repo_type == 'git':
2014 2014 return self._get_general_setting(
2015 2015 pull_request, 'rhodecode_git_close_branch_before_merging')
2016 2016
2017 2017 return False
2018 2018
2019 2019 def _get_general_setting(self, pull_request, settings_key, default=False):
2020 2020 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2021 2021 settings = settings_model.get_general_settings()
2022 2022 return settings.get(settings_key, default)
2023 2023
2024 2024 def _log_audit_action(self, action, action_data, user, pull_request):
2025 2025 audit_logger.store(
2026 2026 action=action,
2027 2027 action_data=action_data,
2028 2028 user=user,
2029 2029 repo=pull_request.target_repo)
2030 2030
2031 2031 def get_reviewer_functions(self):
2032 2032 """
2033 2033 Fetches functions for validation and fetching default reviewers.
2034 2034 If available we use the EE package, else we fallback to CE
2035 2035 package functions
2036 2036 """
2037 2037 try:
2038 2038 from rc_reviewers.utils import get_default_reviewers_data
2039 2039 from rc_reviewers.utils import validate_default_reviewers
2040 2040 from rc_reviewers.utils import validate_observers
2041 2041 except ImportError:
2042 2042 from rhodecode.apps.repository.utils import get_default_reviewers_data
2043 2043 from rhodecode.apps.repository.utils import validate_default_reviewers
2044 2044 from rhodecode.apps.repository.utils import validate_observers
2045 2045
2046 2046 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2047 2047
2048 2048
2049 2049 class MergeCheck(object):
2050 2050 """
2051 2051 Perform Merge Checks and returns a check object which stores information
2052 2052 about merge errors, and merge conditions
2053 2053 """
2054 2054 TODO_CHECK = 'todo'
2055 2055 PERM_CHECK = 'perm'
2056 2056 REVIEW_CHECK = 'review'
2057 2057 MERGE_CHECK = 'merge'
2058 2058 WIP_CHECK = 'wip'
2059 2059
2060 2060 def __init__(self):
2061 2061 self.review_status = None
2062 2062 self.merge_possible = None
2063 2063 self.merge_msg = ''
2064 2064 self.merge_response = None
2065 2065 self.failed = None
2066 2066 self.errors = []
2067 2067 self.error_details = OrderedDict()
2068 2068 self.source_commit = AttributeDict()
2069 2069 self.target_commit = AttributeDict()
2070 2070
2071 2071 def __repr__(self):
2072 2072 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2073 2073 self.merge_possible, self.failed, self.errors)
2074 2074
2075 2075 def push_error(self, error_type, message, error_key, details):
2076 2076 self.failed = True
2077 2077 self.errors.append([error_type, message])
2078 2078 self.error_details[error_key] = dict(
2079 2079 details=details,
2080 2080 error_type=error_type,
2081 2081 message=message
2082 2082 )
2083 2083
2084 2084 @classmethod
2085 2085 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2086 2086 force_shadow_repo_refresh=False):
2087 2087 _ = translator
2088 2088 merge_check = cls()
2089 2089
2090 2090 # title has WIP:
2091 2091 if pull_request.work_in_progress:
2092 2092 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2093 2093
2094 2094 msg = _('WIP marker in title prevents from accidental merge.')
2095 2095 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2096 2096 if fail_early:
2097 2097 return merge_check
2098 2098
2099 2099 # permissions to merge
2100 2100 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2101 2101 if not user_allowed_to_merge:
2102 2102 log.debug("MergeCheck: cannot merge, approval is pending.")
2103 2103
2104 2104 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2105 2105 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2106 2106 if fail_early:
2107 2107 return merge_check
2108 2108
2109 2109 # permission to merge into the target branch
2110 2110 target_commit_id = pull_request.target_ref_parts.commit_id
2111 2111 if pull_request.target_ref_parts.type == 'branch':
2112 2112 branch_name = pull_request.target_ref_parts.name
2113 2113 else:
2114 2114 # for mercurial we can always figure out the branch from the commit
2115 2115 # in case of bookmark
2116 2116 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2117 2117 branch_name = target_commit.branch
2118 2118
2119 2119 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2120 2120 pull_request.target_repo.repo_name, branch_name)
2121 2121 if branch_perm and branch_perm == 'branch.none':
2122 2122 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2123 2123 branch_name, rule)
2124 2124 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2125 2125 if fail_early:
2126 2126 return merge_check
2127 2127
2128 2128 # review status, must be always present
2129 2129 review_status = pull_request.calculated_review_status()
2130 2130 merge_check.review_status = review_status
2131 2131
2132 2132 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2133 2133 if not status_approved:
2134 2134 log.debug("MergeCheck: cannot merge, approval is pending.")
2135 2135
2136 2136 msg = _('Pull request reviewer approval is pending.')
2137 2137
2138 2138 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2139 2139
2140 2140 if fail_early:
2141 2141 return merge_check
2142 2142
2143 2143 # left over TODOs
2144 2144 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2145 2145 if todos:
2146 2146 log.debug("MergeCheck: cannot merge, {} "
2147 2147 "unresolved TODOs left.".format(len(todos)))
2148 2148
2149 2149 if len(todos) == 1:
2150 2150 msg = _('Cannot merge, {} TODO still not resolved.').format(
2151 2151 len(todos))
2152 2152 else:
2153 2153 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2154 2154 len(todos))
2155 2155
2156 2156 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2157 2157
2158 2158 if fail_early:
2159 2159 return merge_check
2160 2160
2161 2161 # merge possible, here is the filesystem simulation + shadow repo
2162 2162 merge_response, merge_status, msg = PullRequestModel().merge_status(
2163 2163 pull_request, translator=translator,
2164 2164 force_shadow_repo_refresh=force_shadow_repo_refresh)
2165 2165
2166 2166 merge_check.merge_possible = merge_status
2167 2167 merge_check.merge_msg = msg
2168 2168 merge_check.merge_response = merge_response
2169 2169
2170 2170 source_ref_id = pull_request.source_ref_parts.commit_id
2171 2171 target_ref_id = pull_request.target_ref_parts.commit_id
2172 2172
2173 2173 try:
2174 2174 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2175 2175 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2176 2176 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2177 2177 merge_check.source_commit.current_raw_id = source_commit.raw_id
2178 2178 merge_check.source_commit.previous_raw_id = source_ref_id
2179 2179
2180 2180 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2181 2181 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2182 2182 merge_check.target_commit.current_raw_id = target_commit.raw_id
2183 2183 merge_check.target_commit.previous_raw_id = target_ref_id
2184 2184 except (SourceRefMissing, TargetRefMissing):
2185 2185 pass
2186 2186
2187 2187 if not merge_status:
2188 2188 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2189 2189 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2190 2190
2191 2191 if fail_early:
2192 2192 return merge_check
2193 2193
2194 2194 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2195 2195 return merge_check
2196 2196
2197 2197 @classmethod
2198 2198 def get_merge_conditions(cls, pull_request, translator):
2199 2199 _ = translator
2200 2200 merge_details = {}
2201 2201
2202 2202 model = PullRequestModel()
2203 2203 use_rebase = model._use_rebase_for_merging(pull_request)
2204 2204
2205 2205 if use_rebase:
2206 2206 merge_details['merge_strategy'] = dict(
2207 2207 details={},
2208 2208 message=_('Merge strategy: rebase')
2209 2209 )
2210 2210 else:
2211 2211 merge_details['merge_strategy'] = dict(
2212 2212 details={},
2213 2213 message=_('Merge strategy: explicit merge commit')
2214 2214 )
2215 2215
2216 2216 close_branch = model._close_branch_before_merging(pull_request)
2217 2217 if close_branch:
2218 2218 repo_type = pull_request.target_repo.repo_type
2219 2219 close_msg = ''
2220 2220 if repo_type == 'hg':
2221 2221 close_msg = _('Source branch will be closed before the merge.')
2222 2222 elif repo_type == 'git':
2223 2223 close_msg = _('Source branch will be deleted after the merge.')
2224 2224
2225 2225 merge_details['close_branch'] = dict(
2226 2226 details={},
2227 2227 message=close_msg
2228 2228 )
2229 2229
2230 2230 return merge_details
2231 2231
2232 2232
2233 2233 ChangeTuple = collections.namedtuple(
2234 2234 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2235 2235
2236 2236 FileChangeTuple = collections.namedtuple(
2237 2237 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,35 +1,40 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 from rhodecode.model.validation_schema import validators, preparers, types
23 23
24 DEFAULT_ROLE = 'reviewer'
25 VALID_ROLES = ['reviewer', 'observer']
26
24 27
25 28 class ReviewerSchema(colander.MappingSchema):
26 29 username = colander.SchemaNode(types.StrOrIntType())
27 30 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
28 31 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
29 32 rules = colander.SchemaNode(colander.List(), missing=[])
33 role = colander.SchemaNode(colander.String(), missing=DEFAULT_ROLE,
34 validator=colander.OneOf(VALID_ROLES))
30 35
31 36
32 37 class ReviewerListSchema(colander.SequenceSchema):
33 38 reviewers = ReviewerSchema()
34 39
35 40
@@ -1,114 +1,115 b''
1 1 <!DOCTYPE html>
2 2 <html xmlns="http://www.w3.org/1999/xhtml">
3 3 <head>
4 4 <title>Error - 502 Bad Gateway</title>
5 5 <link id="favicon" rel="shortcut icon" type="image/png" href="data:image/png;base64,AAABAAIAEBAAAAEAIABoBAAAJgAAACAgAAABACAAqBAAAI4EAAAoAAAAEAAAACAAAAABACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALMiIiK1OTk5ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEsLCz/SUlJ/xUVFcM3NzcAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAM8eHh7/8/Pz//39/f9BQUH/Dw8P0DY2NgMzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAMcjIyP/8vLy////////////9/f3/0RERf8REhTINzc3ADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAKUWFhb/7Ozs///////o6Of/6Onq///////z+v//NUFh/xgaIao3NjIAMzMzADMzMwAAAAAAAAAAAAAAAGgAAAD/0dHR///////o5+X/7O/2/+v5///j5Oj//+ic/92wMv83MB3/Jys2ajMzMwAzMzMAAAAAAAAAABYAAAD4kZGR///////p6er/7Pf//+jv+//my4D/6q0J9PqkAJz/zAAlXlcoeRshOf8zMzMaMzMzAAAAAAAAAACRMDAw///////09fj/6vj//+Xcwv/vtBns/7UAav+8AAP/vgAAyZUKACotNQAtLzXyMzMzsDMzMwAAAAAKAAAA8aSjov//////6PX//+fOif/2qwCH/7YAAKN7EgBsWSQAU0gqAC4wNAAqLTUANTQyZjMzM/8zMzMOAAAAUBMTEv/x8vb/9f///+nLdfL2ogAz/7kAAG5bIwAFFD81Dhs9ShskOU8qLTZMNTQyKTMzMwAzMzP/MzMzZgAAAIVJSEf//////+nRjeb4pQAV/9sAAKiFFQAADkEAMDE0UzQ0M+IzMzOOMzMzvDMzM2QzMzMAMzMz3zMzM6oAAACeXGV9////7/j/yAAe/70AALiDAAA0NTcALDJAADMzMlEzMzPVMzMzgDMzM30zMzMjMzMzADMzM8MzMzPIAAAAnWBlaf//5V86nGYAACgeAAAAAAAABgcNACsrKQA2NjYKMzMzEDMzMwwzMzMGMzMzDDMzMwAzMzPNMzMzvwAAAG0bFQv/JRgHfQAAAB4AAAAAAAAAAAAAAAADAwMAMjIyADY2NgAzMzMAMzMzADMzMxIzMzOKMzMz/zMzM3EAAAADAAAAjAAAAP8AAAD/AAAA/QAAANAAAACZAgICXzExMV82NjaZMzMz0DMzM/wzMzP/MzMz/zMzM5gzMzMAAAAAAAAAAAAAAAAAAAAAOAAAAIoAAADKAAAA9AICAv8xMTH/NjY29DMzM8ozMzOLMzMzODMzMwAzMzMAMzMzAP5/AAD8PwAA+B8AAPAPAADgBwAA4AcAAMAbAACA+QAAgf0AAIf9AAAPjAAAH5wAAD/8AAC/+QAAgYEAAPAPAAAoAAAAIAAAAEAAAAABACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICtjExMbk2NjYAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACwAAAM4BAQH/MDAw/zY2NtEzMzMNMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACMAAADsAAAA/wEBAf8XFxf/MDAw/zU1Ne4zMzMmMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA2AAAA/QAAAP8AAAD/eXl5/56env8ODg7/Jycn/zY2Nv8zMzM6MzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQQAAAP8AAAD/AAAA/4SEhP///////////6Ghof8QEBD/IiIi/zc3N/8zMzNFMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEEAAAD/AAAA/wAAAP+bm5v//////+/v7//u7u7//////7S0tP8VFRX/ICAg/zc3N/8zMzNFMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA3AAAA/wAAAP8AAAD/p6en///////u7u7/6urq/+rq6v/t7e3//////729vf8WFhb/ICAg/zc3N/8zMzM6MzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIQAAAPgAAAD/AAAA/6ampv//////7e3t/+rq6v/r6+v/6+vr/+rq6v/s7Oz//////729vf8UFBT/IyMj/zY2NvozMzMlMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUAAADdAAAA/wAAAP+ampr//////+3t7f/q6ur/6+vr/+vr6//r6+v/6+vr/+rq6v/s7Oz//////7Kysf8PDw//KSkp/zU1NeAzMzMIMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAsQAAAP8AAAD/f39////////u7u7/6urq/+vr6//r6+v/6+vr/+vr6//r6+v/6+vr/+rq6f/t7e///////5ynwf8KEy3/MC8t/zQ0M7UzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHUAAAD/AAAA/1paWv//////8vLy/+rq6v/r6+v/6+vr/+vr6//r6+v/6+vr/+vq6f/r7PD/7/f//+v3///o6Oj//+mZ/3FcJv8OGDL/NjUy/zMzM3ozMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAuAAAA/wAAAP8sLCz///////v7+//p6en/6+vr/+vr6//r6+v/6+vr/+vq6f/r6+7/7/j//+r2///h2sf/37tY/9+iA//zpgD//74A/2BRJv8eJTn/MzMz/zMzMzIzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAMUAAAD/AAAA/9DQ0P//////6Ojo/+vr6//r6+v/6+vr/+vr6v/r6uv/7vX+/+v4///i2sb/4LZC/+OfAP/sngD/9K0A/fCuALz/zgBgoIMYRRAZPPUzMzP/MzMzyTMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAABfAAAA/wAAAP9+fn7//////+np6f/r6+v/6+vr/+vr6//r6uj/7O/z/+36///k5OH/4btP/+WfAP/voQD/9K8AyvCwAGTvrgAQ764AAP/CAABrWSUAFyA6eTMzM/8zMzP/MzMzYzMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAN4AAAD/HR0d//r6+v/4+Pj/6urq/+vr6//r6+v/6+rp/+31///o9P//4sqI/+SjAP/unwD/9K8Aou+vACjurgAA8K8AAPayAAD/xAAA6K0FACwuNAArLjUAMzMz2jMzM/8zMzPiMzMzADMzMwAzMzMAAAAAAAAAAAAAAABgAAAA/wAAAP+dnZ3//////+jo6P/r6+v/6+rq/+zr7f/t+f//5ebi/+OzMv/rnQD/8aoAnfKxABT/ugAA/8IAAP/EAAD/wQAA/LYAAP+5AACNbhoAEh48ADU0MwAzMzNaMzMz/zMzM/8zMzNkMzMzADMzMwAAAAAAAAAAAAAAAMgAAAD/IiIi//z8/P/09PT/6+vr/+vq6f/s7fD/6/r//+TYsf/npQP/8aEAwe+tAB34uAAA/8MAAN+iBAC+jg0Apn8TAHJcIgBYSykAPDkwACcrNgAxMjQAMzMzADMzMwAzMzPYMzMz/zMzM8wzMzMAMzMzAAAAAAAAAAAwAAAA/wAAAP+Hh4f//////+np6f/r6un/7O/z/+r4///lzIf/658A+/KoAFburQAA/8EAAP+/AACCZR0AKSw2ABwkOQAWIDsAEBw8ABoiOgAjKDcALzA0ADU0MgAzMzMAMzMzADMzM3AzMzP/MzMz/zMzMzQzMzMAAAAAAAAAAHoAAAD/AAAA/9ra2v//////6+rp/+zv8//q+P//5cdy/+2eAMvyqwAP7KwAAP+/AADqrQMAUEUrAAcWPwAkKTcAMDE0ADIyMwA0MzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzGTMzM/8zMzP/MzMzfzMzMwAAAAAkAAAAvgAAAP8+Pj7//////+3s6//s7fD/6vj//+fIdP/ungCa8a0AAO6uAAD+uAAA6q0DADAxMwAMGT4ANTQzCDQ0M8gzMzOOMzMzKjMzM8QzMzOQMzMz1DMzM0szMzO9MzMzSTMzMwAzMzMAMzMzvDMzM/8zMzPCMzMzJwAAAFMAAADsAAAA/3d3d///////6urq/+r5///nz4v/7p4AffGsAADvrwAA7asAAP/OAACUdRoAABBAADc1MgAzMzMAMzMzyzMzM6QzMzOFMzMzyDMzM0AzMzPXMzMzLzMzM+gzMzMcMzMzADMzMwAzMzOAMzMz/zMzM+8zMzNXAAAAegAAAP8AAAD/mJiY///////r9///6dyz/+6hAHfwqwAA768AAO2sAAD/vgAA8LQDADUzMgAmKjcAMzMzADMzMwAzMzOdMzMz4zMzM+szMzN4MzMzADMzM+UzMzPPMzMz1DMzMwAzMzMAMzMzADMzM1ozMzP/MzMz/zMzM3wAAACUAAAA/wAAAP+traz//////+ns5//uqguL8KcAAO2tAAD5tAAA/9IAAP/UAABoVCkADho8ADc2MgAzMzMAMzMzADMzM8IzMzOoMzMzdjMzM9ozMzMkMzMz5TMzM5QzMzMmMzMzADMzMwAzMzMAMzMzQjMzM/8zMzP/MzMzkwAAAJ4AAAD/AAAA/7S1tv//////7L5RtfCfAAD8uwAA/9MAAPy9AACxfQAASTgLABYhPwA+Pj0ANDQzADIyMgAzMzMGMzMzwzMzM8kzMzPNMzMzRDMzM24zMzPiMzMzADMzMyEzMzNTMzMzFDMzMwAzMzM5MzMz/zMzM/8zMzOaAAAAlAAAAP8AAAD/q7fS///80O//tgAQ/9MAAPSzAACUagAAIBcAAAAAAAAAAAAABwcHACcnJgA9PT0AOjo6ADIyMgEzMzMBMzMzATMzMwEzMzMAMzMzEDMzMwYzMzMAMzMzRjMzM1wzMzMSMzMzADMzM0IzMzP/MzMz/zMzM5MAAAB5AAAA/wAAAP+fp6r///5ZR96WAAB0VQAADgoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8PDwAvLy8ANjY2ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMBMzMzATMzMwAzMzMAMzMzWzMzM/8zMzP/MzMzegAAAE0AAADmAAAA/1BDKeFvUA4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAAzMzMANjY2ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzOrMzMz/zMzM+ozMzNRAAAAEgAAAKkAAAD/AAAA/wAAAPUAAACnAAAAVgAAABEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICADExMQA2NjYAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzDzMzM1UzMzOlMzMz9TMzM/8zMzP/MzMzrjMzMxYAAAAAAAAAMAAAAOoAAAD/AAAA/wAAAP8AAAD/AAAA/QAAAMgAAACQAAAAXgAAADEAAAAKAAAAAAAAAAACAgIAMTExADY2NgAzMzMAMzMzCTMzMzEzMzNdMzMzjzMzM8czMzP8MzMz/zMzM/8zMzP/MzMz/zMzM+wzMzMzMzMzAAAAAAAAAAAAAAAAAAAAAD0AAACaAAAA5wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAPYAAADPAAAArAICAoQxMTGDNjY2qzMzM88zMzP1MzMz/zMzM/8zMzP/MzMz/zMzM/8zMzP/MzMz6TMzM5wzMzM/MzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMgAAAHQAAACvAAAA5QAAAP8AAAD/AAAA/wAAAP8AAAD/AgIC/zExMf82Njb/MzMz/zMzM/8zMzP/MzMz/zMzM+UzMzOvMzMzdjMzMzQzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIgAAAFEAAAB7AAAAowAAAMYCAgLqMTEx6zY2NsczMzOkMzMzfDMzM1EzMzMjMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAP/+f////D////gf///wD///4Af//8AD//+AAf//AAD//gAAf/wAAD/8AAA/+AAAH/AAAY/wAA/P4AA/x+AA/+fAA//jwA//88Af//OAP5FxgP+FcYH/jHkB/5T4A/+N+Af///iP///5j///8YP//8HAP/wD8AAAD/8AAP//+B//">
6 6 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
7 7 <meta name="robots" content="index, nofollow"/>
8 8 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
9 9 <style>
10 10 * {
11 11 box-sizing: border-box;
12 12 }
13 13 body {
14 14 background:#eeeeee;
15 15 color: #323232;
16 16 font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
17 17 margin: 0 auto;
18 18 max-width: 1000px;
19 19 letter-spacing: .02em;
20 20 font-size: 13px;
21 21 line-height: 1.41em;
22 22 }
23 23 h1 {
24 24 padding: 20px 0;
25 25 font-size: 1.54em;
26 26 }
27 27 ul {
28 28 padding-left: 10px;
29 29 }
30 30 li {
31 31 list-style-type: disc;
32 32 }
33 33 .error_message {
34 34 font-weight: normal;
35 35 }
36 36 .logo-container {
37 37 float: left;
38 38 width: 150px;
39 39 text-align: center;
40 40 }
41 41 a {
42 42 color: #427cc9;
43 43 text-decoration: none;
44 44 outline: none;
45 45 cursor: pointer;
46 46 }
47 47 body {
48 48 padding: 10px;
49 49 padding-top: 10%;
50 50
51 51 }
52 52 .inner-column {
53 53 padding: 10px 30px;
54 54 width: 33%;
55 55 float: left;
56 56 border-right: 1px solid #dbd9da;
57 57
58 58 }
59 59 .inner-column:last-child {
60 60 border: none;
61 61 }
62 62 .side {
63 63 min-height: 220px;
64 64 width: 150px;
65 65 float: left;
66 66 text-align: center;
67 67 border-right: 1px solid #ddd;
68 68 }
69 69 .logo {
70 70 width: 120px;
71 71 height: 150px;
72 72 }
73 73 .main {
74 74 padding-left: 170px;
75 75 }
76 76 @media (max-width: 979px) {
77 77 .inner-column {
78 78 width: 100%;
79 79 }
80 80 }
81 81 </style>
82 82
83 83 </head>
84 84 <body>
85 85 <div class="side">
86 86 <img class="logo" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAScAAAFxCAYAAAAxjW6rAAAACXBIWXMAABcSAAAXEgFnn9JSAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAM9BJREFUeNrsnQtwHVeZ548etmX5dQMJSZx1LANhA+us5YlhSLyLpcCw2AtYXrY2Do9YKqaAhA22h0fBEtZyDdQACcieLFMkM5TkzBRJpgBLvBwYEstbE2cZEixT4RHiwtd5OInz8JUfsmRb1vb/6hy5dX373u6+fU6f7v7/qrquJV/dc7v7nH9/33e+8x0hCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEkGxRx0tAAtLqHDn577YK7xtyjoL8d14ehFCcSE20yAPis0AKkvpdrQzK1z1SvIYoXoTiREppk1YQxGeZFJ/WGL+PsrbcwuW2wAjFiaRQhJTlsypCK8gkg9K6OuSytoZ4aylOhCJkK0qo9lO0KE4kflpdLlhWRKhW0VI/E4oTiQAVD4JFtFj+u5WXpWb3cMjlHg7yklCcSGVaXOJDayg+K0uJF4PwFCcKkZieO0TsIC9FioJFcaIQkcQI1h66hBSnJNImj2XifOCapNslHHRZWHleEoqTLVaREqI2wWA1mXT93GJF64riZAQ1c7ZKnM+uJqQagy53cFAwdkVxitBFU2JESJSuIMWK4kQxIhQrilPy3LQOihGhWFGc4qalRIwYMyJJEasBkbEAe9rFKedy0zoEp/VJ8lGzgUqs8hSn5FlHa+mqkQy5gANps6rSIk5tUoxoHZGsW1X9YjJW1S8SHqtKsjh1uASJsSNCLkRZVP1JdP+SJE65EkEihARz/3YkVahsFaRO59jpHBM87D7a2tomcrkcr4X9xz7n2GR7CMRWy4kum2W0tLRMOxYvXjzt57KP6qEhUSgUiq/Dw8NicHBw6mdinUXVZ1uMyiZxQkLkRgpSjDegtVU4lo9wLKDiz6tWrSr+jN9HTT6fnzoOHTo0JWQQMBIb/a4YVexCFbc4tUi3bYPgLJtRC0gJEQSokvUTB0q03BaX+h0xgpr1g0UV29MiLnHqFAxsG7OClAumLKJE+yCOYEGk9u/fT2vL0LPCObbH4faZFKcW6bZ10m2L3hKC8CxbtmzKKsrcCJKW1Z49e6YEjLGtyOkzaU2ZECfltrXx3oZHxX5wKBFKgyVk0tKie5gsa0qXOMEy2iQYSwptDbmFCK82xYSSjpoxxKHcQ1pZ4S6lmIxNbRUacqd0iFO3dN/ougUUIgSnVayImAeWFURKzR4ylhUIJVKRqXyU4oQRtZPuG4UojYJFC8s3XdLds0qcIEycfXOBmJAKVNM1S5dgqcC7Siwl01gehQUVlTh1SHHKtFWkhAivWZwxyyoIsEOkVNCd1lVxNq/dFnE6KDIW+Ib4QISUe0ariChUwB3WFcQqo7GrJaLGIHkU4gQTYV+WxAivjBWRMK5ghsQKwfHuuMWpV0zmMqXSTYMYdXR0UIxI5GI1MDCQZjcwL62nWMXpqEhJ2gBESIkR3TRi0g3s7++fsqxSlCRaU2C8VnFKdCAcAuQWJEJsQM0CKssqwfSJydSCWMQpcS4dYkcbNmygdUQSZ1XhNWFpC/iyF8UlTomYpYMQrV27lrEjknjcQpUQ9y+0a1eLOFk7S6dKhVCQSNrdvx07dtguVNucY7Npcep2ji02WkidnZ3suYRCZQf4MqFm7WoRp33SeqLLRoiFQtXX12dLjCpUQmZYcYIKHI3rTFVQGxYSBYkQb2BJYdYv5mB6qMXAYcXJeAqBmvbfuHEjZ9kICYia9YNFFUN6AsqprDMlTj1ispicdmAdKbeNEFI7iEkpt89QfCpUSkFYcdIab4JlBAuJbhshZtw+CJVmAsedwoiT1njTpk2bRE9PD3sNIQZBEL29vV1nXCpw3Kk+RCPaLKalS5eK6z/ZzZ5CiGEwyaTZKFgV9A/CiFObrm//8Y9/3HFMx8QfCmfYWwgxjOa4bmCjJow4LdP17VevXi1ac/Wi74/D7CmEGEbX1vMmxUnLt4dLt2DBgsl/N4+LX700xt5CiGE074UY6MODihOC4S26xEnxnssaxF1PsGg8IaZB+SCNBNKOoOKkzea78sorp/387kuE+LvfH2NvIcQgmjfmSKY4zZ8/f9rPsJ5+/sxJcfzMOfYYQgyBHEONuYWBzLKg4rRY17d2u3WK/76wXnx1P4PjhKTEekqm5VSO/3Rxg/jFcyMMjhNCcYr2w4OwcuXKC343t1GI297QKL742FH2GEIMoXnJmG/ls0acvEDsafzcOIPjhBhC84ydb+ULIk7ahKlcvMlN1+JG8a3fHRPPjYyz5xCScK8xUeKkki8rWU+XNdU57t2rvLWEaEZzIqYWy0kbpWkEXtYTAuP/eOAEew8hyWWBDnHSJqfXXHNN1ffAesK6O7p3hOhHY7VZ325dY5IuGKynjftPF927vndcwh5EUsPjjz8+7ec3velNYt68eVM/P//88+Lw4cOef1/6/ijEKe5dXIKI0wJdX+L666/3J7mO5YRDuXcfeeNc9mqSeFDT+zOf+cy03919993i2muvnfr5Rz/6kbjnnns8P6P0/TYbZTrculYbzuwL/35G8ZXuHUkDx48fF9/85jervg+WU1o8xkS5ddVm69xg1u6/LawXPzh8ju4dSTywhiq5awr3ez72sY9d8P8LFy6M9Hthxi6GXVrsE6dqeU6lfHTJTPHzI2N070ii+eMf/yjuu+++4r/hkpXGnbwsp3LilEbqk/ilsazl1tc3FP/91f0FlvUlieQb3/iGb8FRlhMsJFhbOH784x8X3cK0unZBLKc2m87uv17eKL733Fnxp5NCfPHxV8X333kpeztJDLCYlKUEq6lSMNttNUGk3IFxzNB9+tOfFu9973uTJk556y2noC6dm1tfP6mtsJxgQRGSBGDtuAWmu7vbl9Xk9Vn4+0ouId26kAQJhpfy1tc0FoPjALGnhw6fYs8n1rN169YpdwyBZ4hPqbggHuW2rB577LGp47vf/e4FS0xU7CoqNC/+9UWQTTUndHwBlErBrqNhGT5zTqz/5ZgYGa8T82bUi++/61JxRXMDRwAJxYoVKwL/DQRDVxuVPvuDH/xgUcSUgCHXKSowU4dNNjWBDx6s9qbGpHemBY4gwb2786nxYknfTz36MuNPlgJLAHsT6gLxF2RKI2iMwYqnf5RZ07aR5nNLhTiB9y2cIX7y/Bnx+xP1U/Gnzy/LUQ0yBlwlCCAOZFQX+8b73lecCbv88sut+Z5eFo5buBHkhtAqUUdQXIktzhMzdW5XMOo8J4pThHzpLU3iLx+fdO8Qf3rrJbPEOxfO5ojNOBApHBAoW/KD/CwzgTCp9yGeVCkhEoKVxtyn+rScyBWz68WHFp0/HZT2Zf4TUWB2DJZJEvOCKs3EQZhgidlkGdJyKsOHF89y3LuT4vBYQzH+hPwnLG9BoJwQDHIssI0ycBwlbuvH7abdeeedRctJBb+VKGHGLs1xtca0ndD/fsss8dHHTosZDSr+NCy+suIijkwyJVCwomx0g7y+U7UkzbSSOpPizfMbxYcXnc+Q6D90ktUzyQUuntcqfyUE6kj7jBgtJ8PcetVs8ZtjI8XZO4DZu4XNDQyQxwwGeqkF4CezGS6OV0wFrk6YOBISGTEjVkqpy4c4VRqzr9MmTkPCkppOfvjim+HejYmxiUmBQoD8inc0iqtzM3jXYwIzUKWD309CItaNVXLDYAVhRg6zWn6FClPx5cSJTBJ3Fcygbl2iFq8tciylDy+qF+cmJhPbJxM0Xym+knQBqwri9cMf/nAqN6gaEDF3gJlM59ChQ4kSp8Rx85ImsWLB+VU3z42cFZ3/9yX2vBS7jUGsoYSWG0kDg5kXJ9C9tEnUT5yd+hkzeNzePL0gpuXXeiJ2EyTmlMiaJFh79zdLZ4q/+s3ZYnoBwAzeFXMaxK1vns8ekFILKk6Uy6h2TFFr/tQriV6c9jtHR9Rf4JFHHtF+km+/eKZYc+lp8S8vnzcUsUHCwuZG0bG4mb2ARAJm9fwsNUHypG3r/UoZGhqK/TtkJnX69v8wV1zSeHra77BBAuqQk/SgFv+atLDQJjLPkXZQbVMAvBczi1iQHHUNpigpFLQ5SoM6xCnxpSZ7ls+ZFn8Ctz36CtfgpQg/2ywpYYrKxfrQhz4UaqcS1BB31xEn4cVJm533xBNPGDlZpBdg37uxs+cFCqkFmMGjQCUbxHdgvahSKdUorSRZC362dvKimhuYZbfOigzx4eFhY229+/JZ4t9ePSN+9tKEqK+rmxIo5EB9/12v4yJhC1Hr4bxE6cknnwycs2TT2jpYe1GKpeVu3R4d4pRPS2dH/OnX/zosXhmfNfU7lQPFKgZ2ilOUS0hsC0bD8kLGui07qGgUJm1unTZx2rt3r/ET7/3zeWL87PRgOFw7CBSzyNOLqoxpGza5dppdOt86EtRESM3+S8h/+va1zWLkzBkKVEa46aabxJYtW7S3U26Bsx/rMAMunVZx0iKpJnKdyrE0N0N8Ykn9tAC5EihmkacHtTOJ7oW+iBuh2sHu3buL7eHVr5Vm01Ka/fv303JSmAyIl/LRN8wR73jNuakFwgrsgUeBSo846SzWpkrlomKlOz1B1fb2K1C2LEROquWkRVJNpRJ48fXl80Wu7sINObHMhQKVfDDTh40sdYFqCJXED+LkJ+HTFutJY8wpH+TN9To/PAjPPPNMrDfkn67PiZHRkxSolIL8J11BZz/Ck6QyuxSnEp5++ulYbwgC5L1/Pv+CGTwKVPzA8nBvxw0XCgd+H2S/Nr/Z4zpI0oJfjW5dINWzIiAO4kgnKAUB8tveOEMMj1KgbEbFkFSBOaQH+AH5RDZmY9uE5usTqIJdmIC4FlmN23JS3Li42RGoxgtm8JRAMc3APpAe4Ndt2rNnDy9YPC6ddstJm/UUd1DcDWbwrsudLStQqGJAgbKP7u5uX+/jZgVVTBu95XkpTlHQc21OXDP3zAUpBoCJmvaB5Sh+4jpw7by2hSJaLafAXlcYcdImrXElY3px558tEPPECAUqIfh17Wg9eaMx5hRY9ayxnIANQXE3mMG7b+VFYuL0iYoCxXIrFCdaTVUJHOwLI07apNU2y0kJ1LfflhOjYyNl/58CZQ9+K1tSnIxbTSAf9A/C1nPSssGmbXEnBVIMkAPV9ctjorlpzgX/rwrW3XXda8VbL5nFXu5B2L3iECNyC4raLCCs5YS4E0rqKpCKoMqVlAqXn6zt0r8Jm3BZem28zlMXmtfUBTbL6kI21OscnTrOAIslly5dauXg2ouZul8eF5fPm+v5nq+seA03TagwiN2iEBYkYHoJwPvf//7AlSnd69/87EBcDSSKloIlNF4F88Kcpw6WLFmic6ffwFoTtqqaNondtWuXtYPrescq+utrmsXzx094vgebJvzjgRNUopiweUcTm4EoaRSmUP5ivcnG/GBj3MkNkjQhUEdOnPR8z1f3F5hNHhNJWsNmE5rjTaEyX8OKk7awPsQpzhIqfgWqe+nssstcFMwmj4cga+2ISz30Zs6H0otaimVrk1rbUgq8BOoLV8+oKFAqm/y5kXH2frp1VtPf36/VMDMtTtqk9qc//WkibigE6n2XTVQUKKQYfOAXLzLVgG6dtSC/SXMlglAfbqXlZHvcyc1fL8uJd7z2XEWBgmv3gYdedFy9EY4EunbWoTneFPrDG+NotBooPIecJ1tTCkr52xUXiU89dlQ89NKIuLjZO40AM3l/KJwWn3cELauiEcXOJ9XEB20ESSdwW1u6dmZRJV5sFNkdO3bo/PjQHlZdjQ3vdo42HWeEfJgvf/nLiRp8EKgHXxivmAcFkKiJhE3uj0fiBukDyG/SyEVxuHU1qWKaXDu3BfWeyxoq5kEBBMo/8IsjjEORtLt0oeNNUYiTtjODWxd3XXGdAqV2GGYcisTJwMCAVu2r5Y8barUKnWOTczTpOLNFixZFspzANKsXzhaz6s6Jnx8eEfNmzfR83+lzE+Lhw6fE4ZFx8U7nbwgxCWbourq6dDbxNef4Q1yWk1br6f7770/sjf/YVXOLmeRPF4bLlltxg4RNzOYxH4qYRHNuU7GJWv44CnHSZhcm1bVTIA/qq63zfAmUyofCZp6EmGD79u3WCpP1llPSrSclUNi0EwJ1ZrzyUhbkQ33q0VeKa/MI0Qlm6TQXl6vZaIlCnPJC41q7++67L/EdAQL17bctEK+eHC67aUIpqGpAN48k3KWr2WiJKtFGm2unEjKTDoLk3/vPF4vCyHExcqZ6CgHdPJJgl25IRLABb1TipFWGUXQrDaCiJgRq+OTxistdSt08lF9hdQMSmXI47pzG2k0gkpTzqMQpEqX0wuYCdGEE6qG/uFS8tmFUvDziL8epOJvHpE2SDKspEpcuSnHSaj2hvlPSA+NuFjU3iB+sukRcNuNM1WRNBZI2EYf6u98f4+gioUFuk+Z4U15EFIOOUpy0rh5MShkVv2BXFwgUssnzRwtVUw0U3/rdMdaIIuEtCEeYNJZHidRIqYv4ix10jhZdZ/3rX/+6mDWeNrBg+AfPjIp/N3+emNXor1AEFg1/8i3zxUfeOJcjjvhm+fLlulMIlttoOWl17UCaXDs3WI/3jT9bUMyFOjF22tffIECOfChaUcQvECXNwhSZS6dDnLS6dmnIefICuVDfue414vipE74D5WCywsGL3PGFVMVAIDzS8V+n4Qtqde3uvfdesXr16tR2oCewg/Cjr4hj443i0rlzRH2d/1uEOlHYN++K5gaORDINxJkuuugi3c0sERHO2uuodqZVntOS8+RFMdXgXa8Ti5rGfS15oRVFLLGaIk8n0mE5tUjrSRtpDYy7GT5zTnxp/7D43qERcfm8eWJuhdIrXlbU5/9jTlztiB0hmnfzBai90hflB+qw/zFP2abTtUPe05o1a1LdmZoa6opLXjArN/D0cPEx0jzDv9CgRtQ/Hzwp4BVCqEh26evr010nXInTaJQfqKuItdYrgYxx2zfejArUheq7/rXizOlR8dyxY77zoRTIi3r3rheKLh/JJgaEqU/UUI7XtDj16/iybsvpnnvuyUznggWFhM3FsyfEwaMFX5UN3KiSwFinxzV62QI1wjXXCQdaFv7rEqeC0JzzlOa0gnIgUA6BetelM4sCdfRU8GoFqHDwF44VxYA5raYIyesa6zrnnA85xyd0ffgxx8W58sorE7O3XRQgDtWxqLm49OVHzxwXY+NnxdyZM0VdgHQD1C3/1xdHxcPPnxKvnzdDXDGnkSM4pSAArrlGOMA0oBbTrE7zF9/nHK26Phwzdpi5yyJ7XxoTnXtfESPn6gIteymlY/Eccetb5jM3KoVAmBAM10ykuU2mLCeAKGyHTutp5cqVRQsqayxyLJ6b3zBHPORYQH8sjASezVP8YfhMcXsqWFSc1UsPSLq85ZZbxOjoqM5m4M5pSzzUveWs1sA4uOOOOzLbAeHePfSuS4szei+fHPG1kUI5ECRXs3qsvJkOkHSpufoA0BrQ0m05QbYvc46362oAZXyzaj0pbrisqRgwxz55L548JWY1NIqZjcFvLURq17OnxK9eHnPcvEbGoxJsNd100026rSa4crfobKDewLXSnjefZetJgXQDVNh884JG8azj7qKIXRgrCiAnCqkHKA/Mige0muIa1yaioLhKCIpfTetJv5u34fVzxTHHAtp7ZEQcGzstmhobxYyGcLcZ8SikHWAyEMtgZjXUceTTalJjOvKM8DjECbzoHJ06G4BArV+/nr1TunkrL5klfvLsiDgyMho6WO62pB7408li0JwiZTdf+9rXxIMPPqi7GRRWe0B3IyZ7mdZSKgAlSGFBkUmweBjpBkg7gAV1+by5oVMOFIhFIfWgY3EzL7CFVhMW+Bpw6bSlD8RhORXHitCYVkDr6UKQtLm+ZU7R3fuX50dEYbR2KwpB84cPnxIDh0bEvJn1rHqQPatJa/pAXJYTOOocOVpP5kERu42PvVp8jcqKoiWVSaupXWjKCI/TcgKzxWQ5FVpPhnldU8NUsPyXL49GYkW5LSmmH2TCakJBuS+YOifTlhOspoO0nuIFMSjs+PLMybNFK+p1c+fULFIKZJl/8s3zmW2eTqsp8oJyNllO2pMyaT1VB0tfbmxpFmPnJopWFLZGR07UbEeggiwiLgeK3GE5DBYWz2pgTCpFVlNeipMx4ljt+aRzbNItTlmrWBAUBMtVZvnuF8ZEYexMMS9qZn1DqOzyUl4eZeDclNVkIK8JbBYRbvtko1un6BWa856yXLEgKEg52Piro2KXXFc3b9ZM8bo5c8WMhugWEDBwrsnPMlN5AFbTEtPnFledjP26rSdULFiwYIFYsWIFe7APKwp1opQVdez0WXHMeRJjW6rZEcWiVOAcGedM5oxIMczUa4rFaorTcjJiPUGcHn/88eIr8W9F3fm7Y+KepyarZUYdMFdg4wZYUR+5ah5rSYWkvb3dRAnegrSaCqbPL85eod16GhsbE01NTZy5C2hFqeUve18+LV4dO1sMmJ85d07MmVl7wFwB6+k3r54uWlIIosPtu7iJIuUXiNLWrVtNNPU153gwjnOM267Wbj2BLOxzZ8KKanCE6eI5zeKi2bO1tIf0g4+8ca5458LZvPhVWL58uRga0u5pxWY1xW05GbGewNNPPy3WrVvHHh3SikI5ll87Vs6Lo+Pi5OkzYnhsrKZqB17AgkI9Kczw4bH5+nmNjEuVAQFwQztfx2Y12SBOUOQWobHOODhw4ABLqtSAyi7HGj2I1MjZc0VXDxsszG6cIRrqoxUQBM+xCQMqIRw8flZcnZsp5s+o540Qk6kDq1evNpE6kHeOWJ/oNjyWIE4HdTfC1IJoeMaxbr40VJhKOwBw9V7juHr1dfq6E1w+bMaQ9VQExJm6u7tNNGU0G9xGy0lZT+jVbTobYWpBNMB6QtqBCphjrd7ImTOicCra1INyLp87FWHhnMbMWVNIHTAUnsgLw9ngtlpOwMiaO6YWRAsC5n//1Alxx++OTf0OcahL58wRc2fN1N4+AudrHUsqKwF0Q6kDVlhNtlhOAA609ooFSC04cuSIWLNmDZUlAhAwv96xoFAzCqVY4PJhjd4x5zrDmoJQRR00d4N4lAqgY8v1JfNnpNaawmJ2rKEzAKYAb7HhnG2aCoHVhE04W0zcaOY+RY+72oFCx1KYSqjY1DsXNhUTPdMAguBIHYBbZ8JAE4bqNSVJnECnmMx90gqD43pRuVFw+6Zc6qYmcXFzszGRgjDB3UMAPenlWzZv3iy2bdtmoqlBKU5WYFtK7pAUKK2xJwTHkelM60kPcPU6rmx2xGlC/NZx94ou9dmz4uipU8XHIXKk6ur0PhcRNFe7GSfZ7UOipaH1cwDR9hdsOXcbM9zanGO37kYQFN+9ezczxw24egiY43XqiegI00XNs7WnH5QDC47h9t3gWFVJWNNnKBMc9AkLZuhstpxAXgpUi85GEBx/4oknWJROtws9p7EYML/SeX3CsWSQeoCtPlX6Af5twpJSoM4UEjyRkvCk833GHM/zijkNVmaiw5XbsWOHiaaQzrNaaN6HLg3iBPYIA8taWJTOHCjH8rGr5hZNdbh6qMIZp0gBzPYhd+ofnjxunVAh+G2oiByIdZlKktw6RY8JgWLuk3lKFxTb4O65QSAdbl+cM34Gc5rgqSyxsZ/YLE5GEjMB1irde++9VA3DIC8K8agH8ietFCmAmT4lVqZiVEh1MbhQHQ31U5yC0ykMpBYAiBNEipgHCZxf2l+YFjRXIoUUBJRoMZWCUAkE04tCdflsbTXRDe6kAmCatdvaL5JQjwIzd21079JPuZm9qftjOE+qGiiOB6vqBsf1i3L5DCwmWE6GMLKteJrFCeVU9ploiO6dPSIFS+oJmSNVKlILmmZFXja4ViBQk2IV3v0z7M6hjGa3zf0gCXVRkRSGuNPbdTeEuk/XXHONuOqqq6gQMYL0A9SPcqcfKJDMiVpSmOVrqKuPZBurKMDMn0pRQNInEkDBJU3+Zv8wO2eoTpMQ56sOjNrcD5JSZtBYcJzunX084Ax2uHvuNXsKLCyGu4c1fHEHz71AfOqtF88qWlZvc45yM4AGZ+eK3qOwNAieRHECHc6xk+4dRaqcSKkZvgWzmqyJS/kVq+9862+L6+cM0S9irnCZRnECRoLj4K677mL2eAJFqmj9WhqXKsdo/rfi0Jb/IcZPHjPRHKYAlwuLg+BJFqcWMRkcN+Lece1dskUKGedIQ7DZ5fvTZ95TFChDwDzblpT7m7SNwqD8mGd+j+6GuPbOftSSGLweGT1XTOp0c/bcOXHi9Oni8pjxcxNiZkNj5Jsx1MILvd3i+K9+Zqq5QWFJEbm0ihP4f2Iy/nSZ7oaw9o6lVeznqnkziouLUdccAlUqUli3d0qWbMEsHxwGWFVxMvLbR8Xz9/wvk01aVQ4ljW6dwljuE4B7x8XByQF5Uvc7Ll/psphpT+UYs88RXzpw63Wm4kzA+pymNImTkBd7i4mGEHeCQDG9IFl4rd0rBYFzFUQ38r2+/pfi+L8Zc+dQDGp5Eu9fkjenHzTl3qFy5lNPPcVdgxMGtrHCbsWIS2EzBlWqpZQzMjYFt+/0+Dkx07GkGuv1WFOv/uQ7xcMgq5PmzqVBnMAvneMTJhpC9jj3vUsmapeY266eVzbrXAHZQgZ6YXS0uOU63IrG+obIguiYlYPVZBC4c/cn9b6lYSN6Y+4dYPwpHVRaZOzl9tWSkoD40p8++1/EmSPP0p3LkDgBBMdbTTTE+FO6UHGpB587NW23GC8gUHNnzgocnzIcZxJSmIaSfG8aUtLHjLl3jD+lCxWXuvkNc8RV82d4unyK0+Pj0+JTeLpXW3z80j/3iKM//ye6cxkVJwT8hoWB5EyA+BPzn9IF4lIqqRP5Us4NntrWqhwqPoXdjd1C1dhQP60OOvKZDn/rr0yeyqCwbBeVrLt1CmNr7wB3Dk6/y4c0BORMeS2RqeT6NZ8dFX/65PUm85kStXYua+LUIgytvSu6BFx/lxl2HT7lCNVI8dUv576yXohnnzT5NWEx9aXlmtelsB8ZK60CMHMHC4oB8uxYU7ueOyXuOXCiojU1ce8WMfHoD01+tcSUQvFLQwr7zx+kBWVk9u7IkSPFY82aNRy5GQAB9GtfO3NabOrZk+PTkjshShM/udvk14IbZ92mmBSn8mBTzvWm3DtUL2CCZvZAOWH3TB848Pvfion/8z9Nf5V18qGcKupS3HeMLg4u2tUMkGfb5XvmmWK53eHhYZPNJnJRrx/qU9xXkIC22WSDGzZsKFpRJHtAkG6++WbTwjSYVmFKuzgBVP3rN9lBb7vtNtMdlFjA7bffbvrBhLSBrjRf0/oM9BvcwLypxtBBYUGR7HDHHXeI+++/P9X9muKk7wljdIr1kUceKVpQJP1AlL7+9a+n2iOIi4aM9CGjy1uUBcUZvHSDe3zjjTeabnZIpCyfKeviBFB7HDN4V5tq8OGHH+YOwikWpo6OjuJGGIa9ACxPGc3CNa7LWJ9C3hPSC1pMNQjrCSkGrAGVHjDhAWGKYWa2XUzO0GWC+oz1KxV/KpjuyMiBIRSmGtiaJWHKojgpn32z6Q4dQw4M0UAMKQMAwe/urF3rugz3s17n6DTZIBcJJxvMwMaQMjAk3blC1q53Q4b72oAwtHuLgouEkwtECflMMYUh8lm85g0Z73MPiMnyvk2mGoRLgPgTBSpZwhRT3tpNImNxJorTeTAl+zNhqP64W6CYA5UMkFAbU8Y/AuDfzvK1b2D3KyZoHpIunjGQA3XllVcyxcBi8BBZv3696VwmgAD4LVm//hSnSRB0bBGGCtQpdu3aRYGyWJiQMhDDDKvKAB/N+j2oYzechtENEqYa5UadVgFBQl2mGHLTEABvFwnfby4q6nkJprEujo4RU1If8RCmGJNmY+l/FKdkoGrkFOIYEBQoO4QppvuAfjfIu0C3rhpt0sUzCtfhxQtcuZiEqU+kvHAcLafoGIyjs3CZS3wgjykmYYqlr1Gckk2fiGGDQsQ6YpolyrQwxbAsBWSmNhPdOj1gg84O041yHV7qhSlVW4dTnOIBNaAQf2o13TAFKtXCxJQBilNkAnVQGNqkkwKVamECXXGEDJIGY07BnnTGy1bEmKmcWu6++24KEy2n1NEmYkgxoAUVHTFWGBCCKQO0nDQyGFfnogVFYaLlRPywyTl6aEFRmHyCwPdy3gVaTibYFlfcgBZUIoWpnXeBlpNpjNchpwWVKGHCBMoSkcH63xQnO8A+eK1xNEyBsl6YmMtEty5WYuuAdPEoTLScSDViyyKnBWWdMKkH1iDvBC0nG4ilDhQtKCuFiXWZKE7WEevmhxQoUdxXzgJh6uNQoFtnK63SxcvF0XhWXbyY18pRmGg50YLyY0Fde+21mSr5a4Ew9VGYKE5JEqjNcTWepZrklggTl6XQrUscnWIyUTMW0l6TnMJEy4kktPMqCwpbaqcJnBeFKf1wx18zLp7x7c4V2EobgzgtOwsrwcV27hQmihNJuECBNGx9bkksjcJEcaJA6RAoxKFWrFhBYQrHoOBuKRQnCpQe4A5h+6k1a9Yk5qKpBNMDBw7Efe9WO8couzHFiQKlcbAnRaCUMB05ciTuexZb7hrFiWROoHDccMMNoqmpyWphinlJDoUpJpjnFC+dIsY8KGDrchcLFvBSmChOFCgKFIWJXAiTMOOnT8Q8NQ33qb293YrlLrfffjuFidByogU1nbiXu1iQ9W3Fw4JQnChQlgiUWo6CPCwKE1Fwts4uYp/Fw3KXHTt2GMsmV8mVe/fupTARihMFqjomlrtYklxJYaI4kSQKlK7lLpYkV1KYKE4kpEANOMd654gtS1LHchcEvW+88caiC0lhIhSnZPKCc/wsboFSy11WrlxZczb53XffLT772c/acG1RqfQL7GL2wtm6ZBDrpgmKWpM1LUkVEIKbEVCcSPoEatGiReLee+8NFCjHjNyGDRtsqchJYUoIzBBPDohBLRExb3EN9w6BbL85Ser9FghTgcKULBhzShaoJfSAc7zHOS6L60sgkL1z586qM3mIVa1evbooUBYIE5ajPMguRIhectLFm4j7WL9+/cRLL710wXHXXXdN2PD9nOOodIkJIQbptUEAVq5cOXHgwIEpYfrc5z5nizDtozARknGBWrp06UR/f3/RkrJImHLsHoTEyyZLBIHCRAi5gE6KUvHoZVcgxD6wFu8ohYkQYiOtGRWoTt56QuynRcZdKEyEEOvIZUCgmMNESILpTakwHaQwEZJ8egRTBQghltIp0jMjR2EiJGW0iWTP5PXwFhKSXlplvIYzcoQQ60jSTN5RafERQjIkULbP5LGqACEZpltwRo4QYimdwq5AOdfIEUKmsGVN3ibeCkJIKXEGyiGMHbwFhJBKAmU6UM7ANyHEN6aqa8a+Fx8hJHnoLl7HjG9CSGhahZ44VCcvLSGkVuB27RSswUQIsZRuwcRKQoildIpwcSgmVhJCtBO0skEnLxkhxBRwz3YLltIlhFiKVwlg5i8RQmIHbps7DsX8JUKINag4FNfHEUIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCMkadWk9seuuu65FXFi7Ov/oo4/2Zf2mO9emzXlp47VJxL1CH26p9B7nvnUb/k45ObZWod84x34dfacxxfcVN3RLye8GnYMDcFKYeG2SwYYyD5JSjImTI0woKogSzP3OsUONM+f3G53XdkekChQnkjULAoMiVzJQMRCG8OoMiiFeJSMWE4Rps3Msc46N8h60y4cdyjN3JUacpAuxO8Sf5uUB9qATOh2wn10kcy7NWuGj9K/zXmX9DcACjPIJTqbA/eiHC+dcb4zprfL+dErBOur8fnNU195my6nF5Wu3yQ5YkK7HVna+VIsS7nevqBJr8XBXcfQ4n4F+spn9JFJy0lBwu5ytajw613xQ/jwYRWP1Cbw4m5zjoHMhWEg/ncLUIy3tlho/qlP2E+6bF73RoNjv8nLU/0X2MKhP6AWCSO2UZj9JjzD1yodPlP1kHx9kkQGLaIOMPQHE+RAU75DWrogy9henOLX7OLqkG+elxr18MqZKmHQ9bHplagmpAUd4BqUg7XSO7fLfanzid5ujbK8x5hP1A4Jvm2UMotwTsEcKGUmuMHVoFCZlQfWyn0QCDIYt8nrmxfldnbuinrBKRCqBDGquczrxzjIC1YanovOePPtNIoUpJ/zvEIwH2oB8YuPvYDUvE/428kQ/aQvwUCTeYxHGwmbptRR0jb2k5Tlt9uiI+N02dp1EAovJj8vVVSYLuV8KnEoMzFX5jA0iopkkEm18qRz1CbsYedUhS1jMrpJYNvp4z9ZKyyPkIFnnx3ri5U4OScwQ31/GegoVFJdP3I2y07Z4uBChE/pK1iC1lXmyD8ljR63uhithsbSdvDyXHVG5NPK8OsT5PJecx7Xrr2Tyy+tfzWrK+1k7hnNzPm+oSl9o8XFuLfLc1nqcm8pK3yPPb6iGfrG2jGCqe9UXsevsPqcWD5d5j+zveYpTvHGO3iqxijZ5bJFZr30B2+iWwlfJ1WiVR6fz/rx0XQYDttMmKicstsiB0CmT5LpqvHaY6t9S5bzUtUMy5DbhnTTrx5IJEmQdqPaggiCWExQpSj2ievwqV9I3cE03+xUpGfzvrXD9ip/tvG+DT2uwWj/f5KMflvb3fnlOsYpUfQaFCZ13n/AXRFWdsVeKja8O4Rz7fAzgciKy2287LmspSMJimzz3ZWE6upzu7wl4XpvkeeVCuuMDAdoq+Lyf5a5jkD5xwTWVou3ngbXT5/VrE/7iaJX6+e4Q/VDI67Av7jSdJFpO5QbWUACh2SnCZR8Xn5KVLBvXwshabiraWeC0s9mnxRSUXMhBuLOGmE2r/Pv2oO54QEuy38dAzJcRpt4I+iWsxGXO9+2q8CDZEuK61SJMuRrOJycfKu1xLapOlDjJwV9ugOwJcbMLsjPvcXXYnIwPdXrcWHTiJRU+v9ejQ+Hzi0lrMjaizkMtmrzA2nDes8crb8TllnpZD30lFoeKrbWEvO49Htcd54XFnxDtvBwUrXIQlrYFV6VbZ+0h6YYEsTw7IhImRae8b30eLqPJkEUugo9TKzGWx7FGMWmWUzmXIh8i+atfxnfKXfB+52Zsl0/6UqFpqRCz6PSwSC5oS/67X7a1w8PU75WWWsHDVWrxsCDL1dSB9bFNuh49ATt7myi/pASfua7kvIoBfhmzKBfT24gYlA2LcasIfC2o++a20LZEJBZ+3ego3bEW+Zndpu9RImJOrnhHOSsjaMo8ZiPWVRogsmN1VfDHy7pjHmLRVaWt4iD3eGp5xTE2elgx7VXa2iaCB8R7Pc7L8xrK33eVulA1uJSmHnReluigPApBr5lHRVYvhmQ7QyHHSUsA11GdU97HezfGcYMaYxQcP/GLFulmdXh0pG0Braa8V0ygzABTFkDpYFrlYTWVs2R8leyQrl5fmU68sfSJJdsqdy26fLaF5UCr/AwY6faEOi9ZQmN7GUttg4i54qZrKr/qg6z0XH1an+5VC37FeJ27L8vx0RPQCvJzTurBkne1VW1dI4yDDtP11OK0nHb7OHorxH82Vwsae1gXQdjjYdGUsrbckylgMHerV6eoJo5SdIO0dcjn+8qd11CAtsp15jYRP34EA/fvAsGX1qcfcd1Y4RpecO9LB768xu0iWAmSDT5FMF/SVpePdlbRrfOn/EtkJzHRVimtPjv7jiANyQ4z6KNTtNXaVo2DeCDgORXKWC5xV5LwJRgh/6/0PvkR420V3GO/+VMtovqER6Wk2GoPHOP3LInihItkTamUCt8jjAm8x0enaAnRscLGL3IRtDXk0/o0STXBKFSyDuUAryYarT77aH9EEwR+2tof8v+EqL34X2CSmiGOzrU7TOa2BnIenTtMh8OA2OI1kLzidJpW2nt1xt2yXneSqSaOQz7vVauPflqLYEQtTmtlvDGM+GRHnJwBVXXPvJI1XG1lOlhPmWnbOCy5MJ2bxGPp+hlkfu7fcAQiKESEZW0jEjBrsNqtg/UBy8g5EBjs87j5PTF/zShdlLDWFqndIgwqPH7wE0TmgyyJ4lQiVJhRKBfH6UhRCVbuFEJI3G5dSLyKzbWJdOxWmwSRjaLUrW9rAa693/idzAGrNp2+nQ8bipMO6ynvUbMnzkENt2pLjN+nxfA9iNKN3COqB43bhP+ZzxYfn+enDElUOT37RfWcqsj2efNBXgTP9aM4BRSDUnFakBILKExwXZc4FTysk9YIV6n7GShrA4hT1X4gM9dxbrkar+kqn3212nISk1Vcd+hceB01ScxzGvY5qGO1JELulbaqygDOewmGhvMaCiCgtTxoqtEZIKbo95pXFfxKbVaojlF63/yIeJvBa7k2SQM9c8XmNDFUa0eo0OEH3G6th0XTpum8BjW6PJWy4kvZ6eP6tfmweAZLr2klUazwf342/lTJldWsw9Yq60xzNfTBcm11BuyXLXENKopTROZyuae4R/XHSh2+3Pv7fQhG0FXjfl2JgRotGT9s9zmo9nm1Kwf3zgDn48dN3FJuIMu2NgZoy4/49pTrK3JBri9L1VWGpxq91ap24jrjPbKi6+64BlUjdSUS+sSFNXtUvaCqAVjZMct1+HKbKwyUcV+KncnPekM5uDprOK/iYBIB61t7rWrH72Qd7mrWHwbpQfletcxngfw7v65mv7LYfLaJgbzRJTR+98hzL/oe8HG91bn1ybDFYlF+0w0/Qu/n+/XI8+ovCZMsk222+rl3tJwSgBSQ7R7W0yYfwuRVUnVrmbb6PFyFnmomewALw8959foUpZyspLmzQn30zcL/tHubFMwtIlhhtdJdRfxWtGh1tec3prXVLb7CX+Bf1e/aIvzv5Vd6vwZFsJnNTa5zU+dX7npuiGNcUZyiY5uH3w/R2F0aV5CDFp1wn0eH2FphWc7WCk/63tJgPNqWYhKmrrTXeXXK8+rwEkIpSgddMZot5VwzGXzfrPHeFEo/X7a5VUc/KDNJstlgP/RT/iQosSQ6J9Gty3s83WK3npwb2OUhABCmNrlgdlA+tVqquAXdFdpCwbi1Hk/yTikc1QZrLuLzGpKf21rls8u6hPKchIi+dC6+U9kKobjGTpuLA7i51RgqV2NMuq79wkAVUHm/2kXtGxxcIFDC8K7a9SkRp1zIqfuoO0axhneVJ1e1WMKgz3hOlwi/LmuzCJD45/O8WkX5jUNLhWJ7JdEVwQusVesrFXcPkcui+iJoq19Uzp4Pc78KIkRGvs/7FfR75E2Pp8SJkzSZvWIuOQu+X7EYngie9VuQrly7z3K7quP2Be3sUgT2hDyvsIFRtLmkWoa5/P8l0uUq1DCY8PfL/SSMSoFaF3IAFl1GH3Xp1f3qD3ivBsNcB9f9qkV48/I6LokjIG7CrSuI6NPz8eQvN7vV4no6lWt3yMR3Vx1Rxpk2CO8a6Oo7DchYRSFEO11yB5eNFSwXdLIdJW3gvFYFuTbyb9dV2ca99NzQbn+QsjaynW65W7DaRtuPVTYozm+DHvRaqt1wOn221x+0Ldf1a5PXr8PjPPrF9JhjX5jQhat/bHXdr2qfM6juW1z71SnqBDFCuTKqOorESeHIlcRBChrPK1emwxd0dGyPtnQV2/MqfZuPsn5Y6f3SdS5V+kjk50UIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYSQ4Px/AQYAeGpRDl1A8RIAAAAASUVORK5CYII=">
87 87 </div>
88 88 <div class="main">
89 89 <h1>
90 90 502 Bad Gateway
91 91 <br/>
92 92 <span class="error_message">Backend server is unreachable</span>
93 93 </h1>
94 94 <div class="inner-column">
95 95 <h4>Possible Causes</h4>
96 96 <ul>
97 97 <li>The server is being restarted.</li>
98 98 <li>The server is overloaded.</li>
99 99 <li>The link may be incorrect.</li>
100 <li><a onclick="window.location.reload()">Reload page</a></li>
100 101 </ul>
101 102 </div>
102 103 <div class="inner-column">
103 104 <h4>Support</h4>
104 105 <p>For support, go to <a href="https://rhodecode.com/help/" target="_blank">Support</a>.
105 106 It may be useful to include your log file; see the log file locations <a href="https://rhodecode.com/r1/enterprise/docs/admin-system-overview/">here</a>.
106 107 </p>
107 108 </div>
108 109 <div class="inner-column">
109 110 <h4>Documentation</h4>
110 111 <p>For more information, see <a href="https://rhodecode.com/r1/enterprise/docs/">docs.rhodecode.com</a>.</p>
111 112 </div>
112 113 </div>
113 114 </body>
114 115 </html>
@@ -1,981 +1,981 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture()
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 'trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 pull_request, [(pull_request.author, ['author'], False, [])],
125 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
126 126 pull_request.author)
127 127 Session().commit()
128 128
129 129 prs = PullRequestModel().get_awaiting_my_review(
130 130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 131 assert isinstance(prs, list)
132 132 assert len(prs) == 1
133 133
134 134 def test_count_awaiting_my_review(self, pull_request):
135 135 PullRequestModel().update_reviewers(
136 pull_request, [(pull_request.author, ['author'], False, [])],
136 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
137 137 pull_request.author)
138 138 Session().commit()
139 139
140 140 pr_count = PullRequestModel().count_awaiting_my_review(
141 141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 142 assert pr_count == 1
143 143
144 144 def test_delete_calls_cleanup_merge(self, pull_request):
145 145 repo_id = pull_request.target_repo.repo_id
146 146 PullRequestModel().delete(pull_request, pull_request.author)
147 147 Session().commit()
148 148
149 149 self.workspace_remove_mock.assert_called_once_with(
150 150 repo_id, self.workspace_id)
151 151
152 152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 153 PullRequestModel().close_pull_request(
154 154 pull_request, pull_request.author)
155 155 Session().commit()
156 156
157 157 repo_id = pull_request.target_repo.repo_id
158 158
159 159 self.workspace_remove_mock.assert_called_once_with(
160 160 repo_id, self.workspace_id)
161 161 self.hook_mock.assert_called_with(
162 162 self.pull_request, self.pull_request.author, 'close')
163 163
164 164 def test_merge_status(self, pull_request):
165 165 self.merge_mock.return_value = MergeResponse(
166 166 True, False, None, MergeFailureReason.NONE)
167 167
168 168 assert pull_request._last_merge_source_rev is None
169 169 assert pull_request._last_merge_target_rev is None
170 170 assert pull_request.last_merge_status is None
171 171
172 172 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
173 173 assert status is True
174 174 assert msg == 'This pull request can be automatically merged.'
175 175 self.merge_mock.assert_called_with(
176 176 self.repo_id, self.workspace_id,
177 177 pull_request.target_ref_parts,
178 178 pull_request.source_repo.scm_instance(),
179 179 pull_request.source_ref_parts, dry_run=True,
180 180 use_rebase=False, close_branch=False)
181 181
182 182 assert pull_request._last_merge_source_rev == self.source_commit
183 183 assert pull_request._last_merge_target_rev == self.target_commit
184 184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185 185
186 186 self.merge_mock.reset_mock()
187 187 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
188 188 assert status is True
189 189 assert msg == 'This pull request can be automatically merged.'
190 190 assert self.merge_mock.called is False
191 191
192 192 def test_merge_status_known_failure(self, pull_request):
193 193 self.merge_mock.return_value = MergeResponse(
194 194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 195 metadata={'unresolved_files': 'file1'})
196 196
197 197 assert pull_request._last_merge_source_rev is None
198 198 assert pull_request._last_merge_target_rev is None
199 199 assert pull_request.last_merge_status is None
200 200
201 201 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
202 202 assert status is False
203 203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
204 204 self.merge_mock.assert_called_with(
205 205 self.repo_id, self.workspace_id,
206 206 pull_request.target_ref_parts,
207 207 pull_request.source_repo.scm_instance(),
208 208 pull_request.source_ref_parts, dry_run=True,
209 209 use_rebase=False, close_branch=False)
210 210
211 211 assert pull_request._last_merge_source_rev == self.source_commit
212 212 assert pull_request._last_merge_target_rev == self.target_commit
213 213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
214 214
215 215 self.merge_mock.reset_mock()
216 216 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
217 217 assert status is False
218 218 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
219 219 assert self.merge_mock.called is False
220 220
221 221 def test_merge_status_unknown_failure(self, pull_request):
222 222 self.merge_mock.return_value = MergeResponse(
223 223 False, False, None, MergeFailureReason.UNKNOWN,
224 224 metadata={'exception': 'MockError'})
225 225
226 226 assert pull_request._last_merge_source_rev is None
227 227 assert pull_request._last_merge_target_rev is None
228 228 assert pull_request.last_merge_status is None
229 229
230 230 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
231 231 assert status is False
232 232 assert msg == (
233 233 'This pull request cannot be merged because of an unhandled exception. '
234 234 'MockError')
235 235 self.merge_mock.assert_called_with(
236 236 self.repo_id, self.workspace_id,
237 237 pull_request.target_ref_parts,
238 238 pull_request.source_repo.scm_instance(),
239 239 pull_request.source_ref_parts, dry_run=True,
240 240 use_rebase=False, close_branch=False)
241 241
242 242 assert pull_request._last_merge_source_rev is None
243 243 assert pull_request._last_merge_target_rev is None
244 244 assert pull_request.last_merge_status is None
245 245
246 246 self.merge_mock.reset_mock()
247 247 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
248 248 assert status is False
249 249 assert msg == (
250 250 'This pull request cannot be merged because of an unhandled exception. '
251 251 'MockError')
252 252 assert self.merge_mock.called is True
253 253
254 254 def test_merge_status_when_target_is_locked(self, pull_request):
255 255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 256 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
257 257 assert status is False
258 258 assert msg == (
259 259 'This pull request cannot be merged because the target repository '
260 260 'is locked by user:1.')
261 261
262 262 def test_merge_status_requirements_check_target(self, pull_request):
263 263
264 264 def has_largefiles(self, repo):
265 265 return repo == pull_request.source_repo
266 266
267 267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 268 with patcher:
269 269 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
270 270
271 271 assert status is False
272 272 assert msg == 'Target repository large files support is disabled.'
273 273
274 274 def test_merge_status_requirements_check_source(self, pull_request):
275 275
276 276 def has_largefiles(self, repo):
277 277 return repo == pull_request.target_repo
278 278
279 279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 280 with patcher:
281 281 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
282 282
283 283 assert status is False
284 284 assert msg == 'Source repository large files support is disabled.'
285 285
286 286 def test_merge(self, pull_request, merge_extras):
287 287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 288 merge_ref = Reference(
289 289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 290 self.merge_mock.return_value = MergeResponse(
291 291 True, True, merge_ref, MergeFailureReason.NONE)
292 292
293 293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 294 PullRequestModel().merge_repo(
295 295 pull_request, pull_request.author, extras=merge_extras)
296 296 Session().commit()
297 297
298 298 message = (
299 299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 300 u'\n\n {pr_title}'.format(
301 301 pr_id=pull_request.pull_request_id,
302 302 source_repo=safe_unicode(
303 303 pull_request.source_repo.scm_instance().name),
304 304 source_ref_name=pull_request.source_ref_parts.name,
305 305 pr_title=safe_unicode(pull_request.title)
306 306 )
307 307 )
308 308 self.merge_mock.assert_called_with(
309 309 self.repo_id, self.workspace_id,
310 310 pull_request.target_ref_parts,
311 311 pull_request.source_repo.scm_instance(),
312 312 pull_request.source_ref_parts,
313 313 user_name=user.short_contact, user_email=user.email, message=message,
314 314 use_rebase=False, close_branch=False
315 315 )
316 316 self.invalidation_mock.assert_called_once_with(
317 317 pull_request.target_repo.repo_name)
318 318
319 319 self.hook_mock.assert_called_with(
320 320 self.pull_request, self.pull_request.author, 'merge')
321 321
322 322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324 324
325 325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 327 merge_ref = Reference(
328 328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 329 self.merge_mock.return_value = MergeResponse(
330 330 True, True, merge_ref, MergeFailureReason.NONE)
331 331
332 332 merge_extras['repository'] = pull_request.target_repo.repo_name
333 333
334 334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 336 PullRequestModel().merge_repo(
337 337 pull_request, pull_request.author, extras=merge_extras)
338 338 Session().commit()
339 339
340 340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341 341
342 342 message = (
343 343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 344 u'\n\n {pr_title}'.format(
345 345 pr_id=pull_request.pull_request_id,
346 346 source_repo=safe_unicode(
347 347 pull_request.source_repo.scm_instance().name),
348 348 source_ref_name=pull_request.source_ref_parts.name,
349 349 pr_title=safe_unicode(pull_request.title)
350 350 )
351 351 )
352 352 self.merge_mock.assert_called_with(
353 353 self.repo_id, self.workspace_id,
354 354 pull_request.target_ref_parts,
355 355 pull_request.source_repo.scm_instance(),
356 356 pull_request.source_ref_parts,
357 357 user_name=user.short_contact, user_email=user.email, message=message,
358 358 use_rebase=False, close_branch=False
359 359 )
360 360 self.invalidation_mock.assert_called_once_with(
361 361 pull_request.target_repo.repo_name)
362 362
363 363 self.hook_mock.assert_called_with(
364 364 self.pull_request, self.pull_request.author, 'merge')
365 365
366 366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368 368
369 369 def test_merge_failed(self, pull_request, merge_extras):
370 370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 371 merge_ref = Reference(
372 372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 373 self.merge_mock.return_value = MergeResponse(
374 374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375 375
376 376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 377 PullRequestModel().merge_repo(
378 378 pull_request, pull_request.author, extras=merge_extras)
379 379 Session().commit()
380 380
381 381 message = (
382 382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 383 u'\n\n {pr_title}'.format(
384 384 pr_id=pull_request.pull_request_id,
385 385 source_repo=safe_unicode(
386 386 pull_request.source_repo.scm_instance().name),
387 387 source_ref_name=pull_request.source_ref_parts.name,
388 388 pr_title=safe_unicode(pull_request.title)
389 389 )
390 390 )
391 391 self.merge_mock.assert_called_with(
392 392 self.repo_id, self.workspace_id,
393 393 pull_request.target_ref_parts,
394 394 pull_request.source_repo.scm_instance(),
395 395 pull_request.source_ref_parts,
396 396 user_name=user.short_contact, user_email=user.email, message=message,
397 397 use_rebase=False, close_branch=False
398 398 )
399 399
400 400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 401 assert self.invalidation_mock.called is False
402 402 assert pull_request.merge_rev is None
403 403
404 404 def test_get_commit_ids(self, pull_request):
405 405 # The PR has been not merged yet, so expect an exception
406 406 with pytest.raises(ValueError):
407 407 PullRequestModel()._get_commit_ids(pull_request)
408 408
409 409 # Merge revision is in the revisions list
410 410 pull_request.merge_rev = pull_request.revisions[0]
411 411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 412 assert commit_ids == pull_request.revisions
413 413
414 414 # Merge revision is not in the revisions list
415 415 pull_request.merge_rev = 'f000' * 10
416 416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418 418
419 419 def test_get_diff_from_pr_version(self, pull_request):
420 420 source_repo = pull_request.source_repo
421 421 source_ref_id = pull_request.source_ref_parts.commit_id
422 422 target_ref_id = pull_request.target_ref_parts.commit_id
423 423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 424 source_repo, source_ref_id, target_ref_id,
425 425 hide_whitespace_changes=False, diff_context=6)
426 426 assert 'file_1' in diff.raw
427 427
428 428 def test_generate_title_returns_unicode(self):
429 429 title = PullRequestModel().generate_pullrequest_title(
430 430 source='source-dummy',
431 431 source_ref='source-ref-dummy',
432 432 target='target-dummy',
433 433 )
434 434 assert type(title) == unicode
435 435
436 436 @pytest.mark.parametrize('title, has_wip', [
437 437 ('hello', False),
438 438 ('hello wip', False),
439 439 ('hello wip: xxx', False),
440 440 ('[wip] hello', True),
441 441 ('[wip] hello', True),
442 442 ('wip: hello', True),
443 443 ('wip hello', True),
444 444
445 445 ])
446 446 def test_wip_title_marker(self, pull_request, title, has_wip):
447 447 pull_request.title = title
448 448 assert pull_request.work_in_progress == has_wip
449 449
450 450
451 451 @pytest.mark.usefixtures('config_stub')
452 452 class TestIntegrationMerge(object):
453 453 @pytest.mark.parametrize('extra_config', (
454 454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
455 455 ))
456 456 def test_merge_triggers_push_hooks(
457 457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
458 458 extra_config):
459 459
460 460 pull_request = pr_util.create_pull_request(
461 461 approved=True, mergeable=True)
462 462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
463 463 merge_extras['repository'] = pull_request.target_repo.repo_name
464 464 Session().commit()
465 465
466 466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
467 467 merge_state = PullRequestModel().merge_repo(
468 468 pull_request, user_admin, extras=merge_extras)
469 469 Session().commit()
470 470
471 471 assert merge_state.executed
472 472 assert '_pre_push_hook' in capture_rcextensions
473 473 assert '_push_hook' in capture_rcextensions
474 474
475 475 def test_merge_can_be_rejected_by_pre_push_hook(
476 476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
477 477 pull_request = pr_util.create_pull_request(
478 478 approved=True, mergeable=True)
479 479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
480 480 merge_extras['repository'] = pull_request.target_repo.repo_name
481 481 Session().commit()
482 482
483 483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
484 484 pre_pull.side_effect = RepositoryError("Disallow push!")
485 485 merge_status = PullRequestModel().merge_repo(
486 486 pull_request, user_admin, extras=merge_extras)
487 487 Session().commit()
488 488
489 489 assert not merge_status.executed
490 490 assert 'pre_push' not in capture_rcextensions
491 491 assert 'post_push' not in capture_rcextensions
492 492
493 493 def test_merge_fails_if_target_is_locked(
494 494 self, pr_util, user_regular, merge_extras):
495 495 pull_request = pr_util.create_pull_request(
496 496 approved=True, mergeable=True)
497 497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
498 498 pull_request.target_repo.locked = locked_by
499 499 # TODO: johbo: Check if this can work based on the database, currently
500 500 # all data is pre-computed, that's why just updating the DB is not
501 501 # enough.
502 502 merge_extras['locked_by'] = locked_by
503 503 merge_extras['repository'] = pull_request.target_repo.repo_name
504 504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
505 505 Session().commit()
506 506 merge_status = PullRequestModel().merge_repo(
507 507 pull_request, user_regular, extras=merge_extras)
508 508 Session().commit()
509 509
510 510 assert not merge_status.executed
511 511
512 512
513 513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
514 514 (False, 1, 0),
515 515 (True, 0, 1),
516 516 ])
517 517 def test_outdated_comments(
518 518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
519 519 pull_request = pr_util.create_pull_request()
520 520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
521 521
522 522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
523 523 pr_util.add_one_commit()
524 524 assert_inline_comments(
525 525 pull_request, visible=inlines_count, outdated=outdated_count)
526 526 outdated_comment_mock.assert_called_with(pull_request)
527 527
528 528
529 529 @pytest.mark.parametrize('mr_type, expected_msg', [
530 530 (MergeFailureReason.NONE,
531 531 'This pull request can be automatically merged.'),
532 532 (MergeFailureReason.UNKNOWN,
533 533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
534 534 (MergeFailureReason.MERGE_FAILED,
535 535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
536 536 (MergeFailureReason.PUSH_FAILED,
537 537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
538 538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
539 539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
540 540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
541 541 'This pull request cannot be merged because the source contains more branches than the target.'),
542 542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
543 543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
544 544 (MergeFailureReason.TARGET_IS_LOCKED,
545 545 'This pull request cannot be merged because the target repository is locked by user:123.'),
546 546 (MergeFailureReason.MISSING_TARGET_REF,
547 547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
548 548 (MergeFailureReason.MISSING_SOURCE_REF,
549 549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
550 550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
551 551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
552 552
553 553 ])
554 554 def test_merge_response_message(mr_type, expected_msg):
555 555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
556 556 metadata = {
557 557 'unresolved_files': 'CONFLICT_FILE',
558 558 'exception': "CRASH",
559 559 'target': 'some-repo',
560 560 'merge_commit': 'merge_commit',
561 561 'target_ref': merge_ref,
562 562 'source_ref': merge_ref,
563 563 'heads': ','.join(['a', 'b', 'c']),
564 564 'locked_by': 'user:123'
565 565 }
566 566
567 567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
568 568 assert merge_response.merge_status_message == expected_msg
569 569
570 570
571 571 @pytest.fixture()
572 572 def merge_extras(user_regular):
573 573 """
574 574 Context for the vcs operation when running a merge.
575 575 """
576 576 extras = {
577 577 'ip': '127.0.0.1',
578 578 'username': user_regular.username,
579 579 'user_id': user_regular.user_id,
580 580 'action': 'push',
581 581 'repository': 'fake_target_repo_name',
582 582 'scm': 'git',
583 583 'config': 'fake_config_ini_path',
584 584 'repo_store': '',
585 585 'make_lock': None,
586 586 'locked_by': [None, None, None],
587 587 'server_url': 'http://test.example.com:5000',
588 588 'hooks': ['push', 'pull'],
589 589 'is_shadow_repo': False,
590 590 }
591 591 return extras
592 592
593 593
594 594 @pytest.mark.usefixtures('config_stub')
595 595 class TestUpdateCommentHandling(object):
596 596
597 597 @pytest.fixture(autouse=True, scope='class')
598 598 def enable_outdated_comments(self, request, baseapp):
599 599 config_patch = mock.patch.dict(
600 600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
601 601 config_patch.start()
602 602
603 603 @request.addfinalizer
604 604 def cleanup():
605 605 config_patch.stop()
606 606
607 607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
608 608 commits = [
609 609 {'message': 'a'},
610 610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
611 611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
612 612 ]
613 613 pull_request = pr_util.create_pull_request(
614 614 commits=commits, target_head='a', source_head='b', revisions=['b'])
615 615 pr_util.create_inline_comment(file_path='file_b')
616 616 pr_util.add_one_commit(head='c')
617 617
618 618 assert_inline_comments(pull_request, visible=1, outdated=0)
619 619
620 620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
621 621 original_content = ''.join(
622 622 ['line {}\n'.format(x) for x in range(1, 11)])
623 623 updated_content = 'new_line_at_top\n' + original_content
624 624 commits = [
625 625 {'message': 'a'},
626 626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
627 627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
628 628 ]
629 629 pull_request = pr_util.create_pull_request(
630 630 commits=commits, target_head='a', source_head='b', revisions=['b'])
631 631
632 632 with outdated_comments_patcher():
633 633 comment = pr_util.create_inline_comment(
634 634 line_no=u'n8', file_path='file_b')
635 635 pr_util.add_one_commit(head='c')
636 636
637 637 assert_inline_comments(pull_request, visible=1, outdated=0)
638 638 assert comment.line_no == u'n9'
639 639
640 640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
641 641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
642 642 updated_content = original_content + 'new_line_at_end\n'
643 643 commits = [
644 644 {'message': 'a'},
645 645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
646 646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
647 647 ]
648 648 pull_request = pr_util.create_pull_request(
649 649 commits=commits, target_head='a', source_head='b', revisions=['b'])
650 650 pr_util.create_inline_comment(file_path='file_b')
651 651 pr_util.add_one_commit(head='c')
652 652
653 653 assert_inline_comments(pull_request, visible=1, outdated=0)
654 654
655 655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
656 656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
657 657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
658 658 change_lines = list(base_lines)
659 659 change_lines.insert(6, 'line 6a added\n')
660 660
661 661 # Changes on the last line of sight
662 662 update_lines = list(change_lines)
663 663 update_lines[0] = 'line 1 changed\n'
664 664 update_lines[-1] = 'line 12 changed\n'
665 665
666 666 def file_b(lines):
667 667 return FileNode('file_b', ''.join(lines))
668 668
669 669 commits = [
670 670 {'message': 'a', 'added': [file_b(base_lines)]},
671 671 {'message': 'b', 'changed': [file_b(change_lines)]},
672 672 {'message': 'c', 'changed': [file_b(update_lines)]},
673 673 ]
674 674
675 675 pull_request = pr_util.create_pull_request(
676 676 commits=commits, target_head='a', source_head='b', revisions=['b'])
677 677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
678 678
679 679 with outdated_comments_patcher():
680 680 pr_util.add_one_commit(head='c')
681 681 assert_inline_comments(pull_request, visible=0, outdated=1)
682 682
683 683 @pytest.mark.parametrize("change, content", [
684 684 ('changed', 'changed\n'),
685 685 ('removed', ''),
686 686 ], ids=['changed', 'removed'])
687 687 def test_comment_flagged_on_change(self, pr_util, change, content):
688 688 commits = [
689 689 {'message': 'a'},
690 690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
691 691 {'message': 'c', change: [FileNode('file_b', content)]},
692 692 ]
693 693 pull_request = pr_util.create_pull_request(
694 694 commits=commits, target_head='a', source_head='b', revisions=['b'])
695 695 pr_util.create_inline_comment(file_path='file_b')
696 696
697 697 with outdated_comments_patcher():
698 698 pr_util.add_one_commit(head='c')
699 699 assert_inline_comments(pull_request, visible=0, outdated=1)
700 700
701 701
702 702 @pytest.mark.usefixtures('config_stub')
703 703 class TestUpdateChangedFiles(object):
704 704
705 705 def test_no_changes_on_unchanged_diff(self, pr_util):
706 706 commits = [
707 707 {'message': 'a'},
708 708 {'message': 'b',
709 709 'added': [FileNode('file_b', 'test_content b\n')]},
710 710 {'message': 'c',
711 711 'added': [FileNode('file_c', 'test_content c\n')]},
712 712 ]
713 713 # open a PR from a to b, adding file_b
714 714 pull_request = pr_util.create_pull_request(
715 715 commits=commits, target_head='a', source_head='b', revisions=['b'],
716 716 name_suffix='per-file-review')
717 717
718 718 # modify PR adding new file file_c
719 719 pr_util.add_one_commit(head='c')
720 720
721 721 assert_pr_file_changes(
722 722 pull_request,
723 723 added=['file_c'],
724 724 modified=[],
725 725 removed=[])
726 726
727 727 def test_modify_and_undo_modification_diff(self, pr_util):
728 728 commits = [
729 729 {'message': 'a'},
730 730 {'message': 'b',
731 731 'added': [FileNode('file_b', 'test_content b\n')]},
732 732 {'message': 'c',
733 733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
734 734 {'message': 'd',
735 735 'changed': [FileNode('file_b', 'test_content b\n')]},
736 736 ]
737 737 # open a PR from a to b, adding file_b
738 738 pull_request = pr_util.create_pull_request(
739 739 commits=commits, target_head='a', source_head='b', revisions=['b'],
740 740 name_suffix='per-file-review')
741 741
742 742 # modify PR modifying file file_b
743 743 pr_util.add_one_commit(head='c')
744 744
745 745 assert_pr_file_changes(
746 746 pull_request,
747 747 added=[],
748 748 modified=['file_b'],
749 749 removed=[])
750 750
751 751 # move the head again to d, which rollbacks change,
752 752 # meaning we should indicate no changes
753 753 pr_util.add_one_commit(head='d')
754 754
755 755 assert_pr_file_changes(
756 756 pull_request,
757 757 added=[],
758 758 modified=[],
759 759 removed=[])
760 760
761 761 def test_updated_all_files_in_pr(self, pr_util):
762 762 commits = [
763 763 {'message': 'a'},
764 764 {'message': 'b', 'added': [
765 765 FileNode('file_a', 'test_content a\n'),
766 766 FileNode('file_b', 'test_content b\n'),
767 767 FileNode('file_c', 'test_content c\n')]},
768 768 {'message': 'c', 'changed': [
769 769 FileNode('file_a', 'test_content a changed\n'),
770 770 FileNode('file_b', 'test_content b changed\n'),
771 771 FileNode('file_c', 'test_content c changed\n')]},
772 772 ]
773 773 # open a PR from a to b, changing 3 files
774 774 pull_request = pr_util.create_pull_request(
775 775 commits=commits, target_head='a', source_head='b', revisions=['b'],
776 776 name_suffix='per-file-review')
777 777
778 778 pr_util.add_one_commit(head='c')
779 779
780 780 assert_pr_file_changes(
781 781 pull_request,
782 782 added=[],
783 783 modified=['file_a', 'file_b', 'file_c'],
784 784 removed=[])
785 785
786 786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
787 787 commits = [
788 788 {'message': 'a'},
789 789 {'message': 'b', 'added': [
790 790 FileNode('file_a', 'test_content a\n'),
791 791 FileNode('file_b', 'test_content b\n'),
792 792 FileNode('file_c', 'test_content c\n')]},
793 793 {'message': 'c', 'removed': [
794 794 FileNode('file_a', 'test_content a changed\n'),
795 795 FileNode('file_b', 'test_content b changed\n'),
796 796 FileNode('file_c', 'test_content c changed\n')]},
797 797 ]
798 798 # open a PR from a to b, removing 3 files
799 799 pull_request = pr_util.create_pull_request(
800 800 commits=commits, target_head='a', source_head='b', revisions=['b'],
801 801 name_suffix='per-file-review')
802 802
803 803 pr_util.add_one_commit(head='c')
804 804
805 805 assert_pr_file_changes(
806 806 pull_request,
807 807 added=[],
808 808 modified=[],
809 809 removed=['file_a', 'file_b', 'file_c'])
810 810
811 811
812 812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
813 813 model = PullRequestModel()
814 814 pull_request = pr_util.create_pull_request()
815 815 pr_util.update_source_repository()
816 816
817 817 model.update_commits(pull_request, pull_request.author)
818 818
819 819 # Expect that it has a version entry now
820 820 assert len(model.get_versions(pull_request)) == 1
821 821
822 822
823 823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
824 824 pull_request = pr_util.create_pull_request()
825 825 model = PullRequestModel()
826 826 model.update_commits(pull_request, pull_request.author)
827 827
828 828 # Expect that it still has no versions
829 829 assert len(model.get_versions(pull_request)) == 0
830 830
831 831
832 832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
833 833 model = PullRequestModel()
834 834 pull_request = pr_util.create_pull_request()
835 835 comment = pr_util.create_comment()
836 836 pr_util.update_source_repository()
837 837
838 838 model.update_commits(pull_request, pull_request.author)
839 839
840 840 # Expect that the comment is linked to the pr version now
841 841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
842 842
843 843
844 844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
845 845 model = PullRequestModel()
846 846 pull_request = pr_util.create_pull_request()
847 847 pr_util.update_source_repository()
848 848 pr_util.update_source_repository()
849 849
850 850 update_response = model.update_commits(pull_request, pull_request.author)
851 851
852 852 commit_id = update_response.common_ancestor_id
853 853 # Expect to find a new comment about the change
854 854 expected_message = textwrap.dedent(
855 855 """\
856 856 Pull request updated. Auto status change to |under_review|
857 857
858 858 .. role:: added
859 859 .. role:: removed
860 860 .. parsed-literal::
861 861
862 862 Changed commits:
863 863 * :added:`1 added`
864 864 * :removed:`0 removed`
865 865
866 866 Changed files:
867 867 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
868 868
869 869 .. |under_review| replace:: *"Under Review"*"""
870 870 ).format(commit_id[:12])
871 871 pull_request_comments = sorted(
872 872 pull_request.comments, key=lambda c: c.modified_at)
873 873 update_comment = pull_request_comments[-1]
874 874 assert update_comment.text == expected_message
875 875
876 876
877 877 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
878 878 pull_request = pr_util.create_pull_request()
879 879
880 880 # Avoiding default values
881 881 pull_request.status = PullRequest.STATUS_CLOSED
882 882 pull_request._last_merge_source_rev = "0" * 40
883 883 pull_request._last_merge_target_rev = "1" * 40
884 884 pull_request.last_merge_status = 1
885 885 pull_request.merge_rev = "2" * 40
886 886
887 887 # Remember automatic values
888 888 created_on = pull_request.created_on
889 889 updated_on = pull_request.updated_on
890 890
891 891 # Create a new version of the pull request
892 892 version = PullRequestModel()._create_version_from_snapshot(pull_request)
893 893
894 894 # Check attributes
895 895 assert version.title == pr_util.create_parameters['title']
896 896 assert version.description == pr_util.create_parameters['description']
897 897 assert version.status == PullRequest.STATUS_CLOSED
898 898
899 899 # versions get updated created_on
900 900 assert version.created_on != created_on
901 901
902 902 assert version.updated_on == updated_on
903 903 assert version.user_id == pull_request.user_id
904 904 assert version.revisions == pr_util.create_parameters['revisions']
905 905 assert version.source_repo == pr_util.source_repository
906 906 assert version.source_ref == pr_util.create_parameters['source_ref']
907 907 assert version.target_repo == pr_util.target_repository
908 908 assert version.target_ref == pr_util.create_parameters['target_ref']
909 909 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
910 910 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
911 911 assert version.last_merge_status == pull_request.last_merge_status
912 912 assert version.merge_rev == pull_request.merge_rev
913 913 assert version.pull_request == pull_request
914 914
915 915
916 916 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
917 917 version1 = pr_util.create_version_of_pull_request()
918 918 comment_linked = pr_util.create_comment(linked_to=version1)
919 919 comment_unlinked = pr_util.create_comment()
920 920 version2 = pr_util.create_version_of_pull_request()
921 921
922 922 PullRequestModel()._link_comments_to_version(version2)
923 923 Session().commit()
924 924
925 925 # Expect that only the new comment is linked to version2
926 926 assert (
927 927 comment_unlinked.pull_request_version_id ==
928 928 version2.pull_request_version_id)
929 929 assert (
930 930 comment_linked.pull_request_version_id ==
931 931 version1.pull_request_version_id)
932 932 assert (
933 933 comment_unlinked.pull_request_version_id !=
934 934 comment_linked.pull_request_version_id)
935 935
936 936
937 937 def test_calculate_commits():
938 938 old_ids = [1, 2, 3]
939 939 new_ids = [1, 3, 4, 5]
940 940 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
941 941 assert change.added == [4, 5]
942 942 assert change.common == [1, 3]
943 943 assert change.removed == [2]
944 944 assert change.total == [1, 3, 4, 5]
945 945
946 946
947 947 def assert_inline_comments(pull_request, visible=None, outdated=None):
948 948 if visible is not None:
949 949 inline_comments = CommentsModel().get_inline_comments(
950 950 pull_request.target_repo.repo_id, pull_request=pull_request)
951 951 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
952 952 inline_comments))
953 953 assert inline_cnt == visible
954 954 if outdated is not None:
955 955 outdated_comments = CommentsModel().get_outdated_comments(
956 956 pull_request.target_repo.repo_id, pull_request)
957 957 assert len(outdated_comments) == outdated
958 958
959 959
960 960 def assert_pr_file_changes(
961 961 pull_request, added=None, modified=None, removed=None):
962 962 pr_versions = PullRequestModel().get_versions(pull_request)
963 963 # always use first version, ie original PR to calculate changes
964 964 pull_request_version = pr_versions[0]
965 965 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
966 966 pull_request, pull_request_version)
967 967 file_changes = PullRequestModel()._calculate_file_changes(
968 968 old_diff_data, new_diff_data)
969 969
970 970 assert added == file_changes.added, \
971 971 'expected added:%s vs value:%s' % (added, file_changes.added)
972 972 assert modified == file_changes.modified, \
973 973 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
974 974 assert removed == file_changes.removed, \
975 975 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
976 976
977 977
978 978 def outdated_comments_patcher(use_outdated=True):
979 979 return mock.patch.object(
980 980 CommentsModel, 'use_outdated_comments',
981 981 return_value=use_outdated)
@@ -1,1833 +1,1840 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39 import pyramid.paster
40 40
41 41 import rhodecode
42 42 from rhodecode.lib.utils2 import AttributeDict
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.pull_request import PullRequestModel
50 50 from rhodecode.model.repo import RepoModel
51 51 from rhodecode.model.repo_group import RepoGroupModel
52 52 from rhodecode.model.user import UserModel
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.user_group import UserGroupModel
55 55 from rhodecode.model.integration import IntegrationModel
56 56 from rhodecode.integrations import integration_type_registry
57 57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 58 from rhodecode.lib.utils import repo2db_mapper
59 59 from rhodecode.lib.vcs.backends import get_backend
60 60 from rhodecode.lib.vcs.nodes import FileNode
61 61 from rhodecode.tests import (
62 62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 64 TEST_USER_REGULAR_PASS)
65 65 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 66 from rhodecode.tests.fixture import Fixture
67 67 from rhodecode.config import utils as config_utils
68 68
69 69
70 70 def _split_comma(value):
71 71 return value.split(',')
72 72
73 73
74 74 def pytest_addoption(parser):
75 75 parser.addoption(
76 76 '--keep-tmp-path', action='store_true',
77 77 help="Keep the test temporary directories")
78 78 parser.addoption(
79 79 '--backends', action='store', type=_split_comma,
80 80 default=['git', 'hg', 'svn'],
81 81 help="Select which backends to test for backend specific tests.")
82 82 parser.addoption(
83 83 '--dbs', action='store', type=_split_comma,
84 84 default=['sqlite'],
85 85 help="Select which database to test for database specific tests. "
86 86 "Possible options are sqlite,postgres,mysql")
87 87 parser.addoption(
88 88 '--appenlight', '--ae', action='store_true',
89 89 help="Track statistics in appenlight.")
90 90 parser.addoption(
91 91 '--appenlight-api-key', '--ae-key',
92 92 help="API key for Appenlight.")
93 93 parser.addoption(
94 94 '--appenlight-url', '--ae-url',
95 95 default="https://ae.rhodecode.com",
96 96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 97 parser.addoption(
98 98 '--sqlite-connection-string', action='store',
99 99 default='', help="Connection string for the dbs tests with SQLite")
100 100 parser.addoption(
101 101 '--postgres-connection-string', action='store',
102 102 default='', help="Connection string for the dbs tests with Postgres")
103 103 parser.addoption(
104 104 '--mysql-connection-string', action='store',
105 105 default='', help="Connection string for the dbs tests with MySQL")
106 106 parser.addoption(
107 107 '--repeat', type=int, default=100,
108 108 help="Number of repetitions in performance tests.")
109 109
110 110
111 111 def pytest_configure(config):
112 112 from rhodecode.config import patches
113 113
114 114
115 115 def pytest_collection_modifyitems(session, config, items):
116 116 # nottest marked, compare nose, used for transition from nose to pytest
117 117 remaining = [
118 118 i for i in items if getattr(i.obj, '__test__', True)]
119 119 items[:] = remaining
120 120
121 121 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
122 122 # be executed at the end for faster test feedback
123 123 def sorter(item):
124 124 pos = 0
125 125 key = item._nodeid
126 126 if key.startswith('rhodecode/tests/database'):
127 127 pos = 1
128 128 elif key.startswith('rhodecode/tests/vcs_operations'):
129 129 pos = 2
130 130
131 131 return pos
132 132
133 133 items.sort(key=sorter)
134 134
135 135
136 136 def pytest_generate_tests(metafunc):
137 137
138 138 # Support test generation based on --backend parameter
139 139 if 'backend_alias' in metafunc.fixturenames:
140 140 backends = get_backends_from_metafunc(metafunc)
141 141 scope = None
142 142 if not backends:
143 143 pytest.skip("Not enabled for any of selected backends")
144 144
145 145 metafunc.parametrize('backend_alias', backends, scope=scope)
146 146
147 147 backend_mark = metafunc.definition.get_closest_marker('backends')
148 148 if backend_mark:
149 149 backends = get_backends_from_metafunc(metafunc)
150 150 if not backends:
151 151 pytest.skip("Not enabled for any of selected backends")
152 152
153 153
154 154 def get_backends_from_metafunc(metafunc):
155 155 requested_backends = set(metafunc.config.getoption('--backends'))
156 156 backend_mark = metafunc.definition.get_closest_marker('backends')
157 157 if backend_mark:
158 158 # Supported backends by this test function, created from
159 159 # pytest.mark.backends
160 160 backends = backend_mark.args
161 161 elif hasattr(metafunc.cls, 'backend_alias'):
162 162 # Support class attribute "backend_alias", this is mainly
163 163 # for legacy reasons for tests not yet using pytest.mark.backends
164 164 backends = [metafunc.cls.backend_alias]
165 165 else:
166 166 backends = metafunc.config.getoption('--backends')
167 167 return requested_backends.intersection(backends)
168 168
169 169
170 170 @pytest.fixture(scope='session', autouse=True)
171 171 def activate_example_rcextensions(request):
172 172 """
173 173 Patch in an example rcextensions module which verifies passed in kwargs.
174 174 """
175 175 from rhodecode.config import rcextensions
176 176
177 177 old_extensions = rhodecode.EXTENSIONS
178 178 rhodecode.EXTENSIONS = rcextensions
179 179 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
180 180
181 181 @request.addfinalizer
182 182 def cleanup():
183 183 rhodecode.EXTENSIONS = old_extensions
184 184
185 185
186 186 @pytest.fixture()
187 187 def capture_rcextensions():
188 188 """
189 189 Returns the recorded calls to entry points in rcextensions.
190 190 """
191 191 calls = rhodecode.EXTENSIONS.calls
192 192 calls.clear()
193 193 # Note: At this moment, it is still the empty dict, but that will
194 194 # be filled during the test run and since it is a reference this
195 195 # is enough to make it work.
196 196 return calls
197 197
198 198
199 199 @pytest.fixture(scope='session')
200 200 def http_environ_session():
201 201 """
202 202 Allow to use "http_environ" in session scope.
203 203 """
204 204 return plain_http_environ()
205 205
206 206
207 207 def plain_http_host_stub():
208 208 """
209 209 Value of HTTP_HOST in the test run.
210 210 """
211 211 return 'example.com:80'
212 212
213 213
214 214 @pytest.fixture()
215 215 def http_host_stub():
216 216 """
217 217 Value of HTTP_HOST in the test run.
218 218 """
219 219 return plain_http_host_stub()
220 220
221 221
222 222 def plain_http_host_only_stub():
223 223 """
224 224 Value of HTTP_HOST in the test run.
225 225 """
226 226 return plain_http_host_stub().split(':')[0]
227 227
228 228
229 229 @pytest.fixture()
230 230 def http_host_only_stub():
231 231 """
232 232 Value of HTTP_HOST in the test run.
233 233 """
234 234 return plain_http_host_only_stub()
235 235
236 236
237 237 def plain_http_environ():
238 238 """
239 239 HTTP extra environ keys.
240 240
241 241 User by the test application and as well for setting up the pylons
242 242 environment. In the case of the fixture "app" it should be possible
243 243 to override this for a specific test case.
244 244 """
245 245 return {
246 246 'SERVER_NAME': plain_http_host_only_stub(),
247 247 'SERVER_PORT': plain_http_host_stub().split(':')[1],
248 248 'HTTP_HOST': plain_http_host_stub(),
249 249 'HTTP_USER_AGENT': 'rc-test-agent',
250 250 'REQUEST_METHOD': 'GET'
251 251 }
252 252
253 253
254 254 @pytest.fixture()
255 255 def http_environ():
256 256 """
257 257 HTTP extra environ keys.
258 258
259 259 User by the test application and as well for setting up the pylons
260 260 environment. In the case of the fixture "app" it should be possible
261 261 to override this for a specific test case.
262 262 """
263 263 return plain_http_environ()
264 264
265 265
266 266 @pytest.fixture(scope='session')
267 267 def baseapp(ini_config, vcsserver, http_environ_session):
268 268 from rhodecode.lib.pyramid_utils import get_app_config
269 269 from rhodecode.config.middleware import make_pyramid_app
270 270
271 271 print("Using the RhodeCode configuration:{}".format(ini_config))
272 272 pyramid.paster.setup_logging(ini_config)
273 273
274 274 settings = get_app_config(ini_config)
275 275 app = make_pyramid_app({'__file__': ini_config}, **settings)
276 276
277 277 return app
278 278
279 279
280 280 @pytest.fixture(scope='function')
281 281 def app(request, config_stub, baseapp, http_environ):
282 282 app = CustomTestApp(
283 283 baseapp,
284 284 extra_environ=http_environ)
285 285 if request.cls:
286 286 request.cls.app = app
287 287 return app
288 288
289 289
290 290 @pytest.fixture(scope='session')
291 291 def app_settings(baseapp, ini_config):
292 292 """
293 293 Settings dictionary used to create the app.
294 294
295 295 Parses the ini file and passes the result through the sanitize and apply
296 296 defaults mechanism in `rhodecode.config.middleware`.
297 297 """
298 298 return baseapp.config.get_settings()
299 299
300 300
301 301 @pytest.fixture(scope='session')
302 302 def db_connection(ini_settings):
303 303 # Initialize the database connection.
304 304 config_utils.initialize_database(ini_settings)
305 305
306 306
307 307 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
308 308
309 309
310 310 def _autologin_user(app, *args):
311 311 session = login_user_session(app, *args)
312 312 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
313 313 return LoginData(csrf_token, session['rhodecode_user'])
314 314
315 315
316 316 @pytest.fixture()
317 317 def autologin_user(app):
318 318 """
319 319 Utility fixture which makes sure that the admin user is logged in
320 320 """
321 321 return _autologin_user(app)
322 322
323 323
324 324 @pytest.fixture()
325 325 def autologin_regular_user(app):
326 326 """
327 327 Utility fixture which makes sure that the regular user is logged in
328 328 """
329 329 return _autologin_user(
330 330 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
331 331
332 332
333 333 @pytest.fixture(scope='function')
334 334 def csrf_token(request, autologin_user):
335 335 return autologin_user.csrf_token
336 336
337 337
338 338 @pytest.fixture(scope='function')
339 339 def xhr_header(request):
340 340 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
341 341
342 342
343 343 @pytest.fixture()
344 344 def real_crypto_backend(monkeypatch):
345 345 """
346 346 Switch the production crypto backend on for this test.
347 347
348 348 During the test run the crypto backend is replaced with a faster
349 349 implementation based on the MD5 algorithm.
350 350 """
351 351 monkeypatch.setattr(rhodecode, 'is_test', False)
352 352
353 353
354 354 @pytest.fixture(scope='class')
355 355 def index_location(request, baseapp):
356 356 index_location = baseapp.config.get_settings()['search.location']
357 357 if request.cls:
358 358 request.cls.index_location = index_location
359 359 return index_location
360 360
361 361
362 362 @pytest.fixture(scope='session', autouse=True)
363 363 def tests_tmp_path(request):
364 364 """
365 365 Create temporary directory to be used during the test session.
366 366 """
367 367 if not os.path.exists(TESTS_TMP_PATH):
368 368 os.makedirs(TESTS_TMP_PATH)
369 369
370 370 if not request.config.getoption('--keep-tmp-path'):
371 371 @request.addfinalizer
372 372 def remove_tmp_path():
373 373 shutil.rmtree(TESTS_TMP_PATH)
374 374
375 375 return TESTS_TMP_PATH
376 376
377 377
378 378 @pytest.fixture()
379 379 def test_repo_group(request):
380 380 """
381 381 Create a temporary repository group, and destroy it after
382 382 usage automatically
383 383 """
384 384 fixture = Fixture()
385 385 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
386 386 repo_group = fixture.create_repo_group(repogroupid)
387 387
388 388 def _cleanup():
389 389 fixture.destroy_repo_group(repogroupid)
390 390
391 391 request.addfinalizer(_cleanup)
392 392 return repo_group
393 393
394 394
395 395 @pytest.fixture()
396 396 def test_user_group(request):
397 397 """
398 398 Create a temporary user group, and destroy it after
399 399 usage automatically
400 400 """
401 401 fixture = Fixture()
402 402 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
403 403 user_group = fixture.create_user_group(usergroupid)
404 404
405 405 def _cleanup():
406 406 fixture.destroy_user_group(user_group)
407 407
408 408 request.addfinalizer(_cleanup)
409 409 return user_group
410 410
411 411
412 412 @pytest.fixture(scope='session')
413 413 def test_repo(request):
414 414 container = TestRepoContainer()
415 415 request.addfinalizer(container._cleanup)
416 416 return container
417 417
418 418
419 419 class TestRepoContainer(object):
420 420 """
421 421 Container for test repositories which are used read only.
422 422
423 423 Repositories will be created on demand and re-used during the lifetime
424 424 of this object.
425 425
426 426 Usage to get the svn test repository "minimal"::
427 427
428 428 test_repo = TestContainer()
429 429 repo = test_repo('minimal', 'svn')
430 430
431 431 """
432 432
433 433 dump_extractors = {
434 434 'git': utils.extract_git_repo_from_dump,
435 435 'hg': utils.extract_hg_repo_from_dump,
436 436 'svn': utils.extract_svn_repo_from_dump,
437 437 }
438 438
439 439 def __init__(self):
440 440 self._cleanup_repos = []
441 441 self._fixture = Fixture()
442 442 self._repos = {}
443 443
444 444 def __call__(self, dump_name, backend_alias, config=None):
445 445 key = (dump_name, backend_alias)
446 446 if key not in self._repos:
447 447 repo = self._create_repo(dump_name, backend_alias, config)
448 448 self._repos[key] = repo.repo_id
449 449 return Repository.get(self._repos[key])
450 450
451 451 def _create_repo(self, dump_name, backend_alias, config):
452 452 repo_name = '%s-%s' % (backend_alias, dump_name)
453 453 backend = get_backend(backend_alias)
454 454 dump_extractor = self.dump_extractors[backend_alias]
455 455 repo_path = dump_extractor(dump_name, repo_name)
456 456
457 457 vcs_repo = backend(repo_path, config=config)
458 458 repo2db_mapper({repo_name: vcs_repo})
459 459
460 460 repo = RepoModel().get_by_repo_name(repo_name)
461 461 self._cleanup_repos.append(repo_name)
462 462 return repo
463 463
464 464 def _cleanup(self):
465 465 for repo_name in reversed(self._cleanup_repos):
466 466 self._fixture.destroy_repo(repo_name)
467 467
468 468
469 469 def backend_base(request, backend_alias, baseapp, test_repo):
470 470 if backend_alias not in request.config.getoption('--backends'):
471 471 pytest.skip("Backend %s not selected." % (backend_alias, ))
472 472
473 473 utils.check_xfail_backends(request.node, backend_alias)
474 474 utils.check_skip_backends(request.node, backend_alias)
475 475
476 476 repo_name = 'vcs_test_%s' % (backend_alias, )
477 477 backend = Backend(
478 478 alias=backend_alias,
479 479 repo_name=repo_name,
480 480 test_name=request.node.name,
481 481 test_repo_container=test_repo)
482 482 request.addfinalizer(backend.cleanup)
483 483 return backend
484 484
485 485
486 486 @pytest.fixture()
487 487 def backend(request, backend_alias, baseapp, test_repo):
488 488 """
489 489 Parametrized fixture which represents a single backend implementation.
490 490
491 491 It respects the option `--backends` to focus the test run on specific
492 492 backend implementations.
493 493
494 494 It also supports `pytest.mark.xfail_backends` to mark tests as failing
495 495 for specific backends. This is intended as a utility for incremental
496 496 development of a new backend implementation.
497 497 """
498 498 return backend_base(request, backend_alias, baseapp, test_repo)
499 499
500 500
501 501 @pytest.fixture()
502 502 def backend_git(request, baseapp, test_repo):
503 503 return backend_base(request, 'git', baseapp, test_repo)
504 504
505 505
506 506 @pytest.fixture()
507 507 def backend_hg(request, baseapp, test_repo):
508 508 return backend_base(request, 'hg', baseapp, test_repo)
509 509
510 510
511 511 @pytest.fixture()
512 512 def backend_svn(request, baseapp, test_repo):
513 513 return backend_base(request, 'svn', baseapp, test_repo)
514 514
515 515
516 516 @pytest.fixture()
517 517 def backend_random(backend_git):
518 518 """
519 519 Use this to express that your tests need "a backend.
520 520
521 521 A few of our tests need a backend, so that we can run the code. This
522 522 fixture is intended to be used for such cases. It will pick one of the
523 523 backends and run the tests.
524 524
525 525 The fixture `backend` would run the test multiple times for each
526 526 available backend which is a pure waste of time if the test is
527 527 independent of the backend type.
528 528 """
529 529 # TODO: johbo: Change this to pick a random backend
530 530 return backend_git
531 531
532 532
533 533 @pytest.fixture()
534 534 def backend_stub(backend_git):
535 535 """
536 536 Use this to express that your tests need a backend stub
537 537
538 538 TODO: mikhail: Implement a real stub logic instead of returning
539 539 a git backend
540 540 """
541 541 return backend_git
542 542
543 543
544 544 @pytest.fixture()
545 545 def repo_stub(backend_stub):
546 546 """
547 547 Use this to express that your tests need a repository stub
548 548 """
549 549 return backend_stub.create_repo()
550 550
551 551
552 552 class Backend(object):
553 553 """
554 554 Represents the test configuration for one supported backend
555 555
556 556 Provides easy access to different test repositories based on
557 557 `__getitem__`. Such repositories will only be created once per test
558 558 session.
559 559 """
560 560
561 561 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
562 562 _master_repo = None
563 563 _master_repo_path = ''
564 564 _commit_ids = {}
565 565
566 566 def __init__(self, alias, repo_name, test_name, test_repo_container):
567 567 self.alias = alias
568 568 self.repo_name = repo_name
569 569 self._cleanup_repos = []
570 570 self._test_name = test_name
571 571 self._test_repo_container = test_repo_container
572 572 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
573 573 # Fixture will survive in the end.
574 574 self._fixture = Fixture()
575 575
576 576 def __getitem__(self, key):
577 577 return self._test_repo_container(key, self.alias)
578 578
579 579 def create_test_repo(self, key, config=None):
580 580 return self._test_repo_container(key, self.alias, config)
581 581
582 582 @property
583 583 def repo(self):
584 584 """
585 585 Returns the "current" repository. This is the vcs_test repo or the
586 586 last repo which has been created with `create_repo`.
587 587 """
588 588 from rhodecode.model.db import Repository
589 589 return Repository.get_by_repo_name(self.repo_name)
590 590
591 591 @property
592 592 def default_branch_name(self):
593 593 VcsRepository = get_backend(self.alias)
594 594 return VcsRepository.DEFAULT_BRANCH_NAME
595 595
596 596 @property
597 597 def default_head_id(self):
598 598 """
599 599 Returns the default head id of the underlying backend.
600 600
601 601 This will be the default branch name in case the backend does have a
602 602 default branch. In the other cases it will point to a valid head
603 603 which can serve as the base to create a new commit on top of it.
604 604 """
605 605 vcsrepo = self.repo.scm_instance()
606 606 head_id = (
607 607 vcsrepo.DEFAULT_BRANCH_NAME or
608 608 vcsrepo.commit_ids[-1])
609 609 return head_id
610 610
611 611 @property
612 612 def commit_ids(self):
613 613 """
614 614 Returns the list of commits for the last created repository
615 615 """
616 616 return self._commit_ids
617 617
618 618 def create_master_repo(self, commits):
619 619 """
620 620 Create a repository and remember it as a template.
621 621
622 622 This allows to easily create derived repositories to construct
623 623 more complex scenarios for diff, compare and pull requests.
624 624
625 625 Returns a commit map which maps from commit message to raw_id.
626 626 """
627 627 self._master_repo = self.create_repo(commits=commits)
628 628 self._master_repo_path = self._master_repo.repo_full_path
629 629
630 630 return self._commit_ids
631 631
632 632 def create_repo(
633 633 self, commits=None, number_of_commits=0, heads=None,
634 634 name_suffix=u'', bare=False, **kwargs):
635 635 """
636 636 Create a repository and record it for later cleanup.
637 637
638 638 :param commits: Optional. A sequence of dict instances.
639 639 Will add a commit per entry to the new repository.
640 640 :param number_of_commits: Optional. If set to a number, this number of
641 641 commits will be added to the new repository.
642 642 :param heads: Optional. Can be set to a sequence of of commit
643 643 names which shall be pulled in from the master repository.
644 644 :param name_suffix: adds special suffix to generated repo name
645 645 :param bare: set a repo as bare (no checkout)
646 646 """
647 647 self.repo_name = self._next_repo_name() + name_suffix
648 648 repo = self._fixture.create_repo(
649 649 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
650 650 self._cleanup_repos.append(repo.repo_name)
651 651
652 652 commits = commits or [
653 653 {'message': 'Commit %s of %s' % (x, self.repo_name)}
654 654 for x in range(number_of_commits)]
655 655 vcs_repo = repo.scm_instance()
656 656 vcs_repo.count()
657 657 self._add_commits_to_repo(vcs_repo, commits)
658 658 if heads:
659 659 self.pull_heads(repo, heads)
660 660
661 661 return repo
662 662
663 663 def pull_heads(self, repo, heads):
664 664 """
665 665 Make sure that repo contains all commits mentioned in `heads`
666 666 """
667 667 vcsrepo = repo.scm_instance()
668 668 vcsrepo.config.clear_section('hooks')
669 669 commit_ids = [self._commit_ids[h] for h in heads]
670 670 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
671 671
672 672 def create_fork(self):
673 673 repo_to_fork = self.repo_name
674 674 self.repo_name = self._next_repo_name()
675 675 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
676 676 self._cleanup_repos.append(self.repo_name)
677 677 return repo
678 678
679 679 def new_repo_name(self, suffix=u''):
680 680 self.repo_name = self._next_repo_name() + suffix
681 681 self._cleanup_repos.append(self.repo_name)
682 682 return self.repo_name
683 683
684 684 def _next_repo_name(self):
685 685 return u"%s_%s" % (
686 686 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
687 687
688 688 def ensure_file(self, filename, content='Test content\n'):
689 689 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
690 690 commits = [
691 691 {'added': [
692 692 FileNode(filename, content=content),
693 693 ]},
694 694 ]
695 695 self._add_commits_to_repo(self.repo.scm_instance(), commits)
696 696
697 697 def enable_downloads(self):
698 698 repo = self.repo
699 699 repo.enable_downloads = True
700 700 Session().add(repo)
701 701 Session().commit()
702 702
703 703 def cleanup(self):
704 704 for repo_name in reversed(self._cleanup_repos):
705 705 self._fixture.destroy_repo(repo_name)
706 706
707 707 def _add_commits_to_repo(self, repo, commits):
708 708 commit_ids = _add_commits_to_repo(repo, commits)
709 709 if not commit_ids:
710 710 return
711 711 self._commit_ids = commit_ids
712 712
713 713 # Creating refs for Git to allow fetching them from remote repository
714 714 if self.alias == 'git':
715 715 refs = {}
716 716 for message in self._commit_ids:
717 717 # TODO: mikhail: do more special chars replacements
718 718 ref_name = 'refs/test-refs/{}'.format(
719 719 message.replace(' ', ''))
720 720 refs[ref_name] = self._commit_ids[message]
721 721 self._create_refs(repo, refs)
722 722
723 723 def _create_refs(self, repo, refs):
724 724 for ref_name in refs:
725 725 repo.set_refs(ref_name, refs[ref_name])
726 726
727 727
728 728 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
729 729 if backend_alias not in request.config.getoption('--backends'):
730 730 pytest.skip("Backend %s not selected." % (backend_alias, ))
731 731
732 732 utils.check_xfail_backends(request.node, backend_alias)
733 733 utils.check_skip_backends(request.node, backend_alias)
734 734
735 735 repo_name = 'vcs_test_%s' % (backend_alias, )
736 736 repo_path = os.path.join(tests_tmp_path, repo_name)
737 737 backend = VcsBackend(
738 738 alias=backend_alias,
739 739 repo_path=repo_path,
740 740 test_name=request.node.name,
741 741 test_repo_container=test_repo)
742 742 request.addfinalizer(backend.cleanup)
743 743 return backend
744 744
745 745
746 746 @pytest.fixture()
747 747 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
748 748 """
749 749 Parametrized fixture which represents a single vcs backend implementation.
750 750
751 751 See the fixture `backend` for more details. This one implements the same
752 752 concept, but on vcs level. So it does not provide model instances etc.
753 753
754 754 Parameters are generated dynamically, see :func:`pytest_generate_tests`
755 755 for how this works.
756 756 """
757 757 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
758 758
759 759
760 760 @pytest.fixture()
761 761 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
762 762 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
763 763
764 764
765 765 @pytest.fixture()
766 766 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
767 767 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
768 768
769 769
770 770 @pytest.fixture()
771 771 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
772 772 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
773 773
774 774
775 775 @pytest.fixture()
776 776 def vcsbackend_stub(vcsbackend_git):
777 777 """
778 778 Use this to express that your test just needs a stub of a vcsbackend.
779 779
780 780 Plan is to eventually implement an in-memory stub to speed tests up.
781 781 """
782 782 return vcsbackend_git
783 783
784 784
785 785 class VcsBackend(object):
786 786 """
787 787 Represents the test configuration for one supported vcs backend.
788 788 """
789 789
790 790 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
791 791
792 792 def __init__(self, alias, repo_path, test_name, test_repo_container):
793 793 self.alias = alias
794 794 self._repo_path = repo_path
795 795 self._cleanup_repos = []
796 796 self._test_name = test_name
797 797 self._test_repo_container = test_repo_container
798 798
799 799 def __getitem__(self, key):
800 800 return self._test_repo_container(key, self.alias).scm_instance()
801 801
802 802 @property
803 803 def repo(self):
804 804 """
805 805 Returns the "current" repository. This is the vcs_test repo of the last
806 806 repo which has been created.
807 807 """
808 808 Repository = get_backend(self.alias)
809 809 return Repository(self._repo_path)
810 810
811 811 @property
812 812 def backend(self):
813 813 """
814 814 Returns the backend implementation class.
815 815 """
816 816 return get_backend(self.alias)
817 817
818 818 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
819 819 bare=False):
820 820 repo_name = self._next_repo_name()
821 821 self._repo_path = get_new_dir(repo_name)
822 822 repo_class = get_backend(self.alias)
823 823 src_url = None
824 824 if _clone_repo:
825 825 src_url = _clone_repo.path
826 826 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
827 827 self._cleanup_repos.append(repo)
828 828
829 829 commits = commits or [
830 830 {'message': 'Commit %s of %s' % (x, repo_name)}
831 831 for x in xrange(number_of_commits)]
832 832 _add_commits_to_repo(repo, commits)
833 833 return repo
834 834
835 835 def clone_repo(self, repo):
836 836 return self.create_repo(_clone_repo=repo)
837 837
838 838 def cleanup(self):
839 839 for repo in self._cleanup_repos:
840 840 shutil.rmtree(repo.path)
841 841
842 842 def new_repo_path(self):
843 843 repo_name = self._next_repo_name()
844 844 self._repo_path = get_new_dir(repo_name)
845 845 return self._repo_path
846 846
847 847 def _next_repo_name(self):
848 848 return "%s_%s" % (
849 849 self.invalid_repo_name.sub('_', self._test_name),
850 850 len(self._cleanup_repos))
851 851
852 852 def add_file(self, repo, filename, content='Test content\n'):
853 853 imc = repo.in_memory_commit
854 854 imc.add(FileNode(filename, content=content))
855 855 imc.commit(
856 856 message=u'Automatic commit from vcsbackend fixture',
857 857 author=u'Automatic <automatic@rhodecode.com>')
858 858
859 859 def ensure_file(self, filename, content='Test content\n'):
860 860 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
861 861 self.add_file(self.repo, filename, content)
862 862
863 863
864 864 def _add_commits_to_repo(vcs_repo, commits):
865 865 commit_ids = {}
866 866 if not commits:
867 867 return commit_ids
868 868
869 869 imc = vcs_repo.in_memory_commit
870 870 commit = None
871 871
872 872 for idx, commit in enumerate(commits):
873 873 message = unicode(commit.get('message', 'Commit %s' % idx))
874 874
875 875 for node in commit.get('added', []):
876 876 imc.add(FileNode(node.path, content=node.content))
877 877 for node in commit.get('changed', []):
878 878 imc.change(FileNode(node.path, content=node.content))
879 879 for node in commit.get('removed', []):
880 880 imc.remove(FileNode(node.path))
881 881
882 882 parents = [
883 883 vcs_repo.get_commit(commit_id=commit_ids[p])
884 884 for p in commit.get('parents', [])]
885 885
886 886 operations = ('added', 'changed', 'removed')
887 887 if not any((commit.get(o) for o in operations)):
888 888 imc.add(FileNode('file_%s' % idx, content=message))
889 889
890 890 commit = imc.commit(
891 891 message=message,
892 892 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
893 893 date=commit.get('date'),
894 894 branch=commit.get('branch'),
895 895 parents=parents)
896 896
897 897 commit_ids[commit.message] = commit.raw_id
898 898
899 899 return commit_ids
900 900
901 901
902 902 @pytest.fixture()
903 903 def reposerver(request):
904 904 """
905 905 Allows to serve a backend repository
906 906 """
907 907
908 908 repo_server = RepoServer()
909 909 request.addfinalizer(repo_server.cleanup)
910 910 return repo_server
911 911
912 912
913 913 class RepoServer(object):
914 914 """
915 915 Utility to serve a local repository for the duration of a test case.
916 916
917 917 Supports only Subversion so far.
918 918 """
919 919
920 920 url = None
921 921
922 922 def __init__(self):
923 923 self._cleanup_servers = []
924 924
925 925 def serve(self, vcsrepo):
926 926 if vcsrepo.alias != 'svn':
927 927 raise TypeError("Backend %s not supported" % vcsrepo.alias)
928 928
929 929 proc = subprocess32.Popen(
930 930 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
931 931 '--root', vcsrepo.path])
932 932 self._cleanup_servers.append(proc)
933 933 self.url = 'svn://localhost'
934 934
935 935 def cleanup(self):
936 936 for proc in self._cleanup_servers:
937 937 proc.terminate()
938 938
939 939
940 940 @pytest.fixture()
941 941 def pr_util(backend, request, config_stub):
942 942 """
943 943 Utility for tests of models and for functional tests around pull requests.
944 944
945 945 It gives an instance of :class:`PRTestUtility` which provides various
946 946 utility methods around one pull request.
947 947
948 948 This fixture uses `backend` and inherits its parameterization.
949 949 """
950 950
951 951 util = PRTestUtility(backend)
952 952 request.addfinalizer(util.cleanup)
953 953
954 954 return util
955 955
956 956
957 957 class PRTestUtility(object):
958 958
959 959 pull_request = None
960 960 pull_request_id = None
961 961 mergeable_patcher = None
962 962 mergeable_mock = None
963 963 notification_patcher = None
964 964
965 965 def __init__(self, backend):
966 966 self.backend = backend
967 967
968 968 def create_pull_request(
969 969 self, commits=None, target_head=None, source_head=None,
970 970 revisions=None, approved=False, author=None, mergeable=False,
971 enable_notifications=True, name_suffix=u'', reviewers=None,
971 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
972 972 title=u"Test", description=u"Description"):
973 973 self.set_mergeable(mergeable)
974 974 if not enable_notifications:
975 975 # mock notification side effect
976 976 self.notification_patcher = mock.patch(
977 977 'rhodecode.model.notification.NotificationModel.create')
978 978 self.notification_patcher.start()
979 979
980 980 if not self.pull_request:
981 981 if not commits:
982 982 commits = [
983 983 {'message': 'c1'},
984 984 {'message': 'c2'},
985 985 {'message': 'c3'},
986 986 ]
987 987 target_head = 'c1'
988 988 source_head = 'c2'
989 989 revisions = ['c2']
990 990
991 991 self.commit_ids = self.backend.create_master_repo(commits)
992 992 self.target_repository = self.backend.create_repo(
993 993 heads=[target_head], name_suffix=name_suffix)
994 994 self.source_repository = self.backend.create_repo(
995 995 heads=[source_head], name_suffix=name_suffix)
996 996 self.author = author or UserModel().get_by_username(
997 997 TEST_USER_ADMIN_LOGIN)
998 998
999 999 model = PullRequestModel()
1000 1000 self.create_parameters = {
1001 1001 'created_by': self.author,
1002 1002 'source_repo': self.source_repository.repo_name,
1003 1003 'source_ref': self._default_branch_reference(source_head),
1004 1004 'target_repo': self.target_repository.repo_name,
1005 1005 'target_ref': self._default_branch_reference(target_head),
1006 1006 'revisions': [self.commit_ids[r] for r in revisions],
1007 1007 'reviewers': reviewers or self._get_reviewers(),
1008 'observers': observers or self._get_observers(),
1008 1009 'title': title,
1009 1010 'description': description,
1010 1011 }
1011 1012 self.pull_request = model.create(**self.create_parameters)
1012 1013 assert model.get_versions(self.pull_request) == []
1013 1014
1014 1015 self.pull_request_id = self.pull_request.pull_request_id
1015 1016
1016 1017 if approved:
1017 1018 self.approve()
1018 1019
1019 1020 Session().add(self.pull_request)
1020 1021 Session().commit()
1021 1022
1022 1023 return self.pull_request
1023 1024
1024 1025 def approve(self):
1025 1026 self.create_status_votes(
1026 1027 ChangesetStatus.STATUS_APPROVED,
1027 1028 *self.pull_request.reviewers)
1028 1029
1029 1030 def close(self):
1030 1031 PullRequestModel().close_pull_request(self.pull_request, self.author)
1031 1032
1032 1033 def _default_branch_reference(self, commit_message):
1033 1034 reference = '%s:%s:%s' % (
1034 1035 'branch',
1035 1036 self.backend.default_branch_name,
1036 1037 self.commit_ids[commit_message])
1037 1038 return reference
1038 1039
1039 1040 def _get_reviewers(self):
1041 role = PullRequestReviewers.ROLE_REVIEWER
1040 1042 return [
1041 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1042 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1043 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1044 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1045 ]
1046
1047 def _get_observers(self):
1048 return [
1049
1043 1050 ]
1044 1051
1045 1052 def update_source_repository(self, head=None):
1046 1053 heads = [head or 'c3']
1047 1054 self.backend.pull_heads(self.source_repository, heads=heads)
1048 1055
1049 1056 def add_one_commit(self, head=None):
1050 1057 self.update_source_repository(head=head)
1051 1058 old_commit_ids = set(self.pull_request.revisions)
1052 1059 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1053 1060 commit_ids = set(self.pull_request.revisions)
1054 1061 new_commit_ids = commit_ids - old_commit_ids
1055 1062 assert len(new_commit_ids) == 1
1056 1063 return new_commit_ids.pop()
1057 1064
1058 1065 def remove_one_commit(self):
1059 1066 assert len(self.pull_request.revisions) == 2
1060 1067 source_vcs = self.source_repository.scm_instance()
1061 1068 removed_commit_id = source_vcs.commit_ids[-1]
1062 1069
1063 1070 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1064 1071 # remove the if once that's sorted out.
1065 1072 if self.backend.alias == "git":
1066 1073 kwargs = {'branch_name': self.backend.default_branch_name}
1067 1074 else:
1068 1075 kwargs = {}
1069 1076 source_vcs.strip(removed_commit_id, **kwargs)
1070 1077
1071 1078 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1072 1079 assert len(self.pull_request.revisions) == 1
1073 1080 return removed_commit_id
1074 1081
1075 1082 def create_comment(self, linked_to=None):
1076 1083 comment = CommentsModel().create(
1077 1084 text=u"Test comment",
1078 1085 repo=self.target_repository.repo_name,
1079 1086 user=self.author,
1080 1087 pull_request=self.pull_request)
1081 1088 assert comment.pull_request_version_id is None
1082 1089
1083 1090 if linked_to:
1084 1091 PullRequestModel()._link_comments_to_version(linked_to)
1085 1092
1086 1093 return comment
1087 1094
1088 1095 def create_inline_comment(
1089 1096 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1090 1097 comment = CommentsModel().create(
1091 1098 text=u"Test comment",
1092 1099 repo=self.target_repository.repo_name,
1093 1100 user=self.author,
1094 1101 line_no=line_no,
1095 1102 f_path=file_path,
1096 1103 pull_request=self.pull_request)
1097 1104 assert comment.pull_request_version_id is None
1098 1105
1099 1106 if linked_to:
1100 1107 PullRequestModel()._link_comments_to_version(linked_to)
1101 1108
1102 1109 return comment
1103 1110
1104 1111 def create_version_of_pull_request(self):
1105 1112 pull_request = self.create_pull_request()
1106 1113 version = PullRequestModel()._create_version_from_snapshot(
1107 1114 pull_request)
1108 1115 return version
1109 1116
1110 1117 def create_status_votes(self, status, *reviewers):
1111 1118 for reviewer in reviewers:
1112 1119 ChangesetStatusModel().set_status(
1113 1120 repo=self.pull_request.target_repo,
1114 1121 status=status,
1115 1122 user=reviewer.user_id,
1116 1123 pull_request=self.pull_request)
1117 1124
1118 1125 def set_mergeable(self, value):
1119 1126 if not self.mergeable_patcher:
1120 1127 self.mergeable_patcher = mock.patch.object(
1121 1128 VcsSettingsModel, 'get_general_settings')
1122 1129 self.mergeable_mock = self.mergeable_patcher.start()
1123 1130 self.mergeable_mock.return_value = {
1124 1131 'rhodecode_pr_merge_enabled': value}
1125 1132
1126 1133 def cleanup(self):
1127 1134 # In case the source repository is already cleaned up, the pull
1128 1135 # request will already be deleted.
1129 1136 pull_request = PullRequest().get(self.pull_request_id)
1130 1137 if pull_request:
1131 1138 PullRequestModel().delete(pull_request, pull_request.author)
1132 1139 Session().commit()
1133 1140
1134 1141 if self.notification_patcher:
1135 1142 self.notification_patcher.stop()
1136 1143
1137 1144 if self.mergeable_patcher:
1138 1145 self.mergeable_patcher.stop()
1139 1146
1140 1147
1141 1148 @pytest.fixture()
1142 1149 def user_admin(baseapp):
1143 1150 """
1144 1151 Provides the default admin test user as an instance of `db.User`.
1145 1152 """
1146 1153 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1147 1154 return user
1148 1155
1149 1156
1150 1157 @pytest.fixture()
1151 1158 def user_regular(baseapp):
1152 1159 """
1153 1160 Provides the default regular test user as an instance of `db.User`.
1154 1161 """
1155 1162 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1156 1163 return user
1157 1164
1158 1165
1159 1166 @pytest.fixture()
1160 1167 def user_util(request, db_connection):
1161 1168 """
1162 1169 Provides a wired instance of `UserUtility` with integrated cleanup.
1163 1170 """
1164 1171 utility = UserUtility(test_name=request.node.name)
1165 1172 request.addfinalizer(utility.cleanup)
1166 1173 return utility
1167 1174
1168 1175
1169 1176 # TODO: johbo: Split this up into utilities per domain or something similar
1170 1177 class UserUtility(object):
1171 1178
1172 1179 def __init__(self, test_name="test"):
1173 1180 self._test_name = self._sanitize_name(test_name)
1174 1181 self.fixture = Fixture()
1175 1182 self.repo_group_ids = []
1176 1183 self.repos_ids = []
1177 1184 self.user_ids = []
1178 1185 self.user_group_ids = []
1179 1186 self.user_repo_permission_ids = []
1180 1187 self.user_group_repo_permission_ids = []
1181 1188 self.user_repo_group_permission_ids = []
1182 1189 self.user_group_repo_group_permission_ids = []
1183 1190 self.user_user_group_permission_ids = []
1184 1191 self.user_group_user_group_permission_ids = []
1185 1192 self.user_permissions = []
1186 1193
1187 1194 def _sanitize_name(self, name):
1188 1195 for char in ['[', ']']:
1189 1196 name = name.replace(char, '_')
1190 1197 return name
1191 1198
1192 1199 def create_repo_group(
1193 1200 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1194 1201 group_name = "{prefix}_repogroup_{count}".format(
1195 1202 prefix=self._test_name,
1196 1203 count=len(self.repo_group_ids))
1197 1204 repo_group = self.fixture.create_repo_group(
1198 1205 group_name, cur_user=owner)
1199 1206 if auto_cleanup:
1200 1207 self.repo_group_ids.append(repo_group.group_id)
1201 1208 return repo_group
1202 1209
1203 1210 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1204 1211 auto_cleanup=True, repo_type='hg', bare=False):
1205 1212 repo_name = "{prefix}_repository_{count}".format(
1206 1213 prefix=self._test_name,
1207 1214 count=len(self.repos_ids))
1208 1215
1209 1216 repository = self.fixture.create_repo(
1210 1217 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1211 1218 if auto_cleanup:
1212 1219 self.repos_ids.append(repository.repo_id)
1213 1220 return repository
1214 1221
1215 1222 def create_user(self, auto_cleanup=True, **kwargs):
1216 1223 user_name = "{prefix}_user_{count}".format(
1217 1224 prefix=self._test_name,
1218 1225 count=len(self.user_ids))
1219 1226 user = self.fixture.create_user(user_name, **kwargs)
1220 1227 if auto_cleanup:
1221 1228 self.user_ids.append(user.user_id)
1222 1229 return user
1223 1230
1224 1231 def create_additional_user_email(self, user, email):
1225 1232 uem = self.fixture.create_additional_user_email(user=user, email=email)
1226 1233 return uem
1227 1234
1228 1235 def create_user_with_group(self):
1229 1236 user = self.create_user()
1230 1237 user_group = self.create_user_group(members=[user])
1231 1238 return user, user_group
1232 1239
1233 1240 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1234 1241 auto_cleanup=True, **kwargs):
1235 1242 group_name = "{prefix}_usergroup_{count}".format(
1236 1243 prefix=self._test_name,
1237 1244 count=len(self.user_group_ids))
1238 1245 user_group = self.fixture.create_user_group(
1239 1246 group_name, cur_user=owner, **kwargs)
1240 1247
1241 1248 if auto_cleanup:
1242 1249 self.user_group_ids.append(user_group.users_group_id)
1243 1250 if members:
1244 1251 for user in members:
1245 1252 UserGroupModel().add_user_to_group(user_group, user)
1246 1253 return user_group
1247 1254
1248 1255 def grant_user_permission(self, user_name, permission_name):
1249 1256 self.inherit_default_user_permissions(user_name, False)
1250 1257 self.user_permissions.append((user_name, permission_name))
1251 1258
1252 1259 def grant_user_permission_to_repo_group(
1253 1260 self, repo_group, user, permission_name):
1254 1261 permission = RepoGroupModel().grant_user_permission(
1255 1262 repo_group, user, permission_name)
1256 1263 self.user_repo_group_permission_ids.append(
1257 1264 (repo_group.group_id, user.user_id))
1258 1265 return permission
1259 1266
1260 1267 def grant_user_group_permission_to_repo_group(
1261 1268 self, repo_group, user_group, permission_name):
1262 1269 permission = RepoGroupModel().grant_user_group_permission(
1263 1270 repo_group, user_group, permission_name)
1264 1271 self.user_group_repo_group_permission_ids.append(
1265 1272 (repo_group.group_id, user_group.users_group_id))
1266 1273 return permission
1267 1274
1268 1275 def grant_user_permission_to_repo(
1269 1276 self, repo, user, permission_name):
1270 1277 permission = RepoModel().grant_user_permission(
1271 1278 repo, user, permission_name)
1272 1279 self.user_repo_permission_ids.append(
1273 1280 (repo.repo_id, user.user_id))
1274 1281 return permission
1275 1282
1276 1283 def grant_user_group_permission_to_repo(
1277 1284 self, repo, user_group, permission_name):
1278 1285 permission = RepoModel().grant_user_group_permission(
1279 1286 repo, user_group, permission_name)
1280 1287 self.user_group_repo_permission_ids.append(
1281 1288 (repo.repo_id, user_group.users_group_id))
1282 1289 return permission
1283 1290
1284 1291 def grant_user_permission_to_user_group(
1285 1292 self, target_user_group, user, permission_name):
1286 1293 permission = UserGroupModel().grant_user_permission(
1287 1294 target_user_group, user, permission_name)
1288 1295 self.user_user_group_permission_ids.append(
1289 1296 (target_user_group.users_group_id, user.user_id))
1290 1297 return permission
1291 1298
1292 1299 def grant_user_group_permission_to_user_group(
1293 1300 self, target_user_group, user_group, permission_name):
1294 1301 permission = UserGroupModel().grant_user_group_permission(
1295 1302 target_user_group, user_group, permission_name)
1296 1303 self.user_group_user_group_permission_ids.append(
1297 1304 (target_user_group.users_group_id, user_group.users_group_id))
1298 1305 return permission
1299 1306
1300 1307 def revoke_user_permission(self, user_name, permission_name):
1301 1308 self.inherit_default_user_permissions(user_name, True)
1302 1309 UserModel().revoke_perm(user_name, permission_name)
1303 1310
1304 1311 def inherit_default_user_permissions(self, user_name, value):
1305 1312 user = UserModel().get_by_username(user_name)
1306 1313 user.inherit_default_permissions = value
1307 1314 Session().add(user)
1308 1315 Session().commit()
1309 1316
1310 1317 def cleanup(self):
1311 1318 self._cleanup_permissions()
1312 1319 self._cleanup_repos()
1313 1320 self._cleanup_repo_groups()
1314 1321 self._cleanup_user_groups()
1315 1322 self._cleanup_users()
1316 1323
1317 1324 def _cleanup_permissions(self):
1318 1325 if self.user_permissions:
1319 1326 for user_name, permission_name in self.user_permissions:
1320 1327 self.revoke_user_permission(user_name, permission_name)
1321 1328
1322 1329 for permission in self.user_repo_permission_ids:
1323 1330 RepoModel().revoke_user_permission(*permission)
1324 1331
1325 1332 for permission in self.user_group_repo_permission_ids:
1326 1333 RepoModel().revoke_user_group_permission(*permission)
1327 1334
1328 1335 for permission in self.user_repo_group_permission_ids:
1329 1336 RepoGroupModel().revoke_user_permission(*permission)
1330 1337
1331 1338 for permission in self.user_group_repo_group_permission_ids:
1332 1339 RepoGroupModel().revoke_user_group_permission(*permission)
1333 1340
1334 1341 for permission in self.user_user_group_permission_ids:
1335 1342 UserGroupModel().revoke_user_permission(*permission)
1336 1343
1337 1344 for permission in self.user_group_user_group_permission_ids:
1338 1345 UserGroupModel().revoke_user_group_permission(*permission)
1339 1346
1340 1347 def _cleanup_repo_groups(self):
1341 1348 def _repo_group_compare(first_group_id, second_group_id):
1342 1349 """
1343 1350 Gives higher priority to the groups with the most complex paths
1344 1351 """
1345 1352 first_group = RepoGroup.get(first_group_id)
1346 1353 second_group = RepoGroup.get(second_group_id)
1347 1354 first_group_parts = (
1348 1355 len(first_group.group_name.split('/')) if first_group else 0)
1349 1356 second_group_parts = (
1350 1357 len(second_group.group_name.split('/')) if second_group else 0)
1351 1358 return cmp(second_group_parts, first_group_parts)
1352 1359
1353 1360 sorted_repo_group_ids = sorted(
1354 1361 self.repo_group_ids, cmp=_repo_group_compare)
1355 1362 for repo_group_id in sorted_repo_group_ids:
1356 1363 self.fixture.destroy_repo_group(repo_group_id)
1357 1364
1358 1365 def _cleanup_repos(self):
1359 1366 sorted_repos_ids = sorted(self.repos_ids)
1360 1367 for repo_id in sorted_repos_ids:
1361 1368 self.fixture.destroy_repo(repo_id)
1362 1369
1363 1370 def _cleanup_user_groups(self):
1364 1371 def _user_group_compare(first_group_id, second_group_id):
1365 1372 """
1366 1373 Gives higher priority to the groups with the most complex paths
1367 1374 """
1368 1375 first_group = UserGroup.get(first_group_id)
1369 1376 second_group = UserGroup.get(second_group_id)
1370 1377 first_group_parts = (
1371 1378 len(first_group.users_group_name.split('/'))
1372 1379 if first_group else 0)
1373 1380 second_group_parts = (
1374 1381 len(second_group.users_group_name.split('/'))
1375 1382 if second_group else 0)
1376 1383 return cmp(second_group_parts, first_group_parts)
1377 1384
1378 1385 sorted_user_group_ids = sorted(
1379 1386 self.user_group_ids, cmp=_user_group_compare)
1380 1387 for user_group_id in sorted_user_group_ids:
1381 1388 self.fixture.destroy_user_group(user_group_id)
1382 1389
1383 1390 def _cleanup_users(self):
1384 1391 for user_id in self.user_ids:
1385 1392 self.fixture.destroy_user(user_id)
1386 1393
1387 1394
1388 1395 # TODO: Think about moving this into a pytest-pyro package and make it a
1389 1396 # pytest plugin
1390 1397 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1391 1398 def pytest_runtest_makereport(item, call):
1392 1399 """
1393 1400 Adding the remote traceback if the exception has this information.
1394 1401
1395 1402 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1396 1403 to the exception instance.
1397 1404 """
1398 1405 outcome = yield
1399 1406 report = outcome.get_result()
1400 1407 if call.excinfo:
1401 1408 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1402 1409
1403 1410
1404 1411 def _add_vcsserver_remote_traceback(report, exc):
1405 1412 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1406 1413
1407 1414 if vcsserver_traceback:
1408 1415 section = 'VCSServer remote traceback ' + report.when
1409 1416 report.sections.append((section, vcsserver_traceback))
1410 1417
1411 1418
1412 1419 @pytest.fixture(scope='session')
1413 1420 def testrun():
1414 1421 return {
1415 1422 'uuid': uuid.uuid4(),
1416 1423 'start': datetime.datetime.utcnow().isoformat(),
1417 1424 'timestamp': int(time.time()),
1418 1425 }
1419 1426
1420 1427
1421 1428 class AppenlightClient(object):
1422 1429
1423 1430 url_template = '{url}?protocol_version=0.5'
1424 1431
1425 1432 def __init__(
1426 1433 self, url, api_key, add_server=True, add_timestamp=True,
1427 1434 namespace=None, request=None, testrun=None):
1428 1435 self.url = self.url_template.format(url=url)
1429 1436 self.api_key = api_key
1430 1437 self.add_server = add_server
1431 1438 self.add_timestamp = add_timestamp
1432 1439 self.namespace = namespace
1433 1440 self.request = request
1434 1441 self.server = socket.getfqdn(socket.gethostname())
1435 1442 self.tags_before = {}
1436 1443 self.tags_after = {}
1437 1444 self.stats = []
1438 1445 self.testrun = testrun or {}
1439 1446
1440 1447 def tag_before(self, tag, value):
1441 1448 self.tags_before[tag] = value
1442 1449
1443 1450 def tag_after(self, tag, value):
1444 1451 self.tags_after[tag] = value
1445 1452
1446 1453 def collect(self, data):
1447 1454 if self.add_server:
1448 1455 data.setdefault('server', self.server)
1449 1456 if self.add_timestamp:
1450 1457 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1451 1458 if self.namespace:
1452 1459 data.setdefault('namespace', self.namespace)
1453 1460 if self.request:
1454 1461 data.setdefault('request', self.request)
1455 1462 self.stats.append(data)
1456 1463
1457 1464 def send_stats(self):
1458 1465 tags = [
1459 1466 ('testrun', self.request),
1460 1467 ('testrun.start', self.testrun['start']),
1461 1468 ('testrun.timestamp', self.testrun['timestamp']),
1462 1469 ('test', self.namespace),
1463 1470 ]
1464 1471 for key, value in self.tags_before.items():
1465 1472 tags.append((key + '.before', value))
1466 1473 try:
1467 1474 delta = self.tags_after[key] - value
1468 1475 tags.append((key + '.delta', delta))
1469 1476 except Exception:
1470 1477 pass
1471 1478 for key, value in self.tags_after.items():
1472 1479 tags.append((key + '.after', value))
1473 1480 self.collect({
1474 1481 'message': "Collected tags",
1475 1482 'tags': tags,
1476 1483 })
1477 1484
1478 1485 response = requests.post(
1479 1486 self.url,
1480 1487 headers={
1481 1488 'X-appenlight-api-key': self.api_key},
1482 1489 json=self.stats,
1483 1490 )
1484 1491
1485 1492 if not response.status_code == 200:
1486 1493 pprint.pprint(self.stats)
1487 1494 print(response.headers)
1488 1495 print(response.text)
1489 1496 raise Exception('Sending to appenlight failed')
1490 1497
1491 1498
1492 1499 @pytest.fixture()
1493 1500 def gist_util(request, db_connection):
1494 1501 """
1495 1502 Provides a wired instance of `GistUtility` with integrated cleanup.
1496 1503 """
1497 1504 utility = GistUtility()
1498 1505 request.addfinalizer(utility.cleanup)
1499 1506 return utility
1500 1507
1501 1508
1502 1509 class GistUtility(object):
1503 1510 def __init__(self):
1504 1511 self.fixture = Fixture()
1505 1512 self.gist_ids = []
1506 1513
1507 1514 def create_gist(self, **kwargs):
1508 1515 gist = self.fixture.create_gist(**kwargs)
1509 1516 self.gist_ids.append(gist.gist_id)
1510 1517 return gist
1511 1518
1512 1519 def cleanup(self):
1513 1520 for id_ in self.gist_ids:
1514 1521 self.fixture.destroy_gists(str(id_))
1515 1522
1516 1523
1517 1524 @pytest.fixture()
1518 1525 def enabled_backends(request):
1519 1526 backends = request.config.option.backends
1520 1527 return backends[:]
1521 1528
1522 1529
1523 1530 @pytest.fixture()
1524 1531 def settings_util(request, db_connection):
1525 1532 """
1526 1533 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1527 1534 """
1528 1535 utility = SettingsUtility()
1529 1536 request.addfinalizer(utility.cleanup)
1530 1537 return utility
1531 1538
1532 1539
1533 1540 class SettingsUtility(object):
1534 1541 def __init__(self):
1535 1542 self.rhodecode_ui_ids = []
1536 1543 self.rhodecode_setting_ids = []
1537 1544 self.repo_rhodecode_ui_ids = []
1538 1545 self.repo_rhodecode_setting_ids = []
1539 1546
1540 1547 def create_repo_rhodecode_ui(
1541 1548 self, repo, section, value, key=None, active=True, cleanup=True):
1542 1549 key = key or hashlib.sha1(
1543 1550 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1544 1551
1545 1552 setting = RepoRhodeCodeUi()
1546 1553 setting.repository_id = repo.repo_id
1547 1554 setting.ui_section = section
1548 1555 setting.ui_value = value
1549 1556 setting.ui_key = key
1550 1557 setting.ui_active = active
1551 1558 Session().add(setting)
1552 1559 Session().commit()
1553 1560
1554 1561 if cleanup:
1555 1562 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1556 1563 return setting
1557 1564
1558 1565 def create_rhodecode_ui(
1559 1566 self, section, value, key=None, active=True, cleanup=True):
1560 1567 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1561 1568
1562 1569 setting = RhodeCodeUi()
1563 1570 setting.ui_section = section
1564 1571 setting.ui_value = value
1565 1572 setting.ui_key = key
1566 1573 setting.ui_active = active
1567 1574 Session().add(setting)
1568 1575 Session().commit()
1569 1576
1570 1577 if cleanup:
1571 1578 self.rhodecode_ui_ids.append(setting.ui_id)
1572 1579 return setting
1573 1580
1574 1581 def create_repo_rhodecode_setting(
1575 1582 self, repo, name, value, type_, cleanup=True):
1576 1583 setting = RepoRhodeCodeSetting(
1577 1584 repo.repo_id, key=name, val=value, type=type_)
1578 1585 Session().add(setting)
1579 1586 Session().commit()
1580 1587
1581 1588 if cleanup:
1582 1589 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1583 1590 return setting
1584 1591
1585 1592 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1586 1593 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1587 1594 Session().add(setting)
1588 1595 Session().commit()
1589 1596
1590 1597 if cleanup:
1591 1598 self.rhodecode_setting_ids.append(setting.app_settings_id)
1592 1599
1593 1600 return setting
1594 1601
1595 1602 def cleanup(self):
1596 1603 for id_ in self.rhodecode_ui_ids:
1597 1604 setting = RhodeCodeUi.get(id_)
1598 1605 Session().delete(setting)
1599 1606
1600 1607 for id_ in self.rhodecode_setting_ids:
1601 1608 setting = RhodeCodeSetting.get(id_)
1602 1609 Session().delete(setting)
1603 1610
1604 1611 for id_ in self.repo_rhodecode_ui_ids:
1605 1612 setting = RepoRhodeCodeUi.get(id_)
1606 1613 Session().delete(setting)
1607 1614
1608 1615 for id_ in self.repo_rhodecode_setting_ids:
1609 1616 setting = RepoRhodeCodeSetting.get(id_)
1610 1617 Session().delete(setting)
1611 1618
1612 1619 Session().commit()
1613 1620
1614 1621
1615 1622 @pytest.fixture()
1616 1623 def no_notifications(request):
1617 1624 notification_patcher = mock.patch(
1618 1625 'rhodecode.model.notification.NotificationModel.create')
1619 1626 notification_patcher.start()
1620 1627 request.addfinalizer(notification_patcher.stop)
1621 1628
1622 1629
1623 1630 @pytest.fixture(scope='session')
1624 1631 def repeat(request):
1625 1632 """
1626 1633 The number of repetitions is based on this fixture.
1627 1634
1628 1635 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1629 1636 tests are not too slow in our default test suite.
1630 1637 """
1631 1638 return request.config.getoption('--repeat')
1632 1639
1633 1640
1634 1641 @pytest.fixture()
1635 1642 def rhodecode_fixtures():
1636 1643 return Fixture()
1637 1644
1638 1645
1639 1646 @pytest.fixture()
1640 1647 def context_stub():
1641 1648 """
1642 1649 Stub context object.
1643 1650 """
1644 1651 context = pyramid.testing.DummyResource()
1645 1652 return context
1646 1653
1647 1654
1648 1655 @pytest.fixture()
1649 1656 def request_stub():
1650 1657 """
1651 1658 Stub request object.
1652 1659 """
1653 1660 from rhodecode.lib.base import bootstrap_request
1654 1661 request = bootstrap_request(scheme='https')
1655 1662 return request
1656 1663
1657 1664
1658 1665 @pytest.fixture()
1659 1666 def config_stub(request, request_stub):
1660 1667 """
1661 1668 Set up pyramid.testing and return the Configurator.
1662 1669 """
1663 1670 from rhodecode.lib.base import bootstrap_config
1664 1671 config = bootstrap_config(request=request_stub)
1665 1672
1666 1673 @request.addfinalizer
1667 1674 def cleanup():
1668 1675 pyramid.testing.tearDown()
1669 1676
1670 1677 return config
1671 1678
1672 1679
1673 1680 @pytest.fixture()
1674 1681 def StubIntegrationType():
1675 1682 class _StubIntegrationType(IntegrationTypeBase):
1676 1683 """ Test integration type class """
1677 1684
1678 1685 key = 'test'
1679 1686 display_name = 'Test integration type'
1680 1687 description = 'A test integration type for testing'
1681 1688
1682 1689 @classmethod
1683 1690 def icon(cls):
1684 1691 return 'test_icon_html_image'
1685 1692
1686 1693 def __init__(self, settings):
1687 1694 super(_StubIntegrationType, self).__init__(settings)
1688 1695 self.sent_events = [] # for testing
1689 1696
1690 1697 def send_event(self, event):
1691 1698 self.sent_events.append(event)
1692 1699
1693 1700 def settings_schema(self):
1694 1701 class SettingsSchema(colander.Schema):
1695 1702 test_string_field = colander.SchemaNode(
1696 1703 colander.String(),
1697 1704 missing=colander.required,
1698 1705 title='test string field',
1699 1706 )
1700 1707 test_int_field = colander.SchemaNode(
1701 1708 colander.Int(),
1702 1709 title='some integer setting',
1703 1710 )
1704 1711 return SettingsSchema()
1705 1712
1706 1713
1707 1714 integration_type_registry.register_integration_type(_StubIntegrationType)
1708 1715 return _StubIntegrationType
1709 1716
1710 1717 @pytest.fixture()
1711 1718 def stub_integration_settings():
1712 1719 return {
1713 1720 'test_string_field': 'some data',
1714 1721 'test_int_field': 100,
1715 1722 }
1716 1723
1717 1724
1718 1725 @pytest.fixture()
1719 1726 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1720 1727 stub_integration_settings):
1721 1728 integration = IntegrationModel().create(
1722 1729 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1723 1730 name='test repo integration',
1724 1731 repo=repo_stub, repo_group=None, child_repos_only=None)
1725 1732
1726 1733 @request.addfinalizer
1727 1734 def cleanup():
1728 1735 IntegrationModel().delete(integration)
1729 1736
1730 1737 return integration
1731 1738
1732 1739
1733 1740 @pytest.fixture()
1734 1741 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1735 1742 stub_integration_settings):
1736 1743 integration = IntegrationModel().create(
1737 1744 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1738 1745 name='test repogroup integration',
1739 1746 repo=None, repo_group=test_repo_group, child_repos_only=True)
1740 1747
1741 1748 @request.addfinalizer
1742 1749 def cleanup():
1743 1750 IntegrationModel().delete(integration)
1744 1751
1745 1752 return integration
1746 1753
1747 1754
1748 1755 @pytest.fixture()
1749 1756 def repogroup_recursive_integration_stub(request, test_repo_group,
1750 1757 StubIntegrationType, stub_integration_settings):
1751 1758 integration = IntegrationModel().create(
1752 1759 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1753 1760 name='test recursive repogroup integration',
1754 1761 repo=None, repo_group=test_repo_group, child_repos_only=False)
1755 1762
1756 1763 @request.addfinalizer
1757 1764 def cleanup():
1758 1765 IntegrationModel().delete(integration)
1759 1766
1760 1767 return integration
1761 1768
1762 1769
1763 1770 @pytest.fixture()
1764 1771 def global_integration_stub(request, StubIntegrationType,
1765 1772 stub_integration_settings):
1766 1773 integration = IntegrationModel().create(
1767 1774 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1768 1775 name='test global integration',
1769 1776 repo=None, repo_group=None, child_repos_only=None)
1770 1777
1771 1778 @request.addfinalizer
1772 1779 def cleanup():
1773 1780 IntegrationModel().delete(integration)
1774 1781
1775 1782 return integration
1776 1783
1777 1784
1778 1785 @pytest.fixture()
1779 1786 def root_repos_integration_stub(request, StubIntegrationType,
1780 1787 stub_integration_settings):
1781 1788 integration = IntegrationModel().create(
1782 1789 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1783 1790 name='test global integration',
1784 1791 repo=None, repo_group=None, child_repos_only=True)
1785 1792
1786 1793 @request.addfinalizer
1787 1794 def cleanup():
1788 1795 IntegrationModel().delete(integration)
1789 1796
1790 1797 return integration
1791 1798
1792 1799
1793 1800 @pytest.fixture()
1794 1801 def local_dt_to_utc():
1795 1802 def _factory(dt):
1796 1803 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1797 1804 dateutil.tz.tzutc()).replace(tzinfo=None)
1798 1805 return _factory
1799 1806
1800 1807
1801 1808 @pytest.fixture()
1802 1809 def disable_anonymous_user(request, baseapp):
1803 1810 set_anonymous_access(False)
1804 1811
1805 1812 @request.addfinalizer
1806 1813 def cleanup():
1807 1814 set_anonymous_access(True)
1808 1815
1809 1816
1810 1817 @pytest.fixture(scope='module')
1811 1818 def rc_fixture(request):
1812 1819 return Fixture()
1813 1820
1814 1821
1815 1822 @pytest.fixture()
1816 1823 def repo_groups(request):
1817 1824 fixture = Fixture()
1818 1825
1819 1826 session = Session()
1820 1827 zombie_group = fixture.create_repo_group('zombie')
1821 1828 parent_group = fixture.create_repo_group('parent')
1822 1829 child_group = fixture.create_repo_group('parent/child')
1823 1830 groups_in_db = session.query(RepoGroup).all()
1824 1831 assert len(groups_in_db) == 3
1825 1832 assert child_group.group_parent_id == parent_group.group_id
1826 1833
1827 1834 @request.addfinalizer
1828 1835 def cleanup():
1829 1836 fixture.destroy_repo_group(zombie_group)
1830 1837 fixture.destroy_repo_group(child_group)
1831 1838 fixture.destroy_repo_group(parent_group)
1832 1839
1833 1840 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now