##// END OF EJS Templates
feat(task-for-automatic-pr-merge): added f-ty to support pull request auto merge for EE. Fixes: RCCE-67
ilin.s -
r5657:027566d5 default
parent child Browse files
Show More
@@ -1,668 +1,669
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 from rhodecode.lib import auth
24 24 from rhodecode.lib.utils2 import str2bool
25 25 from rhodecode.model.db import (
26 26 Repository, UserRepoToPerm, User)
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
29 29 from rhodecode.model.user import UserModel
30 30 from rhodecode.tests import (
31 31 login_user_session, logout_user_session,
32 32 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
33 33 from rhodecode.tests.fixture import Fixture
34 34 from rhodecode.tests.utils import AssertResponse
35 35 from rhodecode.tests.routes import route_path
36 36
37 37 fixture = Fixture()
38 38
39 39
40 40 @pytest.mark.usefixtures("app")
41 41 class TestVcsSettings(object):
42 42 FORM_DATA = {
43 43 'inherit_global_settings': False,
44 44 'hooks_changegroup_repo_size': False,
45 45 'hooks_changegroup_push_logger': False,
46 46 'hooks_outgoing_pull_logger': False,
47 47 'extensions_largefiles': False,
48 48 'extensions_evolve': False,
49 49 'phases_publish': 'False',
50 50 'rhodecode_pr_merge_enabled': False,
51 'rhodecode_auto_merge_enabled': False,
51 52 'rhodecode_use_outdated_comments': False,
52 53 'new_svn_branch': '',
53 54 'new_svn_tag': ''
54 55 }
55 56
56 57 @pytest.mark.skip_backends('svn')
57 58 def test_global_settings_initial_values(self, autologin_user, backend):
58 59 repo_name = backend.repo_name
59 60 response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name))
60 61
61 62 expected_settings = (
62 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled',
63 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled', 'rhodecode_auto_merge_enabled',
63 64 'hooks_changegroup_repo_size', 'hooks_changegroup_push_logger',
64 65 'hooks_outgoing_pull_logger'
65 66 )
66 67 for setting in expected_settings:
67 68 self.assert_repo_value_equals_global_value(response, setting)
68 69
69 70 def test_show_settings_requires_repo_admin_permission(
70 71 self, backend, user_util, settings_util):
71 72 repo = backend.create_repo()
72 73 repo_name = repo.repo_name
73 74 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
74 75 user_util.grant_user_permission_to_repo(repo, user, 'repository.admin')
75 76 login_user_session(
76 77 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
77 78 self.app.get(route_path('edit_repo_vcs', repo_name=repo_name), status=200)
78 79
79 80 def test_inherit_global_settings_flag_is_true_by_default(
80 81 self, autologin_user, backend):
81 82 repo_name = backend.repo_name
82 83 response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name))
83 84
84 85 assert_response = response.assert_response()
85 86 element = assert_response.get_element('#inherit_global_settings')
86 87 assert element.checked
87 88
88 89 @pytest.mark.parametrize('checked_value', [True, False])
89 90 def test_inherit_global_settings_value(
90 91 self, autologin_user, backend, checked_value, settings_util):
91 92 repo = backend.create_repo()
92 93 repo_name = repo.repo_name
93 94 settings_util.create_repo_rhodecode_setting(
94 95 repo, 'inherit_vcs_settings', checked_value, 'bool')
95 96 response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name))
96 97
97 98 assert_response = response.assert_response()
98 99 element = assert_response.get_element('#inherit_global_settings')
99 100 assert element.checked == checked_value
100 101
101 102 @pytest.mark.skip_backends('svn')
102 103 def test_hooks_settings_are_created(
103 104 self, autologin_user, backend, csrf_token):
104 105 repo_name = backend.repo_name
105 106 data = self.FORM_DATA.copy()
106 107 data['csrf_token'] = csrf_token
107 108 self.app.post(
108 109 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
109 110 settings = SettingsModel(repo=repo_name)
110 111 try:
111 112 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
112 113 ui = settings.get_ui_by_section_and_key(section, key)
113 114 assert ui.ui_active is False
114 115 finally:
115 116 self._cleanup_repo_settings(settings)
116 117
117 118 def test_hooks_settings_are_not_created_for_svn(
118 119 self, autologin_user, backend_svn, csrf_token):
119 120 repo_name = backend_svn.repo_name
120 121 data = self.FORM_DATA.copy()
121 122 data['csrf_token'] = csrf_token
122 123 self.app.post(
123 124 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
124 125 settings = SettingsModel(repo=repo_name)
125 126 try:
126 127 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
127 128 ui = settings.get_ui_by_section_and_key(section, key)
128 129 assert ui is None
129 130 finally:
130 131 self._cleanup_repo_settings(settings)
131 132
132 133 @pytest.mark.skip_backends('svn')
133 134 def test_hooks_settings_are_updated(
134 135 self, autologin_user, backend, csrf_token):
135 136 repo_name = backend.repo_name
136 137 settings = SettingsModel(repo=repo_name)
137 138 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
138 139 settings.create_ui_section_value(section, '', key=key, active=True)
139 140
140 141 data = self.FORM_DATA.copy()
141 142 data['csrf_token'] = csrf_token
142 143 self.app.post(
143 144 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
144 145 try:
145 146 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
146 147 ui = settings.get_ui_by_section_and_key(section, key)
147 148 assert ui.ui_active is False
148 149 finally:
149 150 self._cleanup_repo_settings(settings)
150 151
151 152 def test_hooks_settings_are_not_updated_for_svn(
152 153 self, autologin_user, backend_svn, csrf_token):
153 154 repo_name = backend_svn.repo_name
154 155 settings = SettingsModel(repo=repo_name)
155 156 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
156 157 settings.create_ui_section_value(section, '', key=key, active=True)
157 158
158 159 data = self.FORM_DATA.copy()
159 160 data['csrf_token'] = csrf_token
160 161 self.app.post(
161 162 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
162 163 try:
163 164 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
164 165 ui = settings.get_ui_by_section_and_key(section, key)
165 166 assert ui.ui_active is True
166 167 finally:
167 168 self._cleanup_repo_settings(settings)
168 169
169 170 @pytest.mark.skip_backends('svn')
170 171 def test_pr_settings_are_created(
171 172 self, autologin_user, backend, csrf_token):
172 173 repo_name = backend.repo_name
173 174 data = self.FORM_DATA.copy()
174 175 data['csrf_token'] = csrf_token
175 176 self.app.post(
176 177 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
177 178 settings = SettingsModel(repo=repo_name)
178 179 try:
179 180 for name in VcsSettingsModel.GENERAL_SETTINGS:
180 181 setting = settings.get_setting_by_name(name)
181 182 assert setting.app_settings_value is False
182 183 finally:
183 184 self._cleanup_repo_settings(settings)
184 185
185 186 def test_pr_settings_are_not_created_for_svn(
186 187 self, autologin_user, backend_svn, csrf_token):
187 188 repo_name = backend_svn.repo_name
188 189 data = self.FORM_DATA.copy()
189 190 data['csrf_token'] = csrf_token
190 191 self.app.post(
191 192 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
192 193 settings = SettingsModel(repo=repo_name)
193 194 try:
194 195 for name in VcsSettingsModel.GENERAL_SETTINGS:
195 196 setting = settings.get_setting_by_name(name)
196 197 assert setting is None
197 198 finally:
198 199 self._cleanup_repo_settings(settings)
199 200
200 201 def test_pr_settings_creation_requires_repo_admin_permission(
201 202 self, backend, user_util, settings_util, csrf_token):
202 203 repo = backend.create_repo()
203 204 repo_name = repo.repo_name
204 205
205 206 logout_user_session(self.app, csrf_token)
206 207 session = login_user_session(
207 208 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
208 209 new_csrf_token = auth.get_csrf_token(session)
209 210
210 211 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
211 212 repo = Repository.get_by_repo_name(repo_name)
212 213 user_util.grant_user_permission_to_repo(repo, user, 'repository.admin')
213 214 data = self.FORM_DATA.copy()
214 215 data['csrf_token'] = new_csrf_token
215 216 settings = SettingsModel(repo=repo_name)
216 217
217 218 try:
218 219 self.app.post(
219 220 route_path('edit_repo_vcs_update', repo_name=repo_name), data,
220 221 status=302)
221 222 finally:
222 223 self._cleanup_repo_settings(settings)
223 224
224 225 @pytest.mark.skip_backends('svn')
225 226 def test_pr_settings_are_updated(
226 227 self, autologin_user, backend, csrf_token):
227 228 repo_name = backend.repo_name
228 229 settings = SettingsModel(repo=repo_name)
229 230 for name in VcsSettingsModel.GENERAL_SETTINGS:
230 231 settings.create_or_update_setting(name, True, 'bool')
231 232
232 233 data = self.FORM_DATA.copy()
233 234 data['csrf_token'] = csrf_token
234 235 self.app.post(
235 236 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
236 237 try:
237 238 for name in VcsSettingsModel.GENERAL_SETTINGS:
238 239 setting = settings.get_setting_by_name(name)
239 240 assert setting.app_settings_value is False
240 241 finally:
241 242 self._cleanup_repo_settings(settings)
242 243
243 244 def test_pr_settings_are_not_updated_for_svn(
244 245 self, autologin_user, backend_svn, csrf_token):
245 246 repo_name = backend_svn.repo_name
246 247 settings = SettingsModel(repo=repo_name)
247 248 for name in VcsSettingsModel.GENERAL_SETTINGS:
248 249 settings.create_or_update_setting(name, True, 'bool')
249 250
250 251 data = self.FORM_DATA.copy()
251 252 data['csrf_token'] = csrf_token
252 253 self.app.post(
253 254 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
254 255 try:
255 256 for name in VcsSettingsModel.GENERAL_SETTINGS:
256 257 setting = settings.get_setting_by_name(name)
257 258 assert setting.app_settings_value is True
258 259 finally:
259 260 self._cleanup_repo_settings(settings)
260 261
261 262 def test_svn_settings_are_created(
262 263 self, autologin_user, backend_svn, csrf_token, settings_util):
263 264 repo_name = backend_svn.repo_name
264 265 data = self.FORM_DATA.copy()
265 266 data['new_svn_tag'] = 'svn-tag'
266 267 data['new_svn_branch'] = 'svn-branch'
267 268 data['csrf_token'] = csrf_token
268 269
269 270 # Create few global settings to make sure that uniqueness validators
270 271 # are not triggered
271 272 settings_util.create_rhodecode_ui(
272 273 VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
273 274 settings_util.create_rhodecode_ui(
274 275 VcsSettingsModel.SVN_TAG_SECTION, 'svn-tag')
275 276
276 277 self.app.post(
277 278 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
278 279 settings = SettingsModel(repo=repo_name)
279 280 try:
280 281 svn_branches = settings.get_ui_by_section(
281 282 VcsSettingsModel.SVN_BRANCH_SECTION)
282 283 svn_branch_names = [b.ui_value for b in svn_branches]
283 284 svn_tags = settings.get_ui_by_section(
284 285 VcsSettingsModel.SVN_TAG_SECTION)
285 286 svn_tag_names = [b.ui_value for b in svn_tags]
286 287 assert 'svn-branch' in svn_branch_names
287 288 assert 'svn-tag' in svn_tag_names
288 289 finally:
289 290 self._cleanup_repo_settings(settings)
290 291
291 292 def test_svn_settings_are_unique(
292 293 self, autologin_user, backend_svn, csrf_token, settings_util):
293 294 repo = backend_svn.repo
294 295 repo_name = repo.repo_name
295 296 data = self.FORM_DATA.copy()
296 297 data['new_svn_tag'] = 'test_tag'
297 298 data['new_svn_branch'] = 'test_branch'
298 299 data['csrf_token'] = csrf_token
299 300 settings_util.create_repo_rhodecode_ui(
300 301 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch')
301 302 settings_util.create_repo_rhodecode_ui(
302 303 repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag')
303 304
304 305 response = self.app.post(
305 306 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=200)
306 307 response.mustcontain('Pattern already exists')
307 308
308 309 def test_svn_settings_with_empty_values_are_not_created(
309 310 self, autologin_user, backend_svn, csrf_token):
310 311 repo_name = backend_svn.repo_name
311 312 data = self.FORM_DATA.copy()
312 313 data['csrf_token'] = csrf_token
313 314 self.app.post(
314 315 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
315 316 settings = SettingsModel(repo=repo_name)
316 317 try:
317 318 svn_branches = settings.get_ui_by_section(
318 319 VcsSettingsModel.SVN_BRANCH_SECTION)
319 320 svn_tags = settings.get_ui_by_section(
320 321 VcsSettingsModel.SVN_TAG_SECTION)
321 322 assert len(svn_branches) == 0
322 323 assert len(svn_tags) == 0
323 324 finally:
324 325 self._cleanup_repo_settings(settings)
325 326
326 327 def test_svn_settings_are_shown_for_svn_repository(
327 328 self, autologin_user, backend_svn, csrf_token):
328 329 repo_name = backend_svn.repo_name
329 330 response = self.app.get(
330 331 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
331 332 response.mustcontain('Subversion Settings')
332 333
333 334 @pytest.mark.skip_backends('svn')
334 335 def test_svn_settings_are_not_created_for_not_svn_repository(
335 336 self, autologin_user, backend, csrf_token):
336 337 repo_name = backend.repo_name
337 338 data = self.FORM_DATA.copy()
338 339 data['csrf_token'] = csrf_token
339 340 self.app.post(
340 341 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
341 342 settings = SettingsModel(repo=repo_name)
342 343 try:
343 344 svn_branches = settings.get_ui_by_section(
344 345 VcsSettingsModel.SVN_BRANCH_SECTION)
345 346 svn_tags = settings.get_ui_by_section(
346 347 VcsSettingsModel.SVN_TAG_SECTION)
347 348 assert len(svn_branches) == 0
348 349 assert len(svn_tags) == 0
349 350 finally:
350 351 self._cleanup_repo_settings(settings)
351 352
352 353 @pytest.mark.skip_backends('svn')
353 354 def test_svn_settings_are_shown_only_for_svn_repository(
354 355 self, autologin_user, backend, csrf_token):
355 356 repo_name = backend.repo_name
356 357 response = self.app.get(
357 358 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
358 359 response.mustcontain(no='Subversion Settings')
359 360
360 361 def test_hg_settings_are_created(
361 362 self, autologin_user, backend_hg, csrf_token):
362 363 repo_name = backend_hg.repo_name
363 364 data = self.FORM_DATA.copy()
364 365 data['new_svn_tag'] = 'svn-tag'
365 366 data['new_svn_branch'] = 'svn-branch'
366 367 data['csrf_token'] = csrf_token
367 368 self.app.post(
368 369 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
369 370 settings = SettingsModel(repo=repo_name)
370 371 try:
371 372 largefiles_ui = settings.get_ui_by_section_and_key(
372 373 'extensions', 'largefiles')
373 374 assert largefiles_ui.ui_active is False
374 375 phases_ui = settings.get_ui_by_section_and_key(
375 376 'phases', 'publish')
376 377 assert str2bool(phases_ui.ui_value) is False
377 378 finally:
378 379 self._cleanup_repo_settings(settings)
379 380
380 381 def test_hg_settings_are_updated(
381 382 self, autologin_user, backend_hg, csrf_token):
382 383 repo_name = backend_hg.repo_name
383 384 settings = SettingsModel(repo=repo_name)
384 385 settings.create_ui_section_value(
385 386 'extensions', '', key='largefiles', active=True)
386 387 settings.create_ui_section_value(
387 388 'phases', '1', key='publish', active=True)
388 389
389 390 data = self.FORM_DATA.copy()
390 391 data['csrf_token'] = csrf_token
391 392 self.app.post(
392 393 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
393 394 try:
394 395 largefiles_ui = settings.get_ui_by_section_and_key(
395 396 'extensions', 'largefiles')
396 397 assert largefiles_ui.ui_active is False
397 398 phases_ui = settings.get_ui_by_section_and_key(
398 399 'phases', 'publish')
399 400 assert str2bool(phases_ui.ui_value) is False
400 401 finally:
401 402 self._cleanup_repo_settings(settings)
402 403
403 404 def test_hg_settings_are_shown_for_hg_repository(
404 405 self, autologin_user, backend_hg, csrf_token):
405 406 repo_name = backend_hg.repo_name
406 407 response = self.app.get(
407 408 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
408 409 response.mustcontain('Mercurial Settings')
409 410
410 411 @pytest.mark.skip_backends('hg')
411 412 def test_hg_settings_are_created_only_for_hg_repository(
412 413 self, autologin_user, backend, csrf_token):
413 414 repo_name = backend.repo_name
414 415 data = self.FORM_DATA.copy()
415 416 data['csrf_token'] = csrf_token
416 417 self.app.post(
417 418 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
418 419 settings = SettingsModel(repo=repo_name)
419 420 try:
420 421 largefiles_ui = settings.get_ui_by_section_and_key(
421 422 'extensions', 'largefiles')
422 423 assert largefiles_ui is None
423 424 phases_ui = settings.get_ui_by_section_and_key(
424 425 'phases', 'publish')
425 426 assert phases_ui is None
426 427 finally:
427 428 self._cleanup_repo_settings(settings)
428 429
429 430 @pytest.mark.skip_backends('hg')
430 431 def test_hg_settings_are_shown_only_for_hg_repository(
431 432 self, autologin_user, backend, csrf_token):
432 433 repo_name = backend.repo_name
433 434 response = self.app.get(
434 435 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
435 436 response.mustcontain(no='Mercurial Settings')
436 437
437 438 @pytest.mark.skip_backends('hg')
438 439 def test_hg_settings_are_updated_only_for_hg_repository(
439 440 self, autologin_user, backend, csrf_token):
440 441 repo_name = backend.repo_name
441 442 settings = SettingsModel(repo=repo_name)
442 443 settings.create_ui_section_value(
443 444 'extensions', '', key='largefiles', active=True)
444 445 settings.create_ui_section_value(
445 446 'phases', '1', key='publish', active=True)
446 447
447 448 data = self.FORM_DATA.copy()
448 449 data['csrf_token'] = csrf_token
449 450 self.app.post(
450 451 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
451 452 try:
452 453 largefiles_ui = settings.get_ui_by_section_and_key(
453 454 'extensions', 'largefiles')
454 455 assert largefiles_ui.ui_active is True
455 456 phases_ui = settings.get_ui_by_section_and_key(
456 457 'phases', 'publish')
457 458 assert phases_ui.ui_value == '1'
458 459 finally:
459 460 self._cleanup_repo_settings(settings)
460 461
461 462 def test_per_repo_svn_settings_are_displayed(
462 463 self, autologin_user, backend_svn, settings_util):
463 464 repo = backend_svn.create_repo()
464 465 repo_name = repo.repo_name
465 466 branches = [
466 467 settings_util.create_repo_rhodecode_ui(
467 468 repo, VcsSettingsModel.SVN_BRANCH_SECTION,
468 469 'branch_{}'.format(i))
469 470 for i in range(10)]
470 471 tags = [
471 472 settings_util.create_repo_rhodecode_ui(
472 473 repo, VcsSettingsModel.SVN_TAG_SECTION, 'tag_{}'.format(i))
473 474 for i in range(10)]
474 475
475 476 response = self.app.get(
476 477 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
477 478 assert_response = response.assert_response()
478 479 for branch in branches:
479 480 css_selector = '[name=branch_value_{}]'.format(branch.ui_id)
480 481 element = assert_response.get_element(css_selector)
481 482 assert element.value == branch.ui_value
482 483 for tag in tags:
483 484 css_selector = '[name=tag_ui_value_new_{}]'.format(tag.ui_id)
484 485 element = assert_response.get_element(css_selector)
485 486 assert element.value == tag.ui_value
486 487
487 488 def test_per_repo_hg_and_pr_settings_are_not_displayed_for_svn(
488 489 self, autologin_user, backend_svn, settings_util):
489 490 repo = backend_svn.create_repo()
490 491 repo_name = repo.repo_name
491 492 response = self.app.get(
492 493 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
493 494 response.mustcontain(no='<label>Hooks:</label>')
494 495 response.mustcontain(no='<label>Pull Request Settings:</label>')
495 496
496 497 def test_inherit_global_settings_value_is_saved(
497 498 self, autologin_user, backend, csrf_token):
498 499 repo_name = backend.repo_name
499 500 data = self.FORM_DATA.copy()
500 501 data['csrf_token'] = csrf_token
501 502 data['inherit_global_settings'] = True
502 503 self.app.post(
503 504 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
504 505
505 506 settings = SettingsModel(repo=repo_name)
506 507 vcs_settings = VcsSettingsModel(repo=repo_name)
507 508 try:
508 509 assert vcs_settings.inherit_global_settings is True
509 510 finally:
510 511 self._cleanup_repo_settings(settings)
511 512
512 513 def test_repo_cache_is_invalidated_when_settings_are_updated(
513 514 self, autologin_user, backend, csrf_token):
514 515 repo_name = backend.repo_name
515 516 data = self.FORM_DATA.copy()
516 517 data['csrf_token'] = csrf_token
517 518 data['inherit_global_settings'] = True
518 519 settings = SettingsModel(repo=repo_name)
519 520
520 521 invalidation_patcher = mock.patch(
521 522 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
522 523 with invalidation_patcher as invalidation_mock:
523 524 self.app.post(
524 525 route_path('edit_repo_vcs_update', repo_name=repo_name), data,
525 526 status=302)
526 527 try:
527 528 invalidation_mock.assert_called_once_with(repo_name, delete=True)
528 529 finally:
529 530 self._cleanup_repo_settings(settings)
530 531
531 532 def test_other_settings_not_saved_inherit_global_settings_is_true(
532 533 self, autologin_user, backend, csrf_token):
533 534 repo_name = backend.repo_name
534 535 data = self.FORM_DATA.copy()
535 536 data['csrf_token'] = csrf_token
536 537 data['inherit_global_settings'] = True
537 538 self.app.post(
538 539 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
539 540
540 541 settings = SettingsModel(repo=repo_name)
541 542 ui_settings = (
542 543 VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS)
543 544
544 545 vcs_settings = []
545 546 try:
546 547 for section, key in ui_settings:
547 548 ui = settings.get_ui_by_section_and_key(section, key)
548 549 if ui:
549 550 vcs_settings.append(ui)
550 551 vcs_settings.extend(settings.get_ui_by_section(
551 552 VcsSettingsModel.SVN_BRANCH_SECTION))
552 553 vcs_settings.extend(settings.get_ui_by_section(
553 554 VcsSettingsModel.SVN_TAG_SECTION))
554 555 for name in VcsSettingsModel.GENERAL_SETTINGS:
555 556 setting = settings.get_setting_by_name(name)
556 557 if setting:
557 558 vcs_settings.append(setting)
558 559 assert vcs_settings == []
559 560 finally:
560 561 self._cleanup_repo_settings(settings)
561 562
562 563 def test_delete_svn_branch_and_tag_patterns(
563 564 self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header):
564 565 repo = backend_svn.create_repo()
565 566 repo_name = repo.repo_name
566 567 branch = settings_util.create_repo_rhodecode_ui(
567 568 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch',
568 569 cleanup=False)
569 570 tag = settings_util.create_repo_rhodecode_ui(
570 571 repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag', cleanup=False)
571 572 data = {
572 573 'csrf_token': csrf_token
573 574 }
574 575 for id_ in (branch.ui_id, tag.ui_id):
575 576 data['delete_svn_pattern'] = id_,
576 577 self.app.post(
577 578 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
578 579 data, extra_environ=xhr_header, status=200)
579 580 settings = VcsSettingsModel(repo=repo_name)
580 581 assert settings.get_repo_svn_branch_patterns() == []
581 582
582 583 def test_delete_svn_branch_requires_repo_admin_permission(
583 584 self, backend_svn, user_util, settings_util, csrf_token, xhr_header):
584 585 repo = backend_svn.create_repo()
585 586 repo_name = repo.repo_name
586 587
587 588 logout_user_session(self.app, csrf_token)
588 589 session = login_user_session(
589 590 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
590 591 csrf_token = auth.get_csrf_token(session)
591 592
592 593 repo = Repository.get_by_repo_name(repo_name)
593 594 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
594 595 user_util.grant_user_permission_to_repo(repo, user, 'repository.admin')
595 596 branch = settings_util.create_repo_rhodecode_ui(
596 597 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch',
597 598 cleanup=False)
598 599 data = {
599 600 'csrf_token': csrf_token,
600 601 'delete_svn_pattern': branch.ui_id
601 602 }
602 603 self.app.post(
603 604 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
604 605 data, extra_environ=xhr_header, status=200)
605 606
606 607 def test_delete_svn_branch_raises_400_when_not_found(
607 608 self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header):
608 609 repo_name = backend_svn.repo_name
609 610 data = {
610 611 'delete_svn_pattern': 123,
611 612 'csrf_token': csrf_token
612 613 }
613 614 self.app.post(
614 615 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
615 616 data, extra_environ=xhr_header, status=400)
616 617
617 618 def test_delete_svn_branch_raises_400_when_no_id_specified(
618 619 self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header):
619 620 repo_name = backend_svn.repo_name
620 621 data = {
621 622 'csrf_token': csrf_token
622 623 }
623 624 self.app.post(
624 625 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
625 626 data, extra_environ=xhr_header, status=400)
626 627
627 628 def _cleanup_repo_settings(self, settings_model):
628 629 cleanup = []
629 630 ui_settings = (
630 631 VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS)
631 632
632 633 for section, key in ui_settings:
633 634 ui = settings_model.get_ui_by_section_and_key(section, key)
634 635 if ui:
635 636 cleanup.append(ui)
636 637
637 638 cleanup.extend(settings_model.get_ui_by_section(
638 639 VcsSettingsModel.INHERIT_SETTINGS))
639 640 cleanup.extend(settings_model.get_ui_by_section(
640 641 VcsSettingsModel.SVN_BRANCH_SECTION))
641 642 cleanup.extend(settings_model.get_ui_by_section(
642 643 VcsSettingsModel.SVN_TAG_SECTION))
643 644
644 645 for name in VcsSettingsModel.GENERAL_SETTINGS:
645 646 setting = settings_model.get_setting_by_name(name)
646 647 if setting:
647 648 cleanup.append(setting)
648 649
649 650 for object_ in cleanup:
650 651 Session().delete(object_)
651 652 Session().commit()
652 653
653 654 def assert_repo_value_equals_global_value(self, response, setting):
654 655 assert_response = response.assert_response()
655 656 global_css_selector = '[name={}_inherited]'.format(setting)
656 657 repo_css_selector = '[name={}]'.format(setting)
657 658 repo_element = assert_response.get_element(repo_css_selector)
658 659 global_element = assert_response.get_element(global_css_selector)
659 660 assert repo_element.value == global_element.value
660 661
661 662
662 663 def _get_permission_for_user(user, repo):
663 664 perm = UserRepoToPerm.query()\
664 665 .filter(UserRepoToPerm.repository ==
665 666 Repository.get_by_repo_name(repo))\
666 667 .filter(UserRepoToPerm.user == User.get_by_username(user))\
667 668 .all()
668 669 return perm
@@ -1,471 +1,472
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import sys
21 21 import collections
22 22
23 23 import time
24 24 import logging.config
25 25
26 26 from paste.gzipper import make_gzip_middleware
27 27 import pyramid.events
28 28 from pyramid.wsgi import wsgiapp
29 29 from pyramid.config import Configurator
30 30 from pyramid.settings import asbool, aslist
31 31 from pyramid.httpexceptions import (
32 32 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
33 33 from pyramid.renderers import render_to_response
34 34
35 35 from rhodecode.model import meta
36 36 from rhodecode.config import patches
37 37
38 38 from rhodecode.config.environment import load_pyramid_environment, propagate_rhodecode_config
39 39
40 40 import rhodecode.events
41 41 from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
42 42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 43 from rhodecode.lib.request import Request
44 44 from rhodecode.lib.vcs import VCSCommunicationError
45 45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 49 from rhodecode.lib.utils2 import AttributeDict
50 50 from rhodecode.lib.exc_tracking import store_exception, format_exc
51 51 from rhodecode.subscribers import (
52 scan_repositories_if_enabled, write_js_routes_if_enabled,
52 auto_merge_pr_if_needed, scan_repositories_if_enabled, write_js_routes_if_enabled,
53 53 write_metadata_if_needed, write_usage_data, import_license_if_present)
54 54 from rhodecode.lib.statsd_client import StatsdClient
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def is_http_error(response):
60 60 # error which should have traceback
61 61 return response.status_code > 499
62 62
63 63
64 64 def should_load_all():
65 65 """
66 66 Returns if all application components should be loaded. In some cases it's
67 67 desired to skip apps loading for faster shell script execution
68 68 """
69 69 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
70 70 if ssh_cmd:
71 71 return False
72 72
73 73 return True
74 74
75 75
76 76 def make_pyramid_app(global_config, **settings):
77 77 """
78 78 Constructs the WSGI application based on Pyramid.
79 79
80 80 Specials:
81 81
82 82 * The application can also be integrated like a plugin via the call to
83 83 `includeme`. This is accompanied with the other utility functions which
84 84 are called. Changing this should be done with great care to not break
85 85 cases when these fragments are assembled from another place.
86 86
87 87 """
88 88 start_time = time.time()
89 89 log.info('Pyramid app config starting')
90 90
91 91 sanitize_settings_and_apply_defaults(global_config, settings)
92 92
93 93 # init and bootstrap StatsdClient
94 94 StatsdClient.setup(settings)
95 95
96 96 config = Configurator(settings=settings)
97 97 # Init our statsd at very start
98 98 config.registry.statsd = StatsdClient.statsd
99 99
100 100 # Apply compatibility patches
101 101 patches.inspect_getargspec()
102 102 patches.repoze_sendmail_lf_fix()
103 103
104 104 load_pyramid_environment(global_config, settings)
105 105
106 106 # Static file view comes first
107 107 includeme_first(config)
108 108
109 109 includeme(config)
110 110
111 111 pyramid_app = config.make_wsgi_app()
112 112 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
113 113 pyramid_app.config = config
114 114
115 115 celery_settings = get_celery_config(settings)
116 116 config.configure_celery(celery_settings)
117 117
118 118 # final config set...
119 119 propagate_rhodecode_config(global_config, settings, config.registry.settings)
120 120
121 121 # creating the app uses a connection - return it after we are done
122 122 meta.Session.remove()
123 123
124 124 total_time = time.time() - start_time
125 125 log.info('Pyramid app created and configured in %.2fs', total_time)
126 126 return pyramid_app
127 127
128 128
129 129 def get_celery_config(settings):
130 130 """
131 131 Converts basic ini configuration into celery 4.X options
132 132 """
133 133
134 134 def key_converter(key_name):
135 135 pref = 'celery.'
136 136 if key_name.startswith(pref):
137 137 return key_name[len(pref):].replace('.', '_').lower()
138 138
139 139 def type_converter(parsed_key, value):
140 140 # cast to int
141 141 if value.isdigit():
142 142 return int(value)
143 143
144 144 # cast to bool
145 145 if value.lower() in ['true', 'false', 'True', 'False']:
146 146 return value.lower() == 'true'
147 147 return value
148 148
149 149 celery_config = {}
150 150 for k, v in settings.items():
151 151 pref = 'celery.'
152 152 if k.startswith(pref):
153 153 celery_config[key_converter(k)] = type_converter(key_converter(k), v)
154 154
155 155 # TODO:rethink if we want to support celerybeat based file config, probably NOT
156 156 # beat_config = {}
157 157 # for section in parser.sections():
158 158 # if section.startswith('celerybeat:'):
159 159 # name = section.split(':', 1)[1]
160 160 # beat_config[name] = get_beat_config(parser, section)
161 161
162 162 # final compose of settings
163 163 celery_settings = {}
164 164
165 165 if celery_config:
166 166 celery_settings.update(celery_config)
167 167 # if beat_config:
168 168 # celery_settings.update({'beat_schedule': beat_config})
169 169
170 170 return celery_settings
171 171
172 172
173 173 def not_found_view(request):
174 174 """
175 175 This creates the view which should be registered as not-found-view to
176 176 pyramid.
177 177 """
178 178
179 179 if not getattr(request, 'vcs_call', None):
180 180 # handle like regular case with our error_handler
181 181 return error_handler(HTTPNotFound(), request)
182 182
183 183 # handle not found view as a vcs call
184 184 settings = request.registry.settings
185 185 ae_client = getattr(request, 'ae_client', None)
186 186 vcs_app = VCSMiddleware(
187 187 HTTPNotFound(), request.registry, settings,
188 188 appenlight_client=ae_client)
189 189
190 190 return wsgiapp(vcs_app)(None, request)
191 191
192 192
193 193 def error_handler(exception, request):
194 194 import rhodecode
195 195 from rhodecode.lib import helpers
196 196
197 197 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
198 198
199 199 base_response = HTTPInternalServerError()
200 200 # prefer original exception for the response since it may have headers set
201 201 if isinstance(exception, HTTPException):
202 202 base_response = exception
203 203 elif isinstance(exception, VCSCommunicationError):
204 204 base_response = VCSServerUnavailable()
205 205
206 206 if is_http_error(base_response):
207 207 traceback_info = format_exc(request.exc_info)
208 208 log.error(
209 209 'error occurred handling this request for path: %s, \n%s',
210 210 request.path, traceback_info)
211 211
212 212 error_explanation = base_response.explanation or str(base_response)
213 213 if base_response.status_code == 404:
214 214 error_explanation += " Optionally you don't have permission to access this page."
215 215 c = AttributeDict()
216 216 c.error_message = base_response.status
217 217 c.error_explanation = error_explanation
218 218 c.visual = AttributeDict()
219 219
220 220 c.visual.rhodecode_support_url = (
221 221 request.registry.settings.get('rhodecode_support_url') or
222 222 request.route_url('rhodecode_support')
223 223 )
224 224 c.redirect_time = 0
225 225 c.rhodecode_name = rhodecode_title
226 226 if not c.rhodecode_name:
227 227 c.rhodecode_name = 'Rhodecode'
228 228
229 229 c.causes = []
230 230 if is_http_error(base_response):
231 231 c.causes.append('Server is overloaded.')
232 232 c.causes.append('Server database connection is lost.')
233 233 c.causes.append('Server expected unhandled error.')
234 234
235 235 if hasattr(base_response, 'causes'):
236 236 c.causes = base_response.causes
237 237
238 238 c.messages = helpers.flash.pop_messages(request=request)
239 239 exc_info = sys.exc_info()
240 240 c.exception_id = id(exc_info)
241 241 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
242 242 or base_response.status_code > 499
243 243 c.exception_id_url = request.route_url(
244 244 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
245 245
246 246 debug_mode = rhodecode.ConfigGet().get_bool('debug')
247 247 if c.show_exception_id:
248 248 store_exception(c.exception_id, exc_info)
249 249 c.exception_debug = debug_mode
250 250 c.exception_config_ini = rhodecode.CONFIG.get('__file__')
251 251
252 252 if debug_mode:
253 253 try:
254 254 from rich.traceback import install
255 255 install(show_locals=True)
256 256 log.debug('Installing rich tracebacks...')
257 257 except ImportError:
258 258 pass
259 259
260 260 response = render_to_response(
261 261 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
262 262 response=base_response)
263 263
264 264 response.headers["X-RC-Exception-Id"] = str(c.exception_id)
265 265
266 266 statsd = request.registry.statsd
267 267 if statsd and base_response.status_code > 499:
268 268 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
269 269 statsd.incr('rhodecode_exception_total',
270 270 tags=["exc_source:web",
271 271 f"http_code:{base_response.status_code}",
272 272 f"type:{exc_type}"])
273 273
274 274 return response
275 275
276 276
277 277 def includeme_first(config):
278 278 # redirect automatic browser favicon.ico requests to correct place
279 279 def favicon_redirect(context, request):
280 280 return HTTPFound(
281 281 request.static_path('rhodecode:public/images/favicon.ico'))
282 282
283 283 config.add_view(favicon_redirect, route_name='favicon')
284 284 config.add_route('favicon', '/favicon.ico')
285 285
286 286 def robots_redirect(context, request):
287 287 return HTTPFound(
288 288 request.static_path('rhodecode:public/robots.txt'))
289 289
290 290 config.add_view(robots_redirect, route_name='robots')
291 291 config.add_route('robots', '/robots.txt')
292 292
293 293 config.add_static_view(
294 294 '_static/deform', 'deform:static')
295 295 config.add_static_view(
296 296 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
297 297
298 298
299 299 ce_auth_resources = [
300 300 'rhodecode.authentication.plugins.auth_crowd',
301 301 'rhodecode.authentication.plugins.auth_headers',
302 302 'rhodecode.authentication.plugins.auth_jasig_cas',
303 303 'rhodecode.authentication.plugins.auth_ldap',
304 304 'rhodecode.authentication.plugins.auth_pam',
305 305 'rhodecode.authentication.plugins.auth_rhodecode',
306 306 'rhodecode.authentication.plugins.auth_token',
307 307 ]
308 308
309 309
310 310 def includeme(config, auth_resources=None):
311 311 from rhodecode.lib.celerylib.loader import configure_celery
312 312 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
313 313 settings = config.registry.settings
314 314 config.set_request_factory(Request)
315 315
316 316 # plugin information
317 317 config.registry.rhodecode_plugins = collections.OrderedDict()
318 318
319 319 config.add_directive(
320 320 'register_rhodecode_plugin', register_rhodecode_plugin)
321 321
322 322 config.add_directive('configure_celery', configure_celery)
323 323
324 324 if settings.get('appenlight', False):
325 325 config.include('appenlight_client.ext.pyramid_tween')
326 326
327 327 load_all = should_load_all()
328 328
329 329 # Includes which are required. The application would fail without them.
330 330 config.include('pyramid_mako')
331 331 config.include('rhodecode.lib.rc_beaker')
332 332 config.include('rhodecode.lib.rc_cache')
333 333 config.include('rhodecode.lib.archive_cache')
334 334
335 335 config.include('rhodecode.apps._base.navigation')
336 336 config.include('rhodecode.apps._base.subscribers')
337 337 config.include('rhodecode.tweens')
338 338 config.include('rhodecode.authentication')
339 339
340 340 if load_all:
341 341
342 342 # load CE authentication plugins
343 343
344 344 if auth_resources:
345 345 ce_auth_resources.extend(auth_resources)
346 346
347 347 for resource in ce_auth_resources:
348 348 config.include(resource)
349 349
350 350 # Auto discover authentication plugins and include their configuration.
351 351 if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')):
352 352 from rhodecode.authentication import discover_legacy_plugins
353 353 discover_legacy_plugins(config)
354 354
355 355 # apps
356 356 if load_all:
357 357 log.debug('Starting config.include() calls')
358 358 config.include('rhodecode.api.includeme')
359 359 config.include('rhodecode.apps._base.includeme')
360 360 config.include('rhodecode.apps._base.navigation.includeme')
361 361 config.include('rhodecode.apps._base.subscribers.includeme')
362 362 config.include('rhodecode.apps.hovercards.includeme')
363 363 config.include('rhodecode.apps.ops.includeme')
364 364 config.include('rhodecode.apps.channelstream.includeme')
365 365 config.include('rhodecode.apps.file_store.includeme')
366 366 config.include('rhodecode.apps.admin.includeme')
367 367 config.include('rhodecode.apps.login.includeme')
368 368 config.include('rhodecode.apps.home.includeme')
369 369 config.include('rhodecode.apps.journal.includeme')
370 370
371 371 config.include('rhodecode.apps.repository.includeme')
372 372 config.include('rhodecode.apps.repo_group.includeme')
373 373 config.include('rhodecode.apps.user_group.includeme')
374 374 config.include('rhodecode.apps.search.includeme')
375 375 config.include('rhodecode.apps.user_profile.includeme')
376 376 config.include('rhodecode.apps.user_group_profile.includeme')
377 377 config.include('rhodecode.apps.my_account.includeme')
378 378 config.include('rhodecode.apps.gist.includeme')
379 379
380 380 config.include('rhodecode.apps.svn_support.includeme')
381 381 config.include('rhodecode.apps.ssh_support.includeme')
382 382 config.include('rhodecode.apps.debug_style')
383 383
384 384 if load_all:
385 385 config.include('rhodecode.integrations.includeme')
386 386 config.include('rhodecode.integrations.routes.includeme')
387 387
388 388 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
389 389 settings['default_locale_name'] = settings.get('lang', 'en')
390 390 config.add_translation_dirs('rhodecode:i18n/')
391 391
392 392 # Add subscribers.
393 393 if load_all:
394 394 log.debug('Adding subscribers...')
395 config.add_subscriber(auto_merge_pr_if_needed, rhodecode.events.PullRequestReviewEvent)
395 396 config.add_subscriber(scan_repositories_if_enabled,
396 397 pyramid.events.ApplicationCreated)
397 398 config.add_subscriber(write_metadata_if_needed,
398 399 pyramid.events.ApplicationCreated)
399 400 config.add_subscriber(write_usage_data,
400 401 pyramid.events.ApplicationCreated)
401 402 config.add_subscriber(write_js_routes_if_enabled,
402 403 pyramid.events.ApplicationCreated)
403 404 config.add_subscriber(import_license_if_present,
404 405 pyramid.events.ApplicationCreated)
405 406
406 407 # Set the default renderer for HTML templates to mako.
407 408 config.add_mako_renderer('.html')
408 409
409 410 config.add_renderer(
410 411 name='json_ext',
411 412 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
412 413
413 414 config.add_renderer(
414 415 name='string_html',
415 416 factory='rhodecode.lib.string_renderer.html')
416 417
417 418 # include RhodeCode plugins
418 419 includes = aslist(settings.get('rhodecode.includes', []))
419 420 log.debug('processing rhodecode.includes data...')
420 421 for inc in includes:
421 422 config.include(inc)
422 423
423 424 # custom not found view, if our pyramid app doesn't know how to handle
424 425 # the request pass it to potential VCS handling ap
425 426 config.add_notfound_view(not_found_view)
426 427 if not settings.get('debugtoolbar.enabled', False):
427 428 # disabled debugtoolbar handle all exceptions via the error_handlers
428 429 config.add_view(error_handler, context=Exception)
429 430
430 431 # all errors including 403/404/50X
431 432 config.add_view(error_handler, context=HTTPError)
432 433
433 434
434 435 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
435 436 """
436 437 Apply outer WSGI middlewares around the application.
437 438 """
438 439 registry = config.registry
439 440 settings = registry.settings
440 441
441 442 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
442 443 pyramid_app = HttpsFixup(pyramid_app, settings)
443 444
444 445 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
445 446 pyramid_app, settings)
446 447 registry.ae_client = _ae_client
447 448
448 449 if settings['gzip_responses']:
449 450 pyramid_app = make_gzip_middleware(
450 451 pyramid_app, settings, compress_level=1)
451 452
452 453 # this should be the outer most middleware in the wsgi stack since
453 454 # middleware like Routes make database calls
454 455 def pyramid_app_with_cleanup(environ, start_response):
455 456 start = time.time()
456 457 try:
457 458 return pyramid_app(environ, start_response)
458 459 finally:
459 460 # Dispose current database session and rollback uncommitted
460 461 # transactions.
461 462 meta.Session.remove()
462 463
463 464 # In a single threaded mode server, on non sqlite db we should have
464 465 # '0 Current Checked out connections' at the end of a request,
465 466 # if not, then something, somewhere is leaving a connection open
466 467 pool = meta.get_engine().pool
467 468 log.debug('sa pool status: %s', pool.status())
468 469 total = time.time() - start
469 470 log.debug('Request processing finalized: %.4fs', total)
470 471
471 472 return pyramid_app_with_cleanup
@@ -1,662 +1,663
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 this is forms validation classes
21 21 http://formencode.org/module-formencode.validators.html
22 22 for list off all availible validators
23 23
24 24 we can create our own validators
25 25
26 26 The table below outlines the options which can be used in a schema in addition to the validators themselves
27 27 pre_validators [] These validators will be applied before the schema
28 28 chained_validators [] These validators will be applied after the schema
29 29 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
30 30 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
31 31 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
32 32 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
33 33
34 34
35 35 <name> = formencode.validators.<name of validator>
36 36 <name> must equal form name
37 37 list=[1,2,3,4,5]
38 38 for SELECT use formencode.All(OneOf(list), Int())
39 39
40 40 """
41 41
42 42 import deform
43 43 import logging
44 44 import formencode
45 45
46 46 from pkg_resources import resource_filename
47 47 from formencode import All, Pipe
48 48
49 49 from pyramid.threadlocal import get_current_request
50 50
51 51 from rhodecode import BACKENDS
52 52 from rhodecode.lib import helpers
53 53 from rhodecode.model import validators as v
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 deform_templates = resource_filename('deform', 'templates')
59 59 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
60 60 search_path = (rhodecode_templates, deform_templates)
61 61
62 62
63 63 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
64 64 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
65 65 def __call__(self, template_name, **kw):
66 66 kw['h'] = helpers
67 67 kw['request'] = get_current_request()
68 68 return self.load(template_name)(**kw)
69 69
70 70
71 71 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
72 72 deform.Form.set_default_renderer(form_renderer)
73 73
74 74
75 75 def LoginForm(localizer):
76 76 _ = localizer
77 77
78 78 class _LoginForm(formencode.Schema):
79 79 allow_extra_fields = True
80 80 filter_extra_fields = True
81 81 username = v.UnicodeString(
82 82 strip=True,
83 83 min=1,
84 84 not_empty=True,
85 85 messages={
86 86 'empty': _('Please enter a login'),
87 87 'tooShort': _('Enter a value %(min)i characters long or more')
88 88 }
89 89 )
90 90
91 91 password = v.UnicodeString(
92 92 strip=False,
93 93 min=3,
94 94 max=72,
95 95 not_empty=True,
96 96 messages={
97 97 'empty': _('Please enter a password'),
98 98 'tooShort': _('Enter %(min)i characters or more')}
99 99 )
100 100
101 101 remember = v.StringBoolean(if_missing=False)
102 102
103 103 chained_validators = [v.ValidAuth(localizer)]
104 104 return _LoginForm
105 105
106 106
107 107 def TOTPForm(localizer, user, allow_recovery_code_use=False):
108 108 _ = localizer
109 109
110 110 class _TOTPForm(formencode.Schema):
111 111 allow_extra_fields = True
112 112 filter_extra_fields = False
113 113 totp = v.Regex(r'^(?:\d{6}|[A-Z0-9]{32})$')
114 114 secret_totp = v.String()
115 115
116 116 def to_python(self, value, state=None):
117 117 validation_checks = [user.is_totp_valid]
118 118 if allow_recovery_code_use:
119 119 validation_checks.append(user.is_2fa_recovery_code_valid)
120 120 form_data = super().to_python(value, state)
121 121 received_code = form_data['totp']
122 122 secret = form_data.get('secret_totp')
123 123
124 124 if not any(map(lambda func: func(received_code, secret), validation_checks)):
125 125 error_msg = _('Code is invalid. Try again!')
126 126 raise formencode.Invalid(error_msg, v, state, error_dict={'totp': error_msg})
127 127 return form_data
128 128
129 129 return _TOTPForm
130 130
131 131
132 132 def WhitelistedVcsClientsForm(localizer):
133 133 _ = localizer
134 134
135 135 class _WhitelistedVcsClientsForm(formencode.Schema):
136 136 regexp = r'^(?:\s*[<>=~^!]*\s*\d{1,2}\.\d{1,2}(?:\.\d{1,2})?\s*|\*)\s*(?:,\s*[<>=~^!]*\s*\d{1,2}\.\d{1,2}(?:\.\d{1,2})?\s*|\s*\*\s*)*$'
137 137 allow_extra_fields = True
138 138 filter_extra_fields = True
139 139 git = v.Regex(regexp)
140 140 hg = v.Regex(regexp)
141 141 svn = v.Regex(regexp)
142 142
143 143 return _WhitelistedVcsClientsForm
144 144
145 145
146 146 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
147 147 old_data = old_data or {}
148 148 available_languages = available_languages or []
149 149 _ = localizer
150 150
151 151 class _UserForm(formencode.Schema):
152 152 allow_extra_fields = True
153 153 filter_extra_fields = True
154 154 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
155 155 v.ValidUsername(localizer, edit, old_data))
156 156 if edit:
157 157 new_password = All(
158 158 v.ValidPassword(localizer),
159 159 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
160 160 )
161 161 password_confirmation = All(
162 162 v.ValidPassword(localizer),
163 163 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
164 164 )
165 165 admin = v.StringBoolean(if_missing=False)
166 166 else:
167 167 password = All(
168 168 v.ValidPassword(localizer),
169 169 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
170 170 )
171 171 password_confirmation = All(
172 172 v.ValidPassword(localizer),
173 173 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
174 174 )
175 175
176 176 password_change = v.StringBoolean(if_missing=False)
177 177 create_repo_group = v.StringBoolean(if_missing=False)
178 178
179 179 active = v.StringBoolean(if_missing=False)
180 180 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
181 181 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
182 182 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
183 183 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
184 184 if_missing='')
185 185 extern_name = v.UnicodeString(strip=True)
186 186 extern_type = v.UnicodeString(strip=True)
187 187 language = v.OneOf(available_languages, hideList=False,
188 188 testValueList=True, if_missing=None)
189 189 chained_validators = [v.ValidPasswordsMatch(localizer)]
190 190 return _UserForm
191 191
192 192
193 193 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
194 194 old_data = old_data or {}
195 195 _ = localizer
196 196
197 197 class _UserGroupForm(formencode.Schema):
198 198 allow_extra_fields = True
199 199 filter_extra_fields = True
200 200
201 201 users_group_name = All(
202 202 v.UnicodeString(strip=True, min=1, not_empty=True),
203 203 v.ValidUserGroup(localizer, edit, old_data)
204 204 )
205 205 user_group_description = v.UnicodeString(strip=True, min=1,
206 206 not_empty=False)
207 207
208 208 users_group_active = v.StringBoolean(if_missing=False)
209 209
210 210 if edit:
211 211 # this is user group owner
212 212 user = All(
213 213 v.UnicodeString(not_empty=True),
214 214 v.ValidRepoUser(localizer, allow_disabled))
215 215 return _UserGroupForm
216 216
217 217
218 218 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
219 219 can_create_in_root=False, allow_disabled=False):
220 220 _ = localizer
221 221 old_data = old_data or {}
222 222 available_groups = available_groups or []
223 223
224 224 class _RepoGroupForm(formencode.Schema):
225 225 allow_extra_fields = True
226 226 filter_extra_fields = False
227 227
228 228 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
229 229 v.SlugifyName(localizer),)
230 230 group_description = v.UnicodeString(strip=True, min=1,
231 231 not_empty=False)
232 232 group_copy_permissions = v.StringBoolean(if_missing=False)
233 233
234 234 group_parent_id = v.OneOf(available_groups, hideList=False,
235 235 testValueList=True, not_empty=True)
236 236 enable_locking = v.StringBoolean(if_missing=False)
237 237 chained_validators = [
238 238 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
239 239
240 240 if edit:
241 241 # this is repo group owner
242 242 user = All(
243 243 v.UnicodeString(not_empty=True),
244 244 v.ValidRepoUser(localizer, allow_disabled))
245 245 return _RepoGroupForm
246 246
247 247
248 248 def RegisterForm(localizer, edit=False, old_data=None):
249 249 _ = localizer
250 250 old_data = old_data or {}
251 251
252 252 class _RegisterForm(formencode.Schema):
253 253 allow_extra_fields = True
254 254 filter_extra_fields = True
255 255 username = All(
256 256 v.ValidUsername(localizer, edit, old_data),
257 257 v.UnicodeString(strip=True, min=1, not_empty=True)
258 258 )
259 259 password = All(
260 260 v.ValidPassword(localizer),
261 261 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
262 262 )
263 263 password_confirmation = All(
264 264 v.ValidPassword(localizer),
265 265 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
266 266 )
267 267 active = v.StringBoolean(if_missing=False)
268 268 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
269 269 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
270 270 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
271 271
272 272 chained_validators = [v.ValidPasswordsMatch(localizer)]
273 273 return _RegisterForm
274 274
275 275
276 276 def PasswordResetForm(localizer):
277 277 _ = localizer
278 278
279 279 class _PasswordResetForm(formencode.Schema):
280 280 allow_extra_fields = True
281 281 filter_extra_fields = True
282 282 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
283 283 return _PasswordResetForm
284 284
285 285
286 286 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
287 287 _ = localizer
288 288 old_data = old_data or {}
289 289 repo_groups = repo_groups or []
290 290 supported_backends = BACKENDS.keys()
291 291
292 292 class _RepoForm(formencode.Schema):
293 293 allow_extra_fields = True
294 294 filter_extra_fields = False
295 295 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
296 296 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
297 297 repo_group = All(v.CanWriteGroup(localizer, old_data),
298 298 v.OneOf(repo_groups, hideList=True))
299 299 repo_type = v.OneOf(supported_backends, required=False,
300 300 if_missing=old_data.get('repo_type'))
301 301 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
302 302 repo_private = v.StringBoolean(if_missing=False)
303 303 repo_copy_permissions = v.StringBoolean(if_missing=False)
304 304 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
305 305
306 306 repo_enable_statistics = v.StringBoolean(if_missing=False)
307 307 repo_enable_downloads = v.StringBoolean(if_missing=False)
308 308 repo_enable_locking = v.StringBoolean(if_missing=False)
309 309
310 310 if edit:
311 311 # this is repo owner
312 312 user = All(
313 313 v.UnicodeString(not_empty=True),
314 314 v.ValidRepoUser(localizer, allow_disabled))
315 315 clone_uri_change = v.UnicodeString(
316 316 not_empty=False, if_missing=v.Missing)
317 317
318 318 chained_validators = [v.ValidCloneUri(localizer),
319 319 v.ValidRepoName(localizer, edit, old_data)]
320 320 return _RepoForm
321 321
322 322
323 323 def RepoPermsForm(localizer):
324 324 _ = localizer
325 325
326 326 class _RepoPermsForm(formencode.Schema):
327 327 allow_extra_fields = True
328 328 filter_extra_fields = False
329 329 chained_validators = [v.ValidPerms(localizer, type_='repo')]
330 330 return _RepoPermsForm
331 331
332 332
333 333 def RepoGroupPermsForm(localizer, valid_recursive_choices):
334 334 _ = localizer
335 335
336 336 class _RepoGroupPermsForm(formencode.Schema):
337 337 allow_extra_fields = True
338 338 filter_extra_fields = False
339 339 recursive = v.OneOf(valid_recursive_choices)
340 340 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
341 341 return _RepoGroupPermsForm
342 342
343 343
344 344 def UserGroupPermsForm(localizer):
345 345 _ = localizer
346 346
347 347 class _UserPermsForm(formencode.Schema):
348 348 allow_extra_fields = True
349 349 filter_extra_fields = False
350 350 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
351 351 return _UserPermsForm
352 352
353 353
354 354 def RepoFieldForm(localizer):
355 355 _ = localizer
356 356
357 357 class _RepoFieldForm(formencode.Schema):
358 358 filter_extra_fields = True
359 359 allow_extra_fields = True
360 360
361 361 new_field_key = All(v.FieldKey(localizer),
362 362 v.UnicodeString(strip=True, min=3, not_empty=True))
363 363 new_field_value = v.UnicodeString(not_empty=False, if_missing='')
364 364 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
365 365 if_missing='str')
366 366 new_field_label = v.UnicodeString(not_empty=False)
367 367 new_field_desc = v.UnicodeString(not_empty=False)
368 368 return _RepoFieldForm
369 369
370 370
371 371 def RepoForkForm(localizer, edit=False, old_data=None,
372 372 supported_backends=BACKENDS.keys(), repo_groups=None):
373 373 _ = localizer
374 374 old_data = old_data or {}
375 375 repo_groups = repo_groups or []
376 376
377 377 class _RepoForkForm(formencode.Schema):
378 378 allow_extra_fields = True
379 379 filter_extra_fields = False
380 380 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
381 381 v.SlugifyName(localizer))
382 382 repo_group = All(v.CanWriteGroup(localizer, ),
383 383 v.OneOf(repo_groups, hideList=True))
384 384 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
385 385 description = v.UnicodeString(strip=True, min=1, not_empty=True)
386 386 private = v.StringBoolean(if_missing=False)
387 387 copy_permissions = v.StringBoolean(if_missing=False)
388 388 fork_parent_id = v.UnicodeString()
389 389 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
390 390 return _RepoForkForm
391 391
392 392
393 393 def ApplicationSettingsForm(localizer):
394 394 _ = localizer
395 395
396 396 class _ApplicationSettingsForm(formencode.Schema):
397 397 allow_extra_fields = True
398 398 filter_extra_fields = False
399 399 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
400 400 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
401 401 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
402 402 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
403 403 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
404 404 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
405 405 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
406 406 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
407 407 return _ApplicationSettingsForm
408 408
409 409
410 410 def ApplicationVisualisationForm(localizer):
411 411 from rhodecode.model.db import Repository
412 412 _ = localizer
413 413
414 414 class _ApplicationVisualisationForm(formencode.Schema):
415 415 allow_extra_fields = True
416 416 filter_extra_fields = False
417 417 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
418 418 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
419 419 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
420 420
421 421 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
422 422 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
423 423 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
424 424 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
425 425 rhodecode_show_version = v.StringBoolean(if_missing=False)
426 426 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
427 427 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
428 428 rhodecode_gravatar_url = v.UnicodeString(min=3)
429 429 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
430 430 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
431 431 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
432 432 rhodecode_support_url = v.UnicodeString()
433 433 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
434 434 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
435 435 return _ApplicationVisualisationForm
436 436
437 437
438 438 class _BaseVcsSettingsForm(formencode.Schema):
439 439
440 440 allow_extra_fields = True
441 441 filter_extra_fields = False
442 442 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
443 443 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
444 444 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
445 445
446 446 # PR/Code-review
447 447 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
448 rhodecode_auto_merge_enabled = v.StringBoolean(if_missing=False)
448 449 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
449 450
450 451 # hg
451 452 extensions_largefiles = v.StringBoolean(if_missing=False)
452 453 extensions_evolve = v.StringBoolean(if_missing=False)
453 454 phases_publish = v.StringBoolean(if_missing=False)
454 455
455 456 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
456 457 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
457 458
458 459 # git
459 460 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
460 461 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
461 462 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
462 463
463 464 # cache
464 465 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
465 466
466 467
467 468 def ApplicationUiSettingsForm(localizer):
468 469 _ = localizer
469 470
470 471 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
471 472 extensions_hggit = v.StringBoolean(if_missing=False)
472 473 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
473 474 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
474 475 return _ApplicationUiSettingsForm
475 476
476 477
477 478 def RepoVcsSettingsForm(localizer, repo_name):
478 479 _ = localizer
479 480
480 481 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
481 482 inherit_global_settings = v.StringBoolean(if_missing=False)
482 483 new_svn_branch = v.ValidSvnPattern(localizer,
483 484 section='vcs_svn_branch', repo_name=repo_name)
484 485 new_svn_tag = v.ValidSvnPattern(localizer,
485 486 section='vcs_svn_tag', repo_name=repo_name)
486 487 return _RepoVcsSettingsForm
487 488
488 489
489 490 def LabsSettingsForm(localizer):
490 491 _ = localizer
491 492
492 493 class _LabSettingsForm(formencode.Schema):
493 494 allow_extra_fields = True
494 495 filter_extra_fields = False
495 496 return _LabSettingsForm
496 497
497 498
498 499 def ApplicationPermissionsForm(
499 500 localizer, register_choices, password_reset_choices,
500 501 extern_activate_choices):
501 502 _ = localizer
502 503
503 504 class _DefaultPermissionsForm(formencode.Schema):
504 505 allow_extra_fields = True
505 506 filter_extra_fields = True
506 507
507 508 anonymous = v.StringBoolean(if_missing=False)
508 509 default_register = v.OneOf(register_choices)
509 510 default_register_message = v.UnicodeString()
510 511 default_password_reset = v.OneOf(password_reset_choices)
511 512 default_extern_activate = v.OneOf(extern_activate_choices)
512 513 return _DefaultPermissionsForm
513 514
514 515
515 516 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
516 517 user_group_perms_choices):
517 518 _ = localizer
518 519
519 520 class _ObjectPermissionsForm(formencode.Schema):
520 521 allow_extra_fields = True
521 522 filter_extra_fields = True
522 523 overwrite_default_repo = v.StringBoolean(if_missing=False)
523 524 overwrite_default_group = v.StringBoolean(if_missing=False)
524 525 overwrite_default_user_group = v.StringBoolean(if_missing=False)
525 526
526 527 default_repo_perm = v.OneOf(repo_perms_choices)
527 528 default_group_perm = v.OneOf(group_perms_choices)
528 529 default_user_group_perm = v.OneOf(user_group_perms_choices)
529 530
530 531 return _ObjectPermissionsForm
531 532
532 533
533 534 def BranchPermissionsForm(localizer, branch_perms_choices):
534 535 _ = localizer
535 536
536 537 class _BranchPermissionsForm(formencode.Schema):
537 538 allow_extra_fields = True
538 539 filter_extra_fields = True
539 540 overwrite_default_branch = v.StringBoolean(if_missing=False)
540 541 default_branch_perm = v.OneOf(branch_perms_choices)
541 542
542 543 return _BranchPermissionsForm
543 544
544 545
545 546 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
546 547 repo_group_create_choices, user_group_create_choices,
547 548 fork_choices, inherit_default_permissions_choices):
548 549 _ = localizer
549 550
550 551 class _DefaultPermissionsForm(formencode.Schema):
551 552 allow_extra_fields = True
552 553 filter_extra_fields = True
553 554
554 555 anonymous = v.StringBoolean(if_missing=False)
555 556
556 557 default_repo_create = v.OneOf(create_choices)
557 558 default_repo_create_on_write = v.OneOf(create_on_write_choices)
558 559 default_user_group_create = v.OneOf(user_group_create_choices)
559 560 default_repo_group_create = v.OneOf(repo_group_create_choices)
560 561 default_fork_create = v.OneOf(fork_choices)
561 562 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
562 563 return _DefaultPermissionsForm
563 564
564 565
565 566 def UserIndividualPermissionsForm(localizer):
566 567 _ = localizer
567 568
568 569 class _DefaultPermissionsForm(formencode.Schema):
569 570 allow_extra_fields = True
570 571 filter_extra_fields = True
571 572
572 573 inherit_default_permissions = v.StringBoolean(if_missing=False)
573 574 return _DefaultPermissionsForm
574 575
575 576
576 577 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
577 578 _ = localizer
578 579 old_data = old_data or {}
579 580
580 581 class _DefaultsForm(formencode.Schema):
581 582 allow_extra_fields = True
582 583 filter_extra_fields = True
583 584 default_repo_type = v.OneOf(supported_backends)
584 585 default_repo_private = v.StringBoolean(if_missing=False)
585 586 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
586 587 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
587 588 default_repo_enable_locking = v.StringBoolean(if_missing=False)
588 589 return _DefaultsForm
589 590
590 591
591 592 def AuthSettingsForm(localizer):
592 593 _ = localizer
593 594
594 595 class _AuthSettingsForm(formencode.Schema):
595 596 allow_extra_fields = True
596 597 filter_extra_fields = True
597 598 auth_plugins = All(v.ValidAuthPlugins(localizer),
598 599 v.UniqueListFromString(localizer)(not_empty=True))
599 600 return _AuthSettingsForm
600 601
601 602
602 603 def UserExtraEmailForm(localizer):
603 604 _ = localizer
604 605
605 606 class _UserExtraEmailForm(formencode.Schema):
606 607 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
607 608 return _UserExtraEmailForm
608 609
609 610
610 611 def UserExtraIpForm(localizer):
611 612 _ = localizer
612 613
613 614 class _UserExtraIpForm(formencode.Schema):
614 615 ip = v.ValidIp(localizer)(not_empty=True)
615 616 return _UserExtraIpForm
616 617
617 618
618 619 def PullRequestForm(localizer, repo_id):
619 620 _ = localizer
620 621
621 622 class ReviewerForm(formencode.Schema):
622 623 user_id = v.Int(not_empty=True)
623 624 reasons = All()
624 625 rules = All(v.UniqueList(localizer, convert=int)())
625 626 mandatory = v.StringBoolean()
626 627 role = v.String(if_missing='reviewer')
627 628
628 629 class ObserverForm(formencode.Schema):
629 630 user_id = v.Int(not_empty=True)
630 631 reasons = All()
631 632 rules = All(v.UniqueList(localizer, convert=int)())
632 633 mandatory = v.StringBoolean()
633 634 role = v.String(if_missing='observer')
634 635
635 636 class _PullRequestForm(formencode.Schema):
636 637 allow_extra_fields = True
637 638 filter_extra_fields = True
638 639
639 640 common_ancestor = v.UnicodeString(strip=True, required=True)
640 641 source_repo = v.UnicodeString(strip=True, required=True)
641 642 source_ref = v.UnicodeString(strip=True, required=True)
642 643 target_repo = v.UnicodeString(strip=True, required=True)
643 644 target_ref = v.UnicodeString(strip=True, required=True)
644 645 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
645 646 v.UniqueList(localizer)(not_empty=True))
646 647 review_members = formencode.ForEach(ReviewerForm())
647 648 observer_members = formencode.ForEach(ObserverForm())
648 649 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
649 650 pullrequest_desc = v.UnicodeString(strip=True, required=False)
650 651 description_renderer = v.UnicodeString(strip=True, required=False)
651 652
652 653 return _PullRequestForm
653 654
654 655
655 656 def IssueTrackerPatternsForm(localizer):
656 657 _ = localizer
657 658
658 659 class _IssueTrackerPatternsForm(formencode.Schema):
659 660 allow_extra_fields = True
660 661 filter_extra_fields = False
661 662 chained_validators = [v.ValidPattern(localizer)]
662 663 return _IssueTrackerPatternsForm
@@ -1,2389 +1,2393
1 1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 """
21 21 pull request model for RhodeCode
22 22 """
23 23
24 24 import logging
25 25 import os
26 26
27 27 import datetime
28 28 import urllib.request
29 29 import urllib.parse
30 30 import urllib.error
31 31 import collections
32 32
33 33 import dataclasses as dataclasses
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from collections import OrderedDict
41 41 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
42 42 from rhodecode.lib.ext_json import sjson as json
43 43 from rhodecode.lib.markup_renderer import (
44 44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 45 from rhodecode.lib.hash_utils import md5_safe
46 46 from rhodecode.lib.str_utils import safe_str
47 47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 48 from rhodecode.lib.vcs.backends.base import (
49 49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 50 TargetRefMissing, SourceRefMissing)
51 51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 52 from rhodecode.lib.vcs.exceptions import (
53 53 CommitDoesNotExistError, EmptyRepositoryError)
54 54 from rhodecode.model import BaseModel
55 55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 56 from rhodecode.model.comment import CommentsModel
57 57 from rhodecode.model.db import (
58 58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 60 from rhodecode.model.meta import Session
61 61 from rhodecode.model.notification import NotificationModel, \
62 62 EmailNotificationModel
63 63 from rhodecode.model.scm import ScmModel
64 64 from rhodecode.model.settings import VcsSettingsModel
65 65
66 66
67 67 log = logging.getLogger(__name__)
68 68
69 69
70 70 # Data structure to hold the response data when updating commits during a pull
71 71 # request update.
72 72 class UpdateResponse(object):
73 73
74 74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 75 commit_changes, source_changed, target_changed):
76 76
77 77 self.executed = executed
78 78 self.reason = reason
79 79 self.new = new
80 80 self.old = old
81 81 self.common_ancestor_id = common_ancestor_id
82 82 self.changes = commit_changes
83 83 self.source_changed = source_changed
84 84 self.target_changed = target_changed
85 85
86 86
87 87 def get_diff_info(
88 88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 89 get_commit_authors=True):
90 90 """
91 91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 92 This is also used for default reviewers logic
93 93 """
94 94
95 95 source_scm = source_repo.scm_instance()
96 96 target_scm = target_repo.scm_instance()
97 97
98 98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 99 if not ancestor_id:
100 100 raise ValueError(
101 101 'cannot calculate diff info without a common ancestor. '
102 102 'Make sure both repositories are related, and have a common forking commit.')
103 103
104 104 # case here is that want a simple diff without incoming commits,
105 105 # previewing what will be merged based only on commits in the source.
106 106 log.debug('Using ancestor %s as source_ref instead of %s',
107 107 ancestor_id, source_ref)
108 108
109 109 # source of changes now is the common ancestor
110 110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 111 # target commit becomes the source ref as it is the last commit
112 112 # for diff generation this logic gives proper diff
113 113 target_commit = source_scm.get_commit(commit_id=source_ref)
114 114
115 115 vcs_diff = \
116 116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 117 ignore_whitespace=False, context=3)
118 118
119 119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
120 120 diff_limit=0, file_limit=0, show_full_diff=True)
121 121
122 122 _parsed = diff_processor.prepare()
123 123
124 124 all_files = []
125 125 all_files_changes = []
126 126 changed_lines = {}
127 127 stats = [0, 0]
128 128 for f in _parsed:
129 129 all_files.append(f['filename'])
130 130 all_files_changes.append({
131 131 'filename': f['filename'],
132 132 'stats': f['stats']
133 133 })
134 134 stats[0] += f['stats']['added']
135 135 stats[1] += f['stats']['deleted']
136 136
137 137 changed_lines[f['filename']] = []
138 138 if len(f['chunks']) < 2:
139 139 continue
140 140 # first line is "context" information
141 141 for chunks in f['chunks'][1:]:
142 142 for chunk in chunks['lines']:
143 143 if chunk['action'] not in ('del', 'mod'):
144 144 continue
145 145 changed_lines[f['filename']].append(chunk['old_lineno'])
146 146
147 147 commit_authors = []
148 148 user_counts = {}
149 149 email_counts = {}
150 150 author_counts = {}
151 151 _commit_cache = {}
152 152
153 153 commits = []
154 154 if get_commit_authors:
155 155 log.debug('Obtaining commit authors from set of commits')
156 156 _compare_data = target_scm.compare(
157 157 target_ref, source_ref, source_scm, merge=True,
158 158 pre_load=["author", "date", "message"]
159 159 )
160 160
161 161 for commit in _compare_data:
162 162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 163 # at this function which is later called via JSON serialization
164 164 serialized_commit = dict(
165 165 author=commit.author,
166 166 date=commit.date,
167 167 message=commit.message,
168 168 commit_id=commit.raw_id,
169 169 raw_id=commit.raw_id
170 170 )
171 171 commits.append(serialized_commit)
172 172 user = User.get_from_cs_author(serialized_commit['author'])
173 173 if user and user not in commit_authors:
174 174 commit_authors.append(user)
175 175
176 176 # lines
177 177 if get_authors:
178 178 log.debug('Calculating authors of changed files')
179 179 target_commit = source_repo.get_commit(ancestor_id)
180 180
181 181 for fname, lines in changed_lines.items():
182 182
183 183 try:
184 184 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 185 except Exception:
186 186 log.exception("Failed to load node with path %s", fname)
187 187 continue
188 188
189 189 if not isinstance(node, FileNode):
190 190 continue
191 191
192 192 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 193 if node.is_binary:
194 194 author = node.last_commit.author
195 195 email = node.last_commit.author_email
196 196
197 197 user = User.get_from_cs_author(author)
198 198 if user:
199 199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 200 author_counts[author] = author_counts.get(author, 0) + 1
201 201 email_counts[email] = email_counts.get(email, 0) + 1
202 202
203 203 continue
204 204
205 205 for annotation in node.annotate:
206 206 line_no, commit_id, get_commit_func, line_text = annotation
207 207 if line_no in lines:
208 208 if commit_id not in _commit_cache:
209 209 _commit_cache[commit_id] = get_commit_func()
210 210 commit = _commit_cache[commit_id]
211 211 author = commit.author
212 212 email = commit.author_email
213 213 user = User.get_from_cs_author(author)
214 214 if user:
215 215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 216 author_counts[author] = author_counts.get(author, 0) + 1
217 217 email_counts[email] = email_counts.get(email, 0) + 1
218 218
219 219 log.debug('Default reviewers processing finished')
220 220
221 221 return {
222 222 'commits': commits,
223 223 'files': all_files_changes,
224 224 'stats': stats,
225 225 'ancestor': ancestor_id,
226 226 # original authors of modified files
227 227 'original_authors': {
228 228 'users': user_counts,
229 229 'authors': author_counts,
230 230 'emails': email_counts,
231 231 },
232 232 'commit_authors': commit_authors
233 233 }
234 234
235 235
236 236 class PullRequestModel(BaseModel):
237 237
238 238 cls = PullRequest
239 239
240 240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241 241
242 242 UPDATE_STATUS_MESSAGES = {
243 243 UpdateFailureReason.NONE: lazy_ugettext(
244 244 'Pull request update successful.'),
245 245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 246 'Pull request update failed because of an unknown error.'),
247 247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 248 'No update needed because the source and target have not changed.'),
249 249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 250 'Pull request cannot be updated because the reference type is '
251 251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 253 'This pull request cannot be updated because the target '
254 254 'reference is missing.'),
255 255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 256 'This pull request cannot be updated because the source '
257 257 'reference is missing.'),
258 258 }
259 259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261 261
262 262 def __get_pull_request(self, pull_request):
263 263 return self._get_instance((
264 264 PullRequest, PullRequestVersion), pull_request)
265 265
266 266 def _check_perms(self, perms, pull_request, user, api=False):
267 267 if not api:
268 268 return h.HasRepoPermissionAny(*perms)(
269 269 user=user, repo_name=pull_request.target_repo.repo_name)
270 270 else:
271 271 return h.HasRepoPermissionAnyApi(*perms)(
272 272 user=user, repo_name=pull_request.target_repo.repo_name)
273 273
274 274 def check_user_read(self, pull_request, user, api=False):
275 275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 276 return self._check_perms(_perms, pull_request, user, api)
277 277
278 278 def check_user_merge(self, pull_request, user, api=False):
279 279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 280 return self._check_perms(_perms, pull_request, user, api)
281 281
282 282 def check_user_update(self, pull_request, user, api=False):
283 283 owner = user.user_id == pull_request.user_id
284 284 return self.check_user_merge(pull_request, user, api) or owner
285 285
286 286 def check_user_delete(self, pull_request, user):
287 287 owner = user.user_id == pull_request.user_id
288 288 _perms = ('repository.admin',)
289 289 return self._check_perms(_perms, pull_request, user) or owner
290 290
291 291 def is_user_reviewer(self, pull_request, user):
292 292 return user.user_id in [
293 293 x.user_id for x in
294 294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 295 if x.user
296 296 ]
297 297
298 298 def check_user_change_status(self, pull_request, user, api=False):
299 299 return self.check_user_update(pull_request, user, api) \
300 300 or self.is_user_reviewer(pull_request, user)
301 301
302 302 def check_user_comment(self, pull_request, user):
303 303 owner = user.user_id == pull_request.user_id
304 304 return self.check_user_read(pull_request, user) or owner
305 305
306 306 def get(self, pull_request):
307 307 return self.__get_pull_request(pull_request)
308 308
309 309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 310 statuses=None, opened_by=None, order_by=None,
311 311 order_dir='desc', only_created=False):
312 312 repo = None
313 313 if repo_name:
314 314 repo = self._get_repo(repo_name)
315 315
316 316 q = PullRequest.query()
317 317
318 318 if search_q:
319 319 like_expression = u'%{}%'.format(safe_str(search_q))
320 320 q = q.join(User, User.user_id == PullRequest.user_id)
321 321 q = q.filter(or_(
322 322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 323 User.username.ilike(like_expression),
324 324 PullRequest.title.ilike(like_expression),
325 325 PullRequest.description.ilike(like_expression),
326 326 ))
327 327
328 328 # source or target
329 329 if repo and source:
330 330 q = q.filter(PullRequest.source_repo == repo)
331 331 elif repo:
332 332 q = q.filter(PullRequest.target_repo == repo)
333 333
334 334 # closed,opened
335 335 if statuses:
336 336 q = q.filter(PullRequest.status.in_(statuses))
337 337
338 338 # opened by filter
339 339 if opened_by:
340 340 q = q.filter(PullRequest.user_id.in_(opened_by))
341 341
342 342 # only get those that are in "created" state
343 343 if only_created:
344 344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345 345
346 346 order_map = {
347 347 'name_raw': PullRequest.pull_request_id,
348 348 'id': PullRequest.pull_request_id,
349 349 'title': PullRequest.title,
350 350 'updated_on_raw': PullRequest.updated_on,
351 351 'target_repo': PullRequest.target_repo_id
352 352 }
353 353 if order_by and order_by in order_map:
354 354 if order_dir == 'asc':
355 355 q = q.order_by(order_map[order_by].asc())
356 356 else:
357 357 q = q.order_by(order_map[order_by].desc())
358 358
359 359 return q
360 360
361 361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 362 opened_by=None):
363 363 """
364 364 Count the number of pull requests for a specific repository.
365 365
366 366 :param repo_name: target or source repo
367 367 :param search_q: filter by text
368 368 :param source: boolean flag to specify if repo_name refers to source
369 369 :param statuses: list of pull request statuses
370 370 :param opened_by: author user of the pull request
371 371 :returns: int number of pull requests
372 372 """
373 373 q = self._prepare_get_all_query(
374 374 repo_name, search_q=search_q, source=source, statuses=statuses,
375 375 opened_by=opened_by)
376 376
377 377 return q.count()
378 378
379 379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 381 """
382 382 Get all pull requests for a specific repository.
383 383
384 384 :param repo_name: target or source repo
385 385 :param search_q: filter by text
386 386 :param source: boolean flag to specify if repo_name refers to source
387 387 :param statuses: list of pull request statuses
388 388 :param opened_by: author user of the pull request
389 389 :param offset: pagination offset
390 390 :param length: length of returned list
391 391 :param order_by: order of the returned list
392 392 :param order_dir: 'asc' or 'desc' ordering direction
393 393 :returns: list of pull requests
394 394 """
395 395 q = self._prepare_get_all_query(
396 396 repo_name, search_q=search_q, source=source, statuses=statuses,
397 397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398 398
399 399 if length:
400 400 pull_requests = q.limit(length).offset(offset).all()
401 401 else:
402 402 pull_requests = q.all()
403 403
404 404 return pull_requests
405 405
406 406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
407 407 """
408 408 Count the number of pull requests for a specific repository that are
409 409 awaiting review.
410 410
411 411 :param repo_name: target or source repo
412 412 :param search_q: filter by text
413 413 :param statuses: list of pull request statuses
414 414 :returns: int number of pull requests
415 415 """
416 416 pull_requests = self.get_awaiting_review(
417 417 repo_name, search_q=search_q, statuses=statuses)
418 418
419 419 return len(pull_requests)
420 420
421 421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
422 422 offset=0, length=None, order_by=None, order_dir='desc'):
423 423 """
424 424 Get all pull requests for a specific repository that are awaiting
425 425 review.
426 426
427 427 :param repo_name: target or source repo
428 428 :param search_q: filter by text
429 429 :param statuses: list of pull request statuses
430 430 :param offset: pagination offset
431 431 :param length: length of returned list
432 432 :param order_by: order of the returned list
433 433 :param order_dir: 'asc' or 'desc' ordering direction
434 434 :returns: list of pull requests
435 435 """
436 436 pull_requests = self.get_all(
437 437 repo_name, search_q=search_q, statuses=statuses,
438 438 order_by=order_by, order_dir=order_dir)
439 439
440 440 _filtered_pull_requests = []
441 441 for pr in pull_requests:
442 442 status = pr.calculated_review_status()
443 443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
444 444 ChangesetStatus.STATUS_UNDER_REVIEW]:
445 445 _filtered_pull_requests.append(pr)
446 446 if length:
447 447 return _filtered_pull_requests[offset:offset+length]
448 448 else:
449 449 return _filtered_pull_requests
450 450
451 451 def _prepare_awaiting_my_review_review_query(
452 452 self, repo_name, user_id, search_q=None, statuses=None,
453 453 order_by=None, order_dir='desc'):
454 454
455 455 for_review_statuses = [
456 456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
457 457 ]
458 458
459 459 pull_request_alias = aliased(PullRequest)
460 460 status_alias = aliased(ChangesetStatus)
461 461 reviewers_alias = aliased(PullRequestReviewers)
462 462 repo_alias = aliased(Repository)
463 463
464 464 last_ver_subq = Session()\
465 465 .query(func.min(ChangesetStatus.version)) \
466 466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
467 467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
468 468 .subquery()
469 469
470 470 q = Session().query(pull_request_alias) \
471 471 .options(lazyload(pull_request_alias.author)) \
472 472 .join(reviewers_alias,
473 473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
474 474 .join(repo_alias,
475 475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
476 476 .outerjoin(status_alias,
477 477 and_(status_alias.user_id == reviewers_alias.user_id,
478 478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
479 479 .filter(or_(status_alias.version == null(),
480 480 status_alias.version == last_ver_subq)) \
481 481 .filter(reviewers_alias.user_id == user_id) \
482 482 .filter(repo_alias.repo_name == repo_name) \
483 483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
484 484 .group_by(pull_request_alias)
485 485
486 486 # closed,opened
487 487 if statuses:
488 488 q = q.filter(pull_request_alias.status.in_(statuses))
489 489
490 490 if search_q:
491 491 like_expression = u'%{}%'.format(safe_str(search_q))
492 492 q = q.join(User, User.user_id == pull_request_alias.user_id)
493 493 q = q.filter(or_(
494 494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
495 495 User.username.ilike(like_expression),
496 496 pull_request_alias.title.ilike(like_expression),
497 497 pull_request_alias.description.ilike(like_expression),
498 498 ))
499 499
500 500 order_map = {
501 501 'name_raw': pull_request_alias.pull_request_id,
502 502 'title': pull_request_alias.title,
503 503 'updated_on_raw': pull_request_alias.updated_on,
504 504 'target_repo': pull_request_alias.target_repo_id
505 505 }
506 506 if order_by and order_by in order_map:
507 507 if order_dir == 'asc':
508 508 q = q.order_by(order_map[order_by].asc())
509 509 else:
510 510 q = q.order_by(order_map[order_by].desc())
511 511
512 512 return q
513 513
514 514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
515 515 """
516 516 Count the number of pull requests for a specific repository that are
517 517 awaiting review from a specific user.
518 518
519 519 :param repo_name: target or source repo
520 520 :param user_id: reviewer user of the pull request
521 521 :param search_q: filter by text
522 522 :param statuses: list of pull request statuses
523 523 :returns: int number of pull requests
524 524 """
525 525 q = self._prepare_awaiting_my_review_review_query(
526 526 repo_name, user_id, search_q=search_q, statuses=statuses)
527 527 return q.count()
528 528
529 529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
530 530 offset=0, length=None, order_by=None, order_dir='desc'):
531 531 """
532 532 Get all pull requests for a specific repository that are awaiting
533 533 review from a specific user.
534 534
535 535 :param repo_name: target or source repo
536 536 :param user_id: reviewer user of the pull request
537 537 :param search_q: filter by text
538 538 :param statuses: list of pull request statuses
539 539 :param offset: pagination offset
540 540 :param length: length of returned list
541 541 :param order_by: order of the returned list
542 542 :param order_dir: 'asc' or 'desc' ordering direction
543 543 :returns: list of pull requests
544 544 """
545 545
546 546 q = self._prepare_awaiting_my_review_review_query(
547 547 repo_name, user_id, search_q=search_q, statuses=statuses,
548 548 order_by=order_by, order_dir=order_dir)
549 549
550 550 if length:
551 551 pull_requests = q.limit(length).offset(offset).all()
552 552 else:
553 553 pull_requests = q.all()
554 554
555 555 return pull_requests
556 556
557 557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
558 558 order_by=None, order_dir='desc'):
559 559 """
560 560 return a query of pull-requests user is an creator, or he's added as a reviewer
561 561 """
562 562 q = PullRequest.query()
563 563 if user_id:
564 564
565 565 base_query = select(PullRequestReviewers)\
566 566 .where(PullRequestReviewers.user_id == user_id)\
567 567 .with_only_columns(PullRequestReviewers.pull_request_id)
568 568
569 569 user_filter = or_(
570 570 PullRequest.user_id == user_id,
571 571 PullRequest.pull_request_id.in_(base_query)
572 572 )
573 573 q = PullRequest.query().filter(user_filter)
574 574
575 575 # closed,opened
576 576 if statuses:
577 577 q = q.filter(PullRequest.status.in_(statuses))
578 578
579 579 if query:
580 580 like_expression = u'%{}%'.format(safe_str(query))
581 581 q = q.join(User, User.user_id == PullRequest.user_id)
582 582 q = q.filter(or_(
583 583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 584 User.username.ilike(like_expression),
585 585 PullRequest.title.ilike(like_expression),
586 586 PullRequest.description.ilike(like_expression),
587 587 ))
588 588
589 589 order_map = {
590 590 'name_raw': PullRequest.pull_request_id,
591 591 'title': PullRequest.title,
592 592 'updated_on_raw': PullRequest.updated_on,
593 593 'target_repo': PullRequest.target_repo_id
594 594 }
595 595 if order_by and order_by in order_map:
596 596 if order_dir == 'asc':
597 597 q = q.order_by(order_map[order_by].asc())
598 598 else:
599 599 q = q.order_by(order_map[order_by].desc())
600 600
601 601 return q
602 602
603 603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
604 604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
605 605 return q.count()
606 606
607 607 def get_im_participating_in(
608 608 self, user_id=None, statuses=None, query='', offset=0,
609 609 length=None, order_by=None, order_dir='desc'):
610 610 """
611 611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
612 612 """
613 613
614 614 q = self._prepare_im_participating_query(
615 615 user_id, statuses=statuses, query=query, order_by=order_by,
616 616 order_dir=order_dir)
617 617
618 618 if length:
619 619 pull_requests = q.limit(length).offset(offset).all()
620 620 else:
621 621 pull_requests = q.all()
622 622
623 623 return pull_requests
624 624
625 625 def _prepare_participating_in_for_review_query(
626 626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
627 627
628 628 for_review_statuses = [
629 629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
630 630 ]
631 631
632 632 pull_request_alias = aliased(PullRequest)
633 633 status_alias = aliased(ChangesetStatus)
634 634 reviewers_alias = aliased(PullRequestReviewers)
635 635
636 636 last_ver_subq = Session()\
637 637 .query(func.min(ChangesetStatus.version)) \
638 638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
639 639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
640 640 .subquery()
641 641
642 642 q = Session().query(pull_request_alias) \
643 643 .options(lazyload(pull_request_alias.author)) \
644 644 .join(reviewers_alias,
645 645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
646 646 .outerjoin(status_alias,
647 647 and_(status_alias.user_id == reviewers_alias.user_id,
648 648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
649 649 .filter(or_(status_alias.version == null(),
650 650 status_alias.version == last_ver_subq)) \
651 651 .filter(reviewers_alias.user_id == user_id) \
652 652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
653 653 .group_by(pull_request_alias)
654 654
655 655 # closed,opened
656 656 if statuses:
657 657 q = q.filter(pull_request_alias.status.in_(statuses))
658 658
659 659 if query:
660 660 like_expression = u'%{}%'.format(safe_str(query))
661 661 q = q.join(User, User.user_id == pull_request_alias.user_id)
662 662 q = q.filter(or_(
663 663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
664 664 User.username.ilike(like_expression),
665 665 pull_request_alias.title.ilike(like_expression),
666 666 pull_request_alias.description.ilike(like_expression),
667 667 ))
668 668
669 669 order_map = {
670 670 'name_raw': pull_request_alias.pull_request_id,
671 671 'title': pull_request_alias.title,
672 672 'updated_on_raw': pull_request_alias.updated_on,
673 673 'target_repo': pull_request_alias.target_repo_id
674 674 }
675 675 if order_by and order_by in order_map:
676 676 if order_dir == 'asc':
677 677 q = q.order_by(order_map[order_by].asc())
678 678 else:
679 679 q = q.order_by(order_map[order_by].desc())
680 680
681 681 return q
682 682
683 683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
684 684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
685 685 return q.count()
686 686
687 687 def get_im_participating_in_for_review(
688 688 self, user_id, statuses=None, query='', offset=0,
689 689 length=None, order_by=None, order_dir='desc'):
690 690 """
691 691 Get all Pull requests that needs user approval or rejection
692 692 """
693 693
694 694 q = self._prepare_participating_in_for_review_query(
695 695 user_id, statuses=statuses, query=query, order_by=order_by,
696 696 order_dir=order_dir)
697 697
698 698 if length:
699 699 pull_requests = q.limit(length).offset(offset).all()
700 700 else:
701 701 pull_requests = q.all()
702 702
703 703 return pull_requests
704 704
705 705 def get_versions(self, pull_request):
706 706 """
707 707 returns version of pull request sorted by ID descending
708 708 """
709 709 return PullRequestVersion.query()\
710 710 .filter(PullRequestVersion.pull_request == pull_request)\
711 711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
712 712 .all()
713 713
714 714 def get_pr_version(self, pull_request_id, version=None):
715 715 at_version = None
716 716
717 717 if version and version == 'latest':
718 718 pull_request_ver = PullRequest.get(pull_request_id)
719 719 pull_request_obj = pull_request_ver
720 720 _org_pull_request_obj = pull_request_obj
721 721 at_version = 'latest'
722 722 elif version:
723 723 pull_request_ver = PullRequestVersion.get_or_404(version)
724 724 pull_request_obj = pull_request_ver
725 725 _org_pull_request_obj = pull_request_ver.pull_request
726 726 at_version = pull_request_ver.pull_request_version_id
727 727 else:
728 728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
729 729 pull_request_id)
730 730
731 731 pull_request_display_obj = PullRequest.get_pr_display_object(
732 732 pull_request_obj, _org_pull_request_obj)
733 733
734 734 return _org_pull_request_obj, pull_request_obj, \
735 735 pull_request_display_obj, at_version
736 736
737 737 def pr_commits_versions(self, versions):
738 738 """
739 739 Maps the pull-request commits into all known PR versions. This way we can obtain
740 740 each pr version the commit was introduced in.
741 741 """
742 742 commit_versions = collections.defaultdict(list)
743 743 num_versions = [x.pull_request_version_id for x in versions]
744 744 for ver in versions:
745 745 for commit_id in ver.revisions:
746 746 ver_idx = ChangesetComment.get_index_from_version(
747 747 ver.pull_request_version_id, num_versions=num_versions)
748 748 commit_versions[commit_id].append(ver_idx)
749 749 return commit_versions
750 750
751 751 def create(self, created_by, source_repo, source_ref, target_repo,
752 752 target_ref, revisions, reviewers, observers, title, description=None,
753 753 common_ancestor_id=None,
754 754 description_renderer=None,
755 755 reviewer_data=None, translator=None, auth_user=None):
756 756 translator = translator or get_current_request().translate
757 757
758 758 created_by_user = self._get_user(created_by)
759 759 auth_user = auth_user or created_by_user.AuthUser()
760 760 source_repo = self._get_repo(source_repo)
761 761 target_repo = self._get_repo(target_repo)
762 762
763 763 pull_request = PullRequest()
764 764 pull_request.source_repo = source_repo
765 765 pull_request.source_ref = source_ref
766 766 pull_request.target_repo = target_repo
767 767 pull_request.target_ref = target_ref
768 768 pull_request.revisions = revisions
769 769 pull_request.title = title
770 770 pull_request.description = description
771 771 pull_request.description_renderer = description_renderer
772 772 pull_request.author = created_by_user
773 773 pull_request.reviewer_data = reviewer_data
774 774 pull_request.pull_request_state = pull_request.STATE_CREATING
775 775 pull_request.common_ancestor_id = common_ancestor_id
776 776
777 777 Session().add(pull_request)
778 778 Session().flush()
779 779
780 780 reviewer_ids = set()
781 781 # members / reviewers
782 782 for reviewer_object in reviewers:
783 783 user_id, reasons, mandatory, role, rules = reviewer_object
784 784 user = self._get_user(user_id)
785 785
786 786 # skip duplicates
787 787 if user.user_id in reviewer_ids:
788 788 continue
789 789
790 790 reviewer_ids.add(user.user_id)
791 791
792 792 reviewer = PullRequestReviewers()
793 793 reviewer.user = user
794 794 reviewer.pull_request = pull_request
795 795 reviewer.reasons = reasons
796 796 reviewer.mandatory = mandatory
797 797 reviewer.role = role
798 798
799 799 # NOTE(marcink): pick only first rule for now
800 800 rule_id = list(rules)[0] if rules else None
801 801 rule = RepoReviewRule.get(rule_id) if rule_id else None
802 802 if rule:
803 803 review_group = rule.user_group_vote_rule(user_id)
804 804 # we check if this particular reviewer is member of a voting group
805 805 if review_group:
806 806 # NOTE(marcink):
807 807 # can be that user is member of more but we pick the first same,
808 808 # same as default reviewers algo
809 809 review_group = review_group[0]
810 810
811 811 rule_data = {
812 812 'rule_name':
813 813 rule.review_rule_name,
814 814 'rule_user_group_entry_id':
815 815 review_group.repo_review_rule_users_group_id,
816 816 'rule_user_group_name':
817 817 review_group.users_group.users_group_name,
818 818 'rule_user_group_members':
819 819 [x.user.username for x in review_group.users_group.members],
820 820 'rule_user_group_members_id':
821 821 [x.user.user_id for x in review_group.users_group.members],
822 822 }
823 823 # e.g {'vote_rule': -1, 'mandatory': True}
824 824 rule_data.update(review_group.rule_data())
825 825
826 826 reviewer.rule_data = rule_data
827 827
828 828 Session().add(reviewer)
829 829 Session().flush()
830 830
831 831 for observer_object in observers:
832 832 user_id, reasons, mandatory, role, rules = observer_object
833 833 user = self._get_user(user_id)
834 834
835 835 # skip duplicates from reviewers
836 836 if user.user_id in reviewer_ids:
837 837 continue
838 838
839 839 #reviewer_ids.add(user.user_id)
840 840
841 841 observer = PullRequestReviewers()
842 842 observer.user = user
843 843 observer.pull_request = pull_request
844 844 observer.reasons = reasons
845 845 observer.mandatory = mandatory
846 846 observer.role = role
847 847
848 848 # NOTE(marcink): pick only first rule for now
849 849 rule_id = list(rules)[0] if rules else None
850 850 rule = RepoReviewRule.get(rule_id) if rule_id else None
851 851 if rule:
852 852 # TODO(marcink): do we need this for observers ??
853 853 pass
854 854
855 855 Session().add(observer)
856 856 Session().flush()
857 857
858 858 # Set approval status to "Under Review" for all commits which are
859 859 # part of this pull request.
860 860 ChangesetStatusModel().set_status(
861 861 repo=target_repo,
862 862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
863 863 user=created_by_user,
864 864 pull_request=pull_request
865 865 )
866 866 # we commit early at this point. This has to do with a fact
867 867 # that before queries do some row-locking. And because of that
868 868 # we need to commit and finish transaction before below validate call
869 869 # that for large repos could be long resulting in long row locks
870 870 Session().commit()
871 871
872 872 # prepare workspace, and run initial merge simulation. Set state during that
873 873 # operation
874 874 pull_request = PullRequest.get(pull_request.pull_request_id)
875 875
876 876 # set as merging, for merge simulation, and if finished to created so we mark
877 877 # simulation is working fine
878 878 with pull_request.set_state(PullRequest.STATE_MERGING,
879 879 final_state=PullRequest.STATE_CREATED) as state_obj:
880 880 MergeCheck.validate(
881 881 pull_request, auth_user=auth_user, translator=translator)
882 882
883 883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
884 884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
885 885
886 886 creation_data = pull_request.get_api_data(with_merge_state=False)
887 887 self._log_audit_action(
888 888 'repo.pull_request.create', {'data': creation_data},
889 889 auth_user, pull_request)
890 890
891 891 return pull_request
892 892
893 893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
894 894 pull_request = self.__get_pull_request(pull_request)
895 895 target_scm = pull_request.target_repo.scm_instance()
896 896 if action == 'create':
897 897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
898 898 elif action == 'merge':
899 899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
900 900 elif action == 'close':
901 901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
902 902 elif action == 'review_status_change':
903 903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
904 904 elif action == 'update':
905 905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
906 906 elif action == 'comment':
907 907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
908 908 elif action == 'comment_edit':
909 909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
910 910 else:
911 911 return
912 912
913 913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
914 914 pull_request, action, trigger_hook)
915 915 trigger_hook(
916 916 username=user.username,
917 917 repo_name=pull_request.target_repo.repo_name,
918 918 repo_type=target_scm.alias,
919 919 pull_request=pull_request,
920 920 data=data)
921 921
922 922 def _get_commit_ids(self, pull_request):
923 923 """
924 924 Return the commit ids of the merged pull request.
925 925
926 926 This method is not dealing correctly yet with the lack of autoupdates
927 927 nor with the implicit target updates.
928 928 For example: if a commit in the source repo is already in the target it
929 929 will be reported anyways.
930 930 """
931 931 merge_rev = pull_request.merge_rev
932 932 if merge_rev is None:
933 933 raise ValueError('This pull request was not merged yet')
934 934
935 935 commit_ids = list(pull_request.revisions)
936 936 if merge_rev not in commit_ids:
937 937 commit_ids.append(merge_rev)
938 938
939 939 return commit_ids
940 940
941 941 def merge_repo(self, pull_request, user, extras):
942 942 repo_type = pull_request.source_repo.repo_type
943 943 log.debug("Merging pull request %s", pull_request)
944 944
945 945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
946 946 merge_state = self._merge_pull_request(pull_request, user, extras)
947 947 if merge_state.executed:
948 948 log.debug("Merge was successful, updating the pull request comments.")
949 949 self._comment_and_close_pr(pull_request, user, merge_state)
950 950
951 951 self._log_audit_action(
952 952 'repo.pull_request.merge',
953 953 {'merge_state': merge_state.__dict__},
954 954 user, pull_request)
955 955
956 956 else:
957 957 log.warning("Merge failed, not updating the pull request.")
958 958 return merge_state
959 959
960 960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
961 961 target_vcs = pull_request.target_repo.scm_instance()
962 962 source_vcs = pull_request.source_repo.scm_instance()
963 963
964 964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
965 965 pr_id=pull_request.pull_request_id,
966 966 pr_title=pull_request.title,
967 967 pr_desc=pull_request.description,
968 968 source_repo=source_vcs.name,
969 969 source_ref_name=pull_request.source_ref_parts.name,
970 970 target_repo=target_vcs.name,
971 971 target_ref_name=pull_request.target_ref_parts.name,
972 972 )
973 973
974 974 workspace_id = self._workspace_id(pull_request)
975 975 repo_id = pull_request.target_repo.repo_id
976 976 use_rebase = self._use_rebase_for_merging(pull_request)
977 977 close_branch = self._close_branch_before_merging(pull_request)
978 978 user_name = self._user_name_for_merging(pull_request, user)
979 979
980 980 target_ref = self._refresh_reference(
981 981 pull_request.target_ref_parts, target_vcs)
982 982
983 983 callback_daemon, extras = prepare_callback_daemon(
984 984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
985 985 host=vcs_settings.HOOKS_HOST)
986 986
987 987 with callback_daemon:
988 988 # TODO: johbo: Implement a clean way to run a config_override
989 989 # for a single call.
990 990 target_vcs.config.set(
991 991 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
992 992
993 993 merge_state = target_vcs.merge(
994 994 repo_id, workspace_id, target_ref, source_vcs,
995 995 pull_request.source_ref_parts,
996 996 user_name=user_name, user_email=user.email,
997 997 message=message, use_rebase=use_rebase,
998 998 close_branch=close_branch)
999 999
1000 1000 return merge_state
1001 1001
1002 1002 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1003 1003 pull_request.merge_rev = merge_state.merge_ref.commit_id
1004 1004 pull_request.updated_on = datetime.datetime.now()
1005 1005 close_msg = close_msg or 'Pull request merged and closed'
1006 1006
1007 1007 CommentsModel().create(
1008 1008 text=safe_str(close_msg),
1009 1009 repo=pull_request.target_repo.repo_id,
1010 1010 user=user.user_id,
1011 1011 pull_request=pull_request.pull_request_id,
1012 1012 f_path=None,
1013 1013 line_no=None,
1014 1014 closing_pr=True
1015 1015 )
1016 1016
1017 1017 Session().add(pull_request)
1018 1018 Session().flush()
1019 1019 # TODO: paris: replace invalidation with less radical solution
1020 1020 ScmModel().mark_for_invalidation(
1021 1021 pull_request.target_repo.repo_name)
1022 1022 self.trigger_pull_request_hook(pull_request, user, 'merge')
1023 1023
1024 1024 def has_valid_update_type(self, pull_request):
1025 1025 source_ref_type = pull_request.source_ref_parts.type
1026 1026 return source_ref_type in self.REF_TYPES
1027 1027
1028 1028 def get_flow_commits(self, pull_request):
1029 1029
1030 1030 # source repo
1031 1031 source_ref_name = pull_request.source_ref_parts.name
1032 1032 source_ref_type = pull_request.source_ref_parts.type
1033 1033 source_ref_id = pull_request.source_ref_parts.commit_id
1034 1034 source_repo = pull_request.source_repo.scm_instance()
1035 1035
1036 1036 try:
1037 1037 if source_ref_type in self.REF_TYPES:
1038 1038 source_commit = source_repo.get_commit(
1039 1039 source_ref_name, reference_obj=pull_request.source_ref_parts)
1040 1040 else:
1041 1041 source_commit = source_repo.get_commit(source_ref_id)
1042 1042 except CommitDoesNotExistError:
1043 1043 raise SourceRefMissing()
1044 1044
1045 1045 # target repo
1046 1046 target_ref_name = pull_request.target_ref_parts.name
1047 1047 target_ref_type = pull_request.target_ref_parts.type
1048 1048 target_ref_id = pull_request.target_ref_parts.commit_id
1049 1049 target_repo = pull_request.target_repo.scm_instance()
1050 1050
1051 1051 try:
1052 1052 if target_ref_type in self.REF_TYPES:
1053 1053 target_commit = target_repo.get_commit(
1054 1054 target_ref_name, reference_obj=pull_request.target_ref_parts)
1055 1055 else:
1056 1056 target_commit = target_repo.get_commit(target_ref_id)
1057 1057 except CommitDoesNotExistError:
1058 1058 raise TargetRefMissing()
1059 1059
1060 1060 return source_commit, target_commit
1061 1061
1062 1062 def update_commits(self, pull_request, updating_user):
1063 1063 """
1064 1064 Get the updated list of commits for the pull request
1065 1065 and return the new pull request version and the list
1066 1066 of commits processed by this update action
1067 1067
1068 1068 updating_user is the user_object who triggered the update
1069 1069 """
1070 1070 pull_request = self.__get_pull_request(pull_request)
1071 1071 source_ref_type = pull_request.source_ref_parts.type
1072 1072 source_ref_name = pull_request.source_ref_parts.name
1073 1073 source_ref_id = pull_request.source_ref_parts.commit_id
1074 1074
1075 1075 target_ref_type = pull_request.target_ref_parts.type
1076 1076 target_ref_name = pull_request.target_ref_parts.name
1077 1077 target_ref_id = pull_request.target_ref_parts.commit_id
1078 1078
1079 1079 if not self.has_valid_update_type(pull_request):
1080 1080 log.debug("Skipping update of pull request %s due to ref type: %s",
1081 1081 pull_request, source_ref_type)
1082 1082 return UpdateResponse(
1083 1083 executed=False,
1084 1084 reason=UpdateFailureReason.WRONG_REF_TYPE,
1085 1085 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1086 1086 source_changed=False, target_changed=False)
1087 1087
1088 1088 try:
1089 1089 source_commit, target_commit = self.get_flow_commits(pull_request)
1090 1090 except SourceRefMissing:
1091 1091 return UpdateResponse(
1092 1092 executed=False,
1093 1093 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1094 1094 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1095 1095 source_changed=False, target_changed=False)
1096 1096 except TargetRefMissing:
1097 1097 return UpdateResponse(
1098 1098 executed=False,
1099 1099 reason=UpdateFailureReason.MISSING_TARGET_REF,
1100 1100 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1101 1101 source_changed=False, target_changed=False)
1102 1102
1103 1103 source_changed = source_ref_id != source_commit.raw_id
1104 1104 target_changed = target_ref_id != target_commit.raw_id
1105 1105
1106 1106 if not (source_changed or target_changed):
1107 1107 log.debug("Nothing changed in pull request %s", pull_request)
1108 1108 return UpdateResponse(
1109 1109 executed=False,
1110 1110 reason=UpdateFailureReason.NO_CHANGE,
1111 1111 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1112 1112 source_changed=target_changed, target_changed=source_changed)
1113 1113
1114 1114 change_in_found = 'target repo' if target_changed else 'source repo'
1115 1115 log.debug('Updating pull request because of change in %s detected',
1116 1116 change_in_found)
1117 1117
1118 1118 # Finally there is a need for an update, in case of source change
1119 1119 # we create a new version, else just an update
1120 1120 if source_changed:
1121 1121 pull_request_version = self._create_version_from_snapshot(pull_request)
1122 1122 self._link_comments_to_version(pull_request_version)
1123 1123 else:
1124 1124 try:
1125 1125 ver = pull_request.versions[-1]
1126 1126 except IndexError:
1127 1127 ver = None
1128 1128
1129 1129 pull_request.pull_request_version_id = \
1130 1130 ver.pull_request_version_id if ver else None
1131 1131 pull_request_version = pull_request
1132 1132
1133 1133 source_repo = pull_request.source_repo.scm_instance()
1134 1134 target_repo = pull_request.target_repo.scm_instance()
1135 1135
1136 1136 # re-compute commit ids
1137 1137 old_commit_ids = pull_request.revisions
1138 1138 pre_load = ["author", "date", "message", "branch"]
1139 1139 commit_ranges = target_repo.compare(
1140 1140 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1141 1141 pre_load=pre_load)
1142 1142
1143 1143 target_ref = target_commit.raw_id
1144 1144 source_ref = source_commit.raw_id
1145 1145 ancestor_commit_id = target_repo.get_common_ancestor(
1146 1146 target_ref, source_ref, source_repo)
1147 1147
1148 1148 if not ancestor_commit_id:
1149 1149 raise ValueError(
1150 1150 'cannot calculate diff info without a common ancestor. '
1151 1151 'Make sure both repositories are related, and have a common forking commit.')
1152 1152
1153 1153 pull_request.common_ancestor_id = ancestor_commit_id
1154 1154
1155 1155 pull_request.source_ref = f'{source_ref_type}:{source_ref_name}:{source_commit.raw_id}'
1156 1156 pull_request.target_ref = f'{target_ref_type}:{target_ref_name}:{ancestor_commit_id}'
1157 1157
1158 1158 pull_request.revisions = [
1159 1159 commit.raw_id for commit in reversed(commit_ranges)]
1160 1160 pull_request.updated_on = datetime.datetime.now()
1161 1161 Session().add(pull_request)
1162 1162 new_commit_ids = pull_request.revisions
1163 1163
1164 1164 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 1165 pull_request, pull_request_version)
1166 1166
1167 1167 # calculate commit and file changes
1168 1168 commit_changes = self._calculate_commit_id_changes(
1169 1169 old_commit_ids, new_commit_ids)
1170 1170 file_changes = self._calculate_file_changes(
1171 1171 old_diff_data, new_diff_data)
1172 1172
1173 1173 # set comments as outdated if DIFFS changed
1174 1174 CommentsModel().outdate_comments(
1175 1175 pull_request, old_diff_data=old_diff_data,
1176 1176 new_diff_data=new_diff_data)
1177 1177
1178 1178 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 1179 file_node_changes = (
1180 1180 file_changes.added or file_changes.modified or file_changes.removed)
1181 1181 pr_has_changes = valid_commit_changes or file_node_changes
1182 1182
1183 1183 # Add an automatic comment to the pull request, in case
1184 1184 # anything has changed
1185 1185 if pr_has_changes:
1186 1186 update_comment = CommentsModel().create(
1187 1187 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 1188 repo=pull_request.target_repo,
1189 1189 user=pull_request.author,
1190 1190 pull_request=pull_request,
1191 1191 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192 1192
1193 1193 # Update status to "Under Review" for added commits
1194 1194 for commit_id in commit_changes.added:
1195 1195 ChangesetStatusModel().set_status(
1196 1196 repo=pull_request.source_repo,
1197 1197 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 1198 comment=update_comment,
1199 1199 user=pull_request.author,
1200 1200 pull_request=pull_request,
1201 1201 revision=commit_id)
1202 1202
1203 1203 # initial commit
1204 1204 Session().commit()
1205 1205
1206 1206 if pr_has_changes:
1207 1207 # send update email to users
1208 1208 try:
1209 1209 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 1210 ancestor_commit_id=ancestor_commit_id,
1211 1211 commit_changes=commit_changes,
1212 1212 file_changes=file_changes)
1213 1213 Session().commit()
1214 1214 except Exception:
1215 1215 log.exception('Failed to send email notification to users')
1216 1216 Session().rollback()
1217 1217
1218 1218 log.debug(
1219 1219 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 1220 'removed_ids: %s', pull_request.pull_request_id,
1221 1221 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 1222 log.debug(
1223 1223 'Updated pull request with the following file changes: %s',
1224 1224 file_changes)
1225 1225
1226 1226 log.info(
1227 1227 "Updated pull request %s from commit %s to commit %s, "
1228 1228 "stored new version %s of this pull request.",
1229 1229 pull_request.pull_request_id, source_ref_id,
1230 1230 pull_request.source_ref_parts.commit_id,
1231 1231 pull_request_version.pull_request_version_id)
1232 1232
1233 1233 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234 1234
1235 1235 return UpdateResponse(
1236 1236 executed=True, reason=UpdateFailureReason.NONE,
1237 1237 old=pull_request, new=pull_request_version,
1238 1238 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 1239 source_changed=source_changed, target_changed=target_changed)
1240 1240
1241 1241 def _create_version_from_snapshot(self, pull_request):
1242 1242 version = PullRequestVersion()
1243 1243 version.title = pull_request.title
1244 1244 version.description = pull_request.description
1245 1245 version.status = pull_request.status
1246 1246 version.pull_request_state = pull_request.pull_request_state
1247 1247 version.created_on = datetime.datetime.now()
1248 1248 version.updated_on = pull_request.updated_on
1249 1249 version.user_id = pull_request.user_id
1250 1250 version.source_repo = pull_request.source_repo
1251 1251 version.source_ref = pull_request.source_ref
1252 1252 version.target_repo = pull_request.target_repo
1253 1253 version.target_ref = pull_request.target_ref
1254 1254
1255 1255 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 1256 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 1257 version.last_merge_status = pull_request.last_merge_status
1258 1258 version.last_merge_metadata = pull_request.last_merge_metadata
1259 1259 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 1260 version.merge_rev = pull_request.merge_rev
1261 1261 version.reviewer_data = pull_request.reviewer_data
1262 1262
1263 1263 version.revisions = pull_request.revisions
1264 1264 version.common_ancestor_id = pull_request.common_ancestor_id
1265 1265 version.pull_request = pull_request
1266 1266 Session().add(version)
1267 1267 Session().flush()
1268 1268
1269 1269 return version
1270 1270
1271 1271 def _generate_update_diffs(self, pull_request, pull_request_version):
1272 1272
1273 1273 diff_context = (
1274 1274 self.DIFF_CONTEXT +
1275 1275 CommentsModel.needed_extra_diff_context())
1276 1276 hide_whitespace_changes = False
1277 1277 source_repo = pull_request_version.source_repo
1278 1278 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 1279 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 1280 old_diff = self._get_diff_from_pr_or_version(
1281 1281 source_repo, source_ref_id, target_ref_id,
1282 1282 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283 1283
1284 1284 source_repo = pull_request.source_repo
1285 1285 source_ref_id = pull_request.source_ref_parts.commit_id
1286 1286 target_ref_id = pull_request.target_ref_parts.commit_id
1287 1287
1288 1288 new_diff = self._get_diff_from_pr_or_version(
1289 1289 source_repo, source_ref_id, target_ref_id,
1290 1290 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291 1291
1292 1292 # NOTE: this was using diff_format='gitdiff'
1293 1293 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1294 1294 old_diff_data.prepare()
1295 1295 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1296 1296 new_diff_data.prepare()
1297 1297
1298 1298 return old_diff_data, new_diff_data
1299 1299
1300 1300 def _link_comments_to_version(self, pull_request_version):
1301 1301 """
1302 1302 Link all unlinked comments of this pull request to the given version.
1303 1303
1304 1304 :param pull_request_version: The `PullRequestVersion` to which
1305 1305 the comments shall be linked.
1306 1306
1307 1307 """
1308 1308 pull_request = pull_request_version.pull_request
1309 1309 comments = ChangesetComment.query()\
1310 1310 .filter(
1311 1311 # TODO: johbo: Should we query for the repo at all here?
1312 1312 # Pending decision on how comments of PRs are to be related
1313 1313 # to either the source repo, the target repo or no repo at all.
1314 1314 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1315 1315 ChangesetComment.pull_request == pull_request,
1316 1316 ChangesetComment.pull_request_version == null())\
1317 1317 .order_by(ChangesetComment.comment_id.asc())
1318 1318
1319 1319 # TODO: johbo: Find out why this breaks if it is done in a bulk
1320 1320 # operation.
1321 1321 for comment in comments:
1322 1322 comment.pull_request_version_id = (
1323 1323 pull_request_version.pull_request_version_id)
1324 1324 Session().add(comment)
1325 1325
1326 1326 def _calculate_commit_id_changes(self, old_ids, new_ids):
1327 1327 added = [x for x in new_ids if x not in old_ids]
1328 1328 common = [x for x in new_ids if x in old_ids]
1329 1329 removed = [x for x in old_ids if x not in new_ids]
1330 1330 total = new_ids
1331 1331 return ChangeTuple(added, common, removed, total)
1332 1332
1333 1333 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1334 1334
1335 1335 old_files = OrderedDict()
1336 1336 for diff_data in old_diff_data.parsed_diff:
1337 1337 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1338 1338
1339 1339 added_files = []
1340 1340 modified_files = []
1341 1341 removed_files = []
1342 1342 for diff_data in new_diff_data.parsed_diff:
1343 1343 new_filename = diff_data['filename']
1344 1344 new_hash = md5_safe(diff_data['raw_diff'])
1345 1345
1346 1346 old_hash = old_files.get(new_filename)
1347 1347 if not old_hash:
1348 1348 # file is not present in old diff, we have to figure out from parsed diff
1349 1349 # operation ADD/REMOVE
1350 1350 operations_dict = diff_data['stats']['ops']
1351 1351 if diffs.DEL_FILENODE in operations_dict:
1352 1352 removed_files.append(new_filename)
1353 1353 else:
1354 1354 added_files.append(new_filename)
1355 1355 else:
1356 1356 if new_hash != old_hash:
1357 1357 modified_files.append(new_filename)
1358 1358 # now remove a file from old, since we have seen it already
1359 1359 del old_files[new_filename]
1360 1360
1361 1361 # removed files is when there are present in old, but not in NEW,
1362 1362 # since we remove old files that are present in new diff, left-overs
1363 1363 # if any should be the removed files
1364 1364 removed_files.extend(old_files.keys())
1365 1365
1366 1366 return FileChangeTuple(added_files, modified_files, removed_files)
1367 1367
1368 1368 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1369 1369 """
1370 1370 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1371 1371 so it's always looking the same disregarding on which default
1372 1372 renderer system is using.
1373 1373
1374 1374 :param ancestor_commit_id: ancestor raw_id
1375 1375 :param changes: changes named tuple
1376 1376 :param file_changes: file changes named tuple
1377 1377
1378 1378 """
1379 1379 new_status = ChangesetStatus.get_status_lbl(
1380 1380 ChangesetStatus.STATUS_UNDER_REVIEW)
1381 1381
1382 1382 changed_files = (
1383 1383 file_changes.added + file_changes.modified + file_changes.removed)
1384 1384
1385 1385 params = {
1386 1386 'under_review_label': new_status,
1387 1387 'added_commits': changes.added,
1388 1388 'removed_commits': changes.removed,
1389 1389 'changed_files': changed_files,
1390 1390 'added_files': file_changes.added,
1391 1391 'modified_files': file_changes.modified,
1392 1392 'removed_files': file_changes.removed,
1393 1393 'ancestor_commit_id': ancestor_commit_id
1394 1394 }
1395 1395 renderer = RstTemplateRenderer()
1396 1396 return renderer.render('pull_request_update.mako', **params)
1397 1397
1398 1398 def edit(self, pull_request, title, description, description_renderer, user):
1399 1399 pull_request = self.__get_pull_request(pull_request)
1400 1400 old_data = pull_request.get_api_data(with_merge_state=False)
1401 1401 if pull_request.is_closed():
1402 1402 raise ValueError('This pull request is closed')
1403 1403 if title:
1404 1404 pull_request.title = title
1405 1405 pull_request.description = description
1406 1406 pull_request.updated_on = datetime.datetime.now()
1407 1407 pull_request.description_renderer = description_renderer
1408 1408 Session().add(pull_request)
1409 1409 self._log_audit_action(
1410 1410 'repo.pull_request.edit', {'old_data': old_data},
1411 1411 user, pull_request)
1412 1412
1413 1413 def update_reviewers(self, pull_request, reviewer_data, user):
1414 1414 """
1415 1415 Update the reviewers in the pull request
1416 1416
1417 1417 :param pull_request: the pr to update
1418 1418 :param reviewer_data: list of tuples
1419 1419 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1420 1420 :param user: current use who triggers this action
1421 1421 """
1422 1422
1423 1423 pull_request = self.__get_pull_request(pull_request)
1424 1424 if pull_request.is_closed():
1425 1425 raise ValueError('This pull request is closed')
1426 1426
1427 1427 reviewers = {}
1428 1428 for user_id, reasons, mandatory, role, rules in reviewer_data:
1429 1429 if isinstance(user_id, (int, str)):
1430 1430 user_id = self._get_user(user_id).user_id
1431 1431 reviewers[user_id] = {
1432 1432 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1433 1433
1434 1434 reviewers_ids = set(reviewers.keys())
1435 1435 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1436 1436 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1437 1437
1438 1438 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1439 1439
1440 1440 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1441 1441 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1442 1442
1443 1443 log.debug("Adding %s reviewers", ids_to_add)
1444 1444 log.debug("Removing %s reviewers", ids_to_remove)
1445 1445 changed = False
1446 1446 added_audit_reviewers = []
1447 1447 removed_audit_reviewers = []
1448 1448
1449 1449 for uid in ids_to_add:
1450 1450 changed = True
1451 1451 _usr = self._get_user(uid)
1452 1452 reviewer = PullRequestReviewers()
1453 1453 reviewer.user = _usr
1454 1454 reviewer.pull_request = pull_request
1455 1455 reviewer.reasons = reviewers[uid]['reasons']
1456 1456 # NOTE(marcink): mandatory shouldn't be changed now
1457 1457 # reviewer.mandatory = reviewers[uid]['reasons']
1458 1458 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1459 1459 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1460 1460 Session().add(reviewer)
1461 1461 added_audit_reviewers.append(reviewer.get_dict())
1462 1462
1463 1463 for uid in ids_to_remove:
1464 1464 changed = True
1465 1465 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1466 1466 # This is an edge case that handles previous state of having the same reviewer twice.
1467 1467 # this CAN happen due to the lack of DB checks
1468 1468 reviewers = PullRequestReviewers.query()\
1469 1469 .filter(PullRequestReviewers.user_id == uid,
1470 1470 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1471 1471 PullRequestReviewers.pull_request == pull_request)\
1472 1472 .all()
1473 1473
1474 1474 for obj in reviewers:
1475 1475 added_audit_reviewers.append(obj.get_dict())
1476 1476 Session().delete(obj)
1477 1477
1478 1478 if changed:
1479 1479 Session().expire_all()
1480 1480 pull_request.updated_on = datetime.datetime.now()
1481 1481 Session().add(pull_request)
1482 1482
1483 1483 # finally store audit logs
1484 1484 for user_data in added_audit_reviewers:
1485 1485 self._log_audit_action(
1486 1486 'repo.pull_request.reviewer.add', {'data': user_data},
1487 1487 user, pull_request)
1488 1488 for user_data in removed_audit_reviewers:
1489 1489 self._log_audit_action(
1490 1490 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1491 1491 user, pull_request)
1492 1492
1493 1493 self.notify_reviewers(pull_request, ids_to_add, user)
1494 1494 return ids_to_add, ids_to_remove
1495 1495
1496 1496 def update_observers(self, pull_request, observer_data, user):
1497 1497 """
1498 1498 Update the observers in the pull request
1499 1499
1500 1500 :param pull_request: the pr to update
1501 1501 :param observer_data: list of tuples
1502 1502 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1503 1503 :param user: current use who triggers this action
1504 1504 """
1505 1505 pull_request = self.__get_pull_request(pull_request)
1506 1506 if pull_request.is_closed():
1507 1507 raise ValueError('This pull request is closed')
1508 1508
1509 1509 observers = {}
1510 1510 for user_id, reasons, mandatory, role, rules in observer_data:
1511 1511 if isinstance(user_id, (int, str)):
1512 1512 user_id = self._get_user(user_id).user_id
1513 1513 observers[user_id] = {
1514 1514 'reasons': reasons, 'observers': mandatory, 'role': role}
1515 1515
1516 1516 observers_ids = set(observers.keys())
1517 1517 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1518 1518 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1519 1519
1520 1520 current_observers_ids = set([x.user.user_id for x in current_observers])
1521 1521
1522 1522 ids_to_add = observers_ids.difference(current_observers_ids)
1523 1523 ids_to_remove = current_observers_ids.difference(observers_ids)
1524 1524
1525 1525 log.debug("Adding %s observer", ids_to_add)
1526 1526 log.debug("Removing %s observer", ids_to_remove)
1527 1527 changed = False
1528 1528 added_audit_observers = []
1529 1529 removed_audit_observers = []
1530 1530
1531 1531 for uid in ids_to_add:
1532 1532 changed = True
1533 1533 _usr = self._get_user(uid)
1534 1534 observer = PullRequestReviewers()
1535 1535 observer.user = _usr
1536 1536 observer.pull_request = pull_request
1537 1537 observer.reasons = observers[uid]['reasons']
1538 1538 # NOTE(marcink): mandatory shouldn't be changed now
1539 1539 # observer.mandatory = observer[uid]['reasons']
1540 1540
1541 1541 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1542 1542 observer.role = PullRequestReviewers.ROLE_OBSERVER
1543 1543 Session().add(observer)
1544 1544 added_audit_observers.append(observer.get_dict())
1545 1545
1546 1546 for uid in ids_to_remove:
1547 1547 changed = True
1548 1548 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1549 1549 # This is an edge case that handles previous state of having the same reviewer twice.
1550 1550 # this CAN happen due to the lack of DB checks
1551 1551 observers = PullRequestReviewers.query()\
1552 1552 .filter(PullRequestReviewers.user_id == uid,
1553 1553 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1554 1554 PullRequestReviewers.pull_request == pull_request)\
1555 1555 .all()
1556 1556
1557 1557 for obj in observers:
1558 1558 added_audit_observers.append(obj.get_dict())
1559 1559 Session().delete(obj)
1560 1560
1561 1561 if changed:
1562 1562 Session().expire_all()
1563 1563 pull_request.updated_on = datetime.datetime.now()
1564 1564 Session().add(pull_request)
1565 1565
1566 1566 # finally store audit logs
1567 1567 for user_data in added_audit_observers:
1568 1568 self._log_audit_action(
1569 1569 'repo.pull_request.observer.add', {'data': user_data},
1570 1570 user, pull_request)
1571 1571 for user_data in removed_audit_observers:
1572 1572 self._log_audit_action(
1573 1573 'repo.pull_request.observer.delete', {'old_data': user_data},
1574 1574 user, pull_request)
1575 1575
1576 1576 self.notify_observers(pull_request, ids_to_add, user)
1577 1577 return ids_to_add, ids_to_remove
1578 1578
1579 1579 def get_url(self, pull_request, request=None, permalink=False):
1580 1580 if not request:
1581 1581 request = get_current_request()
1582 1582
1583 1583 if permalink:
1584 1584 return request.route_url(
1585 1585 'pull_requests_global',
1586 1586 pull_request_id=pull_request.pull_request_id,)
1587 1587 else:
1588 1588 return request.route_url('pullrequest_show',
1589 1589 repo_name=safe_str(pull_request.target_repo.repo_name),
1590 1590 pull_request_id=pull_request.pull_request_id,)
1591 1591
1592 1592 def get_shadow_clone_url(self, pull_request, request=None):
1593 1593 """
1594 1594 Returns qualified url pointing to the shadow repository. If this pull
1595 1595 request is closed there is no shadow repository and ``None`` will be
1596 1596 returned.
1597 1597 """
1598 1598 if pull_request.is_closed():
1599 1599 return None
1600 1600 else:
1601 1601 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1602 1602 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1603 1603
1604 1604 def _notify_reviewers(self, pull_request, user_ids, role, user):
1605 1605 # notification to reviewers/observers
1606 1606 if not user_ids:
1607 1607 return
1608 1608
1609 1609 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1610 1610
1611 1611 pull_request_obj = pull_request
1612 1612 # get the current participants of this pull request
1613 1613 recipients = user_ids
1614 1614 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1615 1615
1616 1616 pr_source_repo = pull_request_obj.source_repo
1617 1617 pr_target_repo = pull_request_obj.target_repo
1618 1618
1619 1619 pr_url = h.route_url('pullrequest_show',
1620 1620 repo_name=pr_target_repo.repo_name,
1621 1621 pull_request_id=pull_request_obj.pull_request_id,)
1622 1622
1623 1623 # set some variables for email notification
1624 1624 pr_target_repo_url = h.route_url(
1625 1625 'repo_summary', repo_name=pr_target_repo.repo_name)
1626 1626
1627 1627 pr_source_repo_url = h.route_url(
1628 1628 'repo_summary', repo_name=pr_source_repo.repo_name)
1629 1629
1630 1630 # pull request specifics
1631 1631 pull_request_commits = [
1632 1632 (x.raw_id, x.message)
1633 1633 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1634 1634
1635 1635 current_rhodecode_user = user
1636 1636 kwargs = {
1637 1637 'user': current_rhodecode_user,
1638 1638 'pull_request_author': pull_request.author,
1639 1639 'pull_request': pull_request_obj,
1640 1640 'pull_request_commits': pull_request_commits,
1641 1641
1642 1642 'pull_request_target_repo': pr_target_repo,
1643 1643 'pull_request_target_repo_url': pr_target_repo_url,
1644 1644
1645 1645 'pull_request_source_repo': pr_source_repo,
1646 1646 'pull_request_source_repo_url': pr_source_repo_url,
1647 1647
1648 1648 'pull_request_url': pr_url,
1649 1649 'thread_ids': [pr_url],
1650 1650 'user_role': role
1651 1651 }
1652 1652
1653 1653 # create notification objects, and emails
1654 1654 NotificationModel().create(
1655 1655 created_by=current_rhodecode_user,
1656 1656 notification_subject='', # Filled in based on the notification_type
1657 1657 notification_body='', # Filled in based on the notification_type
1658 1658 notification_type=notification_type,
1659 1659 recipients=recipients,
1660 1660 email_kwargs=kwargs,
1661 1661 )
1662 1662
1663 1663 def notify_reviewers(self, pull_request, reviewers_ids, user):
1664 1664 return self._notify_reviewers(pull_request, reviewers_ids,
1665 1665 PullRequestReviewers.ROLE_REVIEWER, user)
1666 1666
1667 1667 def notify_observers(self, pull_request, observers_ids, user):
1668 1668 return self._notify_reviewers(pull_request, observers_ids,
1669 1669 PullRequestReviewers.ROLE_OBSERVER, user)
1670 1670
1671 1671 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1672 1672 commit_changes, file_changes):
1673 1673
1674 1674 updating_user_id = updating_user.user_id
1675 1675 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1676 1676 # NOTE(marcink): send notification to all other users except to
1677 1677 # person who updated the PR
1678 1678 recipients = reviewers.difference(set([updating_user_id]))
1679 1679
1680 1680 log.debug('Notify following recipients about pull-request update %s', recipients)
1681 1681
1682 1682 pull_request_obj = pull_request
1683 1683
1684 1684 # send email about the update
1685 1685 changed_files = (
1686 1686 file_changes.added + file_changes.modified + file_changes.removed)
1687 1687
1688 1688 pr_source_repo = pull_request_obj.source_repo
1689 1689 pr_target_repo = pull_request_obj.target_repo
1690 1690
1691 1691 pr_url = h.route_url('pullrequest_show',
1692 1692 repo_name=pr_target_repo.repo_name,
1693 1693 pull_request_id=pull_request_obj.pull_request_id,)
1694 1694
1695 1695 # set some variables for email notification
1696 1696 pr_target_repo_url = h.route_url(
1697 1697 'repo_summary', repo_name=pr_target_repo.repo_name)
1698 1698
1699 1699 pr_source_repo_url = h.route_url(
1700 1700 'repo_summary', repo_name=pr_source_repo.repo_name)
1701 1701
1702 1702 email_kwargs = {
1703 1703 'date': datetime.datetime.now(),
1704 1704 'updating_user': updating_user,
1705 1705
1706 1706 'pull_request': pull_request_obj,
1707 1707
1708 1708 'pull_request_target_repo': pr_target_repo,
1709 1709 'pull_request_target_repo_url': pr_target_repo_url,
1710 1710
1711 1711 'pull_request_source_repo': pr_source_repo,
1712 1712 'pull_request_source_repo_url': pr_source_repo_url,
1713 1713
1714 1714 'pull_request_url': pr_url,
1715 1715
1716 1716 'ancestor_commit_id': ancestor_commit_id,
1717 1717 'added_commits': commit_changes.added,
1718 1718 'removed_commits': commit_changes.removed,
1719 1719 'changed_files': changed_files,
1720 1720 'added_files': file_changes.added,
1721 1721 'modified_files': file_changes.modified,
1722 1722 'removed_files': file_changes.removed,
1723 1723 'thread_ids': [pr_url],
1724 1724 }
1725 1725
1726 1726 # create notification objects, and emails
1727 1727 NotificationModel().create(
1728 1728 created_by=updating_user,
1729 1729 notification_subject='', # Filled in based on the notification_type
1730 1730 notification_body='', # Filled in based on the notification_type
1731 1731 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1732 1732 recipients=recipients,
1733 1733 email_kwargs=email_kwargs,
1734 1734 )
1735 1735
1736 1736 def delete(self, pull_request, user=None):
1737 1737 if not user:
1738 1738 user = getattr(get_current_rhodecode_user(), 'username', None)
1739 1739
1740 1740 pull_request = self.__get_pull_request(pull_request)
1741 1741 old_data = pull_request.get_api_data(with_merge_state=False)
1742 1742 self._cleanup_merge_workspace(pull_request)
1743 1743 self._log_audit_action(
1744 1744 'repo.pull_request.delete', {'old_data': old_data},
1745 1745 user, pull_request)
1746 1746 Session().delete(pull_request)
1747 1747
1748 1748 def close_pull_request(self, pull_request, user):
1749 1749 pull_request = self.__get_pull_request(pull_request)
1750 1750 self._cleanup_merge_workspace(pull_request)
1751 1751 pull_request.status = PullRequest.STATUS_CLOSED
1752 1752 pull_request.updated_on = datetime.datetime.now()
1753 1753 Session().add(pull_request)
1754 1754 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1755 1755
1756 1756 pr_data = pull_request.get_api_data(with_merge_state=False)
1757 1757 self._log_audit_action(
1758 1758 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1759 1759
1760 1760 def close_pull_request_with_comment(
1761 1761 self, pull_request, user, repo, message=None, auth_user=None):
1762 1762
1763 1763 pull_request_review_status = pull_request.calculated_review_status()
1764 1764
1765 1765 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1766 1766 # approved only if we have voting consent
1767 1767 status = ChangesetStatus.STATUS_APPROVED
1768 1768 else:
1769 1769 status = ChangesetStatus.STATUS_REJECTED
1770 1770 status_lbl = ChangesetStatus.get_status_lbl(status)
1771 1771
1772 1772 default_message = (
1773 1773 'Closing with status change {transition_icon} {status}.'
1774 1774 ).format(transition_icon='>', status=status_lbl)
1775 1775 text = message or default_message
1776 1776
1777 1777 # create a comment, and link it to new status
1778 1778 comment = CommentsModel().create(
1779 1779 text=text,
1780 1780 repo=repo.repo_id,
1781 1781 user=user.user_id,
1782 1782 pull_request=pull_request.pull_request_id,
1783 1783 status_change=status_lbl,
1784 1784 status_change_type=status,
1785 1785 closing_pr=True,
1786 1786 auth_user=auth_user,
1787 1787 )
1788 1788
1789 1789 # calculate old status before we change it
1790 1790 old_calculated_status = pull_request.calculated_review_status()
1791 1791 ChangesetStatusModel().set_status(
1792 1792 repo.repo_id,
1793 1793 status,
1794 1794 user.user_id,
1795 1795 comment=comment,
1796 1796 pull_request=pull_request.pull_request_id
1797 1797 )
1798 1798
1799 1799 Session().flush()
1800 1800
1801 1801 self.trigger_pull_request_hook(pull_request, user, 'comment',
1802 1802 data={'comment': comment})
1803 1803
1804 1804 # we now calculate the status of pull request again, and based on that
1805 1805 # calculation trigger status change. This might happen in cases
1806 1806 # that non-reviewer admin closes a pr, which means his vote doesn't
1807 1807 # change the status, while if he's a reviewer this might change it.
1808 1808 calculated_status = pull_request.calculated_review_status()
1809 1809 if old_calculated_status != calculated_status:
1810 1810 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1811 1811 data={'status': calculated_status})
1812 1812
1813 1813 # finally close the PR
1814 1814 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1815 1815
1816 1816 return comment, status
1817 1817
1818 1818 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1819 1819 _ = translator or get_current_request().translate
1820 1820
1821 1821 if not self._is_merge_enabled(pull_request):
1822 1822 return None, False, _('Server-side pull request merging is disabled.')
1823 1823
1824 1824 if pull_request.is_closed():
1825 1825 return None, False, _('This pull request is closed.')
1826 1826
1827 1827 merge_possible, msg = self._check_repo_requirements(
1828 1828 target=pull_request.target_repo, source=pull_request.source_repo,
1829 1829 translator=_)
1830 1830 if not merge_possible:
1831 1831 return None, merge_possible, msg
1832 1832
1833 1833 try:
1834 1834 merge_response = self._try_merge(
1835 1835 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1836 1836 log.debug("Merge response: %s", merge_response)
1837 1837 return merge_response, merge_response.possible, merge_response.merge_status_message
1838 1838 except NotImplementedError:
1839 1839 return None, False, _('Pull request merging is not supported.')
1840 1840
1841 1841 def _check_repo_requirements(self, target, source, translator):
1842 1842 """
1843 1843 Check if `target` and `source` have compatible requirements.
1844 1844
1845 1845 Currently this is just checking for largefiles.
1846 1846 """
1847 1847 _ = translator
1848 1848 target_has_largefiles = self._has_largefiles(target)
1849 1849 source_has_largefiles = self._has_largefiles(source)
1850 1850 merge_possible = True
1851 1851 message = u''
1852 1852
1853 1853 if target_has_largefiles != source_has_largefiles:
1854 1854 merge_possible = False
1855 1855 if source_has_largefiles:
1856 1856 message = _(
1857 1857 'Target repository large files support is disabled.')
1858 1858 else:
1859 1859 message = _(
1860 1860 'Source repository large files support is disabled.')
1861 1861
1862 1862 return merge_possible, message
1863 1863
1864 1864 def _has_largefiles(self, repo):
1865 1865 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1866 1866 'extensions', 'largefiles')
1867 1867 return largefiles_ui and largefiles_ui[0].active
1868 1868
1869 1869 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1870 1870 """
1871 1871 Try to merge the pull request and return the merge status.
1872 1872 """
1873 1873 log.debug(
1874 1874 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1875 1875 pull_request.pull_request_id, force_shadow_repo_refresh)
1876 1876 target_vcs = pull_request.target_repo.scm_instance()
1877 1877 # Refresh the target reference.
1878 1878 try:
1879 1879 target_ref = self._refresh_reference(
1880 1880 pull_request.target_ref_parts, target_vcs)
1881 1881 except CommitDoesNotExistError:
1882 1882 merge_state = MergeResponse(
1883 1883 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1884 1884 metadata={'target_ref': pull_request.target_ref_parts})
1885 1885 return merge_state
1886 1886
1887 1887 target_locked = pull_request.target_repo.locked
1888 1888 if target_locked and target_locked[0]:
1889 1889 locked_by = 'user:{}'.format(target_locked[0])
1890 1890 log.debug("The target repository is locked by %s.", locked_by)
1891 1891 merge_state = MergeResponse(
1892 1892 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1893 1893 metadata={'locked_by': locked_by})
1894 1894 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1895 1895 pull_request, target_ref):
1896 1896 log.debug("Refreshing the merge status of the repository.")
1897 1897 merge_state = self._refresh_merge_state(
1898 1898 pull_request, target_vcs, target_ref)
1899 1899 else:
1900 1900 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1901 1901 metadata = {
1902 1902 'unresolved_files': '',
1903 1903 'target_ref': pull_request.target_ref_parts,
1904 1904 'source_ref': pull_request.source_ref_parts,
1905 1905 }
1906 1906 if pull_request.last_merge_metadata:
1907 1907 metadata.update(pull_request.last_merge_metadata_parsed)
1908 1908
1909 1909 if not possible and target_ref.type == 'branch':
1910 1910 # NOTE(marcink): case for mercurial multiple heads on branch
1911 1911 heads = target_vcs._heads(target_ref.name)
1912 1912 if len(heads) != 1:
1913 1913 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1914 1914 metadata.update({
1915 1915 'heads': heads
1916 1916 })
1917 1917
1918 1918 merge_state = MergeResponse(
1919 1919 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1920 1920
1921 1921 return merge_state
1922 1922
1923 1923 def _refresh_reference(self, reference, vcs_repository):
1924 1924 if reference.type in self.UPDATABLE_REF_TYPES:
1925 1925 name_or_id = reference.name
1926 1926 else:
1927 1927 name_or_id = reference.commit_id
1928 1928
1929 1929 refreshed_commit = vcs_repository.get_commit(name_or_id)
1930 1930 refreshed_reference = Reference(
1931 1931 reference.type, reference.name, refreshed_commit.raw_id)
1932 1932 return refreshed_reference
1933 1933
1934 1934 def _needs_merge_state_refresh(self, pull_request, target_reference):
1935 1935 return not(
1936 1936 pull_request.revisions and
1937 1937 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1938 1938 target_reference.commit_id == pull_request._last_merge_target_rev)
1939 1939
1940 1940 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1941 1941 workspace_id = self._workspace_id(pull_request)
1942 1942 source_vcs = pull_request.source_repo.scm_instance()
1943 1943 repo_id = pull_request.target_repo.repo_id
1944 1944 use_rebase = self._use_rebase_for_merging(pull_request)
1945 1945 close_branch = self._close_branch_before_merging(pull_request)
1946 1946 merge_state = target_vcs.merge(
1947 1947 repo_id, workspace_id,
1948 1948 target_reference, source_vcs, pull_request.source_ref_parts,
1949 1949 dry_run=True, use_rebase=use_rebase,
1950 1950 close_branch=close_branch)
1951 1951
1952 1952 # Do not store the response if there was an unknown error.
1953 1953 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1954 1954 pull_request._last_merge_source_rev = \
1955 1955 pull_request.source_ref_parts.commit_id
1956 1956 pull_request._last_merge_target_rev = target_reference.commit_id
1957 1957 pull_request.last_merge_status = merge_state.failure_reason
1958 1958 pull_request.last_merge_metadata = merge_state.metadata
1959 1959
1960 1960 pull_request.shadow_merge_ref = merge_state.merge_ref
1961 1961 Session().add(pull_request)
1962 1962 Session().commit()
1963 1963
1964 1964 return merge_state
1965 1965
1966 1966 def _workspace_id(self, pull_request):
1967 1967 workspace_id = 'pr-%s' % pull_request.pull_request_id
1968 1968 return workspace_id
1969 1969
1970 1970 def generate_repo_data(self, repo, commit_id=None, branch=None,
1971 1971 bookmark=None, translator=None):
1972 1972 from rhodecode.model.repo import RepoModel
1973 1973
1974 1974 all_refs, selected_ref = \
1975 1975 self._get_repo_pullrequest_sources(
1976 1976 repo.scm_instance(), commit_id=commit_id,
1977 1977 branch=branch, bookmark=bookmark, translator=translator)
1978 1978
1979 1979 refs_select2 = []
1980 1980 for element in all_refs:
1981 1981 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1982 1982 refs_select2.append({'text': element[1], 'children': children})
1983 1983
1984 1984 return {
1985 1985 'user': {
1986 1986 'user_id': repo.user.user_id,
1987 1987 'username': repo.user.username,
1988 1988 'firstname': repo.user.first_name,
1989 1989 'lastname': repo.user.last_name,
1990 1990 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1991 1991 },
1992 1992 'name': repo.repo_name,
1993 1993 'link': RepoModel().get_url(repo),
1994 1994 'description': h.chop_at_smart(repo.description_safe, '\n'),
1995 1995 'refs': {
1996 1996 'all_refs': all_refs,
1997 1997 'selected_ref': selected_ref,
1998 1998 'select2_refs': refs_select2
1999 1999 }
2000 2000 }
2001 2001
2002 2002 def generate_pullrequest_title(self, source, source_ref, target):
2003 2003 return u'{source}#{at_ref} to {target}'.format(
2004 2004 source=source,
2005 2005 at_ref=source_ref,
2006 2006 target=target,
2007 2007 )
2008 2008
2009 2009 def _cleanup_merge_workspace(self, pull_request):
2010 2010 # Merging related cleanup
2011 2011 repo_id = pull_request.target_repo.repo_id
2012 2012 target_scm = pull_request.target_repo.scm_instance()
2013 2013 workspace_id = self._workspace_id(pull_request)
2014 2014
2015 2015 try:
2016 2016 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2017 2017 except NotImplementedError:
2018 2018 pass
2019 2019
2020 2020 def _get_repo_pullrequest_sources(
2021 2021 self, repo, commit_id=None, branch=None, bookmark=None,
2022 2022 translator=None):
2023 2023 """
2024 2024 Return a structure with repo's interesting commits, suitable for
2025 2025 the selectors in pullrequest controller
2026 2026
2027 2027 :param commit_id: a commit that must be in the list somehow
2028 2028 and selected by default
2029 2029 :param branch: a branch that must be in the list and selected
2030 2030 by default - even if closed
2031 2031 :param bookmark: a bookmark that must be in the list and selected
2032 2032 """
2033 2033 _ = translator or get_current_request().translate
2034 2034
2035 2035 commit_id = safe_str(commit_id) if commit_id else None
2036 2036 branch = safe_str(branch) if branch else None
2037 2037 bookmark = safe_str(bookmark) if bookmark else None
2038 2038
2039 2039 selected = None
2040 2040
2041 2041 # order matters: first source that has commit_id in it will be selected
2042 2042 sources = []
2043 2043 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2044 2044 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2045 2045
2046 2046 if commit_id:
2047 2047 ref_commit = (h.short_id(commit_id), commit_id)
2048 2048 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2049 2049
2050 2050 sources.append(
2051 2051 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2052 2052 )
2053 2053
2054 2054 groups = []
2055 2055
2056 2056 for group_key, ref_list, group_name, match in sources:
2057 2057 group_refs = []
2058 2058 for ref_name, ref_id in ref_list:
2059 2059 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2060 2060 group_refs.append((ref_key, ref_name))
2061 2061
2062 2062 if not selected:
2063 2063 if set([commit_id, match]) & set([ref_id, ref_name]):
2064 2064 selected = ref_key
2065 2065
2066 2066 if group_refs:
2067 2067 groups.append((group_refs, group_name))
2068 2068
2069 2069 if not selected:
2070 2070 ref = commit_id or branch or bookmark
2071 2071 if ref:
2072 2072 raise CommitDoesNotExistError(
2073 2073 u'No commit refs could be found matching: {}'.format(ref))
2074 2074 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2075 2075 selected = u'branch:{}:{}'.format(
2076 2076 safe_str(repo.DEFAULT_BRANCH_NAME),
2077 2077 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2078 2078 )
2079 2079 elif repo.commit_ids:
2080 2080 # make the user select in this case
2081 2081 selected = None
2082 2082 else:
2083 2083 raise EmptyRepositoryError()
2084 2084 return groups, selected
2085 2085
2086 2086 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2087 2087 hide_whitespace_changes, diff_context):
2088 2088
2089 2089 return self._get_diff_from_pr_or_version(
2090 2090 source_repo, source_ref_id, target_ref_id,
2091 2091 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2092 2092
2093 2093 def _get_diff_from_pr_or_version(
2094 2094 self, source_repo, source_ref_id, target_ref_id,
2095 2095 hide_whitespace_changes, diff_context):
2096 2096
2097 2097 target_commit = source_repo.get_commit(
2098 2098 commit_id=safe_str(target_ref_id))
2099 2099 source_commit = source_repo.get_commit(
2100 2100 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2101 2101 if isinstance(source_repo, Repository):
2102 2102 vcs_repo = source_repo.scm_instance()
2103 2103 else:
2104 2104 vcs_repo = source_repo
2105 2105
2106 2106 # TODO: johbo: In the context of an update, we cannot reach
2107 2107 # the old commit anymore with our normal mechanisms. It needs
2108 2108 # some sort of special support in the vcs layer to avoid this
2109 2109 # workaround.
2110 2110 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2111 2111 vcs_repo.alias == 'git'):
2112 2112 source_commit.raw_id = safe_str(source_ref_id)
2113 2113
2114 2114 log.debug('calculating diff between '
2115 2115 'source_ref:%s and target_ref:%s for repo `%s`',
2116 2116 target_ref_id, source_ref_id,
2117 2117 safe_str(vcs_repo.path))
2118 2118
2119 2119 vcs_diff = vcs_repo.get_diff(
2120 2120 commit1=target_commit, commit2=source_commit,
2121 2121 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2122 2122 return vcs_diff
2123 2123
2124 2124 def _is_merge_enabled(self, pull_request):
2125 2125 return self._get_general_setting(
2126 2126 pull_request, 'rhodecode_pr_merge_enabled')
2127 2127
2128 def is_automatic_merge_enabled(self, pull_request):
2129 return self._get_general_setting(
2130 pull_request, 'rhodecode_auto_merge_enabled')
2131
2128 2132 def _use_rebase_for_merging(self, pull_request):
2129 2133 repo_type = pull_request.target_repo.repo_type
2130 2134 if repo_type == 'hg':
2131 2135 return self._get_general_setting(
2132 2136 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2133 2137 elif repo_type == 'git':
2134 2138 return self._get_general_setting(
2135 2139 pull_request, 'rhodecode_git_use_rebase_for_merging')
2136 2140
2137 2141 return False
2138 2142
2139 2143 def _user_name_for_merging(self, pull_request, user):
2140 2144 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2141 2145 if env_user_name_attr and hasattr(user, env_user_name_attr):
2142 2146 user_name_attr = env_user_name_attr
2143 2147 else:
2144 2148 user_name_attr = 'short_contact'
2145 2149
2146 2150 user_name = getattr(user, user_name_attr)
2147 2151 return user_name
2148 2152
2149 2153 def _close_branch_before_merging(self, pull_request):
2150 2154 repo_type = pull_request.target_repo.repo_type
2151 2155 if repo_type == 'hg':
2152 2156 return self._get_general_setting(
2153 2157 pull_request, 'rhodecode_hg_close_branch_before_merging')
2154 2158 elif repo_type == 'git':
2155 2159 return self._get_general_setting(
2156 2160 pull_request, 'rhodecode_git_close_branch_before_merging')
2157 2161
2158 2162 return False
2159 2163
2160 2164 def _get_general_setting(self, pull_request, settings_key, default=False):
2161 2165 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2162 2166 settings = settings_model.get_general_settings()
2163 2167 return settings.get(settings_key, default)
2164 2168
2165 2169 def _log_audit_action(self, action, action_data, user, pull_request):
2166 2170 audit_logger.store(
2167 2171 action=action,
2168 2172 action_data=action_data,
2169 2173 user=user,
2170 2174 repo=pull_request.target_repo)
2171 2175
2172 2176 def get_reviewer_functions(self):
2173 2177 """
2174 2178 Fetches functions for validation and fetching default reviewers.
2175 2179 If available we use the EE package, else we fallback to CE
2176 2180 package functions
2177 2181 """
2178 2182 try:
2179 2183 from rc_reviewers.utils import get_default_reviewers_data
2180 2184 from rc_reviewers.utils import validate_default_reviewers
2181 2185 from rc_reviewers.utils import validate_observers
2182 2186 except ImportError:
2183 2187 from rhodecode.apps.repository.utils import get_default_reviewers_data
2184 2188 from rhodecode.apps.repository.utils import validate_default_reviewers
2185 2189 from rhodecode.apps.repository.utils import validate_observers
2186 2190
2187 2191 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2188 2192
2189 2193
2190 2194 class MergeCheck(object):
2191 2195 """
2192 2196 Perform Merge Checks and returns a check object which stores information
2193 2197 about merge errors, and merge conditions
2194 2198 """
2195 2199 TODO_CHECK = 'todo'
2196 2200 PERM_CHECK = 'perm'
2197 2201 REVIEW_CHECK = 'review'
2198 2202 MERGE_CHECK = 'merge'
2199 2203 WIP_CHECK = 'wip'
2200 2204
2201 2205 def __init__(self):
2202 2206 self.review_status = None
2203 2207 self.merge_possible = None
2204 2208 self.merge_msg = ''
2205 2209 self.merge_response = None
2206 2210 self.failed = None
2207 2211 self.errors = []
2208 2212 self.error_details = OrderedDict()
2209 2213 self.source_commit = AttributeDict()
2210 2214 self.target_commit = AttributeDict()
2211 2215 self.reviewers_count = 0
2212 2216 self.observers_count = 0
2213 2217
2214 2218 def __repr__(self):
2215 2219 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2216 2220 self.merge_possible, self.failed, self.errors)
2217 2221
2218 2222 def push_error(self, error_type, message, error_key, details):
2219 2223 self.failed = True
2220 2224 self.errors.append([error_type, message])
2221 2225 self.error_details[error_key] = dict(
2222 2226 details=details,
2223 2227 error_type=error_type,
2224 2228 message=message
2225 2229 )
2226 2230
2227 2231 @classmethod
2228 2232 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2229 2233 force_shadow_repo_refresh=False):
2230 2234 _ = translator
2231 2235 merge_check = cls()
2232 2236
2233 2237 # title has WIP:
2234 2238 if pull_request.work_in_progress:
2235 2239 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2236 2240
2237 2241 msg = _('WIP marker in title prevents from accidental merge.')
2238 2242 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2239 2243 if fail_early:
2240 2244 return merge_check
2241 2245
2242 2246 # permissions to merge
2243 2247 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2244 2248 if not user_allowed_to_merge:
2245 2249 log.debug("MergeCheck: cannot merge, approval is pending.")
2246 2250
2247 2251 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2248 2252 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2249 2253 if fail_early:
2250 2254 return merge_check
2251 2255
2252 2256 # permission to merge into the target branch
2253 2257 target_commit_id = pull_request.target_ref_parts.commit_id
2254 2258 if pull_request.target_ref_parts.type == 'branch':
2255 2259 branch_name = pull_request.target_ref_parts.name
2256 2260 else:
2257 2261 # for mercurial we can always figure out the branch from the commit
2258 2262 # in case of bookmark
2259 2263 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2260 2264 branch_name = target_commit.branch
2261 2265
2262 2266 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2263 2267 pull_request.target_repo.repo_name, branch_name)
2264 2268 if branch_perm and branch_perm == 'branch.none':
2265 2269 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2266 2270 branch_name, rule)
2267 2271 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2268 2272 if fail_early:
2269 2273 return merge_check
2270 2274
2271 2275 # review status, must be always present
2272 2276 review_status = pull_request.calculated_review_status()
2273 2277 merge_check.review_status = review_status
2274 2278 merge_check.reviewers_count = pull_request.reviewers_count
2275 2279 merge_check.observers_count = pull_request.observers_count
2276 2280
2277 2281 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2278 2282 if not status_approved and merge_check.reviewers_count:
2279 2283 log.debug("MergeCheck: cannot merge, approval is pending.")
2280 2284 msg = _('Pull request reviewer approval is pending.')
2281 2285
2282 2286 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2283 2287
2284 2288 if fail_early:
2285 2289 return merge_check
2286 2290
2287 2291 # left over TODOs
2288 2292 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2289 2293 if todos:
2290 2294 log.debug("MergeCheck: cannot merge, {} "
2291 2295 "unresolved TODOs left.".format(len(todos)))
2292 2296
2293 2297 if len(todos) == 1:
2294 2298 msg = _('Cannot merge, {} TODO still not resolved.').format(
2295 2299 len(todos))
2296 2300 else:
2297 2301 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2298 2302 len(todos))
2299 2303
2300 2304 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2301 2305
2302 2306 if fail_early:
2303 2307 return merge_check
2304 2308
2305 2309 # merge possible, here is the filesystem simulation + shadow repo
2306 2310 merge_response, merge_status, msg = PullRequestModel().merge_status(
2307 2311 pull_request, translator=translator,
2308 2312 force_shadow_repo_refresh=force_shadow_repo_refresh)
2309 2313
2310 2314 merge_check.merge_possible = merge_status
2311 2315 merge_check.merge_msg = msg
2312 2316 merge_check.merge_response = merge_response
2313 2317
2314 2318 source_ref_id = pull_request.source_ref_parts.commit_id
2315 2319 target_ref_id = pull_request.target_ref_parts.commit_id
2316 2320
2317 2321 try:
2318 2322 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2319 2323 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2320 2324 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2321 2325 merge_check.source_commit.current_raw_id = source_commit.raw_id
2322 2326 merge_check.source_commit.previous_raw_id = source_ref_id
2323 2327
2324 2328 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2325 2329 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2326 2330 merge_check.target_commit.current_raw_id = target_commit.raw_id
2327 2331 merge_check.target_commit.previous_raw_id = target_ref_id
2328 2332 except (SourceRefMissing, TargetRefMissing):
2329 2333 pass
2330 2334
2331 2335 if not merge_status:
2332 2336 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2333 2337 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2334 2338
2335 2339 if fail_early:
2336 2340 return merge_check
2337 2341
2338 2342 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2339 2343 return merge_check
2340 2344
2341 2345 @classmethod
2342 2346 def get_merge_conditions(cls, pull_request, translator):
2343 2347 _ = translator
2344 2348 merge_details = {}
2345 2349
2346 2350 model = PullRequestModel()
2347 2351 use_rebase = model._use_rebase_for_merging(pull_request)
2348 2352
2349 2353 if use_rebase:
2350 2354 merge_details['merge_strategy'] = dict(
2351 2355 details={},
2352 2356 message=_('Merge strategy: rebase')
2353 2357 )
2354 2358 else:
2355 2359 merge_details['merge_strategy'] = dict(
2356 2360 details={},
2357 2361 message=_('Merge strategy: explicit merge commit')
2358 2362 )
2359 2363
2360 2364 close_branch = model._close_branch_before_merging(pull_request)
2361 2365 if close_branch:
2362 2366 repo_type = pull_request.target_repo.repo_type
2363 2367 close_msg = ''
2364 2368 if repo_type == 'hg':
2365 2369 close_msg = _('Source branch will be closed before the merge.')
2366 2370 elif repo_type == 'git':
2367 2371 close_msg = _('Source branch will be deleted after the merge.')
2368 2372
2369 2373 merge_details['close_branch'] = dict(
2370 2374 details={},
2371 2375 message=close_msg
2372 2376 )
2373 2377
2374 2378 return merge_details
2375 2379
2376 2380
2377 2381 @dataclasses.dataclass
2378 2382 class ChangeTuple:
2379 2383 added: list
2380 2384 common: list
2381 2385 removed: list
2382 2386 total: list
2383 2387
2384 2388
2385 2389 @dataclasses.dataclass
2386 2390 class FileChangeTuple:
2387 2391 added: list
2388 2392 modified: list
2389 2393 removed: list
@@ -1,888 +1,889
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import re
21 21 import logging
22 22 import time
23 23 import functools
24 24 from collections import namedtuple
25 25
26 26 from pyramid.threadlocal import get_current_request
27 27
28 28 from rhodecode.lib import rc_cache
29 29 from rhodecode.lib.hash_utils import sha1_safe
30 30 from rhodecode.lib.html_filters import sanitize_html
31 31 from rhodecode.lib.utils2 import (
32 32 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
33 33 from rhodecode.lib.vcs.backends import base
34 34 from rhodecode.lib.statsd_client import StatsdClient
35 35 from rhodecode.model import BaseModel
36 36 from rhodecode.model.db import (
37 37 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
38 38 from rhodecode.model.meta import Session
39 39
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 UiSetting = namedtuple(
45 45 'UiSetting', ['section', 'key', 'value', 'active'])
46 46
47 47 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
48 48
49 49
50 50 class SettingNotFound(Exception):
51 51 def __init__(self, setting_id):
52 52 msg = f'Setting `{setting_id}` is not found'
53 53 super().__init__(msg)
54 54
55 55
56 56 class SettingsModel(BaseModel):
57 57 BUILTIN_HOOKS = (
58 58 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
59 59 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
60 60 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
61 61 RhodeCodeUi.HOOK_PUSH_KEY,)
62 62 HOOKS_SECTION = 'hooks'
63 63
64 64 def __init__(self, sa=None, repo=None):
65 65 self.repo = repo
66 66 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
67 67 self.SettingsDbModel = (
68 68 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
69 69 super().__init__(sa)
70 70
71 71 def get_keyname(self, key_name, prefix='rhodecode_'):
72 72 return f'{prefix}{key_name}'
73 73
74 74 def get_ui_by_key(self, key):
75 75 q = self.UiDbModel.query()
76 76 q = q.filter(self.UiDbModel.ui_key == key)
77 77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 78 return q.scalar()
79 79
80 80 def get_ui_by_section(self, section):
81 81 q = self.UiDbModel.query()
82 82 q = q.filter(self.UiDbModel.ui_section == section)
83 83 q = self._filter_by_repo(RepoRhodeCodeUi, q)
84 84 return q.all()
85 85
86 86 def get_ui_by_section_and_key(self, section, key):
87 87 q = self.UiDbModel.query()
88 88 q = q.filter(self.UiDbModel.ui_section == section)
89 89 q = q.filter(self.UiDbModel.ui_key == key)
90 90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
91 91 return q.scalar()
92 92
93 93 def get_ui(self, section=None, key=None):
94 94 q = self.UiDbModel.query()
95 95 q = self._filter_by_repo(RepoRhodeCodeUi, q)
96 96
97 97 if section:
98 98 q = q.filter(self.UiDbModel.ui_section == section)
99 99 if key:
100 100 q = q.filter(self.UiDbModel.ui_key == key)
101 101
102 102 # TODO: mikhail: add caching
103 103 result = [
104 104 UiSetting(
105 105 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
106 106 value=safe_str(r.ui_value), active=r.ui_active
107 107 )
108 108 for r in q.all()
109 109 ]
110 110 return result
111 111
112 112 def get_builtin_hooks(self):
113 113 q = self.UiDbModel.query()
114 114 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
115 115 return self._get_hooks(q)
116 116
117 117 def get_custom_hooks(self):
118 118 q = self.UiDbModel.query()
119 119 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
120 120 return self._get_hooks(q)
121 121
122 122 def create_ui_section_value(self, section, val, key=None, active=True):
123 123 new_ui = self.UiDbModel()
124 124 new_ui.ui_section = section
125 125 new_ui.ui_value = val
126 126 new_ui.ui_active = active
127 127
128 128 repository_id = ''
129 129 if self.repo:
130 130 repo = self._get_repo(self.repo)
131 131 repository_id = repo.repo_id
132 132 new_ui.repository_id = repository_id
133 133
134 134 if not key:
135 135 # keys are unique so they need appended info
136 136 if self.repo:
137 137 key = sha1_safe(f'{section}{val}{repository_id}')
138 138 else:
139 139 key = sha1_safe(f'{section}{val}')
140 140
141 141 new_ui.ui_key = key
142 142
143 143 Session().add(new_ui)
144 144 return new_ui
145 145
146 146 def create_or_update_hook(self, key, value):
147 147 ui = (
148 148 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
149 149 self.UiDbModel())
150 150 ui.ui_section = self.HOOKS_SECTION
151 151 ui.ui_active = True
152 152 ui.ui_key = key
153 153 ui.ui_value = value
154 154
155 155 if self.repo:
156 156 repo = self._get_repo(self.repo)
157 157 repository_id = repo.repo_id
158 158 ui.repository_id = repository_id
159 159
160 160 Session().add(ui)
161 161 return ui
162 162
163 163 def delete_ui(self, id_):
164 164 ui = self.UiDbModel.get(id_)
165 165 if not ui:
166 166 raise SettingNotFound(id_)
167 167 Session().delete(ui)
168 168
169 169 def get_setting_by_name(self, name):
170 170 q = self._get_settings_query()
171 171 q = q.filter(self.SettingsDbModel.app_settings_name == name)
172 172 return q.scalar()
173 173
174 174 def create_or_update_setting(
175 175 self, name, val: Optional | str = Optional(''), type_: Optional | str = Optional('unicode')):
176 176 """
177 177 Creates or updates RhodeCode setting. If updates are triggered, it will
178 178 only update parameters that are explicitly set Optional instance will
179 179 be skipped
180 180
181 181 :param name:
182 182 :param val:
183 183 :param type_:
184 184 :return:
185 185 """
186 186
187 187 res = self.get_setting_by_name(name)
188 188 repo = self._get_repo(self.repo) if self.repo else None
189 189
190 190 if not res:
191 191 val = Optional.extract(val)
192 192 type_ = Optional.extract(type_)
193 193
194 194 args = (
195 195 (repo.repo_id, name, val, type_)
196 196 if repo else (name, val, type_))
197 197 res = self.SettingsDbModel(*args)
198 198
199 199 else:
200 200 if self.repo:
201 201 res.repository_id = repo.repo_id
202 202
203 203 res.app_settings_name = name
204 204 if not isinstance(type_, Optional):
205 205 # update if set
206 206 res.app_settings_type = type_
207 207 if not isinstance(val, Optional):
208 208 # update if set
209 209 res.app_settings_value = val
210 210
211 211 Session().add(res)
212 212 return res
213 213
214 214 def get_cache_region(self):
215 215 repo = self._get_repo(self.repo) if self.repo else None
216 216 cache_key = f"repo.v1.{repo.repo_id}" if repo else "repo.v1.ALL"
217 217 cache_namespace_uid = f'cache_settings.{cache_key}'
218 218 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
219 219 return region, cache_namespace_uid
220 220
221 221 def invalidate_settings_cache(self, hard=False):
222 222 region, namespace_key = self.get_cache_region()
223 223 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
224 224 'invalidate_settings_cache', region, namespace_key)
225 225
226 226 # we use hard cleanup if invalidation is sent
227 227 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
228 228
229 229 def get_cache_call_method(self, cache=True):
230 230 region, cache_key = self.get_cache_region()
231 231
232 232 @region.conditional_cache_on_arguments(condition=cache)
233 233 def _get_all_settings(name, key):
234 234 q = self._get_settings_query()
235 235 if not q:
236 236 raise Exception('Could not get application settings !')
237 237
238 238 settings = {
239 239 self.get_keyname(res.app_settings_name): res.app_settings_value
240 240 for res in q
241 241 }
242 242 return settings
243 243 return _get_all_settings
244 244
245 245 def get_all_settings(self, cache=False, from_request=True):
246 246 # defines if we use GLOBAL, or PER_REPO
247 247 repo = self._get_repo(self.repo) if self.repo else None
248 248
249 249 # initially try the request context; this is the fastest
250 250 # we only fetch global config, NOT for repo-specific
251 251 if from_request and not repo:
252 252 request = get_current_request()
253 253
254 254 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
255 255 rc_config = request.call_context.rc_config
256 256 if rc_config:
257 257 return rc_config
258 258
259 259 _region, cache_key = self.get_cache_region()
260 260 _get_all_settings = self.get_cache_call_method(cache=cache)
261 261
262 262 start = time.time()
263 263 result = _get_all_settings('rhodecode_settings', cache_key)
264 264 compute_time = time.time() - start
265 265 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
266 266
267 267 statsd = StatsdClient.statsd
268 268 if statsd:
269 269 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
270 270 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
271 271 use_decimals=False)
272 272
273 273 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
274 274
275 275 return result
276 276
277 277 def get_auth_settings(self):
278 278 q = self._get_settings_query()
279 279 q = q.filter(
280 280 self.SettingsDbModel.app_settings_name.startswith('auth_'))
281 281 rows = q.all()
282 282 auth_settings = {
283 283 row.app_settings_name: row.app_settings_value for row in rows}
284 284 return auth_settings
285 285
286 286 def get_auth_plugins(self):
287 287 auth_plugins = self.get_setting_by_name("auth_plugins")
288 288 return auth_plugins.app_settings_value
289 289
290 290 def get_default_repo_settings(self, strip_prefix=False):
291 291 q = self._get_settings_query()
292 292 q = q.filter(
293 293 self.SettingsDbModel.app_settings_name.startswith('default_'))
294 294 rows = q.all()
295 295
296 296 result = {}
297 297 for row in rows:
298 298 key = row.app_settings_name
299 299 if strip_prefix:
300 300 key = remove_prefix(key, prefix='default_')
301 301 result.update({key: row.app_settings_value})
302 302 return result
303 303
304 304 def get_repo(self):
305 305 repo = self._get_repo(self.repo)
306 306 if not repo:
307 307 raise Exception(
308 308 f'Repository `{self.repo}` cannot be found inside the database')
309 309 return repo
310 310
311 311 def _filter_by_repo(self, model, query):
312 312 if self.repo:
313 313 repo = self.get_repo()
314 314 query = query.filter(model.repository_id == repo.repo_id)
315 315 return query
316 316
317 317 def _get_hooks(self, query):
318 318 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
319 319 query = self._filter_by_repo(RepoRhodeCodeUi, query)
320 320 return query.all()
321 321
322 322 def _get_settings_query(self):
323 323 q = self.SettingsDbModel.query()
324 324 return self._filter_by_repo(RepoRhodeCodeSetting, q)
325 325
326 326 def list_enabled_social_plugins(self, settings):
327 327 enabled = []
328 328 for plug in SOCIAL_PLUGINS_LIST:
329 329 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
330 330 enabled.append(plug)
331 331 return enabled
332 332
333 333
334 334 def assert_repo_settings(func):
335 335 @functools.wraps(func)
336 336 def _wrapper(self, *args, **kwargs):
337 337 if not self.repo_settings:
338 338 raise Exception('Repository is not specified')
339 339 return func(self, *args, **kwargs)
340 340 return _wrapper
341 341
342 342
343 343 class IssueTrackerSettingsModel(object):
344 344 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
345 345 SETTINGS_PREFIX = 'issuetracker_'
346 346
347 347 def __init__(self, sa=None, repo=None):
348 348 self.global_settings = SettingsModel(sa=sa)
349 349 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
350 350
351 351 @property
352 352 def inherit_global_settings(self):
353 353 if not self.repo_settings:
354 354 return True
355 355 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
356 356 return setting.app_settings_value if setting else True
357 357
358 358 @inherit_global_settings.setter
359 359 def inherit_global_settings(self, value):
360 360 if self.repo_settings:
361 361 settings = self.repo_settings.create_or_update_setting(
362 362 self.INHERIT_SETTINGS, value, type_='bool')
363 363 Session().add(settings)
364 364
365 365 def _get_keyname(self, key, uid, prefix='rhodecode_'):
366 366 return f'{prefix}{self.SETTINGS_PREFIX}{key}_{uid}'
367 367
368 368 def _make_dict_for_settings(self, qs):
369 369 prefix_match = self._get_keyname('pat', '',)
370 370
371 371 issuetracker_entries = {}
372 372 # create keys
373 373 for k, v in qs.items():
374 374 if k.startswith(prefix_match):
375 375 uid = k[len(prefix_match):]
376 376 issuetracker_entries[uid] = None
377 377
378 378 def url_cleaner(input_str):
379 379 input_str = input_str.replace('"', '').replace("'", '')
380 380 input_str = sanitize_html(input_str, strip=True)
381 381 return input_str
382 382
383 383 # populate
384 384 for uid in issuetracker_entries:
385 385 url_data = qs.get(self._get_keyname('url', uid))
386 386
387 387 pat = qs.get(self._get_keyname('pat', uid))
388 388 try:
389 389 pat_compiled = re.compile(r'%s' % pat)
390 390 except re.error:
391 391 pat_compiled = None
392 392
393 393 issuetracker_entries[uid] = AttributeDict({
394 394 'pat': pat,
395 395 'pat_compiled': pat_compiled,
396 396 'url': url_cleaner(
397 397 qs.get(self._get_keyname('url', uid)) or ''),
398 398 'pref': sanitize_html(
399 399 qs.get(self._get_keyname('pref', uid)) or ''),
400 400 'desc': qs.get(
401 401 self._get_keyname('desc', uid)),
402 402 })
403 403
404 404 return issuetracker_entries
405 405
406 406 def get_global_settings(self, cache=False):
407 407 """
408 408 Returns list of global issue tracker settings
409 409 """
410 410 defaults = self.global_settings.get_all_settings(cache=cache)
411 411 settings = self._make_dict_for_settings(defaults)
412 412 return settings
413 413
414 414 def get_repo_settings(self, cache=False):
415 415 """
416 416 Returns list of issue tracker settings per repository
417 417 """
418 418 if not self.repo_settings:
419 419 raise Exception('Repository is not specified')
420 420 all_settings = self.repo_settings.get_all_settings(cache=cache)
421 421 settings = self._make_dict_for_settings(all_settings)
422 422 return settings
423 423
424 424 def get_settings(self, cache=False):
425 425 if self.inherit_global_settings:
426 426 return self.get_global_settings(cache=cache)
427 427 else:
428 428 return self.get_repo_settings(cache=cache)
429 429
430 430 def delete_entries(self, uid):
431 431 if self.repo_settings:
432 432 all_patterns = self.get_repo_settings()
433 433 settings_model = self.repo_settings
434 434 else:
435 435 all_patterns = self.get_global_settings()
436 436 settings_model = self.global_settings
437 437 entries = all_patterns.get(uid, [])
438 438
439 439 for del_key in entries:
440 440 setting_name = self._get_keyname(del_key, uid, prefix='')
441 441 entry = settings_model.get_setting_by_name(setting_name)
442 442 if entry:
443 443 Session().delete(entry)
444 444
445 445 Session().commit()
446 446
447 447 def create_or_update_setting(
448 448 self, name, val=Optional(''), type_=Optional('unicode')):
449 449 if self.repo_settings:
450 450 setting = self.repo_settings.create_or_update_setting(
451 451 name, val, type_)
452 452 else:
453 453 setting = self.global_settings.create_or_update_setting(
454 454 name, val, type_)
455 455 return setting
456 456
457 457
458 458 class VcsSettingsModel(object):
459 459
460 460 INHERIT_SETTINGS = 'inherit_vcs_settings'
461 461 GENERAL_SETTINGS = (
462 462 'use_outdated_comments',
463 463 'pr_merge_enabled',
464 'auto_merge_enabled',
464 465 'hg_use_rebase_for_merging',
465 466 'hg_close_branch_before_merging',
466 467 'git_use_rebase_for_merging',
467 468 'git_close_branch_before_merging',
468 469 'diff_cache',
469 470 )
470 471
471 472 HOOKS_SETTINGS = (
472 473 ('hooks', 'changegroup.repo_size'),
473 474 ('hooks', 'changegroup.push_logger'),
474 475 ('hooks', 'outgoing.pull_logger'),
475 476 )
476 477 HG_SETTINGS = (
477 478 ('extensions', 'largefiles'),
478 479 ('phases', 'publish'),
479 480 ('extensions', 'evolve'),
480 481 ('extensions', 'topic'),
481 482 ('experimental', 'evolution'),
482 483 ('experimental', 'evolution.exchange'),
483 484 )
484 485 GIT_SETTINGS = (
485 486 ('vcs_git_lfs', 'enabled'),
486 487 )
487 488 GLOBAL_HG_SETTINGS = (
488 489 ('extensions', 'largefiles'),
489 490 ('phases', 'publish'),
490 491 ('extensions', 'evolve'),
491 492 ('extensions', 'topic'),
492 493 ('experimental', 'evolution'),
493 494 ('experimental', 'evolution.exchange'),
494 495 )
495 496
496 497 GLOBAL_GIT_SETTINGS = (
497 498 ('vcs_git_lfs', 'enabled'),
498 499 )
499 500
500 501 SVN_BRANCH_SECTION = 'vcs_svn_branch'
501 502 SVN_TAG_SECTION = 'vcs_svn_tag'
502 503 PATH_SETTING = ('paths', '/')
503 504
504 505 def __init__(self, sa=None, repo=None):
505 506 self.global_settings = SettingsModel(sa=sa)
506 507 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
507 508 self._ui_settings = (
508 509 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
509 510 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
510 511
511 512 @property
512 513 @assert_repo_settings
513 514 def inherit_global_settings(self):
514 515 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
515 516 return setting.app_settings_value if setting else True
516 517
517 518 @inherit_global_settings.setter
518 519 @assert_repo_settings
519 520 def inherit_global_settings(self, value):
520 521 self.repo_settings.create_or_update_setting(
521 522 self.INHERIT_SETTINGS, value, type_='bool')
522 523
523 524 def get_keyname(self, key_name, prefix='rhodecode_'):
524 525 return f'{prefix}{key_name}'
525 526
526 527 def get_global_svn_branch_patterns(self):
527 528 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
528 529
529 530 @assert_repo_settings
530 531 def get_repo_svn_branch_patterns(self):
531 532 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
532 533
533 534 def get_global_svn_tag_patterns(self):
534 535 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
535 536
536 537 @assert_repo_settings
537 538 def get_repo_svn_tag_patterns(self):
538 539 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
539 540
540 541 def get_global_settings(self):
541 542 return self._collect_all_settings(global_=True)
542 543
543 544 @assert_repo_settings
544 545 def get_repo_settings(self):
545 546 return self._collect_all_settings(global_=False)
546 547
547 548 @assert_repo_settings
548 549 def get_repo_settings_inherited(self):
549 550 global_settings = self.get_global_settings()
550 551 global_settings.update(self.get_repo_settings())
551 552 return global_settings
552 553
553 554 @assert_repo_settings
554 555 def create_or_update_repo_settings(
555 556 self, data, inherit_global_settings=False):
556 557 from rhodecode.model.scm import ScmModel
557 558
558 559 self.inherit_global_settings = inherit_global_settings
559 560
560 561 repo = self.repo_settings.get_repo()
561 562 if not inherit_global_settings:
562 563 if repo.repo_type == 'svn':
563 564 self.create_repo_svn_settings(data)
564 565 else:
565 566 self.create_or_update_repo_hook_settings(data)
566 567 self.create_or_update_repo_pr_settings(data)
567 568
568 569 if repo.repo_type == 'hg':
569 570 self.create_or_update_repo_hg_settings(data)
570 571
571 572 if repo.repo_type == 'git':
572 573 self.create_or_update_repo_git_settings(data)
573 574
574 575 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
575 576
576 577 @assert_repo_settings
577 578 def create_or_update_repo_hook_settings(self, data):
578 579 for section, key in self.HOOKS_SETTINGS:
579 580 data_key = self._get_form_ui_key(section, key)
580 581 if data_key not in data:
581 582 raise ValueError(
582 583 f'The given data does not contain {data_key} key')
583 584
584 585 active = data.get(data_key)
585 586 repo_setting = self.repo_settings.get_ui_by_section_and_key(
586 587 section, key)
587 588 if not repo_setting:
588 589 global_setting = self.global_settings.\
589 590 get_ui_by_section_and_key(section, key)
590 591 self.repo_settings.create_ui_section_value(
591 592 section, global_setting.ui_value, key=key, active=active)
592 593 else:
593 594 repo_setting.ui_active = active
594 595 Session().add(repo_setting)
595 596
596 597 def update_global_hook_settings(self, data):
597 598 for section, key in self.HOOKS_SETTINGS:
598 599 data_key = self._get_form_ui_key(section, key)
599 600 if data_key not in data:
600 601 raise ValueError(
601 602 f'The given data does not contain {data_key} key')
602 603 active = data.get(data_key)
603 604 repo_setting = self.global_settings.get_ui_by_section_and_key(
604 605 section, key)
605 606 repo_setting.ui_active = active
606 607 Session().add(repo_setting)
607 608
608 609 @assert_repo_settings
609 610 def create_or_update_repo_pr_settings(self, data):
610 611 return self._create_or_update_general_settings(
611 612 self.repo_settings, data)
612 613
613 614 def create_or_update_global_pr_settings(self, data):
614 615 return self._create_or_update_general_settings(
615 616 self.global_settings, data)
616 617
617 618 @assert_repo_settings
618 619 def create_repo_svn_settings(self, data):
619 620 return self._create_svn_settings(self.repo_settings, data)
620 621
621 622 def _set_evolution(self, settings, is_enabled):
622 623 if is_enabled:
623 624 # if evolve is active set evolution=all
624 625
625 626 self._create_or_update_ui(
626 627 settings, *('experimental', 'evolution'), value='all',
627 628 active=True)
628 629 self._create_or_update_ui(
629 630 settings, *('experimental', 'evolution.exchange'), value='yes',
630 631 active=True)
631 632 # if evolve is active set topics server support
632 633 self._create_or_update_ui(
633 634 settings, *('extensions', 'topic'), value='',
634 635 active=True)
635 636
636 637 else:
637 638 self._create_or_update_ui(
638 639 settings, *('experimental', 'evolution'), value='',
639 640 active=False)
640 641 self._create_or_update_ui(
641 642 settings, *('experimental', 'evolution.exchange'), value='no',
642 643 active=False)
643 644 self._create_or_update_ui(
644 645 settings, *('extensions', 'topic'), value='',
645 646 active=False)
646 647
647 648 @assert_repo_settings
648 649 def create_or_update_repo_hg_settings(self, data):
649 650 largefiles, phases, evolve = \
650 651 self.HG_SETTINGS[:3]
651 652 largefiles_key, phases_key, evolve_key = \
652 653 self._get_settings_keys(self.HG_SETTINGS[:3], data)
653 654
654 655 self._create_or_update_ui(
655 656 self.repo_settings, *largefiles, value='',
656 657 active=data[largefiles_key])
657 658 self._create_or_update_ui(
658 659 self.repo_settings, *evolve, value='',
659 660 active=data[evolve_key])
660 661 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
661 662
662 663 self._create_or_update_ui(
663 664 self.repo_settings, *phases, value=safe_str(data[phases_key]))
664 665
665 666 def create_or_update_global_hg_settings(self, data):
666 667 opts_len = 3
667 668 largefiles, phases, evolve \
668 669 = self.GLOBAL_HG_SETTINGS[:opts_len]
669 670 largefiles_key, phases_key, evolve_key \
670 671 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data)
671 672
672 673 self._create_or_update_ui(
673 674 self.global_settings, *largefiles, value='',
674 675 active=data[largefiles_key])
675 676 self._create_or_update_ui(
676 677 self.global_settings, *phases, value=safe_str(data[phases_key]))
677 678 self._create_or_update_ui(
678 679 self.global_settings, *evolve, value='',
679 680 active=data[evolve_key])
680 681 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
681 682
682 683 def create_or_update_repo_git_settings(self, data):
683 684 # NOTE(marcink): # comma makes unpack work properly
684 685 lfs_enabled, \
685 686 = self.GIT_SETTINGS
686 687
687 688 lfs_enabled_key, \
688 689 = self._get_settings_keys(self.GIT_SETTINGS, data)
689 690
690 691 self._create_or_update_ui(
691 692 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
692 693 active=data[lfs_enabled_key])
693 694
694 695 def create_or_update_global_git_settings(self, data):
695 696 lfs_enabled = self.GLOBAL_GIT_SETTINGS[0]
696 697 lfs_enabled_key = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)[0]
697 698
698 699 self._create_or_update_ui(
699 700 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
700 701 active=data[lfs_enabled_key])
701 702
702 703 def create_or_update_global_svn_settings(self, data):
703 704 # branch/tags patterns
704 705 self._create_svn_settings(self.global_settings, data)
705 706
706 707 @assert_repo_settings
707 708 def delete_repo_svn_pattern(self, id_):
708 709 ui = self.repo_settings.UiDbModel.get(id_)
709 710 if ui and ui.repository.repo_name == self.repo_settings.repo:
710 711 # only delete if it's the same repo as initialized settings
711 712 self.repo_settings.delete_ui(id_)
712 713 else:
713 714 # raise error as if we wouldn't find this option
714 715 self.repo_settings.delete_ui(-1)
715 716
716 717 def delete_global_svn_pattern(self, id_):
717 718 self.global_settings.delete_ui(id_)
718 719
719 720 @assert_repo_settings
720 721 def get_repo_ui_settings(self, section=None, key=None):
721 722 global_uis = self.global_settings.get_ui(section, key)
722 723 repo_uis = self.repo_settings.get_ui(section, key)
723 724
724 725 filtered_repo_uis = self._filter_ui_settings(repo_uis)
725 726 filtered_repo_uis_keys = [
726 727 (s.section, s.key) for s in filtered_repo_uis]
727 728
728 729 def _is_global_ui_filtered(ui):
729 730 return (
730 731 (ui.section, ui.key) in filtered_repo_uis_keys
731 732 or ui.section in self._svn_sections)
732 733
733 734 filtered_global_uis = [
734 735 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
735 736
736 737 return filtered_global_uis + filtered_repo_uis
737 738
738 739 def get_global_ui_settings(self, section=None, key=None):
739 740 return self.global_settings.get_ui(section, key)
740 741
741 742 def get_ui_settings_as_config_obj(self, section=None, key=None):
742 743 config = base.Config()
743 744
744 745 ui_settings = self.get_ui_settings(section=section, key=key)
745 746
746 747 for entry in ui_settings:
747 748 config.set(entry.section, entry.key, entry.value)
748 749
749 750 return config
750 751
751 752 def get_ui_settings(self, section=None, key=None):
752 753 if not self.repo_settings or self.inherit_global_settings:
753 754 return self.get_global_ui_settings(section, key)
754 755 else:
755 756 return self.get_repo_ui_settings(section, key)
756 757
757 758 def get_svn_patterns(self, section=None):
758 759 if not self.repo_settings:
759 760 return self.get_global_ui_settings(section)
760 761 else:
761 762 return self.get_repo_ui_settings(section)
762 763
763 764 @assert_repo_settings
764 765 def get_repo_general_settings(self):
765 766 global_settings = self.global_settings.get_all_settings()
766 767 repo_settings = self.repo_settings.get_all_settings()
767 768 filtered_repo_settings = self._filter_general_settings(repo_settings)
768 769 global_settings.update(filtered_repo_settings)
769 770 return global_settings
770 771
771 772 def get_global_general_settings(self):
772 773 return self.global_settings.get_all_settings()
773 774
774 775 def get_general_settings(self):
775 776 if not self.repo_settings or self.inherit_global_settings:
776 777 return self.get_global_general_settings()
777 778 else:
778 779 return self.get_repo_general_settings()
779 780
780 781 def _filter_ui_settings(self, settings):
781 782 filtered_settings = [
782 783 s for s in settings if self._should_keep_setting(s)]
783 784 return filtered_settings
784 785
785 786 def _should_keep_setting(self, setting):
786 787 keep = (
787 788 (setting.section, setting.key) in self._ui_settings or
788 789 setting.section in self._svn_sections)
789 790 return keep
790 791
791 792 def _filter_general_settings(self, settings):
792 793 keys = [self.get_keyname(key) for key in self.GENERAL_SETTINGS]
793 794 return {
794 795 k: settings[k]
795 796 for k in settings if k in keys}
796 797
797 798 def _collect_all_settings(self, global_=False):
798 799 settings = self.global_settings if global_ else self.repo_settings
799 800 result = {}
800 801
801 802 for section, key in self._ui_settings:
802 803 ui = settings.get_ui_by_section_and_key(section, key)
803 804 result_key = self._get_form_ui_key(section, key)
804 805
805 806 if ui:
806 807 if section in ('hooks', 'extensions'):
807 808 result[result_key] = ui.ui_active
808 809 elif result_key in ['vcs_git_lfs_enabled']:
809 810 result[result_key] = ui.ui_active
810 811 else:
811 812 result[result_key] = ui.ui_value
812 813
813 814 for name in self.GENERAL_SETTINGS:
814 815 setting = settings.get_setting_by_name(name)
815 816 if setting:
816 817 result_key = self.get_keyname(name)
817 818 result[result_key] = setting.app_settings_value
818 819
819 820 return result
820 821
821 822 def _get_form_ui_key(self, section, key):
822 823 return '{section}_{key}'.format(
823 824 section=section, key=key.replace('.', '_'))
824 825
825 826 def _create_or_update_ui(
826 827 self, settings, section, key, value=None, active=None):
827 828 ui = settings.get_ui_by_section_and_key(section, key)
828 829 if not ui:
829 830 active = True if active is None else active
830 831 settings.create_ui_section_value(
831 832 section, value, key=key, active=active)
832 833 else:
833 834 if active is not None:
834 835 ui.ui_active = active
835 836 if value is not None:
836 837 ui.ui_value = value
837 838 Session().add(ui)
838 839
839 840 def _create_svn_settings(self, settings, data):
840 841 svn_settings = {
841 842 'new_svn_branch': self.SVN_BRANCH_SECTION,
842 843 'new_svn_tag': self.SVN_TAG_SECTION
843 844 }
844 845 for key in svn_settings:
845 846 if data.get(key):
846 847 settings.create_ui_section_value(svn_settings[key], data[key])
847 848
848 849 def _create_or_update_general_settings(self, settings, data):
849 850 for name in self.GENERAL_SETTINGS:
850 851 data_key = self.get_keyname(name)
851 852 if data_key not in data:
852 853 raise ValueError(
853 854 f'The given data does not contain {data_key} key')
854 855 setting = settings.create_or_update_setting(
855 856 name, data[data_key], 'bool')
856 857 Session().add(setting)
857 858
858 859 def _get_settings_keys(self, settings, data):
859 860 data_keys = [self._get_form_ui_key(*s) for s in settings]
860 861 for data_key in data_keys:
861 862 if data_key not in data:
862 863 raise ValueError(
863 864 f'The given data does not contain {data_key} key')
864 865 return data_keys
865 866
866 867 def create_largeobjects_dirs_if_needed(self, repo_store_path):
867 868 """
868 869 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
869 870 does a repository scan if enabled in the settings.
870 871 """
871 872
872 873 from rhodecode.lib.vcs.backends.hg import largefiles_store
873 874 from rhodecode.lib.vcs.backends.git import lfs_store
874 875
875 876 paths = [
876 877 largefiles_store(repo_store_path),
877 878 lfs_store(repo_store_path)]
878 879
879 880 for path in paths:
880 881 if os.path.isdir(path):
881 882 continue
882 883 if os.path.isfile(path):
883 884 continue
884 885 # not a file nor dir, we try to create it
885 886 try:
886 887 os.makedirs(path)
887 888 except Exception:
888 889 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,422 +1,450
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18 import io
19 19 import shlex
20 20
21 21 import math
22 22 import re
23 23 import os
24 24 import datetime
25 25 import logging
26 26 import queue
27 27 import subprocess
28 28
29 29
30 30 from dateutil.parser import parse
31 31 from pyramid.interfaces import IRoutesMapper
32 32 from pyramid.settings import asbool
33 33 from pyramid.path import AssetResolver
34 34 from threading import Thread
35 35
36 36 from rhodecode.config.jsroutes import generate_jsroutes_content
37 37 from rhodecode.lib.base import get_auth_user
38 38 from rhodecode.lib.celerylib.loader import set_celery_conf
39 39
40 40 import rhodecode
41 41
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 def add_renderer_globals(event):
47 47 from rhodecode.lib import helpers
48 48
49 49 # TODO: When executed in pyramid view context the request is not available
50 50 # in the event. Find a better solution to get the request.
51 51 from pyramid.threadlocal import get_current_request
52 52 request = event['request'] or get_current_request()
53 53
54 54 # Add Pyramid translation as '_' to context
55 55 event['_'] = request.translate
56 56 event['_ungettext'] = request.plularize
57 57 event['h'] = helpers
58 58
59 59
60 def auto_merge_pr_if_needed(event):
61 from rhodecode.model.db import PullRequest
62 from rhodecode.model.pull_request import (
63 PullRequestModel, ChangesetStatus, MergeCheck
64 )
65
66 pr_event_data = event.as_dict()['pullrequest']
67 pull_request = PullRequest.get(pr_event_data['pull_request_id'])
68 calculated_status = pr_event_data['status']
69 if (calculated_status == ChangesetStatus.STATUS_APPROVED
70 and PullRequestModel().is_automatic_merge_enabled(pull_request)):
71 user = pull_request.author.AuthUser()
72
73 merge_check = MergeCheck.validate(
74 pull_request, user, translator=lambda x: x, fail_early=True
75 )
76 if merge_check.merge_possible:
77 from rhodecode.lib.base import vcs_operation_context
78 extras = vcs_operation_context(
79 event.request.environ, repo_name=pull_request.target_repo.repo_name,
80 username=user.username, action='push',
81 scm=pull_request.target_repo.repo_type)
82 from rc_ee.lib.celerylib.tasks import auto_merge_repo
83 auto_merge_repo.apply_async(
84 args=(pull_request.pull_request_id, extras)
85 )
86
87
60 88 def set_user_lang(event):
61 89 request = event.request
62 90 cur_user = getattr(request, 'user', None)
63 91
64 92 if cur_user:
65 93 user_lang = cur_user.get_instance().user_data.get('language')
66 94 if user_lang:
67 95 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
68 96 event.request._LOCALE_ = user_lang
69 97
70 98
71 99 def update_celery_conf(event):
72 100 log.debug('Setting celery config from new request')
73 101 set_celery_conf(request=event.request, registry=event.request.registry)
74 102
75 103
76 104 def add_request_user_context(event):
77 105 """
78 106 Adds auth user into request context
79 107 """
80 108
81 109 request = event.request
82 110 # access req_id as soon as possible
83 111 req_id = request.req_id
84 112
85 113 if hasattr(request, 'vcs_call'):
86 114 # skip vcs calls
87 115 return
88 116
89 117 if hasattr(request, 'rpc_method'):
90 118 # skip api calls
91 119 return
92 120
93 121 auth_user, auth_token = get_auth_user(request)
94 122 request.user = auth_user
95 123 request.user_auth_token = auth_token
96 124 request.environ['rc_auth_user'] = auth_user
97 125 request.environ['rc_auth_user_id'] = str(auth_user.user_id)
98 126 request.environ['rc_req_id'] = req_id
99 127
100 128
101 129 def reset_log_bucket(event):
102 130 """
103 131 reset the log bucket on new request
104 132 """
105 133 request = event.request
106 134 request.req_id_records_init()
107 135
108 136
109 137 def scan_repositories_if_enabled(event):
110 138 """
111 139 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
112 140 does a repository scan if enabled in the settings.
113 141 """
114 142
115 143 settings = event.app.registry.settings
116 144 vcs_server_enabled = settings['vcs.server.enable']
117 145 import_on_startup = settings['startup.import_repos']
118 146
119 147 if vcs_server_enabled and import_on_startup:
120 148 from rhodecode.model.scm import ScmModel
121 149 from rhodecode.lib.utils import repo2db_mapper
122 150 scm = ScmModel()
123 151 repositories = scm.repo_scan(scm.repos_path)
124 152 repo2db_mapper(repositories, remove_obsolete=False)
125 153
126 154
127 155 def write_metadata_if_needed(event):
128 156 """
129 157 Writes upgrade metadata
130 158 """
131 159 import rhodecode
132 160 from rhodecode.lib import system_info
133 161 from rhodecode.lib import ext_json
134 162
135 163 fname = '.rcmetadata.json'
136 164 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
137 165 metadata_destination = os.path.join(ini_loc, fname)
138 166
139 167 def get_update_age():
140 168 now = datetime.datetime.utcnow()
141 169
142 170 with open(metadata_destination, 'rb') as f:
143 171 data = ext_json.json.loads(f.read())
144 172 if 'created_on' in data:
145 173 update_date = parse(data['created_on'])
146 174 diff = now - update_date
147 175 return diff.total_seconds() / 60.0
148 176
149 177 return 0
150 178
151 179 def write():
152 180 configuration = system_info.SysInfo(
153 181 system_info.rhodecode_config)()['value']
154 182 license_token = configuration['config']['license_token']
155 183
156 184 setup = dict(
157 185 workers=configuration['config']['server:main'].get(
158 186 'workers', '?'),
159 187 worker_type=configuration['config']['server:main'].get(
160 188 'worker_class', 'sync'),
161 189 )
162 190 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
163 191 del dbinfo['url']
164 192
165 193 metadata = dict(
166 194 desc='upgrade metadata info',
167 195 license_token=license_token,
168 196 created_on=datetime.datetime.utcnow().isoformat(),
169 197 usage=system_info.SysInfo(system_info.usage_info)()['value'],
170 198 platform=system_info.SysInfo(system_info.platform_type)()['value'],
171 199 database=dbinfo,
172 200 cpu=system_info.SysInfo(system_info.cpu)()['value'],
173 201 memory=system_info.SysInfo(system_info.memory)()['value'],
174 202 setup=setup
175 203 )
176 204
177 205 with open(metadata_destination, 'wb') as f:
178 206 f.write(ext_json.json.dumps(metadata))
179 207
180 208 settings = event.app.registry.settings
181 209 if settings.get('metadata.skip'):
182 210 return
183 211
184 212 # only write this every 24h, workers restart caused unwanted delays
185 213 try:
186 214 age_in_min = get_update_age()
187 215 except Exception:
188 216 age_in_min = 0
189 217
190 218 if age_in_min > 60 * 60 * 24:
191 219 return
192 220
193 221 try:
194 222 write()
195 223 except Exception:
196 224 pass
197 225
198 226
199 227 def write_usage_data(event):
200 228 import rhodecode
201 229 from rhodecode.lib import system_info
202 230 from rhodecode.lib import ext_json
203 231
204 232 settings = event.app.registry.settings
205 233 instance_tag = settings.get('metadata.write_usage_tag')
206 234 if not settings.get('metadata.write_usage'):
207 235 return
208 236
209 237 def get_update_age(dest_file):
210 238 now = datetime.datetime.now(datetime.UTC)
211 239
212 240 with open(dest_file, 'rb') as f:
213 241 data = ext_json.json.loads(f.read())
214 242 if 'created_on' in data:
215 243 update_date = parse(data['created_on'])
216 244 diff = now - update_date
217 245 return math.ceil(diff.total_seconds() / 60.0)
218 246
219 247 return 0
220 248
221 249 utc_date = datetime.datetime.now(datetime.UTC)
222 250 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
223 251 fname = f'.rc_usage_{utc_date.year}{utc_date.month:02d}{utc_date.day:02d}_{hour_quarter}.json'
224 252 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
225 253
226 254 usage_dir = os.path.join(ini_loc, '.rcusage')
227 255 if not os.path.isdir(usage_dir):
228 256 os.makedirs(usage_dir)
229 257 usage_metadata_destination = os.path.join(usage_dir, fname)
230 258
231 259 try:
232 260 age_in_min = get_update_age(usage_metadata_destination)
233 261 except Exception:
234 262 age_in_min = 0
235 263
236 264 # write every 6th hour
237 265 if age_in_min and age_in_min < 60 * 6:
238 266 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
239 267 age_in_min, 60 * 6)
240 268 return
241 269
242 270 def write(dest_file):
243 271 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
244 272 license_token = configuration['config']['license_token']
245 273
246 274 metadata = dict(
247 275 desc='Usage data',
248 276 instance_tag=instance_tag,
249 277 license_token=license_token,
250 278 created_on=datetime.datetime.utcnow().isoformat(),
251 279 usage=system_info.SysInfo(system_info.usage_info)()['value'],
252 280 )
253 281
254 282 with open(dest_file, 'wb') as f:
255 283 f.write(ext_json.formatted_json(metadata))
256 284
257 285 try:
258 286 log.debug('Writing usage file at: %s', usage_metadata_destination)
259 287 write(usage_metadata_destination)
260 288 except Exception:
261 289 pass
262 290
263 291
264 292 def write_js_routes_if_enabled(event):
265 293 registry = event.app.registry
266 294
267 295 mapper = registry.queryUtility(IRoutesMapper)
268 296 _argument_prog = re.compile(r'\{(.*?)\}|:\((.*)\)')
269 297
270 298 def _extract_route_information(route):
271 299 """
272 300 Convert a route into tuple(name, path, args), eg:
273 301 ('show_user', '/profile/%(username)s', ['username'])
274 302 """
275 303
276 304 route_path = route.pattern
277 305 pattern = route.pattern
278 306
279 307 def replace(matchobj):
280 308 if matchobj.group(1):
281 309 return "%%(%s)s" % matchobj.group(1).split(':')[0]
282 310 else:
283 311 return "%%(%s)s" % matchobj.group(2)
284 312
285 313 route_path = _argument_prog.sub(replace, route_path)
286 314
287 315 if not route_path.startswith('/'):
288 316 route_path = f'/{route_path}'
289 317
290 318 return (
291 319 route.name,
292 320 route_path,
293 321 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
294 322 for arg in _argument_prog.findall(pattern)]
295 323 )
296 324
297 325 def get_routes():
298 326 # pyramid routes
299 327 for route in mapper.get_routes():
300 328 if not route.name.startswith('__'):
301 329 yield _extract_route_information(route)
302 330
303 331 if asbool(registry.settings.get('generate_js_files', 'false')):
304 332 static_path = AssetResolver().resolve('rhodecode:public').abspath()
305 333 jsroutes = get_routes()
306 334 jsroutes_file_content = generate_jsroutes_content(jsroutes)
307 335 jsroutes_file_path = os.path.join(
308 336 static_path, 'js', 'rhodecode', 'routes.js')
309 337
310 338 try:
311 339 with open(jsroutes_file_path, 'w', encoding='utf-8') as f:
312 340 f.write(jsroutes_file_content)
313 341 log.debug('generated JS files in %s', jsroutes_file_path)
314 342 except Exception:
315 343 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
316 344
317 345
318 346 def import_license_if_present(event):
319 347 """
320 348 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
321 349 does a import license key based on a presence of the file.
322 350 """
323 351 settings = event.app.registry.settings
324 352
325 353 rhodecode_edition_id = settings.get('rhodecode.edition_id')
326 354 license_file_path = settings.get('license.import_path')
327 355 force = settings.get('license.import_path_mode') == 'force'
328 356
329 357 if license_file_path and rhodecode_edition_id == 'EE':
330 358 log.debug('license.import_path= is set importing license from %s', license_file_path)
331 359 from rhodecode.model.meta import Session
332 360 from rhodecode.model.license import apply_license_from_file
333 361 try:
334 362 apply_license_from_file(license_file_path, force=force)
335 363 Session().commit()
336 364 except OSError:
337 365 log.exception('Failed to import license from %s, make sure this file exists', license_file_path)
338 366
339 367
340 368 class Subscriber(object):
341 369 """
342 370 Base class for subscribers to the pyramid event system.
343 371 """
344 372 def __call__(self, event):
345 373 self.run(event)
346 374
347 375 def run(self, event):
348 376 raise NotImplementedError('Subclass has to implement this.')
349 377
350 378
351 379 class AsyncSubscriber(Subscriber):
352 380 """
353 381 Subscriber that handles the execution of events in a separate task to not
354 382 block the execution of the code which triggers the event. It puts the
355 383 received events into a queue from which the worker process takes them in
356 384 order.
357 385 """
358 386 def __init__(self):
359 387 self._stop = False
360 388 self._eventq = queue.Queue()
361 389 self._worker = self.create_worker()
362 390 self._worker.start()
363 391
364 392 def __call__(self, event):
365 393 self._eventq.put(event)
366 394
367 395 def create_worker(self):
368 396 worker = Thread(target=self.do_work)
369 397 worker.daemon = True
370 398 return worker
371 399
372 400 def stop_worker(self):
373 401 self._stop = False
374 402 self._eventq.put(None)
375 403 self._worker.join()
376 404
377 405 def do_work(self):
378 406 while not self._stop:
379 407 event = self._eventq.get()
380 408 if event is not None:
381 409 self.run(event)
382 410
383 411
384 412 class AsyncSubprocessSubscriber(AsyncSubscriber):
385 413 """
386 414 Subscriber that uses the subprocess module to execute a command if an
387 415 event is received. Events are handled asynchronously::
388 416
389 417 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
390 418 subscriber(dummyEvent) # running __call__(event)
391 419
392 420 """
393 421
394 422 def __init__(self, cmd, timeout=None):
395 423 if not isinstance(cmd, (list, tuple)):
396 424 cmd = shlex.split(cmd)
397 425 super().__init__()
398 426 self._cmd = cmd
399 427 self._timeout = timeout
400 428
401 429 def run(self, event):
402 430 cmd = self._cmd
403 431 timeout = self._timeout
404 432 log.debug('Executing command %s.', cmd)
405 433
406 434 try:
407 435 output = subprocess.check_output(
408 436 cmd, timeout=timeout, stderr=subprocess.STDOUT)
409 437 log.debug('Command finished %s', cmd)
410 438 if output:
411 439 log.debug('Command output: %s', output)
412 440 except subprocess.TimeoutExpired as e:
413 441 log.exception('Timeout while executing command.')
414 442 if e.output:
415 443 log.error('Command output: %s', e.output)
416 444 except subprocess.CalledProcessError as e:
417 445 log.exception('Error while executing command.')
418 446 if e.output:
419 447 log.error('Command output: %s', e.output)
420 448 except Exception:
421 449 log.exception(
422 450 'Exception while executing command %s.', cmd)
@@ -1,308 +1,325
1 1 ## snippet for displaying vcs settings
2 2 ## usage:
3 3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 4 ## ${vcss.vcs_settings_fields()}
5 5
6 6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)">
7 7 % if display_globals:
8 8
9 9 % endif
10 10
11 11 % if display_globals or repo_type in ['git', 'hg']:
12 12 <div class="panel panel-default">
13 13 <div class="panel-heading" id="vcs-hooks-options">
14 14 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
15 15 </div>
16 16 <div class="panel-body">
17 17 <div class="field">
18 18 <div class="checkbox">
19 19 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
20 20 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
21 21 </div>
22 22
23 23 <div class="label">
24 24 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
25 25 </div>
26 26 <div class="checkbox">
27 27 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
28 28 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
29 29 </div>
30 30 <div class="label">
31 31 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
32 32 </div>
33 33 <div class="checkbox">
34 34 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
35 35 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
36 36 </div>
37 37 <div class="label">
38 38 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
39 39 </div>
40 40 </div>
41 41 </div>
42 42 </div>
43 43 % endif
44 44
45 45 % if display_globals or repo_type in ['hg']:
46 46 <div class="panel panel-default">
47 47 <div class="panel-heading" id="vcs-hg-options">
48 48 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
49 49 </div>
50 50 <div class="panel-body">
51 51 <div class="checkbox">
52 52 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
53 53 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
54 54 </div>
55 55 <div class="label">
56 56 % if display_globals:
57 57 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
58 58 % else:
59 59 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
60 60 % endif
61 61 </div>
62 62
63 63 <div class="checkbox">
64 64 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
65 65 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
66 66 </div>
67 67 <div class="label">
68 68 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
69 69 </div>
70 70
71 71 <div class="checkbox">
72 72 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
73 73 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
74 74 </div>
75 75 <div class="label">
76 76 % if display_globals:
77 77 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
78 78 % else:
79 79 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
80 80 % endif
81 81 </div>
82 82
83 83 </div>
84 84 </div>
85 85 % endif
86 86
87 87 % if display_globals or repo_type in ['git']:
88 88 <div class="panel panel-default">
89 89 <div class="panel-heading" id="vcs-git-options">
90 90 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
91 91 </div>
92 92 <div class="panel-body">
93 93 <div class="checkbox">
94 94 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
95 95 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
96 96 </div>
97 97 <div class="label">
98 98 % if display_globals:
99 99 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
100 100 % else:
101 101 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
102 102 % endif
103 103 </div>
104 104 </div>
105 105 </div>
106 106 % endif
107 107
108 108 % if display_globals or repo_type in ['svn']:
109 109 <div class="panel panel-default">
110 110 <div class="panel-heading" id="vcs-svn-options">
111 111 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
112 112 </div>
113 113 <div class="panel-body">
114 114 % if display_globals:
115 115 <div class="field">
116 116 <div class="content" >
117 117 <label>${_('mod_dav config')}</label><br/>
118 118 <code>path: ${c.svn_config_path}</code>
119 119 </div>
120 120 <br/>
121 121
122 122 <div>
123 123
124 124 % if c.svn_generate_config:
125 125 <span class="buttons">
126 126 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Re-generate Apache Config')}</button>
127 127 </span>
128 128 % endif
129 129 </div>
130 130 </div>
131 131 % endif
132 132
133 133 <div class="field">
134 134 <div class="content" >
135 135 <label>${_('Repository patterns')}</label><br/>
136 136 </div>
137 137 </div>
138 138 <div class="label">
139 139 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
140 140 </div>
141 141
142 142 <div class="field branch_patterns">
143 143 <div class="input" >
144 144 <label>${_('Branches')}:</label><br/>
145 145 </div>
146 146 % if svn_branch_patterns:
147 147 % for branch in svn_branch_patterns:
148 148 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
149 149 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
150 150 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
151 151 % if kwargs.get('disabled') != 'disabled':
152 152 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
153 153 ${_('Delete')}
154 154 </span>
155 155 % endif
156 156 </div>
157 157 % endfor
158 158 %endif
159 159 </div>
160 160 % if kwargs.get('disabled') != 'disabled':
161 161 <div class="field branch_patterns">
162 162 <div class="input" >
163 163 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
164 164 </div>
165 165 </div>
166 166 % endif
167 167 <div class="field tag_patterns">
168 168 <div class="input" >
169 169 <label>${_('Tags')}:</label><br/>
170 170 </div>
171 171 % if svn_tag_patterns:
172 172 % for tag in svn_tag_patterns:
173 173 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
174 174 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
175 175 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
176 176 % if kwargs.get('disabled') != 'disabled':
177 177 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
178 178 ${_('Delete')}
179 179 </span>
180 180 %endif
181 181 </div>
182 182 % endfor
183 183 % endif
184 184 </div>
185 185 % if kwargs.get('disabled') != 'disabled':
186 186 <div class="field tag_patterns">
187 187 <div class="input" >
188 188 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
189 189 </div>
190 190 </div>
191 191 %endif
192 192 </div>
193 193 </div>
194 194 % else:
195 195 ${h.hidden('new_svn_branch' + suffix, '')}
196 196 ${h.hidden('new_svn_tag' + suffix, '')}
197 197 % endif
198 198
199 199
200 200 % if display_globals or repo_type in ['hg', 'git']:
201 201 <div class="panel panel-default">
202 202 <div class="panel-heading" id="vcs-pull-requests-options">
203 203 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
204 204 </div>
205 205 <div class="panel-body">
206 206 <div class="checkbox">
207 207 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
208 208 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
209 209 </div>
210 210 <div class="label">
211 211 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
212 212 </div>
213 %if c.rhodecode_edition_id != 'EE':
214 <div class="checkbox">
215 <input type="checkbox" disabled>
216 <label for="rhodecode_auto_merge_enabled${suffix}">${_('Enable automatic merge for approved pull requests')}</label>
217 </div>
218 <div class="label">
219 <span class="help-block">${_('This feature is available in RhodeCode EE edition only. Contact {sales_email} to obtain a trial license.').format(sales_email='<a href="mailto:sales@rhodecode.com">sales@rhodecode.com</a>')|n}</span>
220 <div>
221 %else:
222 <div class="checkbox">
223 ${h.checkbox('rhodecode_auto_merge_enabled' + suffix, 'True', **kwargs)}
224 <label for="rhodecode_auto_merge_enabled${suffix}">${_('Enable automatic merge for approved pull requests')}</label>
225 </div>
226 <div class="label">
227 <span class="help-block">${_('When this is enabled, the pull request will be merged once it has at least one reviewer and is approved.')}</span>
228 </div>
229 %endif
213 230 <div class="checkbox">
214 231 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
215 232 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
216 233 </div>
217 234 <div class="label">
218 235 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
219 236 </div>
220 237 </div>
221 238 </div>
222 239 % endif
223 240
224 241 % if display_globals or repo_type in ['hg', 'git', 'svn']:
225 242 <div class="panel panel-default">
226 243 <div class="panel-heading" id="vcs-pull-requests-options">
227 244 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
228 245 </div>
229 246 <div class="panel-body">
230 247 <div class="checkbox">
231 248 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
232 249 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
233 250 </div>
234 251 </div>
235 252 </div>
236 253 % endif
237 254
238 255 % if display_globals or repo_type in ['hg',]:
239 256 <div class="panel panel-default">
240 257 <div class="panel-heading" id="vcs-pull-requests-options">
241 258 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
242 259 </div>
243 260 <div class="panel-body">
244 261 ## Specific HG settings
245 262 <div class="checkbox">
246 263 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
247 264 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
248 265 </div>
249 266 <div class="label">
250 267 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
251 268 </div>
252 269
253 270 <div class="checkbox">
254 271 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
255 272 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
256 273 </div>
257 274 <div class="label">
258 275 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
259 276 </div>
260 277
261 278
262 279 </div>
263 280 </div>
264 281 % endif
265 282
266 283 % if display_globals or repo_type in ['git']:
267 284 <div class="panel panel-default">
268 285 <div class="panel-heading" id="vcs-pull-requests-options">
269 286 <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
270 287 </div>
271 288 <div class="panel-body">
272 289 ## <div class="checkbox">
273 290 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
274 291 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
275 292 ## </div>
276 293 ## <div class="label">
277 294 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
278 295 ## </div>
279 296
280 297 <div class="checkbox">
281 298 ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
282 299 <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
283 300 </div>
284 301 <div class="label">
285 302 <span class="help-block">${_('Delete branch after merging it into destination branch.')}</span>
286 303 </div>
287 304 </div>
288 305 </div>
289 306 % endif
290 307
291 308 <script type="text/javascript">
292 309
293 310 $(document).ready(function() {
294 311 /* On click handler for the `Generate Apache Config` button. It sends a
295 312 POST request to trigger the (re)generation of the mod_dav_svn config. */
296 313 $('#vcs_svn_generate_cfg').on('click', function(event) {
297 314 event.preventDefault();
298 315 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
299 316 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
300 317 jqxhr.done(function(data) {
301 318 $.Topic('/notifications').publish(data);
302 319 });
303 320 });
304 321 });
305 322
306 323 </script>
307 324 </%def>
308 325
@@ -1,1097 +1,1099
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 from rhodecode.lib.utils2 import str2bool
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.model.settings import VcsSettingsModel, UiSetting
26 26
27 27
28 28 HOOKS_FORM_DATA = {
29 29 'hooks_changegroup_repo_size': True,
30 30 'hooks_changegroup_push_logger': True,
31 31 'hooks_outgoing_pull_logger': True
32 32 }
33 33
34 34 SVN_FORM_DATA = {
35 35 'new_svn_branch': 'test-branch',
36 36 'new_svn_tag': 'test-tag'
37 37 }
38 38
39 39 GENERAL_FORM_DATA = {
40 40 'rhodecode_pr_merge_enabled': True,
41 'rhodecode_auto_merge_enabled': True,
41 42 'rhodecode_use_outdated_comments': True,
42 43 'rhodecode_hg_use_rebase_for_merging': True,
43 44 'rhodecode_hg_close_branch_before_merging': True,
44 45 'rhodecode_git_use_rebase_for_merging': True,
45 46 'rhodecode_git_close_branch_before_merging': True,
46 47 'rhodecode_diff_cache': True,
47 48 }
48 49
49 50
50 51 class TestInheritGlobalSettingsProperty(object):
51 52 def test_get_raises_exception_when_repository_not_specified(self):
52 53 model = VcsSettingsModel()
53 54 with pytest.raises(Exception) as exc_info:
54 55 model.inherit_global_settings
55 56 assert str(exc_info.value) == 'Repository is not specified'
56 57
57 58 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
58 59 model = VcsSettingsModel(repo=repo_stub.repo_name)
59 60 assert model.inherit_global_settings is True
60 61
61 62 def test_value_is_returned(self, repo_stub, settings_util):
62 63 model = VcsSettingsModel(repo=repo_stub.repo_name)
63 64 settings_util.create_repo_rhodecode_setting(
64 65 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
65 66 assert model.inherit_global_settings is False
66 67
67 68 def test_value_is_set(self, repo_stub):
68 69 model = VcsSettingsModel(repo=repo_stub.repo_name)
69 70 model.inherit_global_settings = False
70 71 setting = model.repo_settings.get_setting_by_name(
71 72 VcsSettingsModel.INHERIT_SETTINGS)
72 73 try:
73 74 assert setting.app_settings_type == 'bool'
74 75 assert setting.app_settings_value is False
75 76 finally:
76 77 Session().delete(setting)
77 78 Session().commit()
78 79
79 80 def test_set_raises_exception_when_repository_not_specified(self):
80 81 model = VcsSettingsModel()
81 82 with pytest.raises(Exception) as exc_info:
82 83 model.inherit_global_settings = False
83 84 assert str(exc_info.value) == 'Repository is not specified'
84 85
85 86
86 87 class TestVcsSettingsModel(object):
87 88 def test_global_svn_branch_patterns(self):
88 89 model = VcsSettingsModel()
89 90 expected_result = {'test': 'test'}
90 91 with mock.patch.object(model, 'global_settings') as settings_mock:
91 92 get_settings = settings_mock.get_ui_by_section
92 93 get_settings.return_value = expected_result
93 94 settings_mock.return_value = expected_result
94 95 result = model.get_global_svn_branch_patterns()
95 96
96 97 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
97 98 assert expected_result == result
98 99
99 100 def test_repo_svn_branch_patterns(self):
100 101 model = VcsSettingsModel()
101 102 expected_result = {'test': 'test'}
102 103 with mock.patch.object(model, 'repo_settings') as settings_mock:
103 104 get_settings = settings_mock.get_ui_by_section
104 105 get_settings.return_value = expected_result
105 106 settings_mock.return_value = expected_result
106 107 result = model.get_repo_svn_branch_patterns()
107 108
108 109 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
109 110 assert expected_result == result
110 111
111 112 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
112 113 self):
113 114 model = VcsSettingsModel()
114 115 with pytest.raises(Exception) as exc_info:
115 116 model.get_repo_svn_branch_patterns()
116 117 assert str(exc_info.value) == 'Repository is not specified'
117 118
118 119 def test_global_svn_tag_patterns(self):
119 120 model = VcsSettingsModel()
120 121 expected_result = {'test': 'test'}
121 122 with mock.patch.object(model, 'global_settings') as settings_mock:
122 123 get_settings = settings_mock.get_ui_by_section
123 124 get_settings.return_value = expected_result
124 125 settings_mock.return_value = expected_result
125 126 result = model.get_global_svn_tag_patterns()
126 127
127 128 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
128 129 assert expected_result == result
129 130
130 131 def test_repo_svn_tag_patterns(self):
131 132 model = VcsSettingsModel()
132 133 expected_result = {'test': 'test'}
133 134 with mock.patch.object(model, 'repo_settings') as settings_mock:
134 135 get_settings = settings_mock.get_ui_by_section
135 136 get_settings.return_value = expected_result
136 137 settings_mock.return_value = expected_result
137 138 result = model.get_repo_svn_tag_patterns()
138 139
139 140 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
140 141 assert expected_result == result
141 142
142 143 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
143 144 model = VcsSettingsModel()
144 145 with pytest.raises(Exception) as exc_info:
145 146 model.get_repo_svn_tag_patterns()
146 147 assert str(exc_info.value) == 'Repository is not specified'
147 148
148 149 def test_get_global_settings(self):
149 150 expected_result = {'test': 'test'}
150 151 model = VcsSettingsModel()
151 152 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
152 153 collect_mock.return_value = expected_result
153 154 result = model.get_global_settings()
154 155
155 156 collect_mock.assert_called_once_with(global_=True)
156 157 assert result == expected_result
157 158
158 159 def test_get_repo_settings(self, repo_stub):
159 160 model = VcsSettingsModel(repo=repo_stub.repo_name)
160 161 expected_result = {'test': 'test'}
161 162 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
162 163 collect_mock.return_value = expected_result
163 164 result = model.get_repo_settings()
164 165
165 166 collect_mock.assert_called_once_with(global_=False)
166 167 assert result == expected_result
167 168
168 169 @pytest.mark.parametrize('settings, global_', [
169 170 ('global_settings', True),
170 171 ('repo_settings', False)
171 172 ])
172 173 def test_collect_all_settings(self, settings, global_):
173 174 model = VcsSettingsModel()
174 175 result_mock = self._mock_result()
175 176
176 177 settings_patch = mock.patch.object(model, settings)
177 178 with settings_patch as settings_mock:
178 179 settings_mock.get_ui_by_section_and_key.return_value = result_mock
179 180 settings_mock.get_setting_by_name.return_value = result_mock
180 181 result = model._collect_all_settings(global_=global_)
181 182
182 183 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
183 184 self._assert_get_settings_calls(
184 185 settings_mock, ui_settings, model.GENERAL_SETTINGS)
185 186 self._assert_collect_all_settings_result(
186 187 ui_settings, model.GENERAL_SETTINGS, result)
187 188
188 189 @pytest.mark.parametrize('settings, global_', [
189 190 ('global_settings', True),
190 191 ('repo_settings', False)
191 192 ])
192 193 def test_collect_all_settings_without_empty_value(self, settings, global_):
193 194 model = VcsSettingsModel()
194 195
195 196 settings_patch = mock.patch.object(model, settings)
196 197 with settings_patch as settings_mock:
197 198 settings_mock.get_ui_by_section_and_key.return_value = None
198 199 settings_mock.get_setting_by_name.return_value = None
199 200 result = model._collect_all_settings(global_=global_)
200 201
201 202 assert result == {}
202 203
203 204 def _mock_result(self):
204 205 result_mock = mock.Mock()
205 206 result_mock.ui_value = 'ui_value'
206 207 result_mock.ui_active = True
207 208 result_mock.app_settings_value = 'setting_value'
208 209 return result_mock
209 210
210 211 def _assert_get_settings_calls(
211 212 self, settings_mock, ui_settings, general_settings):
212 213 assert (
213 214 settings_mock.get_ui_by_section_and_key.call_count ==
214 215 len(ui_settings))
215 216 assert (
216 217 settings_mock.get_setting_by_name.call_count ==
217 218 len(general_settings))
218 219
219 220 for section, key in ui_settings:
220 221 expected_call = mock.call(section, key)
221 222 assert (
222 223 expected_call in
223 224 settings_mock.get_ui_by_section_and_key.call_args_list)
224 225
225 226 for name in general_settings:
226 227 expected_call = mock.call(name)
227 228 assert (
228 229 expected_call in
229 230 settings_mock.get_setting_by_name.call_args_list)
230 231
231 232 def _assert_collect_all_settings_result(
232 233 self, ui_settings, general_settings, result):
233 234 expected_result = {}
234 235 for section, key in ui_settings:
235 236 key = '{}_{}'.format(section, key.replace('.', '_'))
236 237
237 238 if section in ('extensions', 'hooks'):
238 239 value = True
239 240 elif key in ['vcs_git_lfs_enabled']:
240 241 value = True
241 242 else:
242 243 value = 'ui_value'
243 244 expected_result[key] = value
244 245
245 246 for name in general_settings:
246 247 key = 'rhodecode_' + name
247 248 expected_result[key] = 'setting_value'
248 249
249 250 assert expected_result == result
250 251
251 252
252 253 class TestCreateOrUpdateRepoHookSettings(object):
253 254 def test_create_when_no_repo_object_found(self, repo_stub):
254 255 model = VcsSettingsModel(repo=repo_stub.repo_name)
255 256
256 257 self._create_settings(model, HOOKS_FORM_DATA)
257 258
258 259 cleanup = []
259 260 try:
260 261 for section, key in model.HOOKS_SETTINGS:
261 262 ui = model.repo_settings.get_ui_by_section_and_key(
262 263 section, key)
263 264 assert ui.ui_active is True
264 265 cleanup.append(ui)
265 266 finally:
266 267 for ui in cleanup:
267 268 Session().delete(ui)
268 269 Session().commit()
269 270
270 271 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
271 272 model = VcsSettingsModel(repo=repo_stub.repo_name)
272 273
273 274 deleted_key = 'hooks_changegroup_repo_size'
274 275 data = HOOKS_FORM_DATA.copy()
275 276 data.pop(deleted_key)
276 277
277 278 with pytest.raises(ValueError) as exc_info:
278 279 model.create_or_update_repo_hook_settings(data)
279 280 Session().commit()
280 281
281 282 msg = 'The given data does not contain {} key'.format(deleted_key)
282 283 assert str(exc_info.value) == msg
283 284
284 285 def test_update_when_repo_object_found(self, repo_stub, settings_util):
285 286 model = VcsSettingsModel(repo=repo_stub.repo_name)
286 287 for section, key in model.HOOKS_SETTINGS:
287 288 settings_util.create_repo_rhodecode_ui(
288 289 repo_stub, section, None, key=key, active=False)
289 290 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
290 291 Session().commit()
291 292
292 293 for section, key in model.HOOKS_SETTINGS:
293 294 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
294 295 assert ui.ui_active is True
295 296
296 297 def _create_settings(self, model, data):
297 298 global_patch = mock.patch.object(model, 'global_settings')
298 299 global_setting = mock.Mock()
299 300 global_setting.ui_value = 'Test value'
300 301 with global_patch as global_mock:
301 302 global_mock.get_ui_by_section_and_key.return_value = global_setting
302 303 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
303 304 Session().commit()
304 305
305 306
306 307 class TestUpdateGlobalHookSettings(object):
307 308 def test_update_raises_exception_when_data_incomplete(self):
308 309 model = VcsSettingsModel()
309 310
310 311 deleted_key = 'hooks_changegroup_repo_size'
311 312 data = HOOKS_FORM_DATA.copy()
312 313 data.pop(deleted_key)
313 314
314 315 with pytest.raises(ValueError) as exc_info:
315 316 model.update_global_hook_settings(data)
316 317 Session().commit()
317 318
318 319 msg = 'The given data does not contain {} key'.format(deleted_key)
319 320 assert str(exc_info.value) == msg
320 321
321 322 def test_update_global_hook_settings(self, settings_util):
322 323 model = VcsSettingsModel()
323 324 setting_mock = mock.MagicMock()
324 325 setting_mock.ui_active = False
325 326 get_settings_patcher = mock.patch.object(
326 327 model.global_settings, 'get_ui_by_section_and_key',
327 328 return_value=setting_mock)
328 329 session_patcher = mock.patch('rhodecode.model.settings.Session')
329 330 with get_settings_patcher as get_settings_mock, session_patcher:
330 331 model.update_global_hook_settings(HOOKS_FORM_DATA)
331 332 Session().commit()
332 333
333 334 assert setting_mock.ui_active is True
334 335 assert get_settings_mock.call_count == 3
335 336
336 337
337 338 class TestCreateOrUpdateRepoGeneralSettings(object):
338 339 def test_calls_create_or_update_general_settings(self, repo_stub):
339 340 model = VcsSettingsModel(repo=repo_stub.repo_name)
340 341 create_patch = mock.patch.object(
341 342 model, '_create_or_update_general_settings')
342 343 with create_patch as create_mock:
343 344 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
344 345 Session().commit()
345 346
346 347 create_mock.assert_called_once_with(
347 348 model.repo_settings, GENERAL_FORM_DATA)
348 349
349 350 def test_raises_exception_when_repository_is_not_specified(self):
350 351 model = VcsSettingsModel()
351 352 with pytest.raises(Exception) as exc_info:
352 353 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
353 354 assert str(exc_info.value) == 'Repository is not specified'
354 355
355 356
356 357 class TestCreateOrUpdatGlobalGeneralSettings(object):
357 358 def test_calls_create_or_update_general_settings(self):
358 359 model = VcsSettingsModel()
359 360 create_patch = mock.patch.object(
360 361 model, '_create_or_update_general_settings')
361 362 with create_patch as create_mock:
362 363 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
363 364 create_mock.assert_called_once_with(
364 365 model.global_settings, GENERAL_FORM_DATA)
365 366
366 367
367 368 class TestCreateOrUpdateGeneralSettings(object):
368 369 def test_create_when_no_repo_settings_found(self, repo_stub):
369 370 model = VcsSettingsModel(repo=repo_stub.repo_name)
370 371 model._create_or_update_general_settings(
371 372 model.repo_settings, GENERAL_FORM_DATA)
372 373
373 374 cleanup = []
374 375 try:
375 376 for name in model.GENERAL_SETTINGS:
376 377 setting = model.repo_settings.get_setting_by_name(name)
377 378 assert setting.app_settings_value is True
378 379 cleanup.append(setting)
379 380 finally:
380 381 for setting in cleanup:
381 382 Session().delete(setting)
382 383 Session().commit()
383 384
384 385 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
385 386 model = VcsSettingsModel(repo=repo_stub.repo_name)
386 387
387 388 deleted_key = 'rhodecode_pr_merge_enabled'
388 389 data = GENERAL_FORM_DATA.copy()
389 390 data.pop(deleted_key)
390 391
391 392 with pytest.raises(ValueError) as exc_info:
392 393 model._create_or_update_general_settings(model.repo_settings, data)
393 394 Session().commit()
394 395
395 396 msg = 'The given data does not contain {} key'.format(deleted_key)
396 397 assert str(exc_info.value) == msg
397 398
398 399 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
399 400 model = VcsSettingsModel(repo=repo_stub.repo_name)
400 401 for name in model.GENERAL_SETTINGS:
401 402 settings_util.create_repo_rhodecode_setting(
402 403 repo_stub, name, False, 'bool')
403 404
404 405 model._create_or_update_general_settings(
405 406 model.repo_settings, GENERAL_FORM_DATA)
406 407 Session().commit()
407 408
408 409 for name in model.GENERAL_SETTINGS:
409 410 setting = model.repo_settings.get_setting_by_name(name)
410 411 assert setting.app_settings_value is True
411 412
412 413
413 414 class TestCreateRepoSvnSettings(object):
414 415 def test_calls_create_svn_settings(self, repo_stub):
415 416 model = VcsSettingsModel(repo=repo_stub.repo_name)
416 417 with mock.patch.object(model, '_create_svn_settings') as create_mock:
417 418 model.create_repo_svn_settings(SVN_FORM_DATA)
418 419 Session().commit()
419 420
420 421 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
421 422
422 423 def test_raises_exception_when_repository_is_not_specified(self):
423 424 model = VcsSettingsModel()
424 425 with pytest.raises(Exception) as exc_info:
425 426 model.create_repo_svn_settings(SVN_FORM_DATA)
426 427 Session().commit()
427 428
428 429 assert str(exc_info.value) == 'Repository is not specified'
429 430
430 431
431 432 class TestCreateSvnSettings(object):
432 433 def test_create(self, repo_stub):
433 434 model = VcsSettingsModel(repo=repo_stub.repo_name)
434 435 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
435 436 Session().commit()
436 437
437 438 branch_ui = model.repo_settings.get_ui_by_section(
438 439 model.SVN_BRANCH_SECTION)
439 440 tag_ui = model.repo_settings.get_ui_by_section(
440 441 model.SVN_TAG_SECTION)
441 442
442 443 try:
443 444 assert len(branch_ui) == 1
444 445 assert len(tag_ui) == 1
445 446 finally:
446 447 Session().delete(branch_ui[0])
447 448 Session().delete(tag_ui[0])
448 449 Session().commit()
449 450
450 451 def test_create_tag(self, repo_stub):
451 452 model = VcsSettingsModel(repo=repo_stub.repo_name)
452 453 data = SVN_FORM_DATA.copy()
453 454 data.pop('new_svn_branch')
454 455 model._create_svn_settings(model.repo_settings, data)
455 456 Session().commit()
456 457
457 458 branch_ui = model.repo_settings.get_ui_by_section(
458 459 model.SVN_BRANCH_SECTION)
459 460 tag_ui = model.repo_settings.get_ui_by_section(
460 461 model.SVN_TAG_SECTION)
461 462
462 463 try:
463 464 assert len(branch_ui) == 0
464 465 assert len(tag_ui) == 1
465 466 finally:
466 467 Session().delete(tag_ui[0])
467 468 Session().commit()
468 469
469 470 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
470 471 model = VcsSettingsModel(repo=repo_stub.repo_name)
471 472 model._create_svn_settings(model.repo_settings, {})
472 473 Session().commit()
473 474
474 475 branch_ui = model.repo_settings.get_ui_by_section(
475 476 model.SVN_BRANCH_SECTION)
476 477 tag_ui = model.repo_settings.get_ui_by_section(
477 478 model.SVN_TAG_SECTION)
478 479
479 480 assert len(branch_ui) == 0
480 481 assert len(tag_ui) == 0
481 482
482 483 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
483 484 model = VcsSettingsModel(repo=repo_stub.repo_name)
484 485 data = {
485 486 'new_svn_branch': '',
486 487 'new_svn_tag': ''
487 488 }
488 489 model._create_svn_settings(model.repo_settings, data)
489 490 Session().commit()
490 491
491 492 branch_ui = model.repo_settings.get_ui_by_section(
492 493 model.SVN_BRANCH_SECTION)
493 494 tag_ui = model.repo_settings.get_ui_by_section(
494 495 model.SVN_TAG_SECTION)
495 496
496 497 assert len(branch_ui) == 0
497 498 assert len(tag_ui) == 0
498 499
499 500
500 501 class TestCreateOrUpdateUi(object):
501 502 def test_create(self, repo_stub):
502 503 model = VcsSettingsModel(repo=repo_stub.repo_name)
503 504 model._create_or_update_ui(
504 505 model.repo_settings, 'test-section', 'test-key', active=False,
505 506 value='False')
506 507 Session().commit()
507 508
508 509 created_ui = model.repo_settings.get_ui_by_section_and_key(
509 510 'test-section', 'test-key')
510 511
511 512 try:
512 513 assert created_ui.ui_active is False
513 514 assert str2bool(created_ui.ui_value) is False
514 515 finally:
515 516 Session().delete(created_ui)
516 517 Session().commit()
517 518
518 519 def test_update(self, repo_stub, settings_util):
519 520 model = VcsSettingsModel(repo=repo_stub.repo_name)
520 521 # care about only 3 first settings
521 522 largefiles, phases, evolve = model.HG_SETTINGS[:3]
522 523
523 524 section = 'test-section'
524 525 key = 'test-key'
525 526 settings_util.create_repo_rhodecode_ui(
526 527 repo_stub, section, 'True', key=key, active=True)
527 528
528 529 model._create_or_update_ui(
529 530 model.repo_settings, section, key, active=False, value='False')
530 531 Session().commit()
531 532
532 533 created_ui = model.repo_settings.get_ui_by_section_and_key(
533 534 section, key)
534 535 assert created_ui.ui_active is False
535 536 assert str2bool(created_ui.ui_value) is False
536 537
537 538
538 539 class TestCreateOrUpdateRepoHgSettings(object):
539 540 FORM_DATA = {
540 541 'extensions_largefiles': False,
541 542 'extensions_evolve': False,
542 543 'phases_publish': False
543 544 }
544 545
545 546 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
546 547 model = VcsSettingsModel(repo=repo_stub.repo_name)
547 548 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
548 549 model.create_or_update_repo_hg_settings(self.FORM_DATA)
549 550 expected_calls = [
550 551 mock.call(model.repo_settings, 'extensions', 'largefiles', active=False, value=''),
551 552 mock.call(model.repo_settings, 'extensions', 'evolve', active=False, value=''),
552 553 mock.call(model.repo_settings, 'experimental', 'evolution', active=False, value=''),
553 554 mock.call(model.repo_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
554 555 mock.call(model.repo_settings, 'extensions', 'topic', active=False, value=''),
555 556 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
556 557 ]
557 558 assert expected_calls == create_mock.call_args_list
558 559
559 560 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
560 561 def test_key_is_not_found(self, repo_stub, field_to_remove):
561 562 model = VcsSettingsModel(repo=repo_stub.repo_name)
562 563 data = self.FORM_DATA.copy()
563 564 data.pop(field_to_remove)
564 565 with pytest.raises(ValueError) as exc_info:
565 566 model.create_or_update_repo_hg_settings(data)
566 567 Session().commit()
567 568
568 569 expected_message = 'The given data does not contain {} key'.format(
569 570 field_to_remove)
570 571 assert str(exc_info.value) == expected_message
571 572
572 573 def test_create_raises_exception_when_repository_not_specified(self):
573 574 model = VcsSettingsModel()
574 575 with pytest.raises(Exception) as exc_info:
575 576 model.create_or_update_repo_hg_settings(self.FORM_DATA)
576 577 Session().commit()
577 578
578 579 assert str(exc_info.value) == 'Repository is not specified'
579 580
580 581
581 582 class TestCreateOrUpdateGlobalHgSettings(object):
582 583 FORM_DATA = {
583 584 'extensions_largefiles': False,
584 585 'phases_publish': False,
585 586 'extensions_evolve': False
586 587 }
587 588
588 589 def test_creates_repo_hg_settings_when_data_is_correct(self):
589 590 model = VcsSettingsModel()
590 591 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
591 592 model.create_or_update_global_hg_settings(self.FORM_DATA)
592 593 Session().commit()
593 594
594 595 expected_calls = [
595 596 mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''),
596 597 mock.call(model.global_settings, 'phases', 'publish', value='False'),
597 598 mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''),
598 599 mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''),
599 600 mock.call(model.global_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
600 601 mock.call(model.global_settings, 'extensions', 'topic', active=False, value=''),
601 602 ]
602 603
603 604 assert expected_calls == create_mock.call_args_list
604 605
605 606 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
606 607 def test_key_is_not_found(self, repo_stub, field_to_remove):
607 608 model = VcsSettingsModel(repo=repo_stub.repo_name)
608 609 data = self.FORM_DATA.copy()
609 610 data.pop(field_to_remove)
610 611 with pytest.raises(Exception) as exc_info:
611 612 model.create_or_update_global_hg_settings(data)
612 613 Session().commit()
613 614
614 615 expected_message = 'The given data does not contain {} key'.format(
615 616 field_to_remove)
616 617 assert str(exc_info.value) == expected_message
617 618
618 619
619 620 class TestCreateOrUpdateGlobalGitSettings(object):
620 621 FORM_DATA = {
621 622 'vcs_git_lfs_enabled': False,
622 623 }
623 624
624 625 def test_creates_repo_hg_settings_when_data_is_correct(self):
625 626 model = VcsSettingsModel()
626 627 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
627 628 model.create_or_update_global_git_settings(self.FORM_DATA)
628 629 Session().commit()
629 630
630 631 expected_calls = [
631 632 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False),
632 633 ]
633 634 assert expected_calls == create_mock.call_args_list
634 635
635 636
636 637 class TestDeleteRepoSvnPattern(object):
637 638 def test_success_when_repo_is_set(self, backend_svn, settings_util):
638 639 repo = backend_svn.create_repo()
639 640 repo_name = repo.repo_name
640 641
641 642 model = VcsSettingsModel(repo=repo_name)
642 643 entry = settings_util.create_repo_rhodecode_ui(
643 644 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
644 645 Session().commit()
645 646
646 647 model.delete_repo_svn_pattern(entry.ui_id)
647 648
648 649 def test_fail_when_delete_id_from_other_repo(self, backend_svn):
649 650 repo_name = backend_svn.repo_name
650 651 model = VcsSettingsModel(repo=repo_name)
651 652 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
652 653 with delete_ui_patch as delete_ui_mock:
653 654 model.delete_repo_svn_pattern(123)
654 655 Session().commit()
655 656
656 657 delete_ui_mock.assert_called_once_with(-1)
657 658
658 659 def test_raises_exception_when_repository_is_not_specified(self):
659 660 model = VcsSettingsModel()
660 661 with pytest.raises(Exception) as exc_info:
661 662 model.delete_repo_svn_pattern(123)
662 663 assert str(exc_info.value) == 'Repository is not specified'
663 664
664 665
665 666 class TestDeleteGlobalSvnPattern(object):
666 667 def test_delete_global_svn_pattern_calls_delete_ui(self):
667 668 model = VcsSettingsModel()
668 669 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
669 670 with delete_ui_patch as delete_ui_mock:
670 671 model.delete_global_svn_pattern(123)
671 672 delete_ui_mock.assert_called_once_with(123)
672 673
673 674
674 675 class TestFilterUiSettings(object):
675 676 def test_settings_are_filtered(self):
676 677 model = VcsSettingsModel()
677 678 repo_settings = [
678 679 UiSetting('extensions', 'largefiles', '', True),
679 680 UiSetting('phases', 'publish', 'True', True),
680 681 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
681 682 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
682 683 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
683 684 UiSetting(
684 685 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
685 686 'test_branch', True),
686 687 UiSetting(
687 688 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
688 689 'test_tag', True),
689 690 ]
690 691 non_repo_settings = [
691 692 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
692 693 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
693 694 UiSetting('hooks', 'test2', 'hook', True),
694 695 UiSetting(
695 696 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
696 697 'test_tag', True),
697 698 ]
698 699 settings = repo_settings + non_repo_settings
699 700 filtered_settings = model._filter_ui_settings(settings)
700 701 assert sorted(filtered_settings) == sorted(repo_settings)
701 702
702 703
703 704 class TestFilterGeneralSettings(object):
704 705 def test_settings_are_filtered(self):
705 706 model = VcsSettingsModel()
706 707 settings = {
707 708 'rhodecode_abcde': 'value1',
708 709 'rhodecode_vwxyz': 'value2',
709 710 }
710 711 general_settings = {
711 712 'rhodecode_{}'.format(key): 'value'
712 713 for key in VcsSettingsModel.GENERAL_SETTINGS
713 714 }
714 715 settings.update(general_settings)
715 716
716 717 filtered_settings = model._filter_general_settings(general_settings)
717 718 assert sorted(filtered_settings) == sorted(general_settings)
718 719
719 720
720 721 class TestGetRepoUiSettings(object):
721 722 def test_global_uis_are_returned_when_no_repo_uis_found(
722 723 self, repo_stub):
723 724 model = VcsSettingsModel(repo=repo_stub.repo_name)
724 725 result = model.get_repo_ui_settings()
725 726 svn_sections = (
726 727 VcsSettingsModel.SVN_TAG_SECTION,
727 728 VcsSettingsModel.SVN_BRANCH_SECTION)
728 729 expected_result = [
729 730 s for s in model.global_settings.get_ui()
730 731 if s.section not in svn_sections]
731 732 assert sorted(result) == sorted(expected_result)
732 733
733 734 def test_repo_uis_are_overriding_global_uis(
734 735 self, repo_stub, settings_util):
735 736 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
736 737 settings_util.create_repo_rhodecode_ui(
737 738 repo_stub, section, 'repo', key=key, active=False)
738 739 model = VcsSettingsModel(repo=repo_stub.repo_name)
739 740 result = model.get_repo_ui_settings()
740 741 for setting in result:
741 742 locator = (setting.section, setting.key)
742 743 if locator in VcsSettingsModel.HOOKS_SETTINGS:
743 744 assert setting.value == 'repo'
744 745
745 746 assert setting.active is False
746 747
747 748 def test_global_svn_patterns_are_not_in_list(
748 749 self, repo_stub, settings_util):
749 750 svn_sections = (
750 751 VcsSettingsModel.SVN_TAG_SECTION,
751 752 VcsSettingsModel.SVN_BRANCH_SECTION)
752 753 for section in svn_sections:
753 754 settings_util.create_rhodecode_ui(
754 755 section, 'repo', key='deadbeef' + section, active=False)
755 756 Session().commit()
756 757
757 758 model = VcsSettingsModel(repo=repo_stub.repo_name)
758 759 result = model.get_repo_ui_settings()
759 760 for setting in result:
760 761 assert setting.section not in svn_sections
761 762
762 763 def test_repo_uis_filtered_by_section_are_returned(
763 764 self, repo_stub, settings_util):
764 765 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
765 766 settings_util.create_repo_rhodecode_ui(
766 767 repo_stub, section, 'repo', key=key, active=False)
767 768 model = VcsSettingsModel(repo=repo_stub.repo_name)
768 769 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
769 770 result = model.get_repo_ui_settings(section=section)
770 771 for setting in result:
771 772 assert setting.section == section
772 773
773 774 def test_repo_uis_filtered_by_key_are_returned(
774 775 self, repo_stub, settings_util):
775 776 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
776 777 settings_util.create_repo_rhodecode_ui(
777 778 repo_stub, section, 'repo', key=key, active=False)
778 779 model = VcsSettingsModel(repo=repo_stub.repo_name)
779 780 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
780 781 result = model.get_repo_ui_settings(key=key)
781 782 for setting in result:
782 783 assert setting.key == key
783 784
784 785 def test_raises_exception_when_repository_is_not_specified(self):
785 786 model = VcsSettingsModel()
786 787 with pytest.raises(Exception) as exc_info:
787 788 model.get_repo_ui_settings()
788 789 assert str(exc_info.value) == 'Repository is not specified'
789 790
790 791
791 792 class TestGetRepoGeneralSettings(object):
792 793 def test_global_settings_are_returned_when_no_repo_settings_found(
793 794 self, repo_stub):
794 795 model = VcsSettingsModel(repo=repo_stub.repo_name)
795 796 result = model.get_repo_general_settings()
796 797 expected_result = model.global_settings.get_all_settings()
797 798 assert sorted(result) == sorted(expected_result)
798 799
799 800 def test_repo_uis_are_overriding_global_uis(
800 801 self, repo_stub, settings_util):
801 802 for key in VcsSettingsModel.GENERAL_SETTINGS:
802 803 settings_util.create_repo_rhodecode_setting(
803 804 repo_stub, key, 'abcde', type_='unicode')
804 805 Session().commit()
805 806
806 807 model = VcsSettingsModel(repo=repo_stub.repo_name)
807 808 result = model.get_repo_ui_settings()
808 809 for key in result:
809 810 if key in VcsSettingsModel.GENERAL_SETTINGS:
810 811 assert result[key] == 'abcde'
811 812
812 813 def test_raises_exception_when_repository_is_not_specified(self):
813 814 model = VcsSettingsModel()
814 815 with pytest.raises(Exception) as exc_info:
815 816 model.get_repo_general_settings()
816 817 assert str(exc_info.value) == 'Repository is not specified'
817 818
818 819
819 820 class TestGetGlobalGeneralSettings(object):
820 821 def test_global_settings_are_returned(self, repo_stub):
821 822 model = VcsSettingsModel()
822 823 result = model.get_global_general_settings()
823 824 expected_result = model.global_settings.get_all_settings()
824 825 assert sorted(result) == sorted(expected_result)
825 826
826 827 def test_repo_uis_are_not_overriding_global_uis(
827 828 self, repo_stub, settings_util):
828 829 for key in VcsSettingsModel.GENERAL_SETTINGS:
829 830 settings_util.create_repo_rhodecode_setting(
830 831 repo_stub, key, 'abcde', type_='unicode')
831 832 Session().commit()
832 833
833 834 model = VcsSettingsModel(repo=repo_stub.repo_name)
834 835 result = model.get_global_general_settings()
835 836 expected_result = model.global_settings.get_all_settings()
836 837 assert sorted(result) == sorted(expected_result)
837 838
838 839
839 840 class TestGetGlobalUiSettings(object):
840 841 def test_global_uis_are_returned(self, repo_stub):
841 842 model = VcsSettingsModel()
842 843 result = model.get_global_ui_settings()
843 844 expected_result = model.global_settings.get_ui()
844 845 assert sorted(result) == sorted(expected_result)
845 846
846 847 def test_repo_uis_are_not_overriding_global_uis(
847 848 self, repo_stub, settings_util):
848 849 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
849 850 settings_util.create_repo_rhodecode_ui(
850 851 repo_stub, section, 'repo', key=key, active=False)
851 852 Session().commit()
852 853
853 854 model = VcsSettingsModel(repo=repo_stub.repo_name)
854 855 result = model.get_global_ui_settings()
855 856 expected_result = model.global_settings.get_ui()
856 857 assert sorted(result) == sorted(expected_result)
857 858
858 859 def test_ui_settings_filtered_by_section(
859 860 self, repo_stub, settings_util):
860 861 model = VcsSettingsModel(repo=repo_stub.repo_name)
861 862 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
862 863 result = model.get_global_ui_settings(section=section)
863 864 expected_result = model.global_settings.get_ui(section=section)
864 865 assert sorted(result) == sorted(expected_result)
865 866
866 867 def test_ui_settings_filtered_by_key(
867 868 self, repo_stub, settings_util):
868 869 model = VcsSettingsModel(repo=repo_stub.repo_name)
869 870 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
870 871 result = model.get_global_ui_settings(key=key)
871 872 expected_result = model.global_settings.get_ui(key=key)
872 873 assert sorted(result) == sorted(expected_result)
873 874
874 875
875 876 class TestGetGeneralSettings(object):
876 877 def test_global_settings_are_returned_when_inherited_is_true(
877 878 self, repo_stub, settings_util):
878 879 model = VcsSettingsModel(repo=repo_stub.repo_name)
879 880 model.inherit_global_settings = True
880 881 for key in VcsSettingsModel.GENERAL_SETTINGS:
881 882 settings_util.create_repo_rhodecode_setting(
882 883 repo_stub, key, 'abcde', type_='unicode')
883 884 Session().commit()
884 885
885 886 result = model.get_general_settings()
886 887 expected_result = model.get_global_general_settings()
887 888 assert sorted(result) == sorted(expected_result)
888 889
889 890 def test_repo_settings_are_returned_when_inherited_is_false(
890 891 self, repo_stub, settings_util):
891 892 model = VcsSettingsModel(repo=repo_stub.repo_name)
892 893 model.inherit_global_settings = False
893 894 for key in VcsSettingsModel.GENERAL_SETTINGS:
894 895 settings_util.create_repo_rhodecode_setting(
895 896 repo_stub, key, 'abcde', type_='unicode')
896 897 Session().commit()
897 898
898 899 result = model.get_general_settings()
899 900 expected_result = model.get_repo_general_settings()
900 901 assert sorted(result) == sorted(expected_result)
901 902
902 903 def test_global_settings_are_returned_when_no_repository_specified(self):
903 904 model = VcsSettingsModel()
904 905 result = model.get_general_settings()
905 906 expected_result = model.get_global_general_settings()
906 907 assert sorted(result) == sorted(expected_result)
907 908
908 909
909 910 class TestGetUiSettings(object):
910 911 def test_global_settings_are_returned_when_inherited_is_true(
911 912 self, repo_stub, settings_util):
912 913 model = VcsSettingsModel(repo=repo_stub.repo_name)
913 914 model.inherit_global_settings = True
914 915 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
915 916 settings_util.create_repo_rhodecode_ui(
916 917 repo_stub, section, 'repo', key=key, active=True)
917 918 Session().commit()
918 919
919 920 result = model.get_ui_settings()
920 921 expected_result = model.get_global_ui_settings()
921 922 assert sorted(result) == sorted(expected_result)
922 923
923 924 def test_repo_settings_are_returned_when_inherited_is_false(
924 925 self, repo_stub, settings_util):
925 926 model = VcsSettingsModel(repo=repo_stub.repo_name)
926 927 model.inherit_global_settings = False
927 928 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
928 929 settings_util.create_repo_rhodecode_ui(
929 930 repo_stub, section, 'repo', key=key, active=True)
930 931 Session().commit()
931 932
932 933 result = model.get_ui_settings()
933 934 expected_result = model.get_repo_ui_settings()
934 935 assert sorted(result) == sorted(expected_result)
935 936
936 937 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
937 938 model = VcsSettingsModel(repo=repo_stub.repo_name)
938 939 model.inherit_global_settings = False
939 940
940 941 args = ('section', 'key')
941 942 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
942 943 model.get_ui_settings(*args)
943 944 Session().commit()
944 945
945 946 settings_mock.assert_called_once_with(*args)
946 947
947 948 def test_global_settings_filtered_by_section_and_key(self):
948 949 model = VcsSettingsModel()
949 950 args = ('section', 'key')
950 951 with mock.patch.object(model, 'get_global_ui_settings') as (
951 952 settings_mock):
952 953 model.get_ui_settings(*args)
953 954 settings_mock.assert_called_once_with(*args)
954 955
955 956 def test_global_settings_are_returned_when_no_repository_specified(self):
956 957 model = VcsSettingsModel()
957 958 result = model.get_ui_settings()
958 959 expected_result = model.get_global_ui_settings()
959 960 assert sorted(result) == sorted(expected_result)
960 961
961 962
962 963 class TestGetSvnPatterns(object):
963 964 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
964 965 model = VcsSettingsModel(repo=repo_stub.repo_name)
965 966 args = ('section', )
966 967 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
967 968 model.get_svn_patterns(*args)
968 969
969 970 Session().commit()
970 971 settings_mock.assert_called_once_with(*args)
971 972
972 973 def test_global_settings_filtered_by_section_and_key(self):
973 974 model = VcsSettingsModel()
974 975 args = ('section', )
975 976 with mock.patch.object(model, 'get_global_ui_settings') as (
976 977 settings_mock):
977 978 model.get_svn_patterns(*args)
978 979 settings_mock.assert_called_once_with(*args)
979 980
980 981
981 982 class TestCreateOrUpdateRepoSettings(object):
982 983 FORM_DATA = {
983 984 'inherit_global_settings': False,
984 985 'hooks_changegroup_repo_size': False,
985 986 'hooks_changegroup_push_logger': False,
986 987 'hooks_outgoing_pull_logger': False,
987 988 'extensions_largefiles': False,
988 989 'extensions_evolve': False,
989 990 'vcs_git_lfs_enabled': False,
990 991 'phases_publish': 'False',
991 992 'rhodecode_pr_merge_enabled': False,
993 'rhodecode_auto_merge_enabled': False,
992 994 'rhodecode_use_outdated_comments': False,
993 995 'new_svn_branch': '',
994 996 'new_svn_tag': ''
995 997 }
996 998
997 999 def test_get_raises_exception_when_repository_not_specified(self):
998 1000 model = VcsSettingsModel()
999 1001 with pytest.raises(Exception) as exc_info:
1000 1002 model.create_or_update_repo_settings(data=self.FORM_DATA)
1001 1003 Session().commit()
1002 1004
1003 1005 assert str(exc_info.value) == 'Repository is not specified'
1004 1006
1005 1007 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
1006 1008 repo = backend_svn.create_repo()
1007 1009 model = VcsSettingsModel(repo=repo)
1008 1010 with self._patch_model(model) as mocks:
1009 1011 model.create_or_update_repo_settings(
1010 1012 data=self.FORM_DATA, inherit_global_settings=False)
1011 1013 Session().commit()
1012 1014
1013 1015 mocks['create_repo_svn_settings'].assert_called_once_with(
1014 1016 self.FORM_DATA)
1015 1017 non_called_methods = (
1016 1018 'create_or_update_repo_hook_settings',
1017 1019 'create_or_update_repo_pr_settings',
1018 1020 'create_or_update_repo_hg_settings')
1019 1021 for method in non_called_methods:
1020 1022 assert mocks[method].call_count == 0
1021 1023
1022 1024 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1023 1025 repo = backend_hg.create_repo()
1024 1026 model = VcsSettingsModel(repo=repo)
1025 1027 with self._patch_model(model) as mocks:
1026 1028 model.create_or_update_repo_settings(
1027 1029 data=self.FORM_DATA, inherit_global_settings=False)
1028 1030 Session().commit()
1029 1031
1030 1032 assert mocks['create_repo_svn_settings'].call_count == 0
1031 1033 called_methods = (
1032 1034 'create_or_update_repo_hook_settings',
1033 1035 'create_or_update_repo_pr_settings',
1034 1036 'create_or_update_repo_hg_settings')
1035 1037 for method in called_methods:
1036 1038 mocks[method].assert_called_once_with(self.FORM_DATA)
1037 1039
1038 1040 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1039 1041 self, backend_git):
1040 1042 repo = backend_git.create_repo()
1041 1043 model = VcsSettingsModel(repo=repo)
1042 1044 with self._patch_model(model) as mocks:
1043 1045 model.create_or_update_repo_settings(
1044 1046 data=self.FORM_DATA, inherit_global_settings=False)
1045 1047
1046 1048 assert mocks['create_repo_svn_settings'].call_count == 0
1047 1049 called_methods = (
1048 1050 'create_or_update_repo_hook_settings',
1049 1051 'create_or_update_repo_pr_settings')
1050 1052 non_called_methods = (
1051 1053 'create_repo_svn_settings',
1052 1054 'create_or_update_repo_hg_settings'
1053 1055 )
1054 1056 for method in called_methods:
1055 1057 mocks[method].assert_called_once_with(self.FORM_DATA)
1056 1058 for method in non_called_methods:
1057 1059 assert mocks[method].call_count == 0
1058 1060
1059 1061 def test_no_methods_are_called_when_settings_are_inherited(
1060 1062 self, backend):
1061 1063 repo = backend.create_repo()
1062 1064 model = VcsSettingsModel(repo=repo)
1063 1065 with self._patch_model(model) as mocks:
1064 1066 model.create_or_update_repo_settings(
1065 1067 data=self.FORM_DATA, inherit_global_settings=True)
1066 1068 for method_name in mocks:
1067 1069 assert mocks[method_name].call_count == 0
1068 1070
1069 1071 def test_cache_is_marked_for_invalidation(self, repo_stub):
1070 1072 model = VcsSettingsModel(repo=repo_stub)
1071 1073 invalidation_patcher = mock.patch(
1072 1074 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1073 1075 with invalidation_patcher as invalidation_mock:
1074 1076 model.create_or_update_repo_settings(
1075 1077 data=self.FORM_DATA, inherit_global_settings=True)
1076 1078 Session().commit()
1077 1079
1078 1080 invalidation_mock.assert_called_once_with(
1079 1081 repo_stub.repo_name, delete=True)
1080 1082
1081 1083 def test_inherit_flag_is_saved(self, repo_stub):
1082 1084 model = VcsSettingsModel(repo=repo_stub)
1083 1085 model.inherit_global_settings = True
1084 1086 with self._patch_model(model):
1085 1087 model.create_or_update_repo_settings(
1086 1088 data=self.FORM_DATA, inherit_global_settings=False)
1087 1089 Session().commit()
1088 1090
1089 1091 assert model.inherit_global_settings is False
1090 1092
1091 1093 def _patch_model(self, model):
1092 1094 return mock.patch.multiple(
1093 1095 model,
1094 1096 create_repo_svn_settings=mock.DEFAULT,
1095 1097 create_or_update_repo_hook_settings=mock.DEFAULT,
1096 1098 create_or_update_repo_pr_settings=mock.DEFAULT,
1097 1099 create_or_update_repo_hg_settings=mock.DEFAULT)
General Comments 0
You need to be logged in to leave comments. Login now