##// END OF EJS Templates
added latest changes
ilin.s -
r5658:a109f5ac merge default
parent child Browse files
Show More
@@ -1,667 +1,668
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import mock
20 20 import pytest
21 21
22 22 from rhodecode.lib import auth
23 23 from rhodecode.lib.utils2 import str2bool
24 24 from rhodecode.model.db import (
25 25 Repository, UserRepoToPerm, User)
26 26 from rhodecode.model.meta import Session
27 27 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
28 28 from rhodecode.model.user import UserModel
29 29 from rhodecode.tests import (
30 30 login_user_session, logout_user_session,
31 31 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
32 32 from rhodecode.tests.fixtures.rc_fixture import Fixture
33 33 from rhodecode.tests.utils import AssertResponse
34 34 from rhodecode.tests.routes import route_path
35 35
36 36 fixture = Fixture()
37 37
38 38
39 39 @pytest.mark.usefixtures("app")
40 40 class TestVcsSettings(object):
41 41 FORM_DATA = {
42 42 'inherit_global_settings': False,
43 43 'hooks_changegroup_repo_size': False,
44 44 'hooks_changegroup_push_logger': False,
45 45 'hooks_outgoing_pull_logger': False,
46 46 'extensions_largefiles': False,
47 47 'extensions_evolve': False,
48 48 'phases_publish': 'False',
49 49 'rhodecode_pr_merge_enabled': False,
50 'rhodecode_auto_merge_enabled': False,
50 51 'rhodecode_use_outdated_comments': False,
51 52 'new_svn_branch': '',
52 53 'new_svn_tag': ''
53 54 }
54 55
55 56 @pytest.mark.skip_backends('svn')
56 57 def test_global_settings_initial_values(self, autologin_user, backend):
57 58 repo_name = backend.repo_name
58 59 response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name))
59 60
60 61 expected_settings = (
61 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled',
62 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled', 'rhodecode_auto_merge_enabled',
62 63 'hooks_changegroup_repo_size', 'hooks_changegroup_push_logger',
63 64 'hooks_outgoing_pull_logger'
64 65 )
65 66 for setting in expected_settings:
66 67 self.assert_repo_value_equals_global_value(response, setting)
67 68
68 69 def test_show_settings_requires_repo_admin_permission(
69 70 self, backend, user_util, settings_util):
70 71 repo = backend.create_repo()
71 72 repo_name = repo.repo_name
72 73 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
73 74 user_util.grant_user_permission_to_repo(repo, user, 'repository.admin')
74 75 login_user_session(
75 76 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
76 77 self.app.get(route_path('edit_repo_vcs', repo_name=repo_name), status=200)
77 78
78 79 def test_inherit_global_settings_flag_is_true_by_default(
79 80 self, autologin_user, backend):
80 81 repo_name = backend.repo_name
81 82 response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name))
82 83
83 84 assert_response = response.assert_response()
84 85 element = assert_response.get_element('#inherit_global_settings')
85 86 assert element.checked
86 87
87 88 @pytest.mark.parametrize('checked_value', [True, False])
88 89 def test_inherit_global_settings_value(
89 90 self, autologin_user, backend, checked_value, settings_util):
90 91 repo = backend.create_repo()
91 92 repo_name = repo.repo_name
92 93 settings_util.create_repo_rhodecode_setting(
93 94 repo, 'inherit_vcs_settings', checked_value, 'bool')
94 95 response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name))
95 96
96 97 assert_response = response.assert_response()
97 98 element = assert_response.get_element('#inherit_global_settings')
98 99 assert element.checked == checked_value
99 100
100 101 @pytest.mark.skip_backends('svn')
101 102 def test_hooks_settings_are_created(
102 103 self, autologin_user, backend, csrf_token):
103 104 repo_name = backend.repo_name
104 105 data = self.FORM_DATA.copy()
105 106 data['csrf_token'] = csrf_token
106 107 self.app.post(
107 108 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
108 109 settings = SettingsModel(repo=repo_name)
109 110 try:
110 111 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
111 112 ui = settings.get_ui_by_section_and_key(section, key)
112 113 assert ui.ui_active is False
113 114 finally:
114 115 self._cleanup_repo_settings(settings)
115 116
116 117 def test_hooks_settings_are_not_created_for_svn(
117 118 self, autologin_user, backend_svn, csrf_token):
118 119 repo_name = backend_svn.repo_name
119 120 data = self.FORM_DATA.copy()
120 121 data['csrf_token'] = csrf_token
121 122 self.app.post(
122 123 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
123 124 settings = SettingsModel(repo=repo_name)
124 125 try:
125 126 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
126 127 ui = settings.get_ui_by_section_and_key(section, key)
127 128 assert ui is None
128 129 finally:
129 130 self._cleanup_repo_settings(settings)
130 131
131 132 @pytest.mark.skip_backends('svn')
132 133 def test_hooks_settings_are_updated(
133 134 self, autologin_user, backend, csrf_token):
134 135 repo_name = backend.repo_name
135 136 settings = SettingsModel(repo=repo_name)
136 137 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
137 138 settings.create_ui_section_value(section, '', key=key, active=True)
138 139
139 140 data = self.FORM_DATA.copy()
140 141 data['csrf_token'] = csrf_token
141 142 self.app.post(
142 143 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
143 144 try:
144 145 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
145 146 ui = settings.get_ui_by_section_and_key(section, key)
146 147 assert ui.ui_active is False
147 148 finally:
148 149 self._cleanup_repo_settings(settings)
149 150
150 151 def test_hooks_settings_are_not_updated_for_svn(
151 152 self, autologin_user, backend_svn, csrf_token):
152 153 repo_name = backend_svn.repo_name
153 154 settings = SettingsModel(repo=repo_name)
154 155 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
155 156 settings.create_ui_section_value(section, '', key=key, active=True)
156 157
157 158 data = self.FORM_DATA.copy()
158 159 data['csrf_token'] = csrf_token
159 160 self.app.post(
160 161 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
161 162 try:
162 163 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
163 164 ui = settings.get_ui_by_section_and_key(section, key)
164 165 assert ui.ui_active is True
165 166 finally:
166 167 self._cleanup_repo_settings(settings)
167 168
168 169 @pytest.mark.skip_backends('svn')
169 170 def test_pr_settings_are_created(
170 171 self, autologin_user, backend, csrf_token):
171 172 repo_name = backend.repo_name
172 173 data = self.FORM_DATA.copy()
173 174 data['csrf_token'] = csrf_token
174 175 self.app.post(
175 176 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
176 177 settings = SettingsModel(repo=repo_name)
177 178 try:
178 179 for name in VcsSettingsModel.GENERAL_SETTINGS:
179 180 setting = settings.get_setting_by_name(name)
180 181 assert setting.app_settings_value is False
181 182 finally:
182 183 self._cleanup_repo_settings(settings)
183 184
184 185 def test_pr_settings_are_not_created_for_svn(
185 186 self, autologin_user, backend_svn, csrf_token):
186 187 repo_name = backend_svn.repo_name
187 188 data = self.FORM_DATA.copy()
188 189 data['csrf_token'] = csrf_token
189 190 self.app.post(
190 191 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
191 192 settings = SettingsModel(repo=repo_name)
192 193 try:
193 194 for name in VcsSettingsModel.GENERAL_SETTINGS:
194 195 setting = settings.get_setting_by_name(name)
195 196 assert setting is None
196 197 finally:
197 198 self._cleanup_repo_settings(settings)
198 199
199 200 def test_pr_settings_creation_requires_repo_admin_permission(
200 201 self, backend, user_util, settings_util, csrf_token):
201 202 repo = backend.create_repo()
202 203 repo_name = repo.repo_name
203 204
204 205 logout_user_session(self.app, csrf_token)
205 206 session = login_user_session(
206 207 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
207 208 new_csrf_token = auth.get_csrf_token(session)
208 209
209 210 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
210 211 repo = Repository.get_by_repo_name(repo_name)
211 212 user_util.grant_user_permission_to_repo(repo, user, 'repository.admin')
212 213 data = self.FORM_DATA.copy()
213 214 data['csrf_token'] = new_csrf_token
214 215 settings = SettingsModel(repo=repo_name)
215 216
216 217 try:
217 218 self.app.post(
218 219 route_path('edit_repo_vcs_update', repo_name=repo_name), data,
219 220 status=302)
220 221 finally:
221 222 self._cleanup_repo_settings(settings)
222 223
223 224 @pytest.mark.skip_backends('svn')
224 225 def test_pr_settings_are_updated(
225 226 self, autologin_user, backend, csrf_token):
226 227 repo_name = backend.repo_name
227 228 settings = SettingsModel(repo=repo_name)
228 229 for name in VcsSettingsModel.GENERAL_SETTINGS:
229 230 settings.create_or_update_setting(name, True, 'bool')
230 231
231 232 data = self.FORM_DATA.copy()
232 233 data['csrf_token'] = csrf_token
233 234 self.app.post(
234 235 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
235 236 try:
236 237 for name in VcsSettingsModel.GENERAL_SETTINGS:
237 238 setting = settings.get_setting_by_name(name)
238 239 assert setting.app_settings_value is False
239 240 finally:
240 241 self._cleanup_repo_settings(settings)
241 242
242 243 def test_pr_settings_are_not_updated_for_svn(
243 244 self, autologin_user, backend_svn, csrf_token):
244 245 repo_name = backend_svn.repo_name
245 246 settings = SettingsModel(repo=repo_name)
246 247 for name in VcsSettingsModel.GENERAL_SETTINGS:
247 248 settings.create_or_update_setting(name, True, 'bool')
248 249
249 250 data = self.FORM_DATA.copy()
250 251 data['csrf_token'] = csrf_token
251 252 self.app.post(
252 253 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
253 254 try:
254 255 for name in VcsSettingsModel.GENERAL_SETTINGS:
255 256 setting = settings.get_setting_by_name(name)
256 257 assert setting.app_settings_value is True
257 258 finally:
258 259 self._cleanup_repo_settings(settings)
259 260
260 261 def test_svn_settings_are_created(
261 262 self, autologin_user, backend_svn, csrf_token, settings_util):
262 263 repo_name = backend_svn.repo_name
263 264 data = self.FORM_DATA.copy()
264 265 data['new_svn_tag'] = 'svn-tag'
265 266 data['new_svn_branch'] = 'svn-branch'
266 267 data['csrf_token'] = csrf_token
267 268
268 269 # Create few global settings to make sure that uniqueness validators
269 270 # are not triggered
270 271 settings_util.create_rhodecode_ui(
271 272 VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
272 273 settings_util.create_rhodecode_ui(
273 274 VcsSettingsModel.SVN_TAG_SECTION, 'svn-tag')
274 275
275 276 self.app.post(
276 277 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
277 278 settings = SettingsModel(repo=repo_name)
278 279 try:
279 280 svn_branches = settings.get_ui_by_section(
280 281 VcsSettingsModel.SVN_BRANCH_SECTION)
281 282 svn_branch_names = [b.ui_value for b in svn_branches]
282 283 svn_tags = settings.get_ui_by_section(
283 284 VcsSettingsModel.SVN_TAG_SECTION)
284 285 svn_tag_names = [b.ui_value for b in svn_tags]
285 286 assert 'svn-branch' in svn_branch_names
286 287 assert 'svn-tag' in svn_tag_names
287 288 finally:
288 289 self._cleanup_repo_settings(settings)
289 290
290 291 def test_svn_settings_are_unique(
291 292 self, autologin_user, backend_svn, csrf_token, settings_util):
292 293 repo = backend_svn.repo
293 294 repo_name = repo.repo_name
294 295 data = self.FORM_DATA.copy()
295 296 data['new_svn_tag'] = 'test_tag'
296 297 data['new_svn_branch'] = 'test_branch'
297 298 data['csrf_token'] = csrf_token
298 299 settings_util.create_repo_rhodecode_ui(
299 300 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch')
300 301 settings_util.create_repo_rhodecode_ui(
301 302 repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag')
302 303
303 304 response = self.app.post(
304 305 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=200)
305 306 response.mustcontain('Pattern already exists')
306 307
307 308 def test_svn_settings_with_empty_values_are_not_created(
308 309 self, autologin_user, backend_svn, csrf_token):
309 310 repo_name = backend_svn.repo_name
310 311 data = self.FORM_DATA.copy()
311 312 data['csrf_token'] = csrf_token
312 313 self.app.post(
313 314 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
314 315 settings = SettingsModel(repo=repo_name)
315 316 try:
316 317 svn_branches = settings.get_ui_by_section(
317 318 VcsSettingsModel.SVN_BRANCH_SECTION)
318 319 svn_tags = settings.get_ui_by_section(
319 320 VcsSettingsModel.SVN_TAG_SECTION)
320 321 assert len(svn_branches) == 0
321 322 assert len(svn_tags) == 0
322 323 finally:
323 324 self._cleanup_repo_settings(settings)
324 325
325 326 def test_svn_settings_are_shown_for_svn_repository(
326 327 self, autologin_user, backend_svn, csrf_token):
327 328 repo_name = backend_svn.repo_name
328 329 response = self.app.get(
329 330 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
330 331 response.mustcontain('Subversion Settings')
331 332
332 333 @pytest.mark.skip_backends('svn')
333 334 def test_svn_settings_are_not_created_for_not_svn_repository(
334 335 self, autologin_user, backend, csrf_token):
335 336 repo_name = backend.repo_name
336 337 data = self.FORM_DATA.copy()
337 338 data['csrf_token'] = csrf_token
338 339 self.app.post(
339 340 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
340 341 settings = SettingsModel(repo=repo_name)
341 342 try:
342 343 svn_branches = settings.get_ui_by_section(
343 344 VcsSettingsModel.SVN_BRANCH_SECTION)
344 345 svn_tags = settings.get_ui_by_section(
345 346 VcsSettingsModel.SVN_TAG_SECTION)
346 347 assert len(svn_branches) == 0
347 348 assert len(svn_tags) == 0
348 349 finally:
349 350 self._cleanup_repo_settings(settings)
350 351
351 352 @pytest.mark.skip_backends('svn')
352 353 def test_svn_settings_are_shown_only_for_svn_repository(
353 354 self, autologin_user, backend, csrf_token):
354 355 repo_name = backend.repo_name
355 356 response = self.app.get(
356 357 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
357 358 response.mustcontain(no='Subversion Settings')
358 359
359 360 def test_hg_settings_are_created(
360 361 self, autologin_user, backend_hg, csrf_token):
361 362 repo_name = backend_hg.repo_name
362 363 data = self.FORM_DATA.copy()
363 364 data['new_svn_tag'] = 'svn-tag'
364 365 data['new_svn_branch'] = 'svn-branch'
365 366 data['csrf_token'] = csrf_token
366 367 self.app.post(
367 368 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
368 369 settings = SettingsModel(repo=repo_name)
369 370 try:
370 371 largefiles_ui = settings.get_ui_by_section_and_key(
371 372 'extensions', 'largefiles')
372 373 assert largefiles_ui.ui_active is False
373 374 phases_ui = settings.get_ui_by_section_and_key(
374 375 'phases', 'publish')
375 376 assert str2bool(phases_ui.ui_value) is False
376 377 finally:
377 378 self._cleanup_repo_settings(settings)
378 379
379 380 def test_hg_settings_are_updated(
380 381 self, autologin_user, backend_hg, csrf_token):
381 382 repo_name = backend_hg.repo_name
382 383 settings = SettingsModel(repo=repo_name)
383 384 settings.create_ui_section_value(
384 385 'extensions', '', key='largefiles', active=True)
385 386 settings.create_ui_section_value(
386 387 'phases', '1', key='publish', active=True)
387 388
388 389 data = self.FORM_DATA.copy()
389 390 data['csrf_token'] = csrf_token
390 391 self.app.post(
391 392 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
392 393 try:
393 394 largefiles_ui = settings.get_ui_by_section_and_key(
394 395 'extensions', 'largefiles')
395 396 assert largefiles_ui.ui_active is False
396 397 phases_ui = settings.get_ui_by_section_and_key(
397 398 'phases', 'publish')
398 399 assert str2bool(phases_ui.ui_value) is False
399 400 finally:
400 401 self._cleanup_repo_settings(settings)
401 402
402 403 def test_hg_settings_are_shown_for_hg_repository(
403 404 self, autologin_user, backend_hg, csrf_token):
404 405 repo_name = backend_hg.repo_name
405 406 response = self.app.get(
406 407 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
407 408 response.mustcontain('Mercurial Settings')
408 409
409 410 @pytest.mark.skip_backends('hg')
410 411 def test_hg_settings_are_created_only_for_hg_repository(
411 412 self, autologin_user, backend, csrf_token):
412 413 repo_name = backend.repo_name
413 414 data = self.FORM_DATA.copy()
414 415 data['csrf_token'] = csrf_token
415 416 self.app.post(
416 417 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
417 418 settings = SettingsModel(repo=repo_name)
418 419 try:
419 420 largefiles_ui = settings.get_ui_by_section_and_key(
420 421 'extensions', 'largefiles')
421 422 assert largefiles_ui is None
422 423 phases_ui = settings.get_ui_by_section_and_key(
423 424 'phases', 'publish')
424 425 assert phases_ui is None
425 426 finally:
426 427 self._cleanup_repo_settings(settings)
427 428
428 429 @pytest.mark.skip_backends('hg')
429 430 def test_hg_settings_are_shown_only_for_hg_repository(
430 431 self, autologin_user, backend, csrf_token):
431 432 repo_name = backend.repo_name
432 433 response = self.app.get(
433 434 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
434 435 response.mustcontain(no='Mercurial Settings')
435 436
436 437 @pytest.mark.skip_backends('hg')
437 438 def test_hg_settings_are_updated_only_for_hg_repository(
438 439 self, autologin_user, backend, csrf_token):
439 440 repo_name = backend.repo_name
440 441 settings = SettingsModel(repo=repo_name)
441 442 settings.create_ui_section_value(
442 443 'extensions', '', key='largefiles', active=True)
443 444 settings.create_ui_section_value(
444 445 'phases', '1', key='publish', active=True)
445 446
446 447 data = self.FORM_DATA.copy()
447 448 data['csrf_token'] = csrf_token
448 449 self.app.post(
449 450 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
450 451 try:
451 452 largefiles_ui = settings.get_ui_by_section_and_key(
452 453 'extensions', 'largefiles')
453 454 assert largefiles_ui.ui_active is True
454 455 phases_ui = settings.get_ui_by_section_and_key(
455 456 'phases', 'publish')
456 457 assert phases_ui.ui_value == '1'
457 458 finally:
458 459 self._cleanup_repo_settings(settings)
459 460
460 461 def test_per_repo_svn_settings_are_displayed(
461 462 self, autologin_user, backend_svn, settings_util):
462 463 repo = backend_svn.create_repo()
463 464 repo_name = repo.repo_name
464 465 branches = [
465 466 settings_util.create_repo_rhodecode_ui(
466 467 repo, VcsSettingsModel.SVN_BRANCH_SECTION,
467 468 'branch_{}'.format(i))
468 469 for i in range(10)]
469 470 tags = [
470 471 settings_util.create_repo_rhodecode_ui(
471 472 repo, VcsSettingsModel.SVN_TAG_SECTION, 'tag_{}'.format(i))
472 473 for i in range(10)]
473 474
474 475 response = self.app.get(
475 476 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
476 477 assert_response = response.assert_response()
477 478 for branch in branches:
478 479 css_selector = '[name=branch_value_{}]'.format(branch.ui_id)
479 480 element = assert_response.get_element(css_selector)
480 481 assert element.value == branch.ui_value
481 482 for tag in tags:
482 483 css_selector = '[name=tag_ui_value_new_{}]'.format(tag.ui_id)
483 484 element = assert_response.get_element(css_selector)
484 485 assert element.value == tag.ui_value
485 486
486 487 def test_per_repo_hg_and_pr_settings_are_not_displayed_for_svn(
487 488 self, autologin_user, backend_svn, settings_util):
488 489 repo = backend_svn.create_repo()
489 490 repo_name = repo.repo_name
490 491 response = self.app.get(
491 492 route_path('edit_repo_vcs', repo_name=repo_name), status=200)
492 493 response.mustcontain(no='<label>Hooks:</label>')
493 494 response.mustcontain(no='<label>Pull Request Settings:</label>')
494 495
495 496 def test_inherit_global_settings_value_is_saved(
496 497 self, autologin_user, backend, csrf_token):
497 498 repo_name = backend.repo_name
498 499 data = self.FORM_DATA.copy()
499 500 data['csrf_token'] = csrf_token
500 501 data['inherit_global_settings'] = True
501 502 self.app.post(
502 503 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
503 504
504 505 settings = SettingsModel(repo=repo_name)
505 506 vcs_settings = VcsSettingsModel(repo=repo_name)
506 507 try:
507 508 assert vcs_settings.inherit_global_settings is True
508 509 finally:
509 510 self._cleanup_repo_settings(settings)
510 511
511 512 def test_repo_cache_is_invalidated_when_settings_are_updated(
512 513 self, autologin_user, backend, csrf_token):
513 514 repo_name = backend.repo_name
514 515 data = self.FORM_DATA.copy()
515 516 data['csrf_token'] = csrf_token
516 517 data['inherit_global_settings'] = True
517 518 settings = SettingsModel(repo=repo_name)
518 519
519 520 invalidation_patcher = mock.patch(
520 521 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
521 522 with invalidation_patcher as invalidation_mock:
522 523 self.app.post(
523 524 route_path('edit_repo_vcs_update', repo_name=repo_name), data,
524 525 status=302)
525 526 try:
526 527 invalidation_mock.assert_called_once_with(repo_name, delete=True)
527 528 finally:
528 529 self._cleanup_repo_settings(settings)
529 530
530 531 def test_other_settings_not_saved_inherit_global_settings_is_true(
531 532 self, autologin_user, backend, csrf_token):
532 533 repo_name = backend.repo_name
533 534 data = self.FORM_DATA.copy()
534 535 data['csrf_token'] = csrf_token
535 536 data['inherit_global_settings'] = True
536 537 self.app.post(
537 538 route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302)
538 539
539 540 settings = SettingsModel(repo=repo_name)
540 541 ui_settings = (
541 542 VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS)
542 543
543 544 vcs_settings = []
544 545 try:
545 546 for section, key in ui_settings:
546 547 ui = settings.get_ui_by_section_and_key(section, key)
547 548 if ui:
548 549 vcs_settings.append(ui)
549 550 vcs_settings.extend(settings.get_ui_by_section(
550 551 VcsSettingsModel.SVN_BRANCH_SECTION))
551 552 vcs_settings.extend(settings.get_ui_by_section(
552 553 VcsSettingsModel.SVN_TAG_SECTION))
553 554 for name in VcsSettingsModel.GENERAL_SETTINGS:
554 555 setting = settings.get_setting_by_name(name)
555 556 if setting:
556 557 vcs_settings.append(setting)
557 558 assert vcs_settings == []
558 559 finally:
559 560 self._cleanup_repo_settings(settings)
560 561
561 562 def test_delete_svn_branch_and_tag_patterns(
562 563 self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header):
563 564 repo = backend_svn.create_repo()
564 565 repo_name = repo.repo_name
565 566 branch = settings_util.create_repo_rhodecode_ui(
566 567 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch',
567 568 cleanup=False)
568 569 tag = settings_util.create_repo_rhodecode_ui(
569 570 repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag', cleanup=False)
570 571 data = {
571 572 'csrf_token': csrf_token
572 573 }
573 574 for id_ in (branch.ui_id, tag.ui_id):
574 575 data['delete_svn_pattern'] = id_,
575 576 self.app.post(
576 577 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
577 578 data, extra_environ=xhr_header, status=200)
578 579 settings = VcsSettingsModel(repo=repo_name)
579 580 assert settings.get_repo_svn_branch_patterns() == []
580 581
581 582 def test_delete_svn_branch_requires_repo_admin_permission(
582 583 self, backend_svn, user_util, settings_util, csrf_token, xhr_header):
583 584 repo = backend_svn.create_repo()
584 585 repo_name = repo.repo_name
585 586
586 587 logout_user_session(self.app, csrf_token)
587 588 session = login_user_session(
588 589 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
589 590 csrf_token = auth.get_csrf_token(session)
590 591
591 592 repo = Repository.get_by_repo_name(repo_name)
592 593 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
593 594 user_util.grant_user_permission_to_repo(repo, user, 'repository.admin')
594 595 branch = settings_util.create_repo_rhodecode_ui(
595 596 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch',
596 597 cleanup=False)
597 598 data = {
598 599 'csrf_token': csrf_token,
599 600 'delete_svn_pattern': branch.ui_id
600 601 }
601 602 self.app.post(
602 603 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
603 604 data, extra_environ=xhr_header, status=200)
604 605
605 606 def test_delete_svn_branch_raises_400_when_not_found(
606 607 self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header):
607 608 repo_name = backend_svn.repo_name
608 609 data = {
609 610 'delete_svn_pattern': 123,
610 611 'csrf_token': csrf_token
611 612 }
612 613 self.app.post(
613 614 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
614 615 data, extra_environ=xhr_header, status=400)
615 616
616 617 def test_delete_svn_branch_raises_400_when_no_id_specified(
617 618 self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header):
618 619 repo_name = backend_svn.repo_name
619 620 data = {
620 621 'csrf_token': csrf_token
621 622 }
622 623 self.app.post(
623 624 route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name),
624 625 data, extra_environ=xhr_header, status=400)
625 626
626 627 def _cleanup_repo_settings(self, settings_model):
627 628 cleanup = []
628 629 ui_settings = (
629 630 VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS)
630 631
631 632 for section, key in ui_settings:
632 633 ui = settings_model.get_ui_by_section_and_key(section, key)
633 634 if ui:
634 635 cleanup.append(ui)
635 636
636 637 cleanup.extend(settings_model.get_ui_by_section(
637 638 VcsSettingsModel.INHERIT_SETTINGS))
638 639 cleanup.extend(settings_model.get_ui_by_section(
639 640 VcsSettingsModel.SVN_BRANCH_SECTION))
640 641 cleanup.extend(settings_model.get_ui_by_section(
641 642 VcsSettingsModel.SVN_TAG_SECTION))
642 643
643 644 for name in VcsSettingsModel.GENERAL_SETTINGS:
644 645 setting = settings_model.get_setting_by_name(name)
645 646 if setting:
646 647 cleanup.append(setting)
647 648
648 649 for object_ in cleanup:
649 650 Session().delete(object_)
650 651 Session().commit()
651 652
652 653 def assert_repo_value_equals_global_value(self, response, setting):
653 654 assert_response = response.assert_response()
654 655 global_css_selector = '[name={}_inherited]'.format(setting)
655 656 repo_css_selector = '[name={}]'.format(setting)
656 657 repo_element = assert_response.get_element(repo_css_selector)
657 658 global_element = assert_response.get_element(global_css_selector)
658 659 assert repo_element.value == global_element.value
659 660
660 661
661 662 def _get_permission_for_user(user, repo):
662 663 perm = UserRepoToPerm.query()\
663 664 .filter(UserRepoToPerm.repository ==
664 665 Repository.get_by_repo_name(repo))\
665 666 .filter(UserRepoToPerm.user == User.get_by_username(user))\
666 667 .all()
667 668 return perm
@@ -1,474 +1,475
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import sys
21 21 import collections
22 22
23 23 import time
24 24 import logging.config
25 25
26 26 from paste.gzipper import make_gzip_middleware
27 27 import pyramid.events
28 28 from pyramid.wsgi import wsgiapp
29 29 from pyramid.config import Configurator
30 30 from pyramid.settings import asbool, aslist
31 31 from pyramid.httpexceptions import (
32 32 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
33 33 from pyramid.renderers import render_to_response
34 34
35 35 from rhodecode.model import meta
36 36 from rhodecode.config import patches
37 37
38 38 from rhodecode.config.environment import load_pyramid_environment, propagate_rhodecode_config
39 39
40 40 import rhodecode.events
41 41 from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
42 42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 43 from rhodecode.lib.request import Request
44 44 from rhodecode.lib.vcs import VCSCommunicationError
45 45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 49 from rhodecode.lib.utils2 import AttributeDict
50 50 from rhodecode.lib.exc_tracking import store_exception, format_exc
51 51 from rhodecode.subscribers import (
52 scan_repositories_if_enabled, write_js_routes_if_enabled,
52 auto_merge_pr_if_needed, scan_repositories_if_enabled, write_js_routes_if_enabled,
53 53 write_metadata_if_needed, write_usage_data, import_license_if_present)
54 54 from rhodecode.lib.statsd_client import StatsdClient
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def is_http_error(response):
60 60 # error which should have traceback
61 61 return response.status_code > 499
62 62
63 63
64 64 def should_load_all():
65 65 """
66 66 Returns if all application components should be loaded. In some cases it's
67 67 desired to skip apps loading for faster shell script execution
68 68 """
69 69 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
70 70 if ssh_cmd:
71 71 return False
72 72
73 73 return True
74 74
75 75
76 76 def make_pyramid_app(global_config, **settings):
77 77 """
78 78 Constructs the WSGI application based on Pyramid.
79 79
80 80 Specials:
81 81
82 82 * The application can also be integrated like a plugin via the call to
83 83 `includeme`. This is accompanied with the other utility functions which
84 84 are called. Changing this should be done with great care to not break
85 85 cases when these fragments are assembled from another place.
86 86
87 87 """
88 88 start_time = time.time()
89 89 log.info('Pyramid app config starting')
90 90
91 91 sanitize_settings_and_apply_defaults(global_config, settings)
92 92
93 93 # init and bootstrap StatsdClient
94 94 StatsdClient.setup(settings)
95 95
96 96 config = Configurator(settings=settings)
97 97 # Init our statsd at very start
98 98 config.registry.statsd = StatsdClient.statsd
99 99
100 100 # Apply compatibility patches
101 101 patches.inspect_getargspec()
102 102 patches.repoze_sendmail_lf_fix()
103 103
104 104 # first init, so load_pyramid_enviroment, can access some critical data, like __file__
105 105 propagate_rhodecode_config(global_config, {}, {}, full=False)
106 106
107 107 load_pyramid_environment(global_config, settings)
108 108
109 109 # Static file view comes first
110 110 includeme_first(config)
111 111
112 112 includeme(config)
113 113
114 114 pyramid_app = config.make_wsgi_app()
115 115 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
116 116 pyramid_app.config = config
117 117
118 118 celery_settings = get_celery_config(settings)
119 119 config.configure_celery(celery_settings)
120 120
121 121 # final config set...
122 122 propagate_rhodecode_config(global_config, settings, config.registry.settings)
123 123
124 124 # creating the app uses a connection - return it after we are done
125 125 meta.Session.remove()
126 126
127 127 total_time = time.time() - start_time
128 128 log.info('Pyramid app created and configured in %.2fs', total_time)
129 129 return pyramid_app
130 130
131 131
132 132 def get_celery_config(settings):
133 133 """
134 134 Converts basic ini configuration into celery 4.X options
135 135 """
136 136
137 137 def key_converter(key_name):
138 138 pref = 'celery.'
139 139 if key_name.startswith(pref):
140 140 return key_name[len(pref):].replace('.', '_').lower()
141 141
142 142 def type_converter(parsed_key, value):
143 143 # cast to int
144 144 if value.isdigit():
145 145 return int(value)
146 146
147 147 # cast to bool
148 148 if value.lower() in ['true', 'false', 'True', 'False']:
149 149 return value.lower() == 'true'
150 150 return value
151 151
152 152 celery_config = {}
153 153 for k, v in settings.items():
154 154 pref = 'celery.'
155 155 if k.startswith(pref):
156 156 celery_config[key_converter(k)] = type_converter(key_converter(k), v)
157 157
158 158 # TODO:rethink if we want to support celerybeat based file config, probably NOT
159 159 # beat_config = {}
160 160 # for section in parser.sections():
161 161 # if section.startswith('celerybeat:'):
162 162 # name = section.split(':', 1)[1]
163 163 # beat_config[name] = get_beat_config(parser, section)
164 164
165 165 # final compose of settings
166 166 celery_settings = {}
167 167
168 168 if celery_config:
169 169 celery_settings.update(celery_config)
170 170 # if beat_config:
171 171 # celery_settings.update({'beat_schedule': beat_config})
172 172
173 173 return celery_settings
174 174
175 175
176 176 def not_found_view(request):
177 177 """
178 178 This creates the view which should be registered as not-found-view to
179 179 pyramid.
180 180 """
181 181
182 182 if not getattr(request, 'vcs_call', None):
183 183 # handle like regular case with our error_handler
184 184 return error_handler(HTTPNotFound(), request)
185 185
186 186 # handle not found view as a vcs call
187 187 settings = request.registry.settings
188 188 ae_client = getattr(request, 'ae_client', None)
189 189 vcs_app = VCSMiddleware(
190 190 HTTPNotFound(), request.registry, settings,
191 191 appenlight_client=ae_client)
192 192
193 193 return wsgiapp(vcs_app)(None, request)
194 194
195 195
196 196 def error_handler(exception, request):
197 197 import rhodecode
198 198 from rhodecode.lib import helpers
199 199
200 200 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
201 201
202 202 base_response = HTTPInternalServerError()
203 203 # prefer original exception for the response since it may have headers set
204 204 if isinstance(exception, HTTPException):
205 205 base_response = exception
206 206 elif isinstance(exception, VCSCommunicationError):
207 207 base_response = VCSServerUnavailable()
208 208
209 209 if is_http_error(base_response):
210 210 traceback_info = format_exc(request.exc_info)
211 211 log.error(
212 212 'error occurred handling this request for path: %s, \n%s',
213 213 request.path, traceback_info)
214 214
215 215 error_explanation = base_response.explanation or str(base_response)
216 216 if base_response.status_code == 404:
217 217 error_explanation += " Optionally you don't have permission to access this page."
218 218 c = AttributeDict()
219 219 c.error_message = base_response.status
220 220 c.error_explanation = error_explanation
221 221 c.visual = AttributeDict()
222 222
223 223 c.visual.rhodecode_support_url = (
224 224 request.registry.settings.get('rhodecode_support_url') or
225 225 request.route_url('rhodecode_support')
226 226 )
227 227 c.redirect_time = 0
228 228 c.rhodecode_name = rhodecode_title
229 229 if not c.rhodecode_name:
230 230 c.rhodecode_name = 'Rhodecode'
231 231
232 232 c.causes = []
233 233 if is_http_error(base_response):
234 234 c.causes.append('Server is overloaded.')
235 235 c.causes.append('Server database connection is lost.')
236 236 c.causes.append('Server expected unhandled error.')
237 237
238 238 if hasattr(base_response, 'causes'):
239 239 c.causes = base_response.causes
240 240
241 241 c.messages = helpers.flash.pop_messages(request=request)
242 242 exc_info = sys.exc_info()
243 243 c.exception_id = id(exc_info)
244 244 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
245 245 or base_response.status_code > 499
246 246 c.exception_id_url = request.route_url(
247 247 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
248 248
249 249 debug_mode = rhodecode.ConfigGet().get_bool('debug')
250 250 if c.show_exception_id:
251 251 store_exception(c.exception_id, exc_info)
252 252 c.exception_debug = debug_mode
253 253 c.exception_config_ini = rhodecode.CONFIG.get('__file__')
254 254
255 255 if debug_mode:
256 256 try:
257 257 from rich.traceback import install
258 258 install(show_locals=True)
259 259 log.debug('Installing rich tracebacks...')
260 260 except ImportError:
261 261 pass
262 262
263 263 response = render_to_response(
264 264 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
265 265 response=base_response)
266 266
267 267 response.headers["X-RC-Exception-Id"] = str(c.exception_id)
268 268
269 269 statsd = request.registry.statsd
270 270 if statsd and base_response.status_code > 499:
271 271 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
272 272 statsd.incr('rhodecode_exception_total',
273 273 tags=["exc_source:web",
274 274 f"http_code:{base_response.status_code}",
275 275 f"type:{exc_type}"])
276 276
277 277 return response
278 278
279 279
280 280 def includeme_first(config):
281 281 # redirect automatic browser favicon.ico requests to correct place
282 282 def favicon_redirect(context, request):
283 283 return HTTPFound(
284 284 request.static_path('rhodecode:public/images/favicon.ico'))
285 285
286 286 config.add_view(favicon_redirect, route_name='favicon')
287 287 config.add_route('favicon', '/favicon.ico')
288 288
289 289 def robots_redirect(context, request):
290 290 return HTTPFound(
291 291 request.static_path('rhodecode:public/robots.txt'))
292 292
293 293 config.add_view(robots_redirect, route_name='robots')
294 294 config.add_route('robots', '/robots.txt')
295 295
296 296 config.add_static_view(
297 297 '_static/deform', 'deform:static')
298 298 config.add_static_view(
299 299 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
300 300
301 301
302 302 ce_auth_resources = [
303 303 'rhodecode.authentication.plugins.auth_crowd',
304 304 'rhodecode.authentication.plugins.auth_headers',
305 305 'rhodecode.authentication.plugins.auth_jasig_cas',
306 306 'rhodecode.authentication.plugins.auth_ldap',
307 307 'rhodecode.authentication.plugins.auth_pam',
308 308 'rhodecode.authentication.plugins.auth_rhodecode',
309 309 'rhodecode.authentication.plugins.auth_token',
310 310 ]
311 311
312 312
313 313 def includeme(config, auth_resources=None):
314 314 from rhodecode.lib.celerylib.loader import configure_celery
315 315 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
316 316 settings = config.registry.settings
317 317 config.set_request_factory(Request)
318 318
319 319 # plugin information
320 320 config.registry.rhodecode_plugins = collections.OrderedDict()
321 321
322 322 config.add_directive(
323 323 'register_rhodecode_plugin', register_rhodecode_plugin)
324 324
325 325 config.add_directive('configure_celery', configure_celery)
326 326
327 327 if settings.get('appenlight', False):
328 328 config.include('appenlight_client.ext.pyramid_tween')
329 329
330 330 load_all = should_load_all()
331 331
332 332 # Includes which are required. The application would fail without them.
333 333 config.include('pyramid_mako')
334 334 config.include('rhodecode.lib.rc_beaker')
335 335 config.include('rhodecode.lib.rc_cache')
336 336 config.include('rhodecode.lib.archive_cache')
337 337
338 338 config.include('rhodecode.apps._base.navigation')
339 339 config.include('rhodecode.apps._base.subscribers')
340 340 config.include('rhodecode.tweens')
341 341 config.include('rhodecode.authentication')
342 342
343 343 if load_all:
344 344
345 345 # load CE authentication plugins
346 346
347 347 if auth_resources:
348 348 ce_auth_resources.extend(auth_resources)
349 349
350 350 for resource in ce_auth_resources:
351 351 config.include(resource)
352 352
353 353 # Auto discover authentication plugins and include their configuration.
354 354 if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')):
355 355 from rhodecode.authentication import discover_legacy_plugins
356 356 discover_legacy_plugins(config)
357 357
358 358 # apps
359 359 if load_all:
360 360 log.debug('Starting config.include() calls')
361 361 config.include('rhodecode.api.includeme')
362 362 config.include('rhodecode.apps._base.includeme')
363 363 config.include('rhodecode.apps._base.navigation.includeme')
364 364 config.include('rhodecode.apps._base.subscribers.includeme')
365 365 config.include('rhodecode.apps.hovercards.includeme')
366 366 config.include('rhodecode.apps.ops.includeme')
367 367 config.include('rhodecode.apps.channelstream.includeme')
368 368 config.include('rhodecode.apps.file_store.includeme')
369 369 config.include('rhodecode.apps.admin.includeme')
370 370 config.include('rhodecode.apps.login.includeme')
371 371 config.include('rhodecode.apps.home.includeme')
372 372 config.include('rhodecode.apps.journal.includeme')
373 373
374 374 config.include('rhodecode.apps.repository.includeme')
375 375 config.include('rhodecode.apps.repo_group.includeme')
376 376 config.include('rhodecode.apps.user_group.includeme')
377 377 config.include('rhodecode.apps.search.includeme')
378 378 config.include('rhodecode.apps.user_profile.includeme')
379 379 config.include('rhodecode.apps.user_group_profile.includeme')
380 380 config.include('rhodecode.apps.my_account.includeme')
381 381 config.include('rhodecode.apps.gist.includeme')
382 382
383 383 config.include('rhodecode.apps.svn_support.includeme')
384 384 config.include('rhodecode.apps.ssh_support.includeme')
385 385 config.include('rhodecode.apps.debug_style')
386 386
387 387 if load_all:
388 388 config.include('rhodecode.integrations.includeme')
389 389 config.include('rhodecode.integrations.routes.includeme')
390 390
391 391 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
392 392 settings['default_locale_name'] = settings.get('lang', 'en')
393 393 config.add_translation_dirs('rhodecode:i18n/')
394 394
395 395 # Add subscribers.
396 396 if load_all:
397 397 log.debug('Adding subscribers...')
398 config.add_subscriber(auto_merge_pr_if_needed, rhodecode.events.PullRequestReviewEvent)
398 399 config.add_subscriber(scan_repositories_if_enabled,
399 400 pyramid.events.ApplicationCreated)
400 401 config.add_subscriber(write_metadata_if_needed,
401 402 pyramid.events.ApplicationCreated)
402 403 config.add_subscriber(write_usage_data,
403 404 pyramid.events.ApplicationCreated)
404 405 config.add_subscriber(write_js_routes_if_enabled,
405 406 pyramid.events.ApplicationCreated)
406 407 config.add_subscriber(import_license_if_present,
407 408 pyramid.events.ApplicationCreated)
408 409
409 410 # Set the default renderer for HTML templates to mako.
410 411 config.add_mako_renderer('.html')
411 412
412 413 config.add_renderer(
413 414 name='json_ext',
414 415 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
415 416
416 417 config.add_renderer(
417 418 name='string_html',
418 419 factory='rhodecode.lib.string_renderer.html')
419 420
420 421 # include RhodeCode plugins
421 422 includes = aslist(settings.get('rhodecode.includes', []))
422 423 log.debug('processing rhodecode.includes data...')
423 424 for inc in includes:
424 425 config.include(inc)
425 426
426 427 # custom not found view, if our pyramid app doesn't know how to handle
427 428 # the request pass it to potential VCS handling ap
428 429 config.add_notfound_view(not_found_view)
429 430 if not settings.get('debugtoolbar.enabled', False):
430 431 # disabled debugtoolbar handle all exceptions via the error_handlers
431 432 config.add_view(error_handler, context=Exception)
432 433
433 434 # all errors including 403/404/50X
434 435 config.add_view(error_handler, context=HTTPError)
435 436
436 437
437 438 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
438 439 """
439 440 Apply outer WSGI middlewares around the application.
440 441 """
441 442 registry = config.registry
442 443 settings = registry.settings
443 444
444 445 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
445 446 pyramid_app = HttpsFixup(pyramid_app, settings)
446 447
447 448 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
448 449 pyramid_app, settings)
449 450 registry.ae_client = _ae_client
450 451
451 452 if settings['gzip_responses']:
452 453 pyramid_app = make_gzip_middleware(
453 454 pyramid_app, settings, compress_level=1)
454 455
455 456 # this should be the outer most middleware in the wsgi stack since
456 457 # middleware like Routes make database calls
457 458 def pyramid_app_with_cleanup(environ, start_response):
458 459 start = time.time()
459 460 try:
460 461 return pyramid_app(environ, start_response)
461 462 finally:
462 463 # Dispose current database session and rollback uncommitted
463 464 # transactions.
464 465 meta.Session.remove()
465 466
466 467 # In a single threaded mode server, on non sqlite db we should have
467 468 # '0 Current Checked out connections' at the end of a request,
468 469 # if not, then something, somewhere is leaving a connection open
469 470 pool = meta.get_engine().pool
470 471 log.debug('sa pool status: %s', pool.status())
471 472 total = time.time() - start
472 473 log.debug('Request processing finalized: %.4fs', total)
473 474
474 475 return pyramid_app_with_cleanup
@@ -1,662 +1,663
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 this is forms validation classes
21 21 http://formencode.org/module-formencode.validators.html
22 22 for list off all availible validators
23 23
24 24 we can create our own validators
25 25
26 26 The table below outlines the options which can be used in a schema in addition to the validators themselves
27 27 pre_validators [] These validators will be applied before the schema
28 28 chained_validators [] These validators will be applied after the schema
29 29 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
30 30 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
31 31 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
32 32 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
33 33
34 34
35 35 <name> = formencode.validators.<name of validator>
36 36 <name> must equal form name
37 37 list=[1,2,3,4,5]
38 38 for SELECT use formencode.All(OneOf(list), Int())
39 39
40 40 """
41 41
42 42 import deform
43 43 import logging
44 44 import formencode
45 45
46 46 from pkg_resources import resource_filename
47 47 from formencode import All, Pipe
48 48
49 49 from pyramid.threadlocal import get_current_request
50 50
51 51 from rhodecode import BACKENDS
52 52 from rhodecode.lib import helpers
53 53 from rhodecode.model import validators as v
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 deform_templates = resource_filename('deform', 'templates')
59 59 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
60 60 search_path = (rhodecode_templates, deform_templates)
61 61
62 62
63 63 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
64 64 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
65 65 def __call__(self, template_name, **kw):
66 66 kw['h'] = helpers
67 67 kw['request'] = get_current_request()
68 68 return self.load(template_name)(**kw)
69 69
70 70
71 71 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
72 72 deform.Form.set_default_renderer(form_renderer)
73 73
74 74
75 75 def LoginForm(localizer):
76 76 _ = localizer
77 77
78 78 class _LoginForm(formencode.Schema):
79 79 allow_extra_fields = True
80 80 filter_extra_fields = True
81 81 username = v.UnicodeString(
82 82 strip=True,
83 83 min=1,
84 84 not_empty=True,
85 85 messages={
86 86 'empty': _('Please enter a login'),
87 87 'tooShort': _('Enter a value %(min)i characters long or more')
88 88 }
89 89 )
90 90
91 91 password = v.UnicodeString(
92 92 strip=False,
93 93 min=3,
94 94 max=72,
95 95 not_empty=True,
96 96 messages={
97 97 'empty': _('Please enter a password'),
98 98 'tooShort': _('Enter %(min)i characters or more')}
99 99 )
100 100
101 101 remember = v.StringBoolean(if_missing=False)
102 102
103 103 chained_validators = [v.ValidAuth(localizer)]
104 104 return _LoginForm
105 105
106 106
107 107 def TOTPForm(localizer, user, allow_recovery_code_use=False):
108 108 _ = localizer
109 109
110 110 class _TOTPForm(formencode.Schema):
111 111 allow_extra_fields = True
112 112 filter_extra_fields = False
113 113 totp = v.Regex(r'^(?:\d{6}|[A-Z0-9]{32})$')
114 114 secret_totp = v.String()
115 115
116 116 def to_python(self, value, state=None):
117 117 validation_checks = [user.is_totp_valid]
118 118 if allow_recovery_code_use:
119 119 validation_checks.append(user.is_2fa_recovery_code_valid)
120 120 form_data = super().to_python(value, state)
121 121 received_code = form_data['totp']
122 122 secret = form_data.get('secret_totp')
123 123
124 124 if not any(map(lambda func: func(received_code, secret), validation_checks)):
125 125 error_msg = _('Code is invalid. Try again!')
126 126 raise formencode.Invalid(error_msg, v, state, error_dict={'totp': error_msg})
127 127 return form_data
128 128
129 129 return _TOTPForm
130 130
131 131
132 132 def WhitelistedVcsClientsForm(localizer):
133 133 _ = localizer
134 134
135 135 class _WhitelistedVcsClientsForm(formencode.Schema):
136 136 regexp = r'^(?:\s*[<>=~^!]*\s*\d{1,2}\.\d{1,2}(?:\.\d{1,2})?\s*|\*)\s*(?:,\s*[<>=~^!]*\s*\d{1,2}\.\d{1,2}(?:\.\d{1,2})?\s*|\s*\*\s*)*$'
137 137 allow_extra_fields = True
138 138 filter_extra_fields = True
139 139 git = v.Regex(regexp)
140 140 hg = v.Regex(regexp)
141 141 svn = v.Regex(regexp)
142 142
143 143 return _WhitelistedVcsClientsForm
144 144
145 145
146 146 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
147 147 old_data = old_data or {}
148 148 available_languages = available_languages or []
149 149 _ = localizer
150 150
151 151 class _UserForm(formencode.Schema):
152 152 allow_extra_fields = True
153 153 filter_extra_fields = True
154 154 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
155 155 v.ValidUsername(localizer, edit, old_data))
156 156 if edit:
157 157 new_password = All(
158 158 v.ValidPassword(localizer),
159 159 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
160 160 )
161 161 password_confirmation = All(
162 162 v.ValidPassword(localizer),
163 163 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
164 164 )
165 165 admin = v.StringBoolean(if_missing=False)
166 166 else:
167 167 password = All(
168 168 v.ValidPassword(localizer),
169 169 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
170 170 )
171 171 password_confirmation = All(
172 172 v.ValidPassword(localizer),
173 173 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
174 174 )
175 175
176 176 password_change = v.StringBoolean(if_missing=False)
177 177 create_repo_group = v.StringBoolean(if_missing=False)
178 178
179 179 active = v.StringBoolean(if_missing=False)
180 180 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
181 181 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
182 182 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
183 183 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
184 184 if_missing='')
185 185 extern_name = v.UnicodeString(strip=True)
186 186 extern_type = v.UnicodeString(strip=True)
187 187 language = v.OneOf(available_languages, hideList=False,
188 188 testValueList=True, if_missing=None)
189 189 chained_validators = [v.ValidPasswordsMatch(localizer)]
190 190 return _UserForm
191 191
192 192
193 193 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
194 194 old_data = old_data or {}
195 195 _ = localizer
196 196
197 197 class _UserGroupForm(formencode.Schema):
198 198 allow_extra_fields = True
199 199 filter_extra_fields = True
200 200
201 201 users_group_name = All(
202 202 v.UnicodeString(strip=True, min=1, not_empty=True),
203 203 v.ValidUserGroup(localizer, edit, old_data)
204 204 )
205 205 user_group_description = v.UnicodeString(strip=True, min=1,
206 206 not_empty=False)
207 207
208 208 users_group_active = v.StringBoolean(if_missing=False)
209 209
210 210 if edit:
211 211 # this is user group owner
212 212 user = All(
213 213 v.UnicodeString(not_empty=True),
214 214 v.ValidRepoUser(localizer, allow_disabled))
215 215 return _UserGroupForm
216 216
217 217
218 218 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
219 219 can_create_in_root=False, allow_disabled=False):
220 220 _ = localizer
221 221 old_data = old_data or {}
222 222 available_groups = available_groups or []
223 223
224 224 class _RepoGroupForm(formencode.Schema):
225 225 allow_extra_fields = True
226 226 filter_extra_fields = False
227 227
228 228 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
229 229 v.SlugifyName(localizer),)
230 230 group_description = v.UnicodeString(strip=True, min=1,
231 231 not_empty=False)
232 232 group_copy_permissions = v.StringBoolean(if_missing=False)
233 233
234 234 group_parent_id = v.OneOf(available_groups, hideList=False,
235 235 testValueList=True, not_empty=True)
236 236 enable_locking = v.StringBoolean(if_missing=False)
237 237 chained_validators = [
238 238 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
239 239
240 240 if edit:
241 241 # this is repo group owner
242 242 user = All(
243 243 v.UnicodeString(not_empty=True),
244 244 v.ValidRepoUser(localizer, allow_disabled))
245 245 return _RepoGroupForm
246 246
247 247
248 248 def RegisterForm(localizer, edit=False, old_data=None):
249 249 _ = localizer
250 250 old_data = old_data or {}
251 251
252 252 class _RegisterForm(formencode.Schema):
253 253 allow_extra_fields = True
254 254 filter_extra_fields = True
255 255 username = All(
256 256 v.ValidUsername(localizer, edit, old_data),
257 257 v.UnicodeString(strip=True, min=1, not_empty=True)
258 258 )
259 259 password = All(
260 260 v.ValidPassword(localizer),
261 261 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
262 262 )
263 263 password_confirmation = All(
264 264 v.ValidPassword(localizer),
265 265 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
266 266 )
267 267 active = v.StringBoolean(if_missing=False)
268 268 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
269 269 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
270 270 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
271 271
272 272 chained_validators = [v.ValidPasswordsMatch(localizer)]
273 273 return _RegisterForm
274 274
275 275
276 276 def PasswordResetForm(localizer):
277 277 _ = localizer
278 278
279 279 class _PasswordResetForm(formencode.Schema):
280 280 allow_extra_fields = True
281 281 filter_extra_fields = True
282 282 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
283 283 return _PasswordResetForm
284 284
285 285
286 286 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
287 287 _ = localizer
288 288 old_data = old_data or {}
289 289 repo_groups = repo_groups or []
290 290 supported_backends = BACKENDS.keys()
291 291
292 292 class _RepoForm(formencode.Schema):
293 293 allow_extra_fields = True
294 294 filter_extra_fields = False
295 295 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
296 296 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
297 297 repo_group = All(v.CanWriteGroup(localizer, old_data),
298 298 v.OneOf(repo_groups, hideList=True))
299 299 repo_type = v.OneOf(supported_backends, required=False,
300 300 if_missing=old_data.get('repo_type'))
301 301 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
302 302 repo_private = v.StringBoolean(if_missing=False)
303 303 repo_copy_permissions = v.StringBoolean(if_missing=False)
304 304 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
305 305
306 306 repo_enable_statistics = v.StringBoolean(if_missing=False)
307 307 repo_enable_downloads = v.StringBoolean(if_missing=False)
308 308 repo_enable_locking = v.StringBoolean(if_missing=False)
309 309
310 310 if edit:
311 311 # this is repo owner
312 312 user = All(
313 313 v.UnicodeString(not_empty=True),
314 314 v.ValidRepoUser(localizer, allow_disabled))
315 315 clone_uri_change = v.UnicodeString(
316 316 not_empty=False, if_missing=v.Missing)
317 317
318 318 chained_validators = [v.ValidCloneUri(localizer),
319 319 v.ValidRepoName(localizer, edit, old_data)]
320 320 return _RepoForm
321 321
322 322
323 323 def RepoPermsForm(localizer):
324 324 _ = localizer
325 325
326 326 class _RepoPermsForm(formencode.Schema):
327 327 allow_extra_fields = True
328 328 filter_extra_fields = False
329 329 chained_validators = [v.ValidPerms(localizer, type_='repo')]
330 330 return _RepoPermsForm
331 331
332 332
333 333 def RepoGroupPermsForm(localizer, valid_recursive_choices):
334 334 _ = localizer
335 335
336 336 class _RepoGroupPermsForm(formencode.Schema):
337 337 allow_extra_fields = True
338 338 filter_extra_fields = False
339 339 recursive = v.OneOf(valid_recursive_choices)
340 340 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
341 341 return _RepoGroupPermsForm
342 342
343 343
344 344 def UserGroupPermsForm(localizer):
345 345 _ = localizer
346 346
347 347 class _UserPermsForm(formencode.Schema):
348 348 allow_extra_fields = True
349 349 filter_extra_fields = False
350 350 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
351 351 return _UserPermsForm
352 352
353 353
354 354 def RepoFieldForm(localizer):
355 355 _ = localizer
356 356
357 357 class _RepoFieldForm(formencode.Schema):
358 358 filter_extra_fields = True
359 359 allow_extra_fields = True
360 360
361 361 new_field_key = All(v.FieldKey(localizer),
362 362 v.UnicodeString(strip=True, min=3, not_empty=True))
363 363 new_field_value = v.UnicodeString(not_empty=False, if_missing='')
364 364 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
365 365 if_missing='str')
366 366 new_field_label = v.UnicodeString(not_empty=False)
367 367 new_field_desc = v.UnicodeString(not_empty=False)
368 368 return _RepoFieldForm
369 369
370 370
371 371 def RepoForkForm(localizer, edit=False, old_data=None,
372 372 supported_backends=BACKENDS.keys(), repo_groups=None):
373 373 _ = localizer
374 374 old_data = old_data or {}
375 375 repo_groups = repo_groups or []
376 376
377 377 class _RepoForkForm(formencode.Schema):
378 378 allow_extra_fields = True
379 379 filter_extra_fields = False
380 380 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
381 381 v.SlugifyName(localizer))
382 382 repo_group = All(v.CanWriteGroup(localizer, ),
383 383 v.OneOf(repo_groups, hideList=True))
384 384 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
385 385 description = v.UnicodeString(strip=True, min=1, not_empty=True)
386 386 private = v.StringBoolean(if_missing=False)
387 387 copy_permissions = v.StringBoolean(if_missing=False)
388 388 fork_parent_id = v.UnicodeString()
389 389 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
390 390 return _RepoForkForm
391 391
392 392
393 393 def ApplicationSettingsForm(localizer):
394 394 _ = localizer
395 395
396 396 class _ApplicationSettingsForm(formencode.Schema):
397 397 allow_extra_fields = True
398 398 filter_extra_fields = False
399 399 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
400 400 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
401 401 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
402 402 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
403 403 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
404 404 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
405 405 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
406 406 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
407 407 return _ApplicationSettingsForm
408 408
409 409
410 410 def ApplicationVisualisationForm(localizer):
411 411 from rhodecode.model.db import Repository
412 412 _ = localizer
413 413
414 414 class _ApplicationVisualisationForm(formencode.Schema):
415 415 allow_extra_fields = True
416 416 filter_extra_fields = False
417 417 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
418 418 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
419 419 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
420 420
421 421 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
422 422 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
423 423 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
424 424 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
425 425 rhodecode_show_version = v.StringBoolean(if_missing=False)
426 426 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
427 427 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
428 428 rhodecode_gravatar_url = v.UnicodeString(min=3)
429 429 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
430 430 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
431 431 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
432 432 rhodecode_support_url = v.UnicodeString()
433 433 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
434 434 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
435 435 return _ApplicationVisualisationForm
436 436
437 437
438 438 class _BaseVcsSettingsForm(formencode.Schema):
439 439
440 440 allow_extra_fields = True
441 441 filter_extra_fields = False
442 442 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
443 443 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
444 444 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
445 445
446 446 # PR/Code-review
447 447 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
448 rhodecode_auto_merge_enabled = v.StringBoolean(if_missing=False)
448 449 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
449 450
450 451 # hg
451 452 extensions_largefiles = v.StringBoolean(if_missing=False)
452 453 extensions_evolve = v.StringBoolean(if_missing=False)
453 454 phases_publish = v.StringBoolean(if_missing=False)
454 455
455 456 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
456 457 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
457 458
458 459 # git
459 460 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
460 461 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
461 462 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
462 463
463 464 # cache
464 465 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
465 466
466 467
467 468 def ApplicationUiSettingsForm(localizer):
468 469 _ = localizer
469 470
470 471 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
471 472 extensions_hggit = v.StringBoolean(if_missing=False)
472 473 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
473 474 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
474 475 return _ApplicationUiSettingsForm
475 476
476 477
477 478 def RepoVcsSettingsForm(localizer, repo_name):
478 479 _ = localizer
479 480
480 481 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
481 482 inherit_global_settings = v.StringBoolean(if_missing=False)
482 483 new_svn_branch = v.ValidSvnPattern(localizer,
483 484 section='vcs_svn_branch', repo_name=repo_name)
484 485 new_svn_tag = v.ValidSvnPattern(localizer,
485 486 section='vcs_svn_tag', repo_name=repo_name)
486 487 return _RepoVcsSettingsForm
487 488
488 489
489 490 def LabsSettingsForm(localizer):
490 491 _ = localizer
491 492
492 493 class _LabSettingsForm(formencode.Schema):
493 494 allow_extra_fields = True
494 495 filter_extra_fields = False
495 496 return _LabSettingsForm
496 497
497 498
498 499 def ApplicationPermissionsForm(
499 500 localizer, register_choices, password_reset_choices,
500 501 extern_activate_choices):
501 502 _ = localizer
502 503
503 504 class _DefaultPermissionsForm(formencode.Schema):
504 505 allow_extra_fields = True
505 506 filter_extra_fields = True
506 507
507 508 anonymous = v.StringBoolean(if_missing=False)
508 509 default_register = v.OneOf(register_choices)
509 510 default_register_message = v.UnicodeString()
510 511 default_password_reset = v.OneOf(password_reset_choices)
511 512 default_extern_activate = v.OneOf(extern_activate_choices)
512 513 return _DefaultPermissionsForm
513 514
514 515
515 516 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
516 517 user_group_perms_choices):
517 518 _ = localizer
518 519
519 520 class _ObjectPermissionsForm(formencode.Schema):
520 521 allow_extra_fields = True
521 522 filter_extra_fields = True
522 523 overwrite_default_repo = v.StringBoolean(if_missing=False)
523 524 overwrite_default_group = v.StringBoolean(if_missing=False)
524 525 overwrite_default_user_group = v.StringBoolean(if_missing=False)
525 526
526 527 default_repo_perm = v.OneOf(repo_perms_choices)
527 528 default_group_perm = v.OneOf(group_perms_choices)
528 529 default_user_group_perm = v.OneOf(user_group_perms_choices)
529 530
530 531 return _ObjectPermissionsForm
531 532
532 533
533 534 def BranchPermissionsForm(localizer, branch_perms_choices):
534 535 _ = localizer
535 536
536 537 class _BranchPermissionsForm(formencode.Schema):
537 538 allow_extra_fields = True
538 539 filter_extra_fields = True
539 540 overwrite_default_branch = v.StringBoolean(if_missing=False)
540 541 default_branch_perm = v.OneOf(branch_perms_choices)
541 542
542 543 return _BranchPermissionsForm
543 544
544 545
545 546 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
546 547 repo_group_create_choices, user_group_create_choices,
547 548 fork_choices, inherit_default_permissions_choices):
548 549 _ = localizer
549 550
550 551 class _DefaultPermissionsForm(formencode.Schema):
551 552 allow_extra_fields = True
552 553 filter_extra_fields = True
553 554
554 555 anonymous = v.StringBoolean(if_missing=False)
555 556
556 557 default_repo_create = v.OneOf(create_choices)
557 558 default_repo_create_on_write = v.OneOf(create_on_write_choices)
558 559 default_user_group_create = v.OneOf(user_group_create_choices)
559 560 default_repo_group_create = v.OneOf(repo_group_create_choices)
560 561 default_fork_create = v.OneOf(fork_choices)
561 562 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
562 563 return _DefaultPermissionsForm
563 564
564 565
565 566 def UserIndividualPermissionsForm(localizer):
566 567 _ = localizer
567 568
568 569 class _DefaultPermissionsForm(formencode.Schema):
569 570 allow_extra_fields = True
570 571 filter_extra_fields = True
571 572
572 573 inherit_default_permissions = v.StringBoolean(if_missing=False)
573 574 return _DefaultPermissionsForm
574 575
575 576
576 577 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
577 578 _ = localizer
578 579 old_data = old_data or {}
579 580
580 581 class _DefaultsForm(formencode.Schema):
581 582 allow_extra_fields = True
582 583 filter_extra_fields = True
583 584 default_repo_type = v.OneOf(supported_backends)
584 585 default_repo_private = v.StringBoolean(if_missing=False)
585 586 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
586 587 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
587 588 default_repo_enable_locking = v.StringBoolean(if_missing=False)
588 589 return _DefaultsForm
589 590
590 591
591 592 def AuthSettingsForm(localizer):
592 593 _ = localizer
593 594
594 595 class _AuthSettingsForm(formencode.Schema):
595 596 allow_extra_fields = True
596 597 filter_extra_fields = True
597 598 auth_plugins = All(v.ValidAuthPlugins(localizer),
598 599 v.UniqueListFromString(localizer)(not_empty=True))
599 600 return _AuthSettingsForm
600 601
601 602
602 603 def UserExtraEmailForm(localizer):
603 604 _ = localizer
604 605
605 606 class _UserExtraEmailForm(formencode.Schema):
606 607 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
607 608 return _UserExtraEmailForm
608 609
609 610
610 611 def UserExtraIpForm(localizer):
611 612 _ = localizer
612 613
613 614 class _UserExtraIpForm(formencode.Schema):
614 615 ip = v.ValidIp(localizer)(not_empty=True)
615 616 return _UserExtraIpForm
616 617
617 618
618 619 def PullRequestForm(localizer, repo_id):
619 620 _ = localizer
620 621
621 622 class ReviewerForm(formencode.Schema):
622 623 user_id = v.Int(not_empty=True)
623 624 reasons = All()
624 625 rules = All(v.UniqueList(localizer, convert=int)())
625 626 mandatory = v.StringBoolean()
626 627 role = v.String(if_missing='reviewer')
627 628
628 629 class ObserverForm(formencode.Schema):
629 630 user_id = v.Int(not_empty=True)
630 631 reasons = All()
631 632 rules = All(v.UniqueList(localizer, convert=int)())
632 633 mandatory = v.StringBoolean()
633 634 role = v.String(if_missing='observer')
634 635
635 636 class _PullRequestForm(formencode.Schema):
636 637 allow_extra_fields = True
637 638 filter_extra_fields = True
638 639
639 640 common_ancestor = v.UnicodeString(strip=True, required=True)
640 641 source_repo = v.UnicodeString(strip=True, required=True)
641 642 source_ref = v.UnicodeString(strip=True, required=True)
642 643 target_repo = v.UnicodeString(strip=True, required=True)
643 644 target_ref = v.UnicodeString(strip=True, required=True)
644 645 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
645 646 v.UniqueList(localizer)(not_empty=True))
646 647 review_members = formencode.ForEach(ReviewerForm())
647 648 observer_members = formencode.ForEach(ObserverForm())
648 649 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
649 650 pullrequest_desc = v.UnicodeString(strip=True, required=False)
650 651 description_renderer = v.UnicodeString(strip=True, required=False)
651 652
652 653 return _PullRequestForm
653 654
654 655
655 656 def IssueTrackerPatternsForm(localizer):
656 657 _ = localizer
657 658
658 659 class _IssueTrackerPatternsForm(formencode.Schema):
659 660 allow_extra_fields = True
660 661 filter_extra_fields = False
661 662 chained_validators = [v.ValidPattern(localizer)]
662 663 return _IssueTrackerPatternsForm
@@ -1,2385 +1,2389
1 1 # Copyright (C) 2012-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 """
21 21 pull request model for RhodeCode
22 22 """
23 23
24 24 import logging
25 25 import os
26 26
27 27 import datetime
28 28 import urllib.request
29 29 import urllib.parse
30 30 import urllib.error
31 31 import collections
32 32
33 33 import dataclasses as dataclasses
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from collections import OrderedDict
41 41 from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon
42 42 from rhodecode.lib.ext_json import sjson as json
43 43 from rhodecode.lib.markup_renderer import (
44 44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 45 from rhodecode.lib.hash_utils import md5_safe
46 46 from rhodecode.lib.str_utils import safe_str
47 47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 48 from rhodecode.lib.vcs.backends.base import (
49 49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 50 TargetRefMissing, SourceRefMissing)
51 51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 52 from rhodecode.lib.vcs.exceptions import (
53 53 CommitDoesNotExistError, EmptyRepositoryError)
54 54 from rhodecode.model import BaseModel
55 55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 56 from rhodecode.model.comment import CommentsModel
57 57 from rhodecode.model.db import (
58 58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 60 from rhodecode.model.meta import Session
61 61 from rhodecode.model.notification import NotificationModel, \
62 62 EmailNotificationModel
63 63 from rhodecode.model.scm import ScmModel
64 64 from rhodecode.model.settings import VcsSettingsModel
65 65
66 66
67 67 log = logging.getLogger(__name__)
68 68
69 69
70 70 # Data structure to hold the response data when updating commits during a pull
71 71 # request update.
72 72 class UpdateResponse(object):
73 73
74 74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 75 commit_changes, source_changed, target_changed):
76 76
77 77 self.executed = executed
78 78 self.reason = reason
79 79 self.new = new
80 80 self.old = old
81 81 self.common_ancestor_id = common_ancestor_id
82 82 self.changes = commit_changes
83 83 self.source_changed = source_changed
84 84 self.target_changed = target_changed
85 85
86 86
87 87 def get_diff_info(
88 88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 89 get_commit_authors=True):
90 90 """
91 91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 92 This is also used for default reviewers logic
93 93 """
94 94
95 95 source_scm = source_repo.scm_instance()
96 96 target_scm = target_repo.scm_instance()
97 97
98 98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 99 if not ancestor_id:
100 100 raise ValueError(
101 101 'cannot calculate diff info without a common ancestor. '
102 102 'Make sure both repositories are related, and have a common forking commit.')
103 103
104 104 # case here is that want a simple diff without incoming commits,
105 105 # previewing what will be merged based only on commits in the source.
106 106 log.debug('Using ancestor %s as source_ref instead of %s',
107 107 ancestor_id, source_ref)
108 108
109 109 # source of changes now is the common ancestor
110 110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 111 # target commit becomes the source ref as it is the last commit
112 112 # for diff generation this logic gives proper diff
113 113 target_commit = source_scm.get_commit(commit_id=source_ref)
114 114
115 115 vcs_diff = \
116 116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 117 ignore_whitespace=False, context=3)
118 118
119 119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
120 120 diff_limit=0, file_limit=0, show_full_diff=True)
121 121
122 122 _parsed = diff_processor.prepare()
123 123
124 124 all_files = []
125 125 all_files_changes = []
126 126 changed_lines = {}
127 127 stats = [0, 0]
128 128 for f in _parsed:
129 129 all_files.append(f['filename'])
130 130 all_files_changes.append({
131 131 'filename': f['filename'],
132 132 'stats': f['stats']
133 133 })
134 134 stats[0] += f['stats']['added']
135 135 stats[1] += f['stats']['deleted']
136 136
137 137 changed_lines[f['filename']] = []
138 138 if len(f['chunks']) < 2:
139 139 continue
140 140 # first line is "context" information
141 141 for chunks in f['chunks'][1:]:
142 142 for chunk in chunks['lines']:
143 143 if chunk['action'] not in ('del', 'mod'):
144 144 continue
145 145 changed_lines[f['filename']].append(chunk['old_lineno'])
146 146
147 147 commit_authors = []
148 148 user_counts = {}
149 149 email_counts = {}
150 150 author_counts = {}
151 151 _commit_cache = {}
152 152
153 153 commits = []
154 154 if get_commit_authors:
155 155 log.debug('Obtaining commit authors from set of commits')
156 156 _compare_data = target_scm.compare(
157 157 target_ref, source_ref, source_scm, merge=True,
158 158 pre_load=["author", "date", "message"]
159 159 )
160 160
161 161 for commit in _compare_data:
162 162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 163 # at this function which is later called via JSON serialization
164 164 serialized_commit = dict(
165 165 author=commit.author,
166 166 date=commit.date,
167 167 message=commit.message,
168 168 commit_id=commit.raw_id,
169 169 raw_id=commit.raw_id
170 170 )
171 171 commits.append(serialized_commit)
172 172 user = User.get_from_cs_author(serialized_commit['author'])
173 173 if user and user not in commit_authors:
174 174 commit_authors.append(user)
175 175
176 176 # lines
177 177 if get_authors:
178 178 log.debug('Calculating authors of changed files')
179 179 target_commit = source_repo.get_commit(ancestor_id)
180 180
181 181 # TODO: change to operate in bytes..
182 182 for fname, lines in changed_lines.items():
183 183
184 184 try:
185 185 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 186 except Exception:
187 187 log.exception("Failed to load node with path %s", fname)
188 188 continue
189 189
190 190 if not isinstance(node, FileNode):
191 191 continue
192 192
193 193 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 194 if node.is_binary:
195 195 author = node.last_commit.author
196 196 email = node.last_commit.author_email
197 197
198 198 user = User.get_from_cs_author(author)
199 199 if user:
200 200 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 201 author_counts[author] = author_counts.get(author, 0) + 1
202 202 email_counts[email] = email_counts.get(email, 0) + 1
203 203
204 204 continue
205 205
206 206 for annotation in node.annotate:
207 207 line_no, commit_id, get_commit_func, line_text = annotation
208 208 if line_no in lines:
209 209 if commit_id not in _commit_cache:
210 210 _commit_cache[commit_id] = get_commit_func()
211 211 commit = _commit_cache[commit_id]
212 212 author = commit.author
213 213 email = commit.author_email
214 214 user = User.get_from_cs_author(author)
215 215 if user:
216 216 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 217 author_counts[author] = author_counts.get(author, 0) + 1
218 218 email_counts[email] = email_counts.get(email, 0) + 1
219 219
220 220 log.debug('Default reviewers processing finished')
221 221
222 222 return {
223 223 'commits': commits,
224 224 'files': all_files_changes,
225 225 'stats': stats,
226 226 'ancestor': ancestor_id,
227 227 # original authors of modified files
228 228 'original_authors': {
229 229 'users': user_counts,
230 230 'authors': author_counts,
231 231 'emails': email_counts,
232 232 },
233 233 'commit_authors': commit_authors
234 234 }
235 235
236 236
237 237 class PullRequestModel(BaseModel):
238 238
239 239 cls = PullRequest
240 240
241 241 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242 242
243 243 UPDATE_STATUS_MESSAGES = {
244 244 UpdateFailureReason.NONE: lazy_ugettext(
245 245 'Pull request update successful.'),
246 246 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 247 'Pull request update failed because of an unknown error.'),
248 248 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 249 'No update needed because the source and target have not changed.'),
250 250 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 251 'Pull request cannot be updated because the reference type is '
252 252 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 253 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 254 'This pull request cannot be updated because the target '
255 255 'reference is missing.'),
256 256 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 257 'This pull request cannot be updated because the source '
258 258 'reference is missing.'),
259 259 }
260 260 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 261 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262 262
263 263 def __get_pull_request(self, pull_request):
264 264 return self._get_instance((
265 265 PullRequest, PullRequestVersion), pull_request)
266 266
267 267 def _check_perms(self, perms, pull_request, user, api=False):
268 268 if not api:
269 269 return h.HasRepoPermissionAny(*perms)(
270 270 user=user, repo_name=pull_request.target_repo.repo_name)
271 271 else:
272 272 return h.HasRepoPermissionAnyApi(*perms)(
273 273 user=user, repo_name=pull_request.target_repo.repo_name)
274 274
275 275 def check_user_read(self, pull_request, user, api=False):
276 276 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 277 return self._check_perms(_perms, pull_request, user, api)
278 278
279 279 def check_user_merge(self, pull_request, user, api=False):
280 280 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 281 return self._check_perms(_perms, pull_request, user, api)
282 282
283 283 def check_user_update(self, pull_request, user, api=False):
284 284 owner = user.user_id == pull_request.user_id
285 285 return self.check_user_merge(pull_request, user, api) or owner
286 286
287 287 def check_user_delete(self, pull_request, user):
288 288 owner = user.user_id == pull_request.user_id
289 289 _perms = ('repository.admin',)
290 290 return self._check_perms(_perms, pull_request, user) or owner
291 291
292 292 def is_user_reviewer(self, pull_request, user):
293 293 return user.user_id in [
294 294 x.user_id for x in
295 295 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 296 if x.user
297 297 ]
298 298
299 299 def check_user_change_status(self, pull_request, user, api=False):
300 300 return self.check_user_update(pull_request, user, api) \
301 301 or self.is_user_reviewer(pull_request, user)
302 302
303 303 def check_user_comment(self, pull_request, user):
304 304 owner = user.user_id == pull_request.user_id
305 305 return self.check_user_read(pull_request, user) or owner
306 306
307 307 def get(self, pull_request):
308 308 return self.__get_pull_request(pull_request)
309 309
310 310 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 311 statuses=None, opened_by=None, order_by=None,
312 312 order_dir='desc', only_created=False):
313 313 repo = None
314 314 if repo_name:
315 315 repo = self._get_repo(repo_name)
316 316
317 317 q = PullRequest.query()
318 318
319 319 if search_q:
320 320 like_expression = u'%{}%'.format(safe_str(search_q))
321 321 q = q.join(User, User.user_id == PullRequest.user_id)
322 322 q = q.filter(or_(
323 323 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 324 User.username.ilike(like_expression),
325 325 PullRequest.title.ilike(like_expression),
326 326 PullRequest.description.ilike(like_expression),
327 327 ))
328 328
329 329 # source or target
330 330 if repo and source:
331 331 q = q.filter(PullRequest.source_repo == repo)
332 332 elif repo:
333 333 q = q.filter(PullRequest.target_repo == repo)
334 334
335 335 # closed,opened
336 336 if statuses:
337 337 q = q.filter(PullRequest.status.in_(statuses))
338 338
339 339 # opened by filter
340 340 if opened_by:
341 341 q = q.filter(PullRequest.user_id.in_(opened_by))
342 342
343 343 # only get those that are in "created" state
344 344 if only_created:
345 345 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346 346
347 347 order_map = {
348 348 'name_raw': PullRequest.pull_request_id,
349 349 'id': PullRequest.pull_request_id,
350 350 'title': PullRequest.title,
351 351 'updated_on_raw': PullRequest.updated_on,
352 352 'target_repo': PullRequest.target_repo_id
353 353 }
354 354 if order_by and order_by in order_map:
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 363 opened_by=None):
364 364 """
365 365 Count the number of pull requests for a specific repository.
366 366
367 367 :param repo_name: target or source repo
368 368 :param search_q: filter by text
369 369 :param source: boolean flag to specify if repo_name refers to source
370 370 :param statuses: list of pull request statuses
371 371 :param opened_by: author user of the pull request
372 372 :returns: int number of pull requests
373 373 """
374 374 q = self._prepare_get_all_query(
375 375 repo_name, search_q=search_q, source=source, statuses=statuses,
376 376 opened_by=opened_by)
377 377
378 378 return q.count()
379 379
380 380 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 381 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 382 """
383 383 Get all pull requests for a specific repository.
384 384
385 385 :param repo_name: target or source repo
386 386 :param search_q: filter by text
387 387 :param source: boolean flag to specify if repo_name refers to source
388 388 :param statuses: list of pull request statuses
389 389 :param opened_by: author user of the pull request
390 390 :param offset: pagination offset
391 391 :param length: length of returned list
392 392 :param order_by: order of the returned list
393 393 :param order_dir: 'asc' or 'desc' ordering direction
394 394 :returns: list of pull requests
395 395 """
396 396 q = self._prepare_get_all_query(
397 397 repo_name, search_q=search_q, source=source, statuses=statuses,
398 398 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399 399
400 400 if length:
401 401 pull_requests = q.limit(length).offset(offset).all()
402 402 else:
403 403 pull_requests = q.all()
404 404
405 405 return pull_requests
406 406
407 407 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 408 """
409 409 Count the number of pull requests for a specific repository that are
410 410 awaiting review.
411 411
412 412 :param repo_name: target or source repo
413 413 :param search_q: filter by text
414 414 :param statuses: list of pull request statuses
415 415 :returns: int number of pull requests
416 416 """
417 417 pull_requests = self.get_awaiting_review(
418 418 repo_name, search_q=search_q, statuses=statuses)
419 419
420 420 return len(pull_requests)
421 421
422 422 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 423 offset=0, length=None, order_by=None, order_dir='desc'):
424 424 """
425 425 Get all pull requests for a specific repository that are awaiting
426 426 review.
427 427
428 428 :param repo_name: target or source repo
429 429 :param search_q: filter by text
430 430 :param statuses: list of pull request statuses
431 431 :param offset: pagination offset
432 432 :param length: length of returned list
433 433 :param order_by: order of the returned list
434 434 :param order_dir: 'asc' or 'desc' ordering direction
435 435 :returns: list of pull requests
436 436 """
437 437 pull_requests = self.get_all(
438 438 repo_name, search_q=search_q, statuses=statuses,
439 439 order_by=order_by, order_dir=order_dir)
440 440
441 441 _filtered_pull_requests = []
442 442 for pr in pull_requests:
443 443 status = pr.calculated_review_status()
444 444 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 445 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 446 _filtered_pull_requests.append(pr)
447 447 if length:
448 448 return _filtered_pull_requests[offset:offset+length]
449 449 else:
450 450 return _filtered_pull_requests
451 451
452 452 def _prepare_awaiting_my_review_review_query(
453 453 self, repo_name, user_id, search_q=None, statuses=None,
454 454 order_by=None, order_dir='desc'):
455 455
456 456 for_review_statuses = [
457 457 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 458 ]
459 459
460 460 pull_request_alias = aliased(PullRequest)
461 461 status_alias = aliased(ChangesetStatus)
462 462 reviewers_alias = aliased(PullRequestReviewers)
463 463 repo_alias = aliased(Repository)
464 464
465 465 last_ver_subq = Session()\
466 466 .query(func.min(ChangesetStatus.version)) \
467 467 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 468 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 469 .subquery()
470 470
471 471 q = Session().query(pull_request_alias) \
472 472 .options(lazyload(pull_request_alias.author)) \
473 473 .join(reviewers_alias,
474 474 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 475 .join(repo_alias,
476 476 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 477 .outerjoin(status_alias,
478 478 and_(status_alias.user_id == reviewers_alias.user_id,
479 479 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 480 .filter(or_(status_alias.version == null(),
481 481 status_alias.version == last_ver_subq)) \
482 482 .filter(reviewers_alias.user_id == user_id) \
483 483 .filter(repo_alias.repo_name == repo_name) \
484 484 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 485 .group_by(pull_request_alias)
486 486
487 487 # closed,opened
488 488 if statuses:
489 489 q = q.filter(pull_request_alias.status.in_(statuses))
490 490
491 491 if search_q:
492 492 like_expression = u'%{}%'.format(safe_str(search_q))
493 493 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 494 q = q.filter(or_(
495 495 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 496 User.username.ilike(like_expression),
497 497 pull_request_alias.title.ilike(like_expression),
498 498 pull_request_alias.description.ilike(like_expression),
499 499 ))
500 500
501 501 order_map = {
502 502 'name_raw': pull_request_alias.pull_request_id,
503 503 'title': pull_request_alias.title,
504 504 'updated_on_raw': pull_request_alias.updated_on,
505 505 'target_repo': pull_request_alias.target_repo_id
506 506 }
507 507 if order_by and order_by in order_map:
508 508 if order_dir == 'asc':
509 509 q = q.order_by(order_map[order_by].asc())
510 510 else:
511 511 q = q.order_by(order_map[order_by].desc())
512 512
513 513 return q
514 514
515 515 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 516 """
517 517 Count the number of pull requests for a specific repository that are
518 518 awaiting review from a specific user.
519 519
520 520 :param repo_name: target or source repo
521 521 :param user_id: reviewer user of the pull request
522 522 :param search_q: filter by text
523 523 :param statuses: list of pull request statuses
524 524 :returns: int number of pull requests
525 525 """
526 526 q = self._prepare_awaiting_my_review_review_query(
527 527 repo_name, user_id, search_q=search_q, statuses=statuses)
528 528 return q.count()
529 529
530 530 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 531 offset=0, length=None, order_by=None, order_dir='desc'):
532 532 """
533 533 Get all pull requests for a specific repository that are awaiting
534 534 review from a specific user.
535 535
536 536 :param repo_name: target or source repo
537 537 :param user_id: reviewer user of the pull request
538 538 :param search_q: filter by text
539 539 :param statuses: list of pull request statuses
540 540 :param offset: pagination offset
541 541 :param length: length of returned list
542 542 :param order_by: order of the returned list
543 543 :param order_dir: 'asc' or 'desc' ordering direction
544 544 :returns: list of pull requests
545 545 """
546 546
547 547 q = self._prepare_awaiting_my_review_review_query(
548 548 repo_name, user_id, search_q=search_q, statuses=statuses,
549 549 order_by=order_by, order_dir=order_dir)
550 550
551 551 if length:
552 552 pull_requests = q.limit(length).offset(offset).all()
553 553 else:
554 554 pull_requests = q.all()
555 555
556 556 return pull_requests
557 557
558 558 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 559 order_by=None, order_dir='desc'):
560 560 """
561 561 return a query of pull-requests user is an creator, or he's added as a reviewer
562 562 """
563 563 q = PullRequest.query()
564 564 if user_id:
565 565
566 566 base_query = select(PullRequestReviewers)\
567 567 .where(PullRequestReviewers.user_id == user_id)\
568 568 .with_only_columns(PullRequestReviewers.pull_request_id)
569 569
570 570 user_filter = or_(
571 571 PullRequest.user_id == user_id,
572 572 PullRequest.pull_request_id.in_(base_query)
573 573 )
574 574 q = PullRequest.query().filter(user_filter)
575 575
576 576 # closed,opened
577 577 if statuses:
578 578 q = q.filter(PullRequest.status.in_(statuses))
579 579
580 580 if query:
581 581 like_expression = u'%{}%'.format(safe_str(query))
582 582 q = q.join(User, User.user_id == PullRequest.user_id)
583 583 q = q.filter(or_(
584 584 cast(PullRequest.pull_request_id, String).ilike(like_expression),
585 585 User.username.ilike(like_expression),
586 586 PullRequest.title.ilike(like_expression),
587 587 PullRequest.description.ilike(like_expression),
588 588 ))
589 589
590 590 order_map = {
591 591 'name_raw': PullRequest.pull_request_id,
592 592 'title': PullRequest.title,
593 593 'updated_on_raw': PullRequest.updated_on,
594 594 'target_repo': PullRequest.target_repo_id
595 595 }
596 596 if order_by and order_by in order_map:
597 597 if order_dir == 'asc':
598 598 q = q.order_by(order_map[order_by].asc())
599 599 else:
600 600 q = q.order_by(order_map[order_by].desc())
601 601
602 602 return q
603 603
604 604 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
605 605 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
606 606 return q.count()
607 607
608 608 def get_im_participating_in(
609 609 self, user_id=None, statuses=None, query='', offset=0,
610 610 length=None, order_by=None, order_dir='desc'):
611 611 """
612 612 Get all Pull requests that i'm participating in as a reviewer, or i have opened
613 613 """
614 614
615 615 q = self._prepare_im_participating_query(
616 616 user_id, statuses=statuses, query=query, order_by=order_by,
617 617 order_dir=order_dir)
618 618
619 619 if length:
620 620 pull_requests = q.limit(length).offset(offset).all()
621 621 else:
622 622 pull_requests = q.all()
623 623
624 624 return pull_requests
625 625
626 626 def _prepare_participating_in_for_review_query(
627 627 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
628 628
629 629 for_review_statuses = [
630 630 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
631 631 ]
632 632
633 633 pull_request_alias = aliased(PullRequest)
634 634 status_alias = aliased(ChangesetStatus)
635 635 reviewers_alias = aliased(PullRequestReviewers)
636 636
637 637 last_ver_subq = Session()\
638 638 .query(func.min(ChangesetStatus.version)) \
639 639 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
640 640 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
641 641 .subquery()
642 642
643 643 q = Session().query(pull_request_alias) \
644 644 .options(lazyload(pull_request_alias.author)) \
645 645 .join(reviewers_alias,
646 646 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
647 647 .outerjoin(status_alias,
648 648 and_(status_alias.user_id == reviewers_alias.user_id,
649 649 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
650 650 .filter(or_(status_alias.version == null(),
651 651 status_alias.version == last_ver_subq)) \
652 652 .filter(reviewers_alias.user_id == user_id) \
653 653 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
654 654 .group_by(pull_request_alias)
655 655
656 656 # closed,opened
657 657 if statuses:
658 658 q = q.filter(pull_request_alias.status.in_(statuses))
659 659
660 660 if query:
661 661 like_expression = u'%{}%'.format(safe_str(query))
662 662 q = q.join(User, User.user_id == pull_request_alias.user_id)
663 663 q = q.filter(or_(
664 664 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
665 665 User.username.ilike(like_expression),
666 666 pull_request_alias.title.ilike(like_expression),
667 667 pull_request_alias.description.ilike(like_expression),
668 668 ))
669 669
670 670 order_map = {
671 671 'name_raw': pull_request_alias.pull_request_id,
672 672 'title': pull_request_alias.title,
673 673 'updated_on_raw': pull_request_alias.updated_on,
674 674 'target_repo': pull_request_alias.target_repo_id
675 675 }
676 676 if order_by and order_by in order_map:
677 677 if order_dir == 'asc':
678 678 q = q.order_by(order_map[order_by].asc())
679 679 else:
680 680 q = q.order_by(order_map[order_by].desc())
681 681
682 682 return q
683 683
684 684 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
685 685 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
686 686 return q.count()
687 687
688 688 def get_im_participating_in_for_review(
689 689 self, user_id, statuses=None, query='', offset=0,
690 690 length=None, order_by=None, order_dir='desc'):
691 691 """
692 692 Get all Pull requests that needs user approval or rejection
693 693 """
694 694
695 695 q = self._prepare_participating_in_for_review_query(
696 696 user_id, statuses=statuses, query=query, order_by=order_by,
697 697 order_dir=order_dir)
698 698
699 699 if length:
700 700 pull_requests = q.limit(length).offset(offset).all()
701 701 else:
702 702 pull_requests = q.all()
703 703
704 704 return pull_requests
705 705
706 706 def get_versions(self, pull_request):
707 707 """
708 708 returns version of pull request sorted by ID descending
709 709 """
710 710 return PullRequestVersion.query()\
711 711 .filter(PullRequestVersion.pull_request == pull_request)\
712 712 .order_by(PullRequestVersion.pull_request_version_id.asc())\
713 713 .all()
714 714
715 715 def get_pr_version(self, pull_request_id, version=None):
716 716 at_version = None
717 717
718 718 if version and version == 'latest':
719 719 pull_request_ver = PullRequest.get(pull_request_id)
720 720 pull_request_obj = pull_request_ver
721 721 _org_pull_request_obj = pull_request_obj
722 722 at_version = 'latest'
723 723 elif version:
724 724 pull_request_ver = PullRequestVersion.get_or_404(version)
725 725 pull_request_obj = pull_request_ver
726 726 _org_pull_request_obj = pull_request_ver.pull_request
727 727 at_version = pull_request_ver.pull_request_version_id
728 728 else:
729 729 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
730 730 pull_request_id)
731 731
732 732 pull_request_display_obj = PullRequest.get_pr_display_object(
733 733 pull_request_obj, _org_pull_request_obj)
734 734
735 735 return _org_pull_request_obj, pull_request_obj, \
736 736 pull_request_display_obj, at_version
737 737
738 738 def pr_commits_versions(self, versions):
739 739 """
740 740 Maps the pull-request commits into all known PR versions. This way we can obtain
741 741 each pr version the commit was introduced in.
742 742 """
743 743 commit_versions = collections.defaultdict(list)
744 744 num_versions = [x.pull_request_version_id for x in versions]
745 745 for ver in versions:
746 746 for commit_id in ver.revisions:
747 747 ver_idx = ChangesetComment.get_index_from_version(
748 748 ver.pull_request_version_id, num_versions=num_versions)
749 749 commit_versions[commit_id].append(ver_idx)
750 750 return commit_versions
751 751
752 752 def create(self, created_by, source_repo, source_ref, target_repo,
753 753 target_ref, revisions, reviewers, observers, title, description=None,
754 754 common_ancestor_id=None,
755 755 description_renderer=None,
756 756 reviewer_data=None, translator=None, auth_user=None):
757 757 translator = translator or get_current_request().translate
758 758
759 759 created_by_user = self._get_user(created_by)
760 760 auth_user = auth_user or created_by_user.AuthUser()
761 761 source_repo = self._get_repo(source_repo)
762 762 target_repo = self._get_repo(target_repo)
763 763
764 764 pull_request = PullRequest()
765 765 pull_request.source_repo = source_repo
766 766 pull_request.source_ref = source_ref
767 767 pull_request.target_repo = target_repo
768 768 pull_request.target_ref = target_ref
769 769 pull_request.revisions = revisions
770 770 pull_request.title = title
771 771 pull_request.description = description
772 772 pull_request.description_renderer = description_renderer
773 773 pull_request.author = created_by_user
774 774 pull_request.reviewer_data = reviewer_data
775 775 pull_request.pull_request_state = pull_request.STATE_CREATING
776 776 pull_request.common_ancestor_id = common_ancestor_id
777 777
778 778 Session().add(pull_request)
779 779 Session().flush()
780 780
781 781 reviewer_ids = set()
782 782 # members / reviewers
783 783 for reviewer_object in reviewers:
784 784 user_id, reasons, mandatory, role, rules = reviewer_object
785 785 user = self._get_user(user_id)
786 786
787 787 # skip duplicates
788 788 if user.user_id in reviewer_ids:
789 789 continue
790 790
791 791 reviewer_ids.add(user.user_id)
792 792
793 793 reviewer = PullRequestReviewers()
794 794 reviewer.user = user
795 795 reviewer.pull_request = pull_request
796 796 reviewer.reasons = reasons
797 797 reviewer.mandatory = mandatory
798 798 reviewer.role = role
799 799
800 800 # NOTE(marcink): pick only first rule for now
801 801 rule_id = list(rules)[0] if rules else None
802 802 rule = RepoReviewRule.get(rule_id) if rule_id else None
803 803 if rule:
804 804 review_group = rule.user_group_vote_rule(user_id)
805 805 # we check if this particular reviewer is member of a voting group
806 806 if review_group:
807 807 # NOTE(marcink):
808 808 # can be that user is member of more but we pick the first same,
809 809 # same as default reviewers algo
810 810 review_group = review_group[0]
811 811
812 812 rule_data = {
813 813 'rule_name':
814 814 rule.review_rule_name,
815 815 'rule_user_group_entry_id':
816 816 review_group.repo_review_rule_users_group_id,
817 817 'rule_user_group_name':
818 818 review_group.users_group.users_group_name,
819 819 'rule_user_group_members':
820 820 [x.user.username for x in review_group.users_group.members],
821 821 'rule_user_group_members_id':
822 822 [x.user.user_id for x in review_group.users_group.members],
823 823 }
824 824 # e.g {'vote_rule': -1, 'mandatory': True}
825 825 rule_data.update(review_group.rule_data())
826 826
827 827 reviewer.rule_data = rule_data
828 828
829 829 Session().add(reviewer)
830 830 Session().flush()
831 831
832 832 for observer_object in observers:
833 833 user_id, reasons, mandatory, role, rules = observer_object
834 834 user = self._get_user(user_id)
835 835
836 836 # skip duplicates from reviewers
837 837 if user.user_id in reviewer_ids:
838 838 continue
839 839
840 840 #reviewer_ids.add(user.user_id)
841 841
842 842 observer = PullRequestReviewers()
843 843 observer.user = user
844 844 observer.pull_request = pull_request
845 845 observer.reasons = reasons
846 846 observer.mandatory = mandatory
847 847 observer.role = role
848 848
849 849 # NOTE(marcink): pick only first rule for now
850 850 rule_id = list(rules)[0] if rules else None
851 851 rule = RepoReviewRule.get(rule_id) if rule_id else None
852 852 if rule:
853 853 # TODO(marcink): do we need this for observers ??
854 854 pass
855 855
856 856 Session().add(observer)
857 857 Session().flush()
858 858
859 859 # Set approval status to "Under Review" for all commits which are
860 860 # part of this pull request.
861 861 ChangesetStatusModel().set_status(
862 862 repo=target_repo,
863 863 status=ChangesetStatus.STATUS_UNDER_REVIEW,
864 864 user=created_by_user,
865 865 pull_request=pull_request
866 866 )
867 867 # we commit early at this point. This has to do with a fact
868 868 # that before queries do some row-locking. And because of that
869 869 # we need to commit and finish transaction before below validate call
870 870 # that for large repos could be long resulting in long row locks
871 871 Session().commit()
872 872
873 873 # prepare workspace, and run initial merge simulation. Set state during that
874 874 # operation
875 875 pull_request = PullRequest.get(pull_request.pull_request_id)
876 876
877 877 # set as merging, for merge simulation, and if finished to created so we mark
878 878 # simulation is working fine
879 879 with pull_request.set_state(PullRequest.STATE_MERGING,
880 880 final_state=PullRequest.STATE_CREATED) as state_obj:
881 881 MergeCheck.validate(
882 882 pull_request, auth_user=auth_user, translator=translator)
883 883
884 884 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
885 885 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
886 886
887 887 creation_data = pull_request.get_api_data(with_merge_state=False)
888 888 self._log_audit_action(
889 889 'repo.pull_request.create', {'data': creation_data},
890 890 auth_user, pull_request)
891 891
892 892 return pull_request
893 893
894 894 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
895 895 pull_request = self.__get_pull_request(pull_request)
896 896 target_scm = pull_request.target_repo.scm_instance()
897 897 if action == 'create':
898 898 trigger_hook = hooks_utils.trigger_create_pull_request_hook
899 899 elif action == 'merge':
900 900 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
901 901 elif action == 'close':
902 902 trigger_hook = hooks_utils.trigger_close_pull_request_hook
903 903 elif action == 'review_status_change':
904 904 trigger_hook = hooks_utils.trigger_review_pull_request_hook
905 905 elif action == 'update':
906 906 trigger_hook = hooks_utils.trigger_update_pull_request_hook
907 907 elif action == 'comment':
908 908 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
909 909 elif action == 'comment_edit':
910 910 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
911 911 else:
912 912 return
913 913
914 914 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
915 915 pull_request, action, trigger_hook)
916 916 trigger_hook(
917 917 username=user.username,
918 918 repo_name=pull_request.target_repo.repo_name,
919 919 repo_type=target_scm.alias,
920 920 pull_request=pull_request,
921 921 data=data)
922 922
923 923 def _get_commit_ids(self, pull_request):
924 924 """
925 925 Return the commit ids of the merged pull request.
926 926
927 927 This method is not dealing correctly yet with the lack of autoupdates
928 928 nor with the implicit target updates.
929 929 For example: if a commit in the source repo is already in the target it
930 930 will be reported anyways.
931 931 """
932 932 merge_rev = pull_request.merge_rev
933 933 if merge_rev is None:
934 934 raise ValueError('This pull request was not merged yet')
935 935
936 936 commit_ids = list(pull_request.revisions)
937 937 if merge_rev not in commit_ids:
938 938 commit_ids.append(merge_rev)
939 939
940 940 return commit_ids
941 941
942 942 def merge_repo(self, pull_request, user, extras):
943 943 repo_type = pull_request.source_repo.repo_type
944 944 log.debug("Merging pull request %s", pull_request)
945 945
946 946 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
947 947 merge_state = self._merge_pull_request(pull_request, user, extras)
948 948 if merge_state.executed:
949 949 log.debug("Merge was successful, updating the pull request comments.")
950 950 self._comment_and_close_pr(pull_request, user, merge_state)
951 951
952 952 self._log_audit_action(
953 953 'repo.pull_request.merge',
954 954 {'merge_state': merge_state.__dict__},
955 955 user, pull_request)
956 956
957 957 else:
958 958 log.warning("Merge failed, not updating the pull request.")
959 959 return merge_state
960 960
961 961 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
962 962 target_vcs = pull_request.target_repo.scm_instance()
963 963 source_vcs = pull_request.source_repo.scm_instance()
964 964
965 965 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
966 966 pr_id=pull_request.pull_request_id,
967 967 pr_title=pull_request.title,
968 968 pr_desc=pull_request.description,
969 969 source_repo=source_vcs.name,
970 970 source_ref_name=pull_request.source_ref_parts.name,
971 971 target_repo=target_vcs.name,
972 972 target_ref_name=pull_request.target_ref_parts.name,
973 973 )
974 974
975 975 workspace_id = self._workspace_id(pull_request)
976 976 repo_id = pull_request.target_repo.repo_id
977 977 use_rebase = self._use_rebase_for_merging(pull_request)
978 978 close_branch = self._close_branch_before_merging(pull_request)
979 979 user_name = self._user_name_for_merging(pull_request, user)
980 980
981 981 target_ref = self._refresh_reference(
982 982 pull_request.target_ref_parts, target_vcs)
983 983
984 984 callback_daemon, extras = prepare_callback_daemon(extras, protocol=vcs_settings.HOOKS_PROTOCOL)
985 985
986 986 with callback_daemon:
987 987 # TODO: johbo: Implement a clean way to run a config_override
988 988 # for a single call.
989 989 target_vcs.config.set(
990 990 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
991 991
992 992 merge_state = target_vcs.merge(
993 993 repo_id, workspace_id, target_ref, source_vcs,
994 994 pull_request.source_ref_parts,
995 995 user_name=user_name, user_email=user.email,
996 996 message=message, use_rebase=use_rebase,
997 997 close_branch=close_branch)
998 998
999 999 return merge_state
1000 1000
1001 1001 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1002 1002 pull_request.merge_rev = merge_state.merge_ref.commit_id
1003 1003 pull_request.updated_on = datetime.datetime.now()
1004 1004 close_msg = close_msg or 'Pull request merged and closed'
1005 1005
1006 1006 CommentsModel().create(
1007 1007 text=safe_str(close_msg),
1008 1008 repo=pull_request.target_repo.repo_id,
1009 1009 user=user.user_id,
1010 1010 pull_request=pull_request.pull_request_id,
1011 1011 f_path=None,
1012 1012 line_no=None,
1013 1013 closing_pr=True
1014 1014 )
1015 1015
1016 1016 Session().add(pull_request)
1017 1017 Session().flush()
1018 1018 # TODO: paris: replace invalidation with less radical solution
1019 1019 ScmModel().mark_for_invalidation(
1020 1020 pull_request.target_repo.repo_name)
1021 1021 self.trigger_pull_request_hook(pull_request, user, 'merge')
1022 1022
1023 1023 def has_valid_update_type(self, pull_request):
1024 1024 source_ref_type = pull_request.source_ref_parts.type
1025 1025 return source_ref_type in self.REF_TYPES
1026 1026
1027 1027 def get_flow_commits(self, pull_request):
1028 1028
1029 1029 # source repo
1030 1030 source_ref_name = pull_request.source_ref_parts.name
1031 1031 source_ref_type = pull_request.source_ref_parts.type
1032 1032 source_ref_id = pull_request.source_ref_parts.commit_id
1033 1033 source_repo = pull_request.source_repo.scm_instance()
1034 1034
1035 1035 try:
1036 1036 if source_ref_type in self.REF_TYPES:
1037 1037 source_commit = source_repo.get_commit(
1038 1038 source_ref_name, reference_obj=pull_request.source_ref_parts)
1039 1039 else:
1040 1040 source_commit = source_repo.get_commit(source_ref_id)
1041 1041 except CommitDoesNotExistError:
1042 1042 raise SourceRefMissing()
1043 1043
1044 1044 # target repo
1045 1045 target_ref_name = pull_request.target_ref_parts.name
1046 1046 target_ref_type = pull_request.target_ref_parts.type
1047 1047 target_ref_id = pull_request.target_ref_parts.commit_id
1048 1048 target_repo = pull_request.target_repo.scm_instance()
1049 1049
1050 1050 try:
1051 1051 if target_ref_type in self.REF_TYPES:
1052 1052 target_commit = target_repo.get_commit(
1053 1053 target_ref_name, reference_obj=pull_request.target_ref_parts)
1054 1054 else:
1055 1055 target_commit = target_repo.get_commit(target_ref_id)
1056 1056 except CommitDoesNotExistError:
1057 1057 raise TargetRefMissing()
1058 1058
1059 1059 return source_commit, target_commit
1060 1060
1061 1061 def update_commits(self, pull_request, updating_user):
1062 1062 """
1063 1063 Get the updated list of commits for the pull request
1064 1064 and return the new pull request version and the list
1065 1065 of commits processed by this update action
1066 1066
1067 1067 updating_user is the user_object who triggered the update
1068 1068 """
1069 1069 pull_request = self.__get_pull_request(pull_request)
1070 1070 source_ref_type = pull_request.source_ref_parts.type
1071 1071 source_ref_name = pull_request.source_ref_parts.name
1072 1072 source_ref_id = pull_request.source_ref_parts.commit_id
1073 1073
1074 1074 target_ref_type = pull_request.target_ref_parts.type
1075 1075 target_ref_name = pull_request.target_ref_parts.name
1076 1076 target_ref_id = pull_request.target_ref_parts.commit_id
1077 1077
1078 1078 if not self.has_valid_update_type(pull_request):
1079 1079 log.debug("Skipping update of pull request %s due to ref type: %s",
1080 1080 pull_request, source_ref_type)
1081 1081 return UpdateResponse(
1082 1082 executed=False,
1083 1083 reason=UpdateFailureReason.WRONG_REF_TYPE,
1084 1084 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1085 1085 source_changed=False, target_changed=False)
1086 1086
1087 1087 try:
1088 1088 source_commit, target_commit = self.get_flow_commits(pull_request)
1089 1089 except SourceRefMissing:
1090 1090 return UpdateResponse(
1091 1091 executed=False,
1092 1092 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1093 1093 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1094 1094 source_changed=False, target_changed=False)
1095 1095 except TargetRefMissing:
1096 1096 return UpdateResponse(
1097 1097 executed=False,
1098 1098 reason=UpdateFailureReason.MISSING_TARGET_REF,
1099 1099 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1100 1100 source_changed=False, target_changed=False)
1101 1101
1102 1102 source_changed = source_ref_id != source_commit.raw_id
1103 1103 target_changed = target_ref_id != target_commit.raw_id
1104 1104
1105 1105 if not (source_changed or target_changed):
1106 1106 log.debug("Nothing changed in pull request %s", pull_request)
1107 1107 return UpdateResponse(
1108 1108 executed=False,
1109 1109 reason=UpdateFailureReason.NO_CHANGE,
1110 1110 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1111 1111 source_changed=target_changed, target_changed=source_changed)
1112 1112
1113 1113 change_in_found = 'target repo' if target_changed else 'source repo'
1114 1114 log.debug('Updating pull request because of change in %s detected',
1115 1115 change_in_found)
1116 1116
1117 1117 # Finally there is a need for an update, in case of source change
1118 1118 # we create a new version, else just an update
1119 1119 if source_changed:
1120 1120 pull_request_version = self._create_version_from_snapshot(pull_request)
1121 1121 self._link_comments_to_version(pull_request_version)
1122 1122 else:
1123 1123 try:
1124 1124 ver = pull_request.versions[-1]
1125 1125 except IndexError:
1126 1126 ver = None
1127 1127
1128 1128 pull_request.pull_request_version_id = \
1129 1129 ver.pull_request_version_id if ver else None
1130 1130 pull_request_version = pull_request
1131 1131
1132 1132 source_repo = pull_request.source_repo.scm_instance()
1133 1133 target_repo = pull_request.target_repo.scm_instance()
1134 1134
1135 1135 # re-compute commit ids
1136 1136 old_commit_ids = pull_request.revisions
1137 1137 pre_load = ["author", "date", "message", "branch"]
1138 1138 commit_ranges = target_repo.compare(
1139 1139 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1140 1140 pre_load=pre_load)
1141 1141
1142 1142 target_ref = target_commit.raw_id
1143 1143 source_ref = source_commit.raw_id
1144 1144 ancestor_commit_id = target_repo.get_common_ancestor(
1145 1145 target_ref, source_ref, source_repo)
1146 1146
1147 1147 if not ancestor_commit_id:
1148 1148 raise ValueError(
1149 1149 'cannot calculate diff info without a common ancestor. '
1150 1150 'Make sure both repositories are related, and have a common forking commit.')
1151 1151
1152 1152 pull_request.common_ancestor_id = ancestor_commit_id
1153 1153
1154 1154 pull_request.source_ref = f'{source_ref_type}:{source_ref_name}:{source_commit.raw_id}'
1155 1155 pull_request.target_ref = f'{target_ref_type}:{target_ref_name}:{ancestor_commit_id}'
1156 1156
1157 1157 pull_request.revisions = [
1158 1158 commit.raw_id for commit in reversed(commit_ranges)]
1159 1159 pull_request.updated_on = datetime.datetime.now()
1160 1160 Session().add(pull_request)
1161 1161 new_commit_ids = pull_request.revisions
1162 1162
1163 1163 old_diff_data, new_diff_data = self._generate_update_diffs(
1164 1164 pull_request, pull_request_version)
1165 1165
1166 1166 # calculate commit and file changes
1167 1167 commit_changes = self._calculate_commit_id_changes(
1168 1168 old_commit_ids, new_commit_ids)
1169 1169 file_changes = self._calculate_file_changes(
1170 1170 old_diff_data, new_diff_data)
1171 1171
1172 1172 # set comments as outdated if DIFFS changed
1173 1173 CommentsModel().outdate_comments(
1174 1174 pull_request, old_diff_data=old_diff_data,
1175 1175 new_diff_data=new_diff_data)
1176 1176
1177 1177 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1178 1178 file_node_changes = (
1179 1179 file_changes.added or file_changes.modified or file_changes.removed)
1180 1180 pr_has_changes = valid_commit_changes or file_node_changes
1181 1181
1182 1182 # Add an automatic comment to the pull request, in case
1183 1183 # anything has changed
1184 1184 if pr_has_changes:
1185 1185 update_comment = CommentsModel().create(
1186 1186 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1187 1187 repo=pull_request.target_repo,
1188 1188 user=pull_request.author,
1189 1189 pull_request=pull_request,
1190 1190 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1191 1191
1192 1192 # Update status to "Under Review" for added commits
1193 1193 for commit_id in commit_changes.added:
1194 1194 ChangesetStatusModel().set_status(
1195 1195 repo=pull_request.source_repo,
1196 1196 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1197 1197 comment=update_comment,
1198 1198 user=pull_request.author,
1199 1199 pull_request=pull_request,
1200 1200 revision=commit_id)
1201 1201
1202 1202 # initial commit
1203 1203 Session().commit()
1204 1204
1205 1205 if pr_has_changes:
1206 1206 # send update email to users
1207 1207 try:
1208 1208 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1209 1209 ancestor_commit_id=ancestor_commit_id,
1210 1210 commit_changes=commit_changes,
1211 1211 file_changes=file_changes)
1212 1212 Session().commit()
1213 1213 except Exception:
1214 1214 log.exception('Failed to send email notification to users')
1215 1215 Session().rollback()
1216 1216
1217 1217 log.debug(
1218 1218 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1219 1219 'removed_ids: %s', pull_request.pull_request_id,
1220 1220 commit_changes.added, commit_changes.common, commit_changes.removed)
1221 1221 log.debug(
1222 1222 'Updated pull request with the following file changes: %s',
1223 1223 file_changes)
1224 1224
1225 1225 log.info(
1226 1226 "Updated pull request %s from commit %s to commit %s, "
1227 1227 "stored new version %s of this pull request.",
1228 1228 pull_request.pull_request_id, source_ref_id,
1229 1229 pull_request.source_ref_parts.commit_id,
1230 1230 pull_request_version.pull_request_version_id)
1231 1231
1232 1232 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1233 1233
1234 1234 return UpdateResponse(
1235 1235 executed=True, reason=UpdateFailureReason.NONE,
1236 1236 old=pull_request, new=pull_request_version,
1237 1237 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1238 1238 source_changed=source_changed, target_changed=target_changed)
1239 1239
1240 1240 def _create_version_from_snapshot(self, pull_request):
1241 1241 version = PullRequestVersion()
1242 1242 version.title = pull_request.title
1243 1243 version.description = pull_request.description
1244 1244 version.status = pull_request.status
1245 1245 version.pull_request_state = pull_request.pull_request_state
1246 1246 version.created_on = datetime.datetime.now()
1247 1247 version.updated_on = pull_request.updated_on
1248 1248 version.user_id = pull_request.user_id
1249 1249 version.source_repo = pull_request.source_repo
1250 1250 version.source_ref = pull_request.source_ref
1251 1251 version.target_repo = pull_request.target_repo
1252 1252 version.target_ref = pull_request.target_ref
1253 1253
1254 1254 version._last_merge_source_rev = pull_request._last_merge_source_rev
1255 1255 version._last_merge_target_rev = pull_request._last_merge_target_rev
1256 1256 version.last_merge_status = pull_request.last_merge_status
1257 1257 version.last_merge_metadata = pull_request.last_merge_metadata
1258 1258 version.shadow_merge_ref = pull_request.shadow_merge_ref
1259 1259 version.merge_rev = pull_request.merge_rev
1260 1260 version.reviewer_data = pull_request.reviewer_data
1261 1261
1262 1262 version.revisions = pull_request.revisions
1263 1263 version.common_ancestor_id = pull_request.common_ancestor_id
1264 1264 version.pull_request = pull_request
1265 1265 Session().add(version)
1266 1266 Session().flush()
1267 1267
1268 1268 return version
1269 1269
1270 1270 def _generate_update_diffs(self, pull_request, pull_request_version):
1271 1271
1272 1272 diff_context = (
1273 1273 self.DIFF_CONTEXT +
1274 1274 CommentsModel.needed_extra_diff_context())
1275 1275 hide_whitespace_changes = False
1276 1276 source_repo = pull_request_version.source_repo
1277 1277 source_ref_id = pull_request_version.source_ref_parts.commit_id
1278 1278 target_ref_id = pull_request_version.target_ref_parts.commit_id
1279 1279 old_diff = self._get_diff_from_pr_or_version(
1280 1280 source_repo, source_ref_id, target_ref_id,
1281 1281 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1282 1282
1283 1283 source_repo = pull_request.source_repo
1284 1284 source_ref_id = pull_request.source_ref_parts.commit_id
1285 1285 target_ref_id = pull_request.target_ref_parts.commit_id
1286 1286
1287 1287 new_diff = self._get_diff_from_pr_or_version(
1288 1288 source_repo, source_ref_id, target_ref_id,
1289 1289 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1290 1290
1291 1291 # NOTE: this was using diff_format='gitdiff'
1292 1292 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1293 1293 old_diff_data.prepare()
1294 1294 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1295 1295 new_diff_data.prepare()
1296 1296
1297 1297 return old_diff_data, new_diff_data
1298 1298
1299 1299 def _link_comments_to_version(self, pull_request_version):
1300 1300 """
1301 1301 Link all unlinked comments of this pull request to the given version.
1302 1302
1303 1303 :param pull_request_version: The `PullRequestVersion` to which
1304 1304 the comments shall be linked.
1305 1305
1306 1306 """
1307 1307 pull_request = pull_request_version.pull_request
1308 1308 comments = ChangesetComment.query()\
1309 1309 .filter(
1310 1310 # TODO: johbo: Should we query for the repo at all here?
1311 1311 # Pending decision on how comments of PRs are to be related
1312 1312 # to either the source repo, the target repo or no repo at all.
1313 1313 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1314 1314 ChangesetComment.pull_request == pull_request,
1315 1315 ChangesetComment.pull_request_version == null())\
1316 1316 .order_by(ChangesetComment.comment_id.asc())
1317 1317
1318 1318 # TODO: johbo: Find out why this breaks if it is done in a bulk
1319 1319 # operation.
1320 1320 for comment in comments:
1321 1321 comment.pull_request_version_id = (
1322 1322 pull_request_version.pull_request_version_id)
1323 1323 Session().add(comment)
1324 1324
1325 1325 def _calculate_commit_id_changes(self, old_ids, new_ids):
1326 1326 added = [x for x in new_ids if x not in old_ids]
1327 1327 common = [x for x in new_ids if x in old_ids]
1328 1328 removed = [x for x in old_ids if x not in new_ids]
1329 1329 total = new_ids
1330 1330 return ChangeTuple(added, common, removed, total)
1331 1331
1332 1332 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1333 1333
1334 1334 old_files = OrderedDict()
1335 1335 for diff_data in old_diff_data.parsed_diff:
1336 1336 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1337 1337
1338 1338 added_files = []
1339 1339 modified_files = []
1340 1340 removed_files = []
1341 1341 for diff_data in new_diff_data.parsed_diff:
1342 1342 new_filename = diff_data['filename']
1343 1343 new_hash = md5_safe(diff_data['raw_diff'])
1344 1344
1345 1345 old_hash = old_files.get(new_filename)
1346 1346 if not old_hash:
1347 1347 # file is not present in old diff, we have to figure out from parsed diff
1348 1348 # operation ADD/REMOVE
1349 1349 operations_dict = diff_data['stats']['ops']
1350 1350 if diffs.DEL_FILENODE in operations_dict:
1351 1351 removed_files.append(new_filename)
1352 1352 else:
1353 1353 added_files.append(new_filename)
1354 1354 else:
1355 1355 if new_hash != old_hash:
1356 1356 modified_files.append(new_filename)
1357 1357 # now remove a file from old, since we have seen it already
1358 1358 del old_files[new_filename]
1359 1359
1360 1360 # removed files is when there are present in old, but not in NEW,
1361 1361 # since we remove old files that are present in new diff, left-overs
1362 1362 # if any should be the removed files
1363 1363 removed_files.extend(old_files.keys())
1364 1364
1365 1365 return FileChangeTuple(added_files, modified_files, removed_files)
1366 1366
1367 1367 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1368 1368 """
1369 1369 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1370 1370 so it's always looking the same disregarding on which default
1371 1371 renderer system is using.
1372 1372
1373 1373 :param ancestor_commit_id: ancestor raw_id
1374 1374 :param changes: changes named tuple
1375 1375 :param file_changes: file changes named tuple
1376 1376
1377 1377 """
1378 1378 new_status = ChangesetStatus.get_status_lbl(
1379 1379 ChangesetStatus.STATUS_UNDER_REVIEW)
1380 1380
1381 1381 changed_files = (
1382 1382 file_changes.added + file_changes.modified + file_changes.removed)
1383 1383
1384 1384 params = {
1385 1385 'under_review_label': new_status,
1386 1386 'added_commits': changes.added,
1387 1387 'removed_commits': changes.removed,
1388 1388 'changed_files': changed_files,
1389 1389 'added_files': file_changes.added,
1390 1390 'modified_files': file_changes.modified,
1391 1391 'removed_files': file_changes.removed,
1392 1392 'ancestor_commit_id': ancestor_commit_id
1393 1393 }
1394 1394 renderer = RstTemplateRenderer()
1395 1395 return renderer.render('pull_request_update.mako', **params)
1396 1396
1397 1397 def edit(self, pull_request, title, description, description_renderer, user):
1398 1398 pull_request = self.__get_pull_request(pull_request)
1399 1399 old_data = pull_request.get_api_data(with_merge_state=False)
1400 1400 if pull_request.is_closed():
1401 1401 raise ValueError('This pull request is closed')
1402 1402 if title:
1403 1403 pull_request.title = title
1404 1404 pull_request.description = description
1405 1405 pull_request.updated_on = datetime.datetime.now()
1406 1406 pull_request.description_renderer = description_renderer
1407 1407 Session().add(pull_request)
1408 1408 self._log_audit_action(
1409 1409 'repo.pull_request.edit', {'old_data': old_data},
1410 1410 user, pull_request)
1411 1411
1412 1412 def update_reviewers(self, pull_request, reviewer_data, user):
1413 1413 """
1414 1414 Update the reviewers in the pull request
1415 1415
1416 1416 :param pull_request: the pr to update
1417 1417 :param reviewer_data: list of tuples
1418 1418 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1419 1419 :param user: current use who triggers this action
1420 1420 """
1421 1421
1422 1422 pull_request = self.__get_pull_request(pull_request)
1423 1423 if pull_request.is_closed():
1424 1424 raise ValueError('This pull request is closed')
1425 1425
1426 1426 reviewers = {}
1427 1427 for user_id, reasons, mandatory, role, rules in reviewer_data:
1428 1428 if isinstance(user_id, (int, str)):
1429 1429 user_id = self._get_user(user_id).user_id
1430 1430 reviewers[user_id] = {
1431 1431 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1432 1432
1433 1433 reviewers_ids = set(reviewers.keys())
1434 1434 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1435 1435 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1436 1436
1437 1437 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1438 1438
1439 1439 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1440 1440 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1441 1441
1442 1442 log.debug("Adding %s reviewers", ids_to_add)
1443 1443 log.debug("Removing %s reviewers", ids_to_remove)
1444 1444 changed = False
1445 1445 added_audit_reviewers = []
1446 1446 removed_audit_reviewers = []
1447 1447
1448 1448 for uid in ids_to_add:
1449 1449 changed = True
1450 1450 _usr = self._get_user(uid)
1451 1451 reviewer = PullRequestReviewers()
1452 1452 reviewer.user = _usr
1453 1453 reviewer.pull_request = pull_request
1454 1454 reviewer.reasons = reviewers[uid]['reasons']
1455 1455 # NOTE(marcink): mandatory shouldn't be changed now
1456 1456 # reviewer.mandatory = reviewers[uid]['reasons']
1457 1457 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1458 1458 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1459 1459 Session().add(reviewer)
1460 1460 added_audit_reviewers.append(reviewer.get_dict())
1461 1461
1462 1462 for uid in ids_to_remove:
1463 1463 changed = True
1464 1464 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1465 1465 # This is an edge case that handles previous state of having the same reviewer twice.
1466 1466 # this CAN happen due to the lack of DB checks
1467 1467 reviewers = PullRequestReviewers.query()\
1468 1468 .filter(PullRequestReviewers.user_id == uid,
1469 1469 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1470 1470 PullRequestReviewers.pull_request == pull_request)\
1471 1471 .all()
1472 1472
1473 1473 for obj in reviewers:
1474 1474 added_audit_reviewers.append(obj.get_dict())
1475 1475 Session().delete(obj)
1476 1476
1477 1477 if changed:
1478 1478 Session().expire_all()
1479 1479 pull_request.updated_on = datetime.datetime.now()
1480 1480 Session().add(pull_request)
1481 1481
1482 1482 # finally store audit logs
1483 1483 for user_data in added_audit_reviewers:
1484 1484 self._log_audit_action(
1485 1485 'repo.pull_request.reviewer.add', {'data': user_data},
1486 1486 user, pull_request)
1487 1487 for user_data in removed_audit_reviewers:
1488 1488 self._log_audit_action(
1489 1489 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1490 1490 user, pull_request)
1491 1491
1492 1492 self.notify_reviewers(pull_request, ids_to_add, user)
1493 1493 return ids_to_add, ids_to_remove
1494 1494
1495 1495 def update_observers(self, pull_request, observer_data, user):
1496 1496 """
1497 1497 Update the observers in the pull request
1498 1498
1499 1499 :param pull_request: the pr to update
1500 1500 :param observer_data: list of tuples
1501 1501 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1502 1502 :param user: current use who triggers this action
1503 1503 """
1504 1504 pull_request = self.__get_pull_request(pull_request)
1505 1505 if pull_request.is_closed():
1506 1506 raise ValueError('This pull request is closed')
1507 1507
1508 1508 observers = {}
1509 1509 for user_id, reasons, mandatory, role, rules in observer_data:
1510 1510 if isinstance(user_id, (int, str)):
1511 1511 user_id = self._get_user(user_id).user_id
1512 1512 observers[user_id] = {
1513 1513 'reasons': reasons, 'observers': mandatory, 'role': role}
1514 1514
1515 1515 observers_ids = set(observers.keys())
1516 1516 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1517 1517 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1518 1518
1519 1519 current_observers_ids = set([x.user.user_id for x in current_observers])
1520 1520
1521 1521 ids_to_add = observers_ids.difference(current_observers_ids)
1522 1522 ids_to_remove = current_observers_ids.difference(observers_ids)
1523 1523
1524 1524 log.debug("Adding %s observer", ids_to_add)
1525 1525 log.debug("Removing %s observer", ids_to_remove)
1526 1526 changed = False
1527 1527 added_audit_observers = []
1528 1528 removed_audit_observers = []
1529 1529
1530 1530 for uid in ids_to_add:
1531 1531 changed = True
1532 1532 _usr = self._get_user(uid)
1533 1533 observer = PullRequestReviewers()
1534 1534 observer.user = _usr
1535 1535 observer.pull_request = pull_request
1536 1536 observer.reasons = observers[uid]['reasons']
1537 1537 # NOTE(marcink): mandatory shouldn't be changed now
1538 1538 # observer.mandatory = observer[uid]['reasons']
1539 1539
1540 1540 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1541 1541 observer.role = PullRequestReviewers.ROLE_OBSERVER
1542 1542 Session().add(observer)
1543 1543 added_audit_observers.append(observer.get_dict())
1544 1544
1545 1545 for uid in ids_to_remove:
1546 1546 changed = True
1547 1547 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1548 1548 # This is an edge case that handles previous state of having the same reviewer twice.
1549 1549 # this CAN happen due to the lack of DB checks
1550 1550 observers = PullRequestReviewers.query()\
1551 1551 .filter(PullRequestReviewers.user_id == uid,
1552 1552 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1553 1553 PullRequestReviewers.pull_request == pull_request)\
1554 1554 .all()
1555 1555
1556 1556 for obj in observers:
1557 1557 added_audit_observers.append(obj.get_dict())
1558 1558 Session().delete(obj)
1559 1559
1560 1560 if changed:
1561 1561 Session().expire_all()
1562 1562 pull_request.updated_on = datetime.datetime.now()
1563 1563 Session().add(pull_request)
1564 1564
1565 1565 # finally store audit logs
1566 1566 for user_data in added_audit_observers:
1567 1567 self._log_audit_action(
1568 1568 'repo.pull_request.observer.add', {'data': user_data},
1569 1569 user, pull_request)
1570 1570 for user_data in removed_audit_observers:
1571 1571 self._log_audit_action(
1572 1572 'repo.pull_request.observer.delete', {'old_data': user_data},
1573 1573 user, pull_request)
1574 1574
1575 1575 self.notify_observers(pull_request, ids_to_add, user)
1576 1576 return ids_to_add, ids_to_remove
1577 1577
1578 1578 def get_url(self, pull_request, request=None, permalink=False):
1579 1579 if not request:
1580 1580 request = get_current_request()
1581 1581
1582 1582 if permalink:
1583 1583 return request.route_url(
1584 1584 'pull_requests_global',
1585 1585 pull_request_id=pull_request.pull_request_id,)
1586 1586 else:
1587 1587 return request.route_url('pullrequest_show',
1588 1588 repo_name=safe_str(pull_request.target_repo.repo_name),
1589 1589 pull_request_id=pull_request.pull_request_id,)
1590 1590
1591 1591 def get_shadow_clone_url(self, pull_request, request=None):
1592 1592 """
1593 1593 Returns qualified url pointing to the shadow repository. If this pull
1594 1594 request is closed there is no shadow repository and ``None`` will be
1595 1595 returned.
1596 1596 """
1597 1597 if pull_request.is_closed():
1598 1598 return None
1599 1599 else:
1600 1600 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1601 1601 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1602 1602
1603 1603 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 1604 # notification to reviewers/observers
1605 1605 if not user_ids:
1606 1606 return
1607 1607
1608 1608 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1609 1609
1610 1610 pull_request_obj = pull_request
1611 1611 # get the current participants of this pull request
1612 1612 recipients = user_ids
1613 1613 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1614 1614
1615 1615 pr_source_repo = pull_request_obj.source_repo
1616 1616 pr_target_repo = pull_request_obj.target_repo
1617 1617
1618 1618 pr_url = h.route_url('pullrequest_show',
1619 1619 repo_name=pr_target_repo.repo_name,
1620 1620 pull_request_id=pull_request_obj.pull_request_id,)
1621 1621
1622 1622 # set some variables for email notification
1623 1623 pr_target_repo_url = h.route_url(
1624 1624 'repo_summary', repo_name=pr_target_repo.repo_name)
1625 1625
1626 1626 pr_source_repo_url = h.route_url(
1627 1627 'repo_summary', repo_name=pr_source_repo.repo_name)
1628 1628
1629 1629 # pull request specifics
1630 1630 pull_request_commits = [
1631 1631 (x.raw_id, x.message)
1632 1632 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1633 1633
1634 1634 current_rhodecode_user = user
1635 1635 kwargs = {
1636 1636 'user': current_rhodecode_user,
1637 1637 'pull_request_author': pull_request.author,
1638 1638 'pull_request': pull_request_obj,
1639 1639 'pull_request_commits': pull_request_commits,
1640 1640
1641 1641 'pull_request_target_repo': pr_target_repo,
1642 1642 'pull_request_target_repo_url': pr_target_repo_url,
1643 1643
1644 1644 'pull_request_source_repo': pr_source_repo,
1645 1645 'pull_request_source_repo_url': pr_source_repo_url,
1646 1646
1647 1647 'pull_request_url': pr_url,
1648 1648 'thread_ids': [pr_url],
1649 1649 'user_role': role
1650 1650 }
1651 1651
1652 1652 # create notification objects, and emails
1653 1653 NotificationModel().create(
1654 1654 created_by=current_rhodecode_user,
1655 1655 notification_subject='', # Filled in based on the notification_type
1656 1656 notification_body='', # Filled in based on the notification_type
1657 1657 notification_type=notification_type,
1658 1658 recipients=recipients,
1659 1659 email_kwargs=kwargs,
1660 1660 )
1661 1661
1662 1662 def notify_reviewers(self, pull_request, reviewers_ids, user):
1663 1663 return self._notify_reviewers(pull_request, reviewers_ids,
1664 1664 PullRequestReviewers.ROLE_REVIEWER, user)
1665 1665
1666 1666 def notify_observers(self, pull_request, observers_ids, user):
1667 1667 return self._notify_reviewers(pull_request, observers_ids,
1668 1668 PullRequestReviewers.ROLE_OBSERVER, user)
1669 1669
1670 1670 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1671 1671 commit_changes, file_changes):
1672 1672
1673 1673 updating_user_id = updating_user.user_id
1674 1674 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1675 1675 # NOTE(marcink): send notification to all other users except to
1676 1676 # person who updated the PR
1677 1677 recipients = reviewers.difference(set([updating_user_id]))
1678 1678
1679 1679 log.debug('Notify following recipients about pull-request update %s', recipients)
1680 1680
1681 1681 pull_request_obj = pull_request
1682 1682
1683 1683 # send email about the update
1684 1684 changed_files = (
1685 1685 file_changes.added + file_changes.modified + file_changes.removed)
1686 1686
1687 1687 pr_source_repo = pull_request_obj.source_repo
1688 1688 pr_target_repo = pull_request_obj.target_repo
1689 1689
1690 1690 pr_url = h.route_url('pullrequest_show',
1691 1691 repo_name=pr_target_repo.repo_name,
1692 1692 pull_request_id=pull_request_obj.pull_request_id,)
1693 1693
1694 1694 # set some variables for email notification
1695 1695 pr_target_repo_url = h.route_url(
1696 1696 'repo_summary', repo_name=pr_target_repo.repo_name)
1697 1697
1698 1698 pr_source_repo_url = h.route_url(
1699 1699 'repo_summary', repo_name=pr_source_repo.repo_name)
1700 1700
1701 1701 email_kwargs = {
1702 1702 'date': datetime.datetime.now(),
1703 1703 'updating_user': updating_user,
1704 1704
1705 1705 'pull_request': pull_request_obj,
1706 1706
1707 1707 'pull_request_target_repo': pr_target_repo,
1708 1708 'pull_request_target_repo_url': pr_target_repo_url,
1709 1709
1710 1710 'pull_request_source_repo': pr_source_repo,
1711 1711 'pull_request_source_repo_url': pr_source_repo_url,
1712 1712
1713 1713 'pull_request_url': pr_url,
1714 1714
1715 1715 'ancestor_commit_id': ancestor_commit_id,
1716 1716 'added_commits': commit_changes.added,
1717 1717 'removed_commits': commit_changes.removed,
1718 1718 'changed_files': changed_files,
1719 1719 'added_files': file_changes.added,
1720 1720 'modified_files': file_changes.modified,
1721 1721 'removed_files': file_changes.removed,
1722 1722 'thread_ids': [pr_url],
1723 1723 }
1724 1724
1725 1725 # create notification objects, and emails
1726 1726 NotificationModel().create(
1727 1727 created_by=updating_user,
1728 1728 notification_subject='', # Filled in based on the notification_type
1729 1729 notification_body='', # Filled in based on the notification_type
1730 1730 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1731 1731 recipients=recipients,
1732 1732 email_kwargs=email_kwargs,
1733 1733 )
1734 1734
1735 1735 def delete(self, pull_request, user=None):
1736 1736 if not user:
1737 1737 user = getattr(get_current_rhodecode_user(), 'username', None)
1738 1738
1739 1739 pull_request = self.__get_pull_request(pull_request)
1740 1740 old_data = pull_request.get_api_data(with_merge_state=False)
1741 1741 self._cleanup_merge_workspace(pull_request)
1742 1742 self._log_audit_action(
1743 1743 'repo.pull_request.delete', {'old_data': old_data},
1744 1744 user, pull_request)
1745 1745 Session().delete(pull_request)
1746 1746
1747 1747 def close_pull_request(self, pull_request, user):
1748 1748 pull_request = self.__get_pull_request(pull_request)
1749 1749 self._cleanup_merge_workspace(pull_request)
1750 1750 pull_request.status = PullRequest.STATUS_CLOSED
1751 1751 pull_request.updated_on = datetime.datetime.now()
1752 1752 Session().add(pull_request)
1753 1753 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1754 1754
1755 1755 pr_data = pull_request.get_api_data(with_merge_state=False)
1756 1756 self._log_audit_action(
1757 1757 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1758 1758
1759 1759 def close_pull_request_with_comment(
1760 1760 self, pull_request, user, repo, message=None, auth_user=None):
1761 1761
1762 1762 pull_request_review_status = pull_request.calculated_review_status()
1763 1763
1764 1764 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1765 1765 # approved only if we have voting consent
1766 1766 status = ChangesetStatus.STATUS_APPROVED
1767 1767 else:
1768 1768 status = ChangesetStatus.STATUS_REJECTED
1769 1769 status_lbl = ChangesetStatus.get_status_lbl(status)
1770 1770
1771 1771 default_message = (
1772 1772 'Closing with status change {transition_icon} {status}.'
1773 1773 ).format(transition_icon='>', status=status_lbl)
1774 1774 text = message or default_message
1775 1775
1776 1776 # create a comment, and link it to new status
1777 1777 comment = CommentsModel().create(
1778 1778 text=text,
1779 1779 repo=repo.repo_id,
1780 1780 user=user.user_id,
1781 1781 pull_request=pull_request.pull_request_id,
1782 1782 status_change=status_lbl,
1783 1783 status_change_type=status,
1784 1784 closing_pr=True,
1785 1785 auth_user=auth_user,
1786 1786 )
1787 1787
1788 1788 # calculate old status before we change it
1789 1789 old_calculated_status = pull_request.calculated_review_status()
1790 1790 ChangesetStatusModel().set_status(
1791 1791 repo.repo_id,
1792 1792 status,
1793 1793 user.user_id,
1794 1794 comment=comment,
1795 1795 pull_request=pull_request.pull_request_id
1796 1796 )
1797 1797
1798 1798 Session().flush()
1799 1799
1800 1800 self.trigger_pull_request_hook(pull_request, user, 'comment',
1801 1801 data={'comment': comment})
1802 1802
1803 1803 # we now calculate the status of pull request again, and based on that
1804 1804 # calculation trigger status change. This might happen in cases
1805 1805 # that non-reviewer admin closes a pr, which means his vote doesn't
1806 1806 # change the status, while if he's a reviewer this might change it.
1807 1807 calculated_status = pull_request.calculated_review_status()
1808 1808 if old_calculated_status != calculated_status:
1809 1809 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1810 1810 data={'status': calculated_status})
1811 1811
1812 1812 # finally close the PR
1813 1813 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1814 1814
1815 1815 return comment, status
1816 1816
1817 1817 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1818 1818 _ = translator or get_current_request().translate
1819 1819
1820 1820 if not self._is_merge_enabled(pull_request):
1821 1821 return None, False, _('Server-side pull request merging is disabled.')
1822 1822
1823 1823 if pull_request.is_closed():
1824 1824 return None, False, _('This pull request is closed.')
1825 1825
1826 1826 merge_possible, msg = self._check_repo_requirements(
1827 1827 target=pull_request.target_repo, source=pull_request.source_repo,
1828 1828 translator=_)
1829 1829 if not merge_possible:
1830 1830 return None, merge_possible, msg
1831 1831
1832 1832 try:
1833 1833 merge_response = self._try_merge(
1834 1834 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 1835 log.debug("Merge response: %s", merge_response)
1836 1836 return merge_response, merge_response.possible, merge_response.merge_status_message
1837 1837 except NotImplementedError:
1838 1838 return None, False, _('Pull request merging is not supported.')
1839 1839
1840 1840 def _check_repo_requirements(self, target, source, translator):
1841 1841 """
1842 1842 Check if `target` and `source` have compatible requirements.
1843 1843
1844 1844 Currently this is just checking for largefiles.
1845 1845 """
1846 1846 _ = translator
1847 1847 target_has_largefiles = self._has_largefiles(target)
1848 1848 source_has_largefiles = self._has_largefiles(source)
1849 1849 merge_possible = True
1850 1850 message = u''
1851 1851
1852 1852 if target_has_largefiles != source_has_largefiles:
1853 1853 merge_possible = False
1854 1854 if source_has_largefiles:
1855 1855 message = _(
1856 1856 'Target repository large files support is disabled.')
1857 1857 else:
1858 1858 message = _(
1859 1859 'Source repository large files support is disabled.')
1860 1860
1861 1861 return merge_possible, message
1862 1862
1863 1863 def _has_largefiles(self, repo):
1864 1864 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1865 1865 'extensions', 'largefiles')
1866 1866 return largefiles_ui and largefiles_ui[0].active
1867 1867
1868 1868 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1869 1869 """
1870 1870 Try to merge the pull request and return the merge status.
1871 1871 """
1872 1872 log.debug(
1873 1873 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1874 1874 pull_request.pull_request_id, force_shadow_repo_refresh)
1875 1875 target_vcs = pull_request.target_repo.scm_instance()
1876 1876 # Refresh the target reference.
1877 1877 try:
1878 1878 target_ref = self._refresh_reference(
1879 1879 pull_request.target_ref_parts, target_vcs)
1880 1880 except CommitDoesNotExistError:
1881 1881 merge_state = MergeResponse(
1882 1882 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1883 1883 metadata={'target_ref': pull_request.target_ref_parts})
1884 1884 return merge_state
1885 1885
1886 1886 target_locked = pull_request.target_repo.locked
1887 1887 if target_locked and target_locked[0]:
1888 1888 locked_by = 'user:{}'.format(target_locked[0])
1889 1889 log.debug("The target repository is locked by %s.", locked_by)
1890 1890 merge_state = MergeResponse(
1891 1891 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1892 1892 metadata={'locked_by': locked_by})
1893 1893 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1894 1894 pull_request, target_ref):
1895 1895 log.debug("Refreshing the merge status of the repository.")
1896 1896 merge_state = self._refresh_merge_state(
1897 1897 pull_request, target_vcs, target_ref)
1898 1898 else:
1899 1899 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1900 1900 metadata = {
1901 1901 'unresolved_files': '',
1902 1902 'target_ref': pull_request.target_ref_parts,
1903 1903 'source_ref': pull_request.source_ref_parts,
1904 1904 }
1905 1905 if pull_request.last_merge_metadata:
1906 1906 metadata.update(pull_request.last_merge_metadata_parsed)
1907 1907
1908 1908 if not possible and target_ref.type == 'branch':
1909 1909 # NOTE(marcink): case for mercurial multiple heads on branch
1910 1910 heads = target_vcs._heads(target_ref.name)
1911 1911 if len(heads) != 1:
1912 1912 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1913 1913 metadata.update({
1914 1914 'heads': heads
1915 1915 })
1916 1916
1917 1917 merge_state = MergeResponse(
1918 1918 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1919 1919
1920 1920 return merge_state
1921 1921
1922 1922 def _refresh_reference(self, reference, vcs_repository):
1923 1923 if reference.type in self.UPDATABLE_REF_TYPES:
1924 1924 name_or_id = reference.name
1925 1925 else:
1926 1926 name_or_id = reference.commit_id
1927 1927
1928 1928 refreshed_commit = vcs_repository.get_commit(name_or_id)
1929 1929 refreshed_reference = Reference(
1930 1930 reference.type, reference.name, refreshed_commit.raw_id)
1931 1931 return refreshed_reference
1932 1932
1933 1933 def _needs_merge_state_refresh(self, pull_request, target_reference):
1934 1934 return not(
1935 1935 pull_request.revisions and
1936 1936 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1937 1937 target_reference.commit_id == pull_request._last_merge_target_rev)
1938 1938
1939 1939 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1940 1940 workspace_id = self._workspace_id(pull_request)
1941 1941 source_vcs = pull_request.source_repo.scm_instance()
1942 1942 repo_id = pull_request.target_repo.repo_id
1943 1943 use_rebase = self._use_rebase_for_merging(pull_request)
1944 1944 close_branch = self._close_branch_before_merging(pull_request)
1945 1945 merge_state = target_vcs.merge(
1946 1946 repo_id, workspace_id,
1947 1947 target_reference, source_vcs, pull_request.source_ref_parts,
1948 1948 dry_run=True, use_rebase=use_rebase,
1949 1949 close_branch=close_branch)
1950 1950
1951 1951 # Do not store the response if there was an unknown error.
1952 1952 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1953 1953 pull_request._last_merge_source_rev = \
1954 1954 pull_request.source_ref_parts.commit_id
1955 1955 pull_request._last_merge_target_rev = target_reference.commit_id
1956 1956 pull_request.last_merge_status = merge_state.failure_reason
1957 1957 pull_request.last_merge_metadata = merge_state.metadata
1958 1958
1959 1959 pull_request.shadow_merge_ref = merge_state.merge_ref
1960 1960 Session().add(pull_request)
1961 1961 Session().commit()
1962 1962
1963 1963 return merge_state
1964 1964
1965 1965 def _workspace_id(self, pull_request):
1966 1966 workspace_id = 'pr-%s' % pull_request.pull_request_id
1967 1967 return workspace_id
1968 1968
1969 1969 def generate_repo_data(self, repo, commit_id=None, branch=None,
1970 1970 bookmark=None, translator=None):
1971 1971 from rhodecode.model.repo import RepoModel
1972 1972
1973 1973 all_refs, selected_ref = \
1974 1974 self._get_repo_pullrequest_sources(
1975 1975 repo.scm_instance(), commit_id=commit_id,
1976 1976 branch=branch, bookmark=bookmark, translator=translator)
1977 1977
1978 1978 refs_select2 = []
1979 1979 for element in all_refs:
1980 1980 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1981 1981 refs_select2.append({'text': element[1], 'children': children})
1982 1982
1983 1983 return {
1984 1984 'user': {
1985 1985 'user_id': repo.user.user_id,
1986 1986 'username': repo.user.username,
1987 1987 'firstname': repo.user.first_name,
1988 1988 'lastname': repo.user.last_name,
1989 1989 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1990 1990 },
1991 1991 'name': repo.repo_name,
1992 1992 'link': RepoModel().get_url(repo),
1993 1993 'description': h.chop_at_smart(repo.description_safe, '\n'),
1994 1994 'refs': {
1995 1995 'all_refs': all_refs,
1996 1996 'selected_ref': selected_ref,
1997 1997 'select2_refs': refs_select2
1998 1998 }
1999 1999 }
2000 2000
2001 2001 def generate_pullrequest_title(self, source, source_ref, target):
2002 2002 return u'{source}#{at_ref} to {target}'.format(
2003 2003 source=source,
2004 2004 at_ref=source_ref,
2005 2005 target=target,
2006 2006 )
2007 2007
2008 2008 def _cleanup_merge_workspace(self, pull_request):
2009 2009 # Merging related cleanup
2010 2010 repo_id = pull_request.target_repo.repo_id
2011 2011 target_scm = pull_request.target_repo.scm_instance()
2012 2012 workspace_id = self._workspace_id(pull_request)
2013 2013
2014 2014 try:
2015 2015 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2016 2016 except NotImplementedError:
2017 2017 pass
2018 2018
2019 2019 def _get_repo_pullrequest_sources(
2020 2020 self, repo, commit_id=None, branch=None, bookmark=None,
2021 2021 translator=None):
2022 2022 """
2023 2023 Return a structure with repo's interesting commits, suitable for
2024 2024 the selectors in pullrequest controller
2025 2025
2026 2026 :param commit_id: a commit that must be in the list somehow
2027 2027 and selected by default
2028 2028 :param branch: a branch that must be in the list and selected
2029 2029 by default - even if closed
2030 2030 :param bookmark: a bookmark that must be in the list and selected
2031 2031 """
2032 2032 _ = translator or get_current_request().translate
2033 2033
2034 2034 commit_id = safe_str(commit_id) if commit_id else None
2035 2035 branch = safe_str(branch) if branch else None
2036 2036 bookmark = safe_str(bookmark) if bookmark else None
2037 2037
2038 2038 selected = None
2039 2039
2040 2040 # order matters: first source that has commit_id in it will be selected
2041 2041 sources = []
2042 2042 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2043 2043 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2044 2044
2045 2045 if commit_id:
2046 2046 ref_commit = (h.short_id(commit_id), commit_id)
2047 2047 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2048 2048
2049 2049 sources.append(
2050 2050 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2051 2051 )
2052 2052
2053 2053 groups = []
2054 2054
2055 2055 for group_key, ref_list, group_name, match in sources:
2056 2056 group_refs = []
2057 2057 for ref_name, ref_id in ref_list:
2058 2058 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2059 2059 group_refs.append((ref_key, ref_name))
2060 2060
2061 2061 if not selected:
2062 2062 if set([commit_id, match]) & set([ref_id, ref_name]):
2063 2063 selected = ref_key
2064 2064
2065 2065 if group_refs:
2066 2066 groups.append((group_refs, group_name))
2067 2067
2068 2068 if not selected:
2069 2069 ref = commit_id or branch or bookmark
2070 2070 if ref:
2071 2071 raise CommitDoesNotExistError(
2072 2072 u'No commit refs could be found matching: {}'.format(ref))
2073 2073 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 2074 selected = u'branch:{}:{}'.format(
2075 2075 safe_str(repo.DEFAULT_BRANCH_NAME),
2076 2076 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 2077 )
2078 2078 elif repo.commit_ids:
2079 2079 # make the user select in this case
2080 2080 selected = None
2081 2081 else:
2082 2082 raise EmptyRepositoryError()
2083 2083 return groups, selected
2084 2084
2085 2085 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2086 2086 hide_whitespace_changes, diff_context):
2087 2087
2088 2088 return self._get_diff_from_pr_or_version(
2089 2089 source_repo, source_ref_id, target_ref_id,
2090 2090 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2091 2091
2092 2092 def _get_diff_from_pr_or_version(
2093 2093 self, source_repo, source_ref_id, target_ref_id,
2094 2094 hide_whitespace_changes, diff_context):
2095 2095
2096 2096 target_commit = source_repo.get_commit(
2097 2097 commit_id=safe_str(target_ref_id))
2098 2098 source_commit = source_repo.get_commit(
2099 2099 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2100 2100 if isinstance(source_repo, Repository):
2101 2101 vcs_repo = source_repo.scm_instance()
2102 2102 else:
2103 2103 vcs_repo = source_repo
2104 2104
2105 2105 # TODO: johbo: In the context of an update, we cannot reach
2106 2106 # the old commit anymore with our normal mechanisms. It needs
2107 2107 # some sort of special support in the vcs layer to avoid this
2108 2108 # workaround.
2109 2109 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2110 2110 vcs_repo.alias == 'git'):
2111 2111 source_commit.raw_id = safe_str(source_ref_id)
2112 2112
2113 2113 log.debug('calculating diff between '
2114 2114 'source_ref:%s and target_ref:%s for repo `%s`',
2115 2115 target_ref_id, source_ref_id,
2116 2116 safe_str(vcs_repo.path))
2117 2117
2118 2118 vcs_diff = vcs_repo.get_diff(
2119 2119 commit1=target_commit, commit2=source_commit,
2120 2120 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2121 2121 return vcs_diff
2122 2122
2123 2123 def _is_merge_enabled(self, pull_request):
2124 2124 return self._get_general_setting(
2125 2125 pull_request, 'rhodecode_pr_merge_enabled')
2126 2126
2127 def is_automatic_merge_enabled(self, pull_request):
2128 return self._get_general_setting(
2129 pull_request, 'rhodecode_auto_merge_enabled')
2130
2127 2131 def _use_rebase_for_merging(self, pull_request):
2128 2132 repo_type = pull_request.target_repo.repo_type
2129 2133 if repo_type == 'hg':
2130 2134 return self._get_general_setting(
2131 2135 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2132 2136 elif repo_type == 'git':
2133 2137 return self._get_general_setting(
2134 2138 pull_request, 'rhodecode_git_use_rebase_for_merging')
2135 2139
2136 2140 return False
2137 2141
2138 2142 def _user_name_for_merging(self, pull_request, user):
2139 2143 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2140 2144 if env_user_name_attr and hasattr(user, env_user_name_attr):
2141 2145 user_name_attr = env_user_name_attr
2142 2146 else:
2143 2147 user_name_attr = 'short_contact'
2144 2148
2145 2149 user_name = getattr(user, user_name_attr)
2146 2150 return user_name
2147 2151
2148 2152 def _close_branch_before_merging(self, pull_request):
2149 2153 repo_type = pull_request.target_repo.repo_type
2150 2154 if repo_type == 'hg':
2151 2155 return self._get_general_setting(
2152 2156 pull_request, 'rhodecode_hg_close_branch_before_merging')
2153 2157 elif repo_type == 'git':
2154 2158 return self._get_general_setting(
2155 2159 pull_request, 'rhodecode_git_close_branch_before_merging')
2156 2160
2157 2161 return False
2158 2162
2159 2163 def _get_general_setting(self, pull_request, settings_key, default=False):
2160 2164 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2161 2165 settings = settings_model.get_general_settings()
2162 2166 return settings.get(settings_key, default)
2163 2167
2164 2168 def _log_audit_action(self, action, action_data, user, pull_request):
2165 2169 audit_logger.store(
2166 2170 action=action,
2167 2171 action_data=action_data,
2168 2172 user=user,
2169 2173 repo=pull_request.target_repo)
2170 2174
2171 2175 def get_reviewer_functions(self):
2172 2176 """
2173 2177 Fetches functions for validation and fetching default reviewers.
2174 2178 If available we use the EE package, else we fallback to CE
2175 2179 package functions
2176 2180 """
2177 2181 try:
2178 2182 from rc_reviewers.utils import get_default_reviewers_data
2179 2183 from rc_reviewers.utils import validate_default_reviewers
2180 2184 from rc_reviewers.utils import validate_observers
2181 2185 except ImportError:
2182 2186 from rhodecode.apps.repository.utils import get_default_reviewers_data
2183 2187 from rhodecode.apps.repository.utils import validate_default_reviewers
2184 2188 from rhodecode.apps.repository.utils import validate_observers
2185 2189
2186 2190 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2187 2191
2188 2192
2189 2193 class MergeCheck(object):
2190 2194 """
2191 2195 Perform Merge Checks and returns a check object which stores information
2192 2196 about merge errors, and merge conditions
2193 2197 """
2194 2198 TODO_CHECK = 'todo'
2195 2199 PERM_CHECK = 'perm'
2196 2200 REVIEW_CHECK = 'review'
2197 2201 MERGE_CHECK = 'merge'
2198 2202 WIP_CHECK = 'wip'
2199 2203
2200 2204 def __init__(self):
2201 2205 self.review_status = None
2202 2206 self.merge_possible = None
2203 2207 self.merge_msg = ''
2204 2208 self.merge_response = None
2205 2209 self.failed = None
2206 2210 self.errors = []
2207 2211 self.error_details = OrderedDict()
2208 2212 self.source_commit = AttributeDict()
2209 2213 self.target_commit = AttributeDict()
2210 2214 self.reviewers_count = 0
2211 2215 self.observers_count = 0
2212 2216
2213 2217 def __repr__(self):
2214 2218 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2215 2219 self.merge_possible, self.failed, self.errors)
2216 2220
2217 2221 def push_error(self, error_type, message, error_key, details):
2218 2222 self.failed = True
2219 2223 self.errors.append([error_type, message])
2220 2224 self.error_details[error_key] = dict(
2221 2225 details=details,
2222 2226 error_type=error_type,
2223 2227 message=message
2224 2228 )
2225 2229
2226 2230 @classmethod
2227 2231 def validate(cls, pull_request, auth_user, translator, fail_early=False, force_shadow_repo_refresh=False):
2228 2232 _ = translator
2229 2233 merge_check = cls()
2230 2234
2231 2235 # title has WIP:
2232 2236 if pull_request.work_in_progress:
2233 2237 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2234 2238
2235 2239 msg = _('WIP marker in title prevents from accidental merge.')
2236 2240 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2237 2241 if fail_early:
2238 2242 return merge_check
2239 2243
2240 2244 # permissions to merge
2241 2245 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2242 2246 if not user_allowed_to_merge:
2243 2247 log.debug("MergeCheck: cannot merge, approval is pending.")
2244 2248
2245 2249 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2246 2250 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2247 2251 if fail_early:
2248 2252 return merge_check
2249 2253
2250 2254 # permission to merge into the target branch
2251 2255 target_commit_id = pull_request.target_ref_parts.commit_id
2252 2256 if pull_request.target_ref_parts.type == 'branch':
2253 2257 branch_name = pull_request.target_ref_parts.name
2254 2258 else:
2255 2259 # for mercurial we can always figure out the branch from the commit
2256 2260 # in case of bookmark
2257 2261 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2258 2262 branch_name = target_commit.branch
2259 2263
2260 2264 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2261 2265 pull_request.target_repo.repo_name, branch_name)
2262 2266 if branch_perm and branch_perm == 'branch.none':
2263 2267 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2264 2268 branch_name, rule)
2265 2269 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2266 2270 if fail_early:
2267 2271 return merge_check
2268 2272
2269 2273 # review status, must be always present
2270 2274 review_status = pull_request.calculated_review_status()
2271 2275 merge_check.review_status = review_status
2272 2276 merge_check.reviewers_count = pull_request.reviewers_count
2273 2277 merge_check.observers_count = pull_request.observers_count
2274 2278
2275 2279 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2276 2280 if not status_approved and merge_check.reviewers_count:
2277 2281 log.debug("MergeCheck: cannot merge, approval is pending.")
2278 2282 msg = _('Pull request reviewer approval is pending.')
2279 2283
2280 2284 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2281 2285
2282 2286 if fail_early:
2283 2287 return merge_check
2284 2288
2285 2289 # left over TODOs
2286 2290 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2287 2291 if todos:
2288 2292 log.debug("MergeCheck: cannot merge, %s unresolved TODOs left.", len(todos))
2289 2293
2290 2294 if len(todos) == 1:
2291 2295 msg = _('Cannot merge, {} TODO still not resolved.').format(len(todos))
2292 2296 else:
2293 2297 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2294 2298 len(todos))
2295 2299
2296 2300 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2297 2301
2298 2302 if fail_early:
2299 2303 return merge_check
2300 2304
2301 2305 # merge possible, here is the filesystem simulation + shadow repo
2302 2306 merge_response, merge_status, msg = PullRequestModel().merge_status(
2303 2307 pull_request, translator=translator,
2304 2308 force_shadow_repo_refresh=force_shadow_repo_refresh)
2305 2309
2306 2310 merge_check.merge_possible = merge_status
2307 2311 merge_check.merge_msg = msg
2308 2312 merge_check.merge_response = merge_response
2309 2313
2310 2314 source_ref_id = pull_request.source_ref_parts.commit_id
2311 2315 target_ref_id = pull_request.target_ref_parts.commit_id
2312 2316
2313 2317 try:
2314 2318 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2315 2319 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2316 2320 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2317 2321 merge_check.source_commit.current_raw_id = source_commit.raw_id
2318 2322 merge_check.source_commit.previous_raw_id = source_ref_id
2319 2323
2320 2324 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2321 2325 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2322 2326 merge_check.target_commit.current_raw_id = target_commit.raw_id
2323 2327 merge_check.target_commit.previous_raw_id = target_ref_id
2324 2328 except (SourceRefMissing, TargetRefMissing):
2325 2329 pass
2326 2330
2327 2331 if not merge_status:
2328 2332 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2329 2333 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2330 2334
2331 2335 if fail_early:
2332 2336 return merge_check
2333 2337
2334 2338 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2335 2339 return merge_check
2336 2340
2337 2341 @classmethod
2338 2342 def get_merge_conditions(cls, pull_request, translator):
2339 2343 _ = translator
2340 2344 merge_details = {}
2341 2345
2342 2346 model = PullRequestModel()
2343 2347 use_rebase = model._use_rebase_for_merging(pull_request)
2344 2348
2345 2349 if use_rebase:
2346 2350 merge_details['merge_strategy'] = dict(
2347 2351 details={},
2348 2352 message=_('Merge strategy: rebase')
2349 2353 )
2350 2354 else:
2351 2355 merge_details['merge_strategy'] = dict(
2352 2356 details={},
2353 2357 message=_('Merge strategy: explicit merge commit')
2354 2358 )
2355 2359
2356 2360 close_branch = model._close_branch_before_merging(pull_request)
2357 2361 if close_branch:
2358 2362 repo_type = pull_request.target_repo.repo_type
2359 2363 close_msg = ''
2360 2364 if repo_type == 'hg':
2361 2365 close_msg = _('Source branch will be closed before the merge.')
2362 2366 elif repo_type == 'git':
2363 2367 close_msg = _('Source branch will be deleted after the merge.')
2364 2368
2365 2369 merge_details['close_branch'] = dict(
2366 2370 details={},
2367 2371 message=close_msg
2368 2372 )
2369 2373
2370 2374 return merge_details
2371 2375
2372 2376
2373 2377 @dataclasses.dataclass
2374 2378 class ChangeTuple:
2375 2379 added: list
2376 2380 common: list
2377 2381 removed: list
2378 2382 total: list
2379 2383
2380 2384
2381 2385 @dataclasses.dataclass
2382 2386 class FileChangeTuple:
2383 2387 added: list
2384 2388 modified: list
2385 2389 removed: list
@@ -1,864 +1,865
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import re
21 21 import logging
22 22 import time
23 23 import functools
24 24 from collections import namedtuple
25 25
26 26 from pyramid.threadlocal import get_current_request
27 27
28 28 from rhodecode.lib import rc_cache
29 29 from rhodecode.lib.hash_utils import sha1_safe
30 30 from rhodecode.lib.html_filters import sanitize_html
31 31 from rhodecode.lib.utils2 import (
32 32 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
33 33 from rhodecode.lib.vcs.backends import base
34 34 from rhodecode.lib.statsd_client import StatsdClient
35 35 from rhodecode.model import BaseModel
36 36 from rhodecode.model.db import (
37 37 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
38 38 from rhodecode.model.meta import Session
39 39
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 UiSetting = namedtuple(
45 45 'UiSetting', ['section', 'key', 'value', 'active'])
46 46
47 47 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
48 48
49 49
50 50 class SettingNotFound(Exception):
51 51 def __init__(self, setting_id):
52 52 msg = f'Setting `{setting_id}` is not found'
53 53 super().__init__(msg)
54 54
55 55
56 56 class SettingsModel(BaseModel):
57 57 BUILTIN_HOOKS = (
58 58 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
59 59 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
60 60 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
61 61 RhodeCodeUi.HOOK_PUSH_KEY,)
62 62 HOOKS_SECTION = 'hooks'
63 63
64 64 def __init__(self, sa=None, repo=None):
65 65 self.repo = repo
66 66 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
67 67 self.SettingsDbModel = (
68 68 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
69 69 super().__init__(sa)
70 70
71 71 def get_keyname(self, key_name, prefix='rhodecode_'):
72 72 return f'{prefix}{key_name}'
73 73
74 74 def get_ui_by_key(self, key):
75 75 q = self.UiDbModel.query()
76 76 q = q.filter(self.UiDbModel.ui_key == key)
77 77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 78 return q.scalar()
79 79
80 80 def get_ui_by_section(self, section):
81 81 q = self.UiDbModel.query()
82 82 q = q.filter(self.UiDbModel.ui_section == section)
83 83 q = self._filter_by_repo(RepoRhodeCodeUi, q)
84 84 return q.all()
85 85
86 86 def get_ui_by_section_and_key(self, section, key):
87 87 q = self.UiDbModel.query()
88 88 q = q.filter(self.UiDbModel.ui_section == section)
89 89 q = q.filter(self.UiDbModel.ui_key == key)
90 90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
91 91 return q.scalar()
92 92
93 93 def get_ui(self, section=None, key=None):
94 94 q = self.UiDbModel.query()
95 95 q = self._filter_by_repo(RepoRhodeCodeUi, q)
96 96
97 97 if section:
98 98 q = q.filter(self.UiDbModel.ui_section == section)
99 99 if key:
100 100 q = q.filter(self.UiDbModel.ui_key == key)
101 101
102 102 # TODO: mikhail: add caching
103 103 result = [
104 104 UiSetting(
105 105 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
106 106 value=safe_str(r.ui_value), active=r.ui_active
107 107 )
108 108 for r in q.all()
109 109 ]
110 110 return result
111 111
112 112 def get_builtin_hooks(self):
113 113 q = self.UiDbModel.query()
114 114 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
115 115 return self._get_hooks(q)
116 116
117 117 def get_custom_hooks(self):
118 118 q = self.UiDbModel.query()
119 119 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
120 120 return self._get_hooks(q)
121 121
122 122 def create_ui_section_value(self, section, val, key=None, active=True):
123 123 new_ui = self.UiDbModel()
124 124 new_ui.ui_section = section
125 125 new_ui.ui_value = val
126 126 new_ui.ui_active = active
127 127
128 128 repository_id = ''
129 129 if self.repo:
130 130 repo = self._get_repo(self.repo)
131 131 repository_id = repo.repo_id
132 132 new_ui.repository_id = repository_id
133 133
134 134 if not key:
135 135 # keys are unique so they need appended info
136 136 if self.repo:
137 137 key = sha1_safe(f'{section}{val}{repository_id}')
138 138 else:
139 139 key = sha1_safe(f'{section}{val}')
140 140
141 141 new_ui.ui_key = key
142 142
143 143 Session().add(new_ui)
144 144 return new_ui
145 145
146 146 def create_or_update_hook(self, key, value):
147 147 ui = (
148 148 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
149 149 self.UiDbModel())
150 150 ui.ui_section = self.HOOKS_SECTION
151 151 ui.ui_active = True
152 152 ui.ui_key = key
153 153 ui.ui_value = value
154 154
155 155 if self.repo:
156 156 repo = self._get_repo(self.repo)
157 157 repository_id = repo.repo_id
158 158 ui.repository_id = repository_id
159 159
160 160 Session().add(ui)
161 161 return ui
162 162
163 163 def delete_ui(self, id_):
164 164 ui = self.UiDbModel.get(id_)
165 165 if not ui:
166 166 raise SettingNotFound(id_)
167 167 Session().delete(ui)
168 168
169 169 def get_setting_by_name(self, name):
170 170 q = self._get_settings_query()
171 171 q = q.filter(self.SettingsDbModel.app_settings_name == name)
172 172 return q.scalar()
173 173
174 174 def create_or_update_setting(
175 175 self, name, val: Optional | str = Optional(''), type_: Optional | str = Optional('unicode')):
176 176 """
177 177 Creates or updates RhodeCode setting. If updates are triggered, it will
178 178 only update parameters that are explicitly set Optional instance will
179 179 be skipped
180 180
181 181 :param name:
182 182 :param val:
183 183 :param type_:
184 184 :return:
185 185 """
186 186
187 187 res = self.get_setting_by_name(name)
188 188 repo = self._get_repo(self.repo) if self.repo else None
189 189
190 190 if not res:
191 191 val = Optional.extract(val)
192 192 type_ = Optional.extract(type_)
193 193
194 194 args = (
195 195 (repo.repo_id, name, val, type_)
196 196 if repo else (name, val, type_))
197 197 res = self.SettingsDbModel(*args)
198 198
199 199 else:
200 200 if self.repo:
201 201 res.repository_id = repo.repo_id
202 202
203 203 res.app_settings_name = name
204 204 if not isinstance(type_, Optional):
205 205 # update if set
206 206 res.app_settings_type = type_
207 207 if not isinstance(val, Optional):
208 208 # update if set
209 209 res.app_settings_value = val
210 210
211 211 Session().add(res)
212 212 return res
213 213
214 214 def get_cache_region(self):
215 215 repo = self._get_repo(self.repo) if self.repo else None
216 216 cache_key = f"repo.v1.{repo.repo_id}" if repo else "repo.v1.ALL"
217 217 cache_namespace_uid = f'cache_settings.{cache_key}'
218 218 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
219 219 return region, cache_namespace_uid
220 220
221 221 def invalidate_settings_cache(self, hard=False):
222 222 region, namespace_key = self.get_cache_region()
223 223 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
224 224 'invalidate_settings_cache', region, namespace_key)
225 225
226 226 # we use hard cleanup if invalidation is sent
227 227 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
228 228
229 229 def get_cache_call_method(self, cache=True):
230 230 region, cache_key = self.get_cache_region()
231 231
232 232 @region.conditional_cache_on_arguments(condition=cache)
233 233 def _get_all_settings(name, key):
234 234 q = self._get_settings_query()
235 235 if not q:
236 236 raise Exception('Could not get application settings !')
237 237
238 238 settings = {
239 239 self.get_keyname(res.app_settings_name): res.app_settings_value
240 240 for res in q
241 241 }
242 242 return settings
243 243 return _get_all_settings
244 244
245 245 def get_all_settings(self, cache=False, from_request=True):
246 246 # defines if we use GLOBAL, or PER_REPO
247 247 repo = self._get_repo(self.repo) if self.repo else None
248 248
249 249 # initially try the request context; this is the fastest
250 250 # we only fetch global config, NOT for repo-specific
251 251 if from_request and not repo:
252 252 request = get_current_request()
253 253
254 254 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
255 255 rc_config = request.call_context.rc_config
256 256 if rc_config:
257 257 return rc_config
258 258
259 259 _region, cache_key = self.get_cache_region()
260 260 _get_all_settings = self.get_cache_call_method(cache=cache)
261 261
262 262 start = time.time()
263 263 result = _get_all_settings('rhodecode_settings', cache_key)
264 264 compute_time = time.time() - start
265 265 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
266 266
267 267 statsd = StatsdClient.statsd
268 268 if statsd:
269 269 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
270 270 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
271 271 use_decimals=False)
272 272
273 273 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
274 274
275 275 return result
276 276
277 277 def get_auth_settings(self):
278 278 q = self._get_settings_query()
279 279 q = q.filter(
280 280 self.SettingsDbModel.app_settings_name.startswith('auth_'))
281 281 rows = q.all()
282 282 auth_settings = {
283 283 row.app_settings_name: row.app_settings_value for row in rows}
284 284 return auth_settings
285 285
286 286 def get_auth_plugins(self):
287 287 auth_plugins = self.get_setting_by_name("auth_plugins")
288 288 return auth_plugins.app_settings_value
289 289
290 290 def get_default_repo_settings(self, strip_prefix=False):
291 291 q = self._get_settings_query()
292 292 q = q.filter(
293 293 self.SettingsDbModel.app_settings_name.startswith('default_'))
294 294 rows = q.all()
295 295
296 296 result = {}
297 297 for row in rows:
298 298 key = row.app_settings_name
299 299 if strip_prefix:
300 300 key = remove_prefix(key, prefix='default_')
301 301 result.update({key: row.app_settings_value})
302 302 return result
303 303
304 304 def get_repo(self):
305 305 repo = self._get_repo(self.repo)
306 306 if not repo:
307 307 raise Exception(
308 308 f'Repository `{self.repo}` cannot be found inside the database')
309 309 return repo
310 310
311 311 def _filter_by_repo(self, model, query):
312 312 if self.repo:
313 313 repo = self.get_repo()
314 314 query = query.filter(model.repository_id == repo.repo_id)
315 315 return query
316 316
317 317 def _get_hooks(self, query):
318 318 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
319 319 query = self._filter_by_repo(RepoRhodeCodeUi, query)
320 320 return query.all()
321 321
322 322 def _get_settings_query(self):
323 323 q = self.SettingsDbModel.query()
324 324 return self._filter_by_repo(RepoRhodeCodeSetting, q)
325 325
326 326 def list_enabled_social_plugins(self, settings):
327 327 enabled = []
328 328 for plug in SOCIAL_PLUGINS_LIST:
329 329 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
330 330 enabled.append(plug)
331 331 return enabled
332 332
333 333
334 334 def assert_repo_settings(func):
335 335 @functools.wraps(func)
336 336 def _wrapper(self, *args, **kwargs):
337 337 if not self.repo_settings:
338 338 raise Exception('Repository is not specified')
339 339 return func(self, *args, **kwargs)
340 340 return _wrapper
341 341
342 342
343 343 class IssueTrackerSettingsModel(object):
344 344 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
345 345 SETTINGS_PREFIX = 'issuetracker_'
346 346
347 347 def __init__(self, sa=None, repo=None):
348 348 self.global_settings = SettingsModel(sa=sa)
349 349 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
350 350
351 351 @property
352 352 def inherit_global_settings(self):
353 353 if not self.repo_settings:
354 354 return True
355 355 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
356 356 return setting.app_settings_value if setting else True
357 357
358 358 @inherit_global_settings.setter
359 359 def inherit_global_settings(self, value):
360 360 if self.repo_settings:
361 361 settings = self.repo_settings.create_or_update_setting(
362 362 self.INHERIT_SETTINGS, value, type_='bool')
363 363 Session().add(settings)
364 364
365 365 def _get_keyname(self, key, uid, prefix='rhodecode_'):
366 366 return f'{prefix}{self.SETTINGS_PREFIX}{key}_{uid}'
367 367
368 368 def _make_dict_for_settings(self, qs):
369 369 prefix_match = self._get_keyname('pat', '',)
370 370
371 371 issuetracker_entries = {}
372 372 # create keys
373 373 for k, v in qs.items():
374 374 if k.startswith(prefix_match):
375 375 uid = k[len(prefix_match):]
376 376 issuetracker_entries[uid] = None
377 377
378 378 def url_cleaner(input_str):
379 379 input_str = input_str.replace('"', '').replace("'", '')
380 380 input_str = sanitize_html(input_str, strip=True)
381 381 return input_str
382 382
383 383 # populate
384 384 for uid in issuetracker_entries:
385 385 url_data = qs.get(self._get_keyname('url', uid))
386 386
387 387 pat = qs.get(self._get_keyname('pat', uid))
388 388 try:
389 389 pat_compiled = re.compile(r'%s' % pat)
390 390 except re.error:
391 391 pat_compiled = None
392 392
393 393 issuetracker_entries[uid] = AttributeDict({
394 394 'pat': pat,
395 395 'pat_compiled': pat_compiled,
396 396 'url': url_cleaner(
397 397 qs.get(self._get_keyname('url', uid)) or ''),
398 398 'pref': sanitize_html(
399 399 qs.get(self._get_keyname('pref', uid)) or ''),
400 400 'desc': qs.get(
401 401 self._get_keyname('desc', uid)),
402 402 })
403 403
404 404 return issuetracker_entries
405 405
406 406 def get_global_settings(self, cache=False):
407 407 """
408 408 Returns list of global issue tracker settings
409 409 """
410 410 defaults = self.global_settings.get_all_settings(cache=cache)
411 411 settings = self._make_dict_for_settings(defaults)
412 412 return settings
413 413
414 414 def get_repo_settings(self, cache=False):
415 415 """
416 416 Returns list of issue tracker settings per repository
417 417 """
418 418 if not self.repo_settings:
419 419 raise Exception('Repository is not specified')
420 420 all_settings = self.repo_settings.get_all_settings(cache=cache)
421 421 settings = self._make_dict_for_settings(all_settings)
422 422 return settings
423 423
424 424 def get_settings(self, cache=False):
425 425 if self.inherit_global_settings:
426 426 return self.get_global_settings(cache=cache)
427 427 else:
428 428 return self.get_repo_settings(cache=cache)
429 429
430 430 def delete_entries(self, uid):
431 431 if self.repo_settings:
432 432 all_patterns = self.get_repo_settings()
433 433 settings_model = self.repo_settings
434 434 else:
435 435 all_patterns = self.get_global_settings()
436 436 settings_model = self.global_settings
437 437 entries = all_patterns.get(uid, [])
438 438
439 439 for del_key in entries:
440 440 setting_name = self._get_keyname(del_key, uid, prefix='')
441 441 entry = settings_model.get_setting_by_name(setting_name)
442 442 if entry:
443 443 Session().delete(entry)
444 444
445 445 Session().commit()
446 446
447 447 def create_or_update_setting(
448 448 self, name, val=Optional(''), type_=Optional('unicode')):
449 449 if self.repo_settings:
450 450 setting = self.repo_settings.create_or_update_setting(
451 451 name, val, type_)
452 452 else:
453 453 setting = self.global_settings.create_or_update_setting(
454 454 name, val, type_)
455 455 return setting
456 456
457 457
458 458 class VcsSettingsModel(object):
459 459
460 460 INHERIT_SETTINGS = 'inherit_vcs_settings'
461 461 GENERAL_SETTINGS = (
462 462 'use_outdated_comments',
463 463 'pr_merge_enabled',
464 'auto_merge_enabled',
464 465 'hg_use_rebase_for_merging',
465 466 'hg_close_branch_before_merging',
466 467 'git_use_rebase_for_merging',
467 468 'git_close_branch_before_merging',
468 469 'diff_cache',
469 470 )
470 471
471 472 HOOKS_SETTINGS = (
472 473 ('hooks', 'changegroup.repo_size'),
473 474 ('hooks', 'changegroup.push_logger'),
474 475 ('hooks', 'outgoing.pull_logger'),
475 476 )
476 477 HG_SETTINGS = (
477 478 ('extensions', 'largefiles'),
478 479 ('phases', 'publish'),
479 480 ('extensions', 'evolve'),
480 481 ('extensions', 'topic'),
481 482 ('experimental', 'evolution'),
482 483 ('experimental', 'evolution.exchange'),
483 484 )
484 485 GIT_SETTINGS = (
485 486 ('vcs_git_lfs', 'enabled'),
486 487 )
487 488 GLOBAL_HG_SETTINGS = (
488 489 ('extensions', 'largefiles'),
489 490 ('phases', 'publish'),
490 491 ('extensions', 'evolve'),
491 492 ('extensions', 'topic'),
492 493 ('experimental', 'evolution'),
493 494 ('experimental', 'evolution.exchange'),
494 495 )
495 496
496 497 GLOBAL_GIT_SETTINGS = (
497 498 ('vcs_git_lfs', 'enabled'),
498 499 )
499 500
500 501 SVN_BRANCH_SECTION = 'vcs_svn_branch'
501 502 SVN_TAG_SECTION = 'vcs_svn_tag'
502 503 PATH_SETTING = ('paths', '/')
503 504
504 505 def __init__(self, sa=None, repo=None):
505 506 self.global_settings = SettingsModel(sa=sa)
506 507 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
507 508 self._ui_settings = (
508 509 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
509 510 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
510 511
511 512 @property
512 513 @assert_repo_settings
513 514 def inherit_global_settings(self):
514 515 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
515 516 return setting.app_settings_value if setting else True
516 517
517 518 @inherit_global_settings.setter
518 519 @assert_repo_settings
519 520 def inherit_global_settings(self, value):
520 521 self.repo_settings.create_or_update_setting(
521 522 self.INHERIT_SETTINGS, value, type_='bool')
522 523
523 524 def get_keyname(self, key_name, prefix='rhodecode_'):
524 525 return f'{prefix}{key_name}'
525 526
526 527 def get_global_svn_branch_patterns(self):
527 528 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
528 529
529 530 @assert_repo_settings
530 531 def get_repo_svn_branch_patterns(self):
531 532 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
532 533
533 534 def get_global_svn_tag_patterns(self):
534 535 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
535 536
536 537 @assert_repo_settings
537 538 def get_repo_svn_tag_patterns(self):
538 539 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
539 540
540 541 def get_global_settings(self):
541 542 return self._collect_all_settings(global_=True)
542 543
543 544 @assert_repo_settings
544 545 def get_repo_settings(self):
545 546 return self._collect_all_settings(global_=False)
546 547
547 548 @assert_repo_settings
548 549 def get_repo_settings_inherited(self):
549 550 global_settings = self.get_global_settings()
550 551 global_settings.update(self.get_repo_settings())
551 552 return global_settings
552 553
553 554 @assert_repo_settings
554 555 def create_or_update_repo_settings(
555 556 self, data, inherit_global_settings=False):
556 557 from rhodecode.model.scm import ScmModel
557 558
558 559 self.inherit_global_settings = inherit_global_settings
559 560
560 561 repo = self.repo_settings.get_repo()
561 562 if not inherit_global_settings:
562 563 if repo.repo_type == 'svn':
563 564 self.create_repo_svn_settings(data)
564 565 else:
565 566 self.create_or_update_repo_hook_settings(data)
566 567 self.create_or_update_repo_pr_settings(data)
567 568
568 569 if repo.repo_type == 'hg':
569 570 self.create_or_update_repo_hg_settings(data)
570 571
571 572 if repo.repo_type == 'git':
572 573 self.create_or_update_repo_git_settings(data)
573 574
574 575 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
575 576
576 577 @assert_repo_settings
577 578 def create_or_update_repo_hook_settings(self, data):
578 579 for section, key in self.HOOKS_SETTINGS:
579 580 data_key = self._get_form_ui_key(section, key)
580 581 if data_key not in data:
581 582 raise ValueError(
582 583 f'The given data does not contain {data_key} key')
583 584
584 585 active = data.get(data_key)
585 586 repo_setting = self.repo_settings.get_ui_by_section_and_key(
586 587 section, key)
587 588 if not repo_setting:
588 589 global_setting = self.global_settings.\
589 590 get_ui_by_section_and_key(section, key)
590 591 self.repo_settings.create_ui_section_value(
591 592 section, global_setting.ui_value, key=key, active=active)
592 593 else:
593 594 repo_setting.ui_active = active
594 595 Session().add(repo_setting)
595 596
596 597 def update_global_hook_settings(self, data):
597 598 for section, key in self.HOOKS_SETTINGS:
598 599 data_key = self._get_form_ui_key(section, key)
599 600 if data_key not in data:
600 601 raise ValueError(
601 602 f'The given data does not contain {data_key} key')
602 603 active = data.get(data_key)
603 604 repo_setting = self.global_settings.get_ui_by_section_and_key(
604 605 section, key)
605 606 repo_setting.ui_active = active
606 607 Session().add(repo_setting)
607 608
608 609 @assert_repo_settings
609 610 def create_or_update_repo_pr_settings(self, data):
610 611 return self._create_or_update_general_settings(
611 612 self.repo_settings, data)
612 613
613 614 def create_or_update_global_pr_settings(self, data):
614 615 return self._create_or_update_general_settings(
615 616 self.global_settings, data)
616 617
617 618 @assert_repo_settings
618 619 def create_repo_svn_settings(self, data):
619 620 return self._create_svn_settings(self.repo_settings, data)
620 621
621 622 def _set_evolution(self, settings, is_enabled):
622 623 if is_enabled:
623 624 # if evolve is active set evolution=all
624 625
625 626 self._create_or_update_ui(
626 627 settings, *('experimental', 'evolution'), value='all',
627 628 active=True)
628 629 self._create_or_update_ui(
629 630 settings, *('experimental', 'evolution.exchange'), value='yes',
630 631 active=True)
631 632 # if evolve is active set topics server support
632 633 self._create_or_update_ui(
633 634 settings, *('extensions', 'topic'), value='',
634 635 active=True)
635 636
636 637 else:
637 638 self._create_or_update_ui(
638 639 settings, *('experimental', 'evolution'), value='',
639 640 active=False)
640 641 self._create_or_update_ui(
641 642 settings, *('experimental', 'evolution.exchange'), value='no',
642 643 active=False)
643 644 self._create_or_update_ui(
644 645 settings, *('extensions', 'topic'), value='',
645 646 active=False)
646 647
647 648 @assert_repo_settings
648 649 def create_or_update_repo_hg_settings(self, data):
649 650 largefiles, phases, evolve = \
650 651 self.HG_SETTINGS[:3]
651 652 largefiles_key, phases_key, evolve_key = \
652 653 self._get_settings_keys(self.HG_SETTINGS[:3], data)
653 654
654 655 self._create_or_update_ui(
655 656 self.repo_settings, *largefiles, value='',
656 657 active=data[largefiles_key])
657 658 self._create_or_update_ui(
658 659 self.repo_settings, *evolve, value='',
659 660 active=data[evolve_key])
660 661 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
661 662
662 663 self._create_or_update_ui(
663 664 self.repo_settings, *phases, value=safe_str(data[phases_key]))
664 665
665 666 def create_or_update_global_hg_settings(self, data):
666 667 opts_len = 3
667 668 largefiles, phases, evolve \
668 669 = self.GLOBAL_HG_SETTINGS[:opts_len]
669 670 largefiles_key, phases_key, evolve_key \
670 671 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data)
671 672
672 673 self._create_or_update_ui(
673 674 self.global_settings, *largefiles, value='',
674 675 active=data[largefiles_key])
675 676 self._create_or_update_ui(
676 677 self.global_settings, *phases, value=safe_str(data[phases_key]))
677 678 self._create_or_update_ui(
678 679 self.global_settings, *evolve, value='',
679 680 active=data[evolve_key])
680 681 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
681 682
682 683 def create_or_update_repo_git_settings(self, data):
683 684 # NOTE(marcink): # comma makes unpack work properly
684 685 lfs_enabled, \
685 686 = self.GIT_SETTINGS
686 687
687 688 lfs_enabled_key, \
688 689 = self._get_settings_keys(self.GIT_SETTINGS, data)
689 690
690 691 self._create_or_update_ui(
691 692 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
692 693 active=data[lfs_enabled_key])
693 694
694 695 def create_or_update_global_git_settings(self, data):
695 696 lfs_enabled = self.GLOBAL_GIT_SETTINGS[0]
696 697 lfs_enabled_key = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)[0]
697 698
698 699 self._create_or_update_ui(
699 700 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
700 701 active=data[lfs_enabled_key])
701 702
702 703 def create_or_update_global_svn_settings(self, data):
703 704 # branch/tags patterns
704 705 self._create_svn_settings(self.global_settings, data)
705 706
706 707 @assert_repo_settings
707 708 def delete_repo_svn_pattern(self, id_):
708 709 ui = self.repo_settings.UiDbModel.get(id_)
709 710 if ui and ui.repository.repo_name == self.repo_settings.repo:
710 711 # only delete if it's the same repo as initialized settings
711 712 self.repo_settings.delete_ui(id_)
712 713 else:
713 714 # raise error as if we wouldn't find this option
714 715 self.repo_settings.delete_ui(-1)
715 716
716 717 def delete_global_svn_pattern(self, id_):
717 718 self.global_settings.delete_ui(id_)
718 719
719 720 @assert_repo_settings
720 721 def get_repo_ui_settings(self, section=None, key=None):
721 722 global_uis = self.global_settings.get_ui(section, key)
722 723 repo_uis = self.repo_settings.get_ui(section, key)
723 724
724 725 filtered_repo_uis = self._filter_ui_settings(repo_uis)
725 726 filtered_repo_uis_keys = [
726 727 (s.section, s.key) for s in filtered_repo_uis]
727 728
728 729 def _is_global_ui_filtered(ui):
729 730 return (
730 731 (ui.section, ui.key) in filtered_repo_uis_keys
731 732 or ui.section in self._svn_sections)
732 733
733 734 filtered_global_uis = [
734 735 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
735 736
736 737 return filtered_global_uis + filtered_repo_uis
737 738
738 739 def get_global_ui_settings(self, section=None, key=None):
739 740 return self.global_settings.get_ui(section, key)
740 741
741 742 def get_ui_settings_as_config_obj(self, section=None, key=None):
742 743 config = base.Config()
743 744
744 745 ui_settings = self.get_ui_settings(section=section, key=key)
745 746
746 747 for entry in ui_settings:
747 748 config.set(entry.section, entry.key, entry.value)
748 749
749 750 return config
750 751
751 752 def get_ui_settings(self, section=None, key=None):
752 753 if not self.repo_settings or self.inherit_global_settings:
753 754 return self.get_global_ui_settings(section, key)
754 755 else:
755 756 return self.get_repo_ui_settings(section, key)
756 757
757 758 def get_svn_patterns(self, section=None):
758 759 if not self.repo_settings:
759 760 return self.get_global_ui_settings(section)
760 761 else:
761 762 return self.get_repo_ui_settings(section)
762 763
763 764 @assert_repo_settings
764 765 def get_repo_general_settings(self):
765 766 global_settings = self.global_settings.get_all_settings()
766 767 repo_settings = self.repo_settings.get_all_settings()
767 768 filtered_repo_settings = self._filter_general_settings(repo_settings)
768 769 global_settings.update(filtered_repo_settings)
769 770 return global_settings
770 771
771 772 def get_global_general_settings(self):
772 773 return self.global_settings.get_all_settings()
773 774
774 775 def get_general_settings(self):
775 776 if not self.repo_settings or self.inherit_global_settings:
776 777 return self.get_global_general_settings()
777 778 else:
778 779 return self.get_repo_general_settings()
779 780
780 781 def _filter_ui_settings(self, settings):
781 782 filtered_settings = [
782 783 s for s in settings if self._should_keep_setting(s)]
783 784 return filtered_settings
784 785
785 786 def _should_keep_setting(self, setting):
786 787 keep = (
787 788 (setting.section, setting.key) in self._ui_settings or
788 789 setting.section in self._svn_sections)
789 790 return keep
790 791
791 792 def _filter_general_settings(self, settings):
792 793 keys = [self.get_keyname(key) for key in self.GENERAL_SETTINGS]
793 794 return {
794 795 k: settings[k]
795 796 for k in settings if k in keys}
796 797
797 798 def _collect_all_settings(self, global_=False):
798 799 settings = self.global_settings if global_ else self.repo_settings
799 800 result = {}
800 801
801 802 for section, key in self._ui_settings:
802 803 ui = settings.get_ui_by_section_and_key(section, key)
803 804 result_key = self._get_form_ui_key(section, key)
804 805
805 806 if ui:
806 807 if section in ('hooks', 'extensions'):
807 808 result[result_key] = ui.ui_active
808 809 elif result_key in ['vcs_git_lfs_enabled']:
809 810 result[result_key] = ui.ui_active
810 811 else:
811 812 result[result_key] = ui.ui_value
812 813
813 814 for name in self.GENERAL_SETTINGS:
814 815 setting = settings.get_setting_by_name(name)
815 816 if setting:
816 817 result_key = self.get_keyname(name)
817 818 result[result_key] = setting.app_settings_value
818 819
819 820 return result
820 821
821 822 def _get_form_ui_key(self, section, key):
822 823 return '{section}_{key}'.format(
823 824 section=section, key=key.replace('.', '_'))
824 825
825 826 def _create_or_update_ui(
826 827 self, settings, section, key, value=None, active=None):
827 828 ui = settings.get_ui_by_section_and_key(section, key)
828 829 if not ui:
829 830 active = True if active is None else active
830 831 settings.create_ui_section_value(
831 832 section, value, key=key, active=active)
832 833 else:
833 834 if active is not None:
834 835 ui.ui_active = active
835 836 if value is not None:
836 837 ui.ui_value = value
837 838 Session().add(ui)
838 839
839 840 def _create_svn_settings(self, settings, data):
840 841 svn_settings = {
841 842 'new_svn_branch': self.SVN_BRANCH_SECTION,
842 843 'new_svn_tag': self.SVN_TAG_SECTION
843 844 }
844 845 for key in svn_settings:
845 846 if data.get(key):
846 847 settings.create_ui_section_value(svn_settings[key], data[key])
847 848
848 849 def _create_or_update_general_settings(self, settings, data):
849 850 for name in self.GENERAL_SETTINGS:
850 851 data_key = self.get_keyname(name)
851 852 if data_key not in data:
852 853 raise ValueError(
853 854 f'The given data does not contain {data_key} key')
854 855 setting = settings.create_or_update_setting(
855 856 name, data[data_key], 'bool')
856 857 Session().add(setting)
857 858
858 859 def _get_settings_keys(self, settings, data):
859 860 data_keys = [self._get_form_ui_key(*s) for s in settings]
860 861 for data_key in data_keys:
861 862 if data_key not in data:
862 863 raise ValueError(
863 864 f'The given data does not contain {data_key} key')
864 865 return data_keys
@@ -1,422 +1,450
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18 import io
19 19 import shlex
20 20
21 21 import math
22 22 import re
23 23 import os
24 24 import datetime
25 25 import logging
26 26 import queue
27 27 import subprocess
28 28
29 29
30 30 from dateutil.parser import parse
31 31 from pyramid.interfaces import IRoutesMapper
32 32 from pyramid.settings import asbool
33 33 from pyramid.path import AssetResolver
34 34 from threading import Thread
35 35
36 36 from rhodecode.config.jsroutes import generate_jsroutes_content
37 37 from rhodecode.lib.base import get_auth_user
38 38 from rhodecode.lib.celerylib.loader import set_celery_conf
39 39
40 40 import rhodecode
41 41
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 def add_renderer_globals(event):
47 47 from rhodecode.lib import helpers
48 48
49 49 # TODO: When executed in pyramid view context the request is not available
50 50 # in the event. Find a better solution to get the request.
51 51 from pyramid.threadlocal import get_current_request
52 52 request = event['request'] or get_current_request()
53 53
54 54 # Add Pyramid translation as '_' to context
55 55 event['_'] = request.translate
56 56 event['_ungettext'] = request.plularize
57 57 event['h'] = helpers
58 58
59 59
60 def auto_merge_pr_if_needed(event):
61 from rhodecode.model.db import PullRequest
62 from rhodecode.model.pull_request import (
63 PullRequestModel, ChangesetStatus, MergeCheck
64 )
65
66 pr_event_data = event.as_dict()['pullrequest']
67 pull_request = PullRequest.get(pr_event_data['pull_request_id'])
68 calculated_status = pr_event_data['status']
69 if (calculated_status == ChangesetStatus.STATUS_APPROVED
70 and PullRequestModel().is_automatic_merge_enabled(pull_request)):
71 user = pull_request.author.AuthUser()
72
73 merge_check = MergeCheck.validate(
74 pull_request, user, translator=lambda x: x, fail_early=True
75 )
76 if merge_check.merge_possible:
77 from rhodecode.lib.base import vcs_operation_context
78 extras = vcs_operation_context(
79 event.request.environ, repo_name=pull_request.target_repo.repo_name,
80 username=user.username, action='push',
81 scm=pull_request.target_repo.repo_type)
82 from rc_ee.lib.celerylib.tasks import auto_merge_repo
83 auto_merge_repo.apply_async(
84 args=(pull_request.pull_request_id, extras)
85 )
86
87
60 88 def set_user_lang(event):
61 89 request = event.request
62 90 cur_user = getattr(request, 'user', None)
63 91
64 92 if cur_user:
65 93 user_lang = cur_user.get_instance().user_data.get('language')
66 94 if user_lang:
67 95 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
68 96 event.request._LOCALE_ = user_lang
69 97
70 98
71 99 def update_celery_conf(event):
72 100 log.debug('Setting celery config from new request')
73 101 set_celery_conf(request=event.request, registry=event.request.registry)
74 102
75 103
76 104 def add_request_user_context(event):
77 105 """
78 106 Adds auth user into request context
79 107 """
80 108
81 109 request = event.request
82 110 # access req_id as soon as possible
83 111 req_id = request.req_id
84 112
85 113 if hasattr(request, 'vcs_call'):
86 114 # skip vcs calls
87 115 return
88 116
89 117 if hasattr(request, 'rpc_method'):
90 118 # skip api calls
91 119 return
92 120
93 121 auth_user, auth_token = get_auth_user(request)
94 122 request.user = auth_user
95 123 request.user_auth_token = auth_token
96 124 request.environ['rc_auth_user'] = auth_user
97 125 request.environ['rc_auth_user_id'] = str(auth_user.user_id)
98 126 request.environ['rc_req_id'] = req_id
99 127
100 128
101 129 def reset_log_bucket(event):
102 130 """
103 131 reset the log bucket on new request
104 132 """
105 133 request = event.request
106 134 request.req_id_records_init()
107 135
108 136
109 137 def scan_repositories_if_enabled(event):
110 138 """
111 139 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
112 140 does a repository scan if enabled in the settings.
113 141 """
114 142
115 143 settings = event.app.registry.settings
116 144 vcs_server_enabled = settings['vcs.server.enable']
117 145 import_on_startup = settings['startup.import_repos']
118 146
119 147 if vcs_server_enabled and import_on_startup:
120 148 from rhodecode.model.scm import ScmModel
121 149 from rhodecode.lib.utils import repo2db_mapper
122 150 scm = ScmModel()
123 151 repositories = scm.repo_scan(scm.repos_path)
124 152 repo2db_mapper(repositories)
125 153
126 154
127 155 def write_metadata_if_needed(event):
128 156 """
129 157 Writes upgrade metadata
130 158 """
131 159 import rhodecode
132 160 from rhodecode.lib import system_info
133 161 from rhodecode.lib import ext_json
134 162
135 163 fname = '.rcmetadata.json'
136 164 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
137 165 metadata_destination = os.path.join(ini_loc, fname)
138 166
139 167 def get_update_age():
140 168 now = datetime.datetime.utcnow()
141 169
142 170 with open(metadata_destination, 'rb') as f:
143 171 data = ext_json.json.loads(f.read())
144 172 if 'created_on' in data:
145 173 update_date = parse(data['created_on'])
146 174 diff = now - update_date
147 175 return diff.total_seconds() / 60.0
148 176
149 177 return 0
150 178
151 179 def write():
152 180 configuration = system_info.SysInfo(
153 181 system_info.rhodecode_config)()['value']
154 182 license_token = configuration['config']['license_token']
155 183
156 184 setup = dict(
157 185 workers=configuration['config']['server:main'].get(
158 186 'workers', '?'),
159 187 worker_type=configuration['config']['server:main'].get(
160 188 'worker_class', 'sync'),
161 189 )
162 190 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
163 191 del dbinfo['url']
164 192
165 193 metadata = dict(
166 194 desc='upgrade metadata info',
167 195 license_token=license_token,
168 196 created_on=datetime.datetime.utcnow().isoformat(),
169 197 usage=system_info.SysInfo(system_info.usage_info)()['value'],
170 198 platform=system_info.SysInfo(system_info.platform_type)()['value'],
171 199 database=dbinfo,
172 200 cpu=system_info.SysInfo(system_info.cpu)()['value'],
173 201 memory=system_info.SysInfo(system_info.memory)()['value'],
174 202 setup=setup
175 203 )
176 204
177 205 with open(metadata_destination, 'wb') as f:
178 206 f.write(ext_json.json.dumps(metadata))
179 207
180 208 settings = event.app.registry.settings
181 209 if settings.get('metadata.skip'):
182 210 return
183 211
184 212 # only write this every 24h, workers restart caused unwanted delays
185 213 try:
186 214 age_in_min = get_update_age()
187 215 except Exception:
188 216 age_in_min = 0
189 217
190 218 if age_in_min > 60 * 60 * 24:
191 219 return
192 220
193 221 try:
194 222 write()
195 223 except Exception:
196 224 pass
197 225
198 226
199 227 def write_usage_data(event):
200 228 import rhodecode
201 229 from rhodecode.lib import system_info
202 230 from rhodecode.lib import ext_json
203 231
204 232 settings = event.app.registry.settings
205 233 instance_tag = settings.get('metadata.write_usage_tag')
206 234 if not settings.get('metadata.write_usage'):
207 235 return
208 236
209 237 def get_update_age(dest_file):
210 238 now = datetime.datetime.now(datetime.UTC)
211 239
212 240 with open(dest_file, 'rb') as f:
213 241 data = ext_json.json.loads(f.read())
214 242 if 'created_on' in data:
215 243 update_date = parse(data['created_on'])
216 244 diff = now - update_date
217 245 return math.ceil(diff.total_seconds() / 60.0)
218 246
219 247 return 0
220 248
221 249 utc_date = datetime.datetime.now(datetime.UTC)
222 250 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
223 251 fname = f'.rc_usage_{utc_date.year}{utc_date.month:02d}{utc_date.day:02d}_{hour_quarter}.json'
224 252 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
225 253
226 254 usage_dir = os.path.join(ini_loc, '.rcusage')
227 255 if not os.path.isdir(usage_dir):
228 256 os.makedirs(usage_dir)
229 257 usage_metadata_destination = os.path.join(usage_dir, fname)
230 258
231 259 try:
232 260 age_in_min = get_update_age(usage_metadata_destination)
233 261 except Exception:
234 262 age_in_min = 0
235 263
236 264 # write every 6th hour
237 265 if age_in_min and age_in_min < 60 * 6:
238 266 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
239 267 age_in_min, 60 * 6)
240 268 return
241 269
242 270 def write(dest_file):
243 271 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
244 272 license_token = configuration['config']['license_token']
245 273
246 274 metadata = dict(
247 275 desc='Usage data',
248 276 instance_tag=instance_tag,
249 277 license_token=license_token,
250 278 created_on=datetime.datetime.utcnow().isoformat(),
251 279 usage=system_info.SysInfo(system_info.usage_info)()['value'],
252 280 )
253 281
254 282 with open(dest_file, 'wb') as f:
255 283 f.write(ext_json.formatted_json(metadata))
256 284
257 285 try:
258 286 log.debug('Writing usage file at: %s', usage_metadata_destination)
259 287 write(usage_metadata_destination)
260 288 except Exception:
261 289 pass
262 290
263 291
264 292 def write_js_routes_if_enabled(event):
265 293 registry = event.app.registry
266 294
267 295 mapper = registry.queryUtility(IRoutesMapper)
268 296 _argument_prog = re.compile(r'\{(.*?)\}|:\((.*)\)')
269 297
270 298 def _extract_route_information(route):
271 299 """
272 300 Convert a route into tuple(name, path, args), eg:
273 301 ('show_user', '/profile/%(username)s', ['username'])
274 302 """
275 303
276 304 route_path = route.pattern
277 305 pattern = route.pattern
278 306
279 307 def replace(matchobj):
280 308 if matchobj.group(1):
281 309 return "%%(%s)s" % matchobj.group(1).split(':')[0]
282 310 else:
283 311 return "%%(%s)s" % matchobj.group(2)
284 312
285 313 route_path = _argument_prog.sub(replace, route_path)
286 314
287 315 if not route_path.startswith('/'):
288 316 route_path = f'/{route_path}'
289 317
290 318 return (
291 319 route.name,
292 320 route_path,
293 321 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
294 322 for arg in _argument_prog.findall(pattern)]
295 323 )
296 324
297 325 def get_routes():
298 326 # pyramid routes
299 327 for route in mapper.get_routes():
300 328 if not route.name.startswith('__'):
301 329 yield _extract_route_information(route)
302 330
303 331 if asbool(registry.settings.get('generate_js_files', 'false')):
304 332 static_path = AssetResolver().resolve('rhodecode:public').abspath()
305 333 jsroutes = get_routes()
306 334 jsroutes_file_content = generate_jsroutes_content(jsroutes)
307 335 jsroutes_file_path = os.path.join(
308 336 static_path, 'js', 'rhodecode', 'routes.js')
309 337
310 338 try:
311 339 with open(jsroutes_file_path, 'w', encoding='utf-8') as f:
312 340 f.write(jsroutes_file_content)
313 341 log.debug('generated JS files in %s', jsroutes_file_path)
314 342 except Exception:
315 343 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
316 344
317 345
318 346 def import_license_if_present(event):
319 347 """
320 348 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
321 349 does a import license key based on a presence of the file.
322 350 """
323 351 settings = event.app.registry.settings
324 352
325 353 rhodecode_edition_id = settings.get('rhodecode.edition_id')
326 354 license_file_path = settings.get('license.import_path')
327 355 force = settings.get('license.import_path_mode') == 'force'
328 356
329 357 if license_file_path and rhodecode_edition_id == 'EE':
330 358 log.debug('license.import_path= is set importing license from %s', license_file_path)
331 359 from rhodecode.model.meta import Session
332 360 from rhodecode.model.license import apply_license_from_file
333 361 try:
334 362 apply_license_from_file(license_file_path, force=force)
335 363 Session().commit()
336 364 except OSError:
337 365 log.exception('Failed to import license from %s, make sure this file exists', license_file_path)
338 366
339 367
340 368 class Subscriber(object):
341 369 """
342 370 Base class for subscribers to the pyramid event system.
343 371 """
344 372 def __call__(self, event):
345 373 self.run(event)
346 374
347 375 def run(self, event):
348 376 raise NotImplementedError('Subclass has to implement this.')
349 377
350 378
351 379 class AsyncSubscriber(Subscriber):
352 380 """
353 381 Subscriber that handles the execution of events in a separate task to not
354 382 block the execution of the code which triggers the event. It puts the
355 383 received events into a queue from which the worker process takes them in
356 384 order.
357 385 """
358 386 def __init__(self):
359 387 self._stop = False
360 388 self._eventq = queue.Queue()
361 389 self._worker = self.create_worker()
362 390 self._worker.start()
363 391
364 392 def __call__(self, event):
365 393 self._eventq.put(event)
366 394
367 395 def create_worker(self):
368 396 worker = Thread(target=self.do_work)
369 397 worker.daemon = True
370 398 return worker
371 399
372 400 def stop_worker(self):
373 401 self._stop = False
374 402 self._eventq.put(None)
375 403 self._worker.join()
376 404
377 405 def do_work(self):
378 406 while not self._stop:
379 407 event = self._eventq.get()
380 408 if event is not None:
381 409 self.run(event)
382 410
383 411
384 412 class AsyncSubprocessSubscriber(AsyncSubscriber):
385 413 """
386 414 Subscriber that uses the subprocess module to execute a command if an
387 415 event is received. Events are handled asynchronously::
388 416
389 417 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
390 418 subscriber(dummyEvent) # running __call__(event)
391 419
392 420 """
393 421
394 422 def __init__(self, cmd, timeout=None):
395 423 if not isinstance(cmd, (list, tuple)):
396 424 cmd = shlex.split(cmd)
397 425 super().__init__()
398 426 self._cmd = cmd
399 427 self._timeout = timeout
400 428
401 429 def run(self, event):
402 430 cmd = self._cmd
403 431 timeout = self._timeout
404 432 log.debug('Executing command %s.', cmd)
405 433
406 434 try:
407 435 output = subprocess.check_output(
408 436 cmd, timeout=timeout, stderr=subprocess.STDOUT)
409 437 log.debug('Command finished %s', cmd)
410 438 if output:
411 439 log.debug('Command output: %s', output)
412 440 except subprocess.TimeoutExpired as e:
413 441 log.exception('Timeout while executing command.')
414 442 if e.output:
415 443 log.error('Command output: %s', e.output)
416 444 except subprocess.CalledProcessError as e:
417 445 log.exception('Error while executing command.')
418 446 if e.output:
419 447 log.error('Command output: %s', e.output)
420 448 except Exception:
421 449 log.exception(
422 450 'Exception while executing command %s.', cmd)
@@ -1,308 +1,325
1 1 ## snippet for displaying vcs settings
2 2 ## usage:
3 3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 4 ## ${vcss.vcs_settings_fields()}
5 5
6 6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)">
7 7 % if display_globals:
8 8
9 9 % endif
10 10
11 11 % if display_globals or repo_type in ['git', 'hg']:
12 12 <div class="panel panel-default">
13 13 <div class="panel-heading" id="vcs-hooks-options">
14 14 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
15 15 </div>
16 16 <div class="panel-body">
17 17 <div class="field">
18 18 <div class="checkbox">
19 19 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
20 20 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
21 21 </div>
22 22
23 23 <div class="label">
24 24 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
25 25 </div>
26 26 <div class="checkbox">
27 27 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
28 28 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
29 29 </div>
30 30 <div class="label">
31 31 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
32 32 </div>
33 33 <div class="checkbox">
34 34 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
35 35 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
36 36 </div>
37 37 <div class="label">
38 38 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
39 39 </div>
40 40 </div>
41 41 </div>
42 42 </div>
43 43 % endif
44 44
45 45 % if display_globals or repo_type in ['hg']:
46 46 <div class="panel panel-default">
47 47 <div class="panel-heading" id="vcs-hg-options">
48 48 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
49 49 </div>
50 50 <div class="panel-body">
51 51 <div class="checkbox">
52 52 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
53 53 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
54 54 </div>
55 55 <div class="label">
56 56 % if display_globals:
57 57 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
58 58 % else:
59 59 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
60 60 % endif
61 61 </div>
62 62
63 63 <div class="checkbox">
64 64 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
65 65 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
66 66 </div>
67 67 <div class="label">
68 68 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
69 69 </div>
70 70
71 71 <div class="checkbox">
72 72 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
73 73 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
74 74 </div>
75 75 <div class="label">
76 76 % if display_globals:
77 77 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
78 78 % else:
79 79 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
80 80 % endif
81 81 </div>
82 82
83 83 </div>
84 84 </div>
85 85 % endif
86 86
87 87 % if display_globals or repo_type in ['git']:
88 88 <div class="panel panel-default">
89 89 <div class="panel-heading" id="vcs-git-options">
90 90 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
91 91 </div>
92 92 <div class="panel-body">
93 93 <div class="checkbox">
94 94 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
95 95 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
96 96 </div>
97 97 <div class="label">
98 98 % if display_globals:
99 99 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
100 100 % else:
101 101 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
102 102 % endif
103 103 </div>
104 104 </div>
105 105 </div>
106 106 % endif
107 107
108 108 % if display_globals or repo_type in ['svn']:
109 109 <div class="panel panel-default">
110 110 <div class="panel-heading" id="vcs-svn-options">
111 111 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
112 112 </div>
113 113 <div class="panel-body">
114 114 % if display_globals:
115 115 <div class="field">
116 116 <div class="content" >
117 117 <label>${_('mod_dav config')}</label><br/>
118 118 <code>path: ${c.svn_config_path}</code>
119 119 </div>
120 120 <br/>
121 121
122 122 <div>
123 123
124 124 % if c.svn_generate_config:
125 125 <span class="buttons">
126 126 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Re-generate Apache Config')}</button>
127 127 </span>
128 128 % endif
129 129 </div>
130 130 </div>
131 131 % endif
132 132
133 133 <div class="field">
134 134 <div class="content" >
135 135 <label>${_('Repository patterns')}</label><br/>
136 136 </div>
137 137 </div>
138 138 <div class="label">
139 139 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
140 140 </div>
141 141
142 142 <div class="field branch_patterns">
143 143 <div class="input" >
144 144 <label>${_('Branches')}:</label><br/>
145 145 </div>
146 146 % if svn_branch_patterns:
147 147 % for branch in svn_branch_patterns:
148 148 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
149 149 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
150 150 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
151 151 % if kwargs.get('disabled') != 'disabled':
152 152 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
153 153 ${_('Delete')}
154 154 </span>
155 155 % endif
156 156 </div>
157 157 % endfor
158 158 %endif
159 159 </div>
160 160 % if kwargs.get('disabled') != 'disabled':
161 161 <div class="field branch_patterns">
162 162 <div class="input" >
163 163 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
164 164 </div>
165 165 </div>
166 166 % endif
167 167 <div class="field tag_patterns">
168 168 <div class="input" >
169 169 <label>${_('Tags')}:</label><br/>
170 170 </div>
171 171 % if svn_tag_patterns:
172 172 % for tag in svn_tag_patterns:
173 173 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
174 174 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
175 175 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
176 176 % if kwargs.get('disabled') != 'disabled':
177 177 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
178 178 ${_('Delete')}
179 179 </span>
180 180 %endif
181 181 </div>
182 182 % endfor
183 183 % endif
184 184 </div>
185 185 % if kwargs.get('disabled') != 'disabled':
186 186 <div class="field tag_patterns">
187 187 <div class="input" >
188 188 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
189 189 </div>
190 190 </div>
191 191 %endif
192 192 </div>
193 193 </div>
194 194 % else:
195 195 ${h.hidden('new_svn_branch' + suffix, '')}
196 196 ${h.hidden('new_svn_tag' + suffix, '')}
197 197 % endif
198 198
199 199
200 200 % if display_globals or repo_type in ['hg', 'git']:
201 201 <div class="panel panel-default">
202 202 <div class="panel-heading" id="vcs-pull-requests-options">
203 203 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
204 204 </div>
205 205 <div class="panel-body">
206 206 <div class="checkbox">
207 207 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
208 208 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
209 209 </div>
210 210 <div class="label">
211 211 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
212 212 </div>
213 %if c.rhodecode_edition_id != 'EE':
214 <div class="checkbox">
215 <input type="checkbox" disabled>
216 <label for="rhodecode_auto_merge_enabled${suffix}">${_('Enable automatic merge for approved pull requests')}</label>
217 </div>
218 <div class="label">
219 <span class="help-block">${_('This feature is available in RhodeCode EE edition only. Contact {sales_email} to obtain a trial license.').format(sales_email='<a href="mailto:sales@rhodecode.com">sales@rhodecode.com</a>')|n}</span>
220 <div>
221 %else:
222 <div class="checkbox">
223 ${h.checkbox('rhodecode_auto_merge_enabled' + suffix, 'True', **kwargs)}
224 <label for="rhodecode_auto_merge_enabled${suffix}">${_('Enable automatic merge for approved pull requests')}</label>
225 </div>
226 <div class="label">
227 <span class="help-block">${_('When this is enabled, the pull request will be merged once it has at least one reviewer and is approved.')}</span>
228 </div>
229 %endif
213 230 <div class="checkbox">
214 231 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
215 232 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
216 233 </div>
217 234 <div class="label">
218 235 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
219 236 </div>
220 237 </div>
221 238 </div>
222 239 % endif
223 240
224 241 % if display_globals or repo_type in ['hg', 'git', 'svn']:
225 242 <div class="panel panel-default">
226 243 <div class="panel-heading" id="vcs-pull-requests-options">
227 244 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
228 245 </div>
229 246 <div class="panel-body">
230 247 <div class="checkbox">
231 248 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
232 249 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
233 250 </div>
234 251 </div>
235 252 </div>
236 253 % endif
237 254
238 255 % if display_globals or repo_type in ['hg',]:
239 256 <div class="panel panel-default">
240 257 <div class="panel-heading" id="vcs-pull-requests-options">
241 258 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
242 259 </div>
243 260 <div class="panel-body">
244 261 ## Specific HG settings
245 262 <div class="checkbox">
246 263 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
247 264 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
248 265 </div>
249 266 <div class="label">
250 267 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
251 268 </div>
252 269
253 270 <div class="checkbox">
254 271 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
255 272 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
256 273 </div>
257 274 <div class="label">
258 275 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
259 276 </div>
260 277
261 278
262 279 </div>
263 280 </div>
264 281 % endif
265 282
266 283 % if display_globals or repo_type in ['git']:
267 284 <div class="panel panel-default">
268 285 <div class="panel-heading" id="vcs-pull-requests-options">
269 286 <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
270 287 </div>
271 288 <div class="panel-body">
272 289 ## <div class="checkbox">
273 290 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
274 291 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
275 292 ## </div>
276 293 ## <div class="label">
277 294 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
278 295 ## </div>
279 296
280 297 <div class="checkbox">
281 298 ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
282 299 <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
283 300 </div>
284 301 <div class="label">
285 302 <span class="help-block">${_('Delete branch after merging it into destination branch.')}</span>
286 303 </div>
287 304 </div>
288 305 </div>
289 306 % endif
290 307
291 308 <script type="text/javascript">
292 309
293 310 $(document).ready(function() {
294 311 /* On click handler for the `Generate Apache Config` button. It sends a
295 312 POST request to trigger the (re)generation of the mod_dav_svn config. */
296 313 $('#vcs_svn_generate_cfg').on('click', function(event) {
297 314 event.preventDefault();
298 315 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
299 316 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
300 317 jqxhr.done(function(data) {
301 318 $.Topic('/notifications').publish(data);
302 319 });
303 320 });
304 321 });
305 322
306 323 </script>
307 324 </%def>
308 325
@@ -1,1096 +1,1098
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import mock
20 20 import pytest
21 21
22 22 from rhodecode.lib.utils2 import str2bool
23 23 from rhodecode.model.meta import Session
24 24 from rhodecode.model.settings import VcsSettingsModel, UiSetting
25 25
26 26
27 27 HOOKS_FORM_DATA = {
28 28 'hooks_changegroup_repo_size': True,
29 29 'hooks_changegroup_push_logger': True,
30 30 'hooks_outgoing_pull_logger': True
31 31 }
32 32
33 33 SVN_FORM_DATA = {
34 34 'new_svn_branch': 'test-branch',
35 35 'new_svn_tag': 'test-tag'
36 36 }
37 37
38 38 GENERAL_FORM_DATA = {
39 39 'rhodecode_pr_merge_enabled': True,
40 'rhodecode_auto_merge_enabled': True,
40 41 'rhodecode_use_outdated_comments': True,
41 42 'rhodecode_hg_use_rebase_for_merging': True,
42 43 'rhodecode_hg_close_branch_before_merging': True,
43 44 'rhodecode_git_use_rebase_for_merging': True,
44 45 'rhodecode_git_close_branch_before_merging': True,
45 46 'rhodecode_diff_cache': True,
46 47 }
47 48
48 49
49 50 class TestInheritGlobalSettingsProperty(object):
50 51 def test_get_raises_exception_when_repository_not_specified(self):
51 52 model = VcsSettingsModel()
52 53 with pytest.raises(Exception) as exc_info:
53 54 model.inherit_global_settings
54 55 assert str(exc_info.value) == 'Repository is not specified'
55 56
56 57 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
57 58 model = VcsSettingsModel(repo=repo_stub.repo_name)
58 59 assert model.inherit_global_settings is True
59 60
60 61 def test_value_is_returned(self, repo_stub, settings_util):
61 62 model = VcsSettingsModel(repo=repo_stub.repo_name)
62 63 settings_util.create_repo_rhodecode_setting(
63 64 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
64 65 assert model.inherit_global_settings is False
65 66
66 67 def test_value_is_set(self, repo_stub):
67 68 model = VcsSettingsModel(repo=repo_stub.repo_name)
68 69 model.inherit_global_settings = False
69 70 setting = model.repo_settings.get_setting_by_name(
70 71 VcsSettingsModel.INHERIT_SETTINGS)
71 72 try:
72 73 assert setting.app_settings_type == 'bool'
73 74 assert setting.app_settings_value is False
74 75 finally:
75 76 Session().delete(setting)
76 77 Session().commit()
77 78
78 79 def test_set_raises_exception_when_repository_not_specified(self):
79 80 model = VcsSettingsModel()
80 81 with pytest.raises(Exception) as exc_info:
81 82 model.inherit_global_settings = False
82 83 assert str(exc_info.value) == 'Repository is not specified'
83 84
84 85
85 86 class TestVcsSettingsModel(object):
86 87 def test_global_svn_branch_patterns(self):
87 88 model = VcsSettingsModel()
88 89 expected_result = {'test': 'test'}
89 90 with mock.patch.object(model, 'global_settings') as settings_mock:
90 91 get_settings = settings_mock.get_ui_by_section
91 92 get_settings.return_value = expected_result
92 93 settings_mock.return_value = expected_result
93 94 result = model.get_global_svn_branch_patterns()
94 95
95 96 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
96 97 assert expected_result == result
97 98
98 99 def test_repo_svn_branch_patterns(self):
99 100 model = VcsSettingsModel()
100 101 expected_result = {'test': 'test'}
101 102 with mock.patch.object(model, 'repo_settings') as settings_mock:
102 103 get_settings = settings_mock.get_ui_by_section
103 104 get_settings.return_value = expected_result
104 105 settings_mock.return_value = expected_result
105 106 result = model.get_repo_svn_branch_patterns()
106 107
107 108 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
108 109 assert expected_result == result
109 110
110 111 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
111 112 self):
112 113 model = VcsSettingsModel()
113 114 with pytest.raises(Exception) as exc_info:
114 115 model.get_repo_svn_branch_patterns()
115 116 assert str(exc_info.value) == 'Repository is not specified'
116 117
117 118 def test_global_svn_tag_patterns(self):
118 119 model = VcsSettingsModel()
119 120 expected_result = {'test': 'test'}
120 121 with mock.patch.object(model, 'global_settings') as settings_mock:
121 122 get_settings = settings_mock.get_ui_by_section
122 123 get_settings.return_value = expected_result
123 124 settings_mock.return_value = expected_result
124 125 result = model.get_global_svn_tag_patterns()
125 126
126 127 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
127 128 assert expected_result == result
128 129
129 130 def test_repo_svn_tag_patterns(self):
130 131 model = VcsSettingsModel()
131 132 expected_result = {'test': 'test'}
132 133 with mock.patch.object(model, 'repo_settings') as settings_mock:
133 134 get_settings = settings_mock.get_ui_by_section
134 135 get_settings.return_value = expected_result
135 136 settings_mock.return_value = expected_result
136 137 result = model.get_repo_svn_tag_patterns()
137 138
138 139 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
139 140 assert expected_result == result
140 141
141 142 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
142 143 model = VcsSettingsModel()
143 144 with pytest.raises(Exception) as exc_info:
144 145 model.get_repo_svn_tag_patterns()
145 146 assert str(exc_info.value) == 'Repository is not specified'
146 147
147 148 def test_get_global_settings(self):
148 149 expected_result = {'test': 'test'}
149 150 model = VcsSettingsModel()
150 151 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
151 152 collect_mock.return_value = expected_result
152 153 result = model.get_global_settings()
153 154
154 155 collect_mock.assert_called_once_with(global_=True)
155 156 assert result == expected_result
156 157
157 158 def test_get_repo_settings(self, repo_stub):
158 159 model = VcsSettingsModel(repo=repo_stub.repo_name)
159 160 expected_result = {'test': 'test'}
160 161 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
161 162 collect_mock.return_value = expected_result
162 163 result = model.get_repo_settings()
163 164
164 165 collect_mock.assert_called_once_with(global_=False)
165 166 assert result == expected_result
166 167
167 168 @pytest.mark.parametrize('settings, global_', [
168 169 ('global_settings', True),
169 170 ('repo_settings', False)
170 171 ])
171 172 def test_collect_all_settings(self, settings, global_):
172 173 model = VcsSettingsModel()
173 174 result_mock = self._mock_result()
174 175
175 176 settings_patch = mock.patch.object(model, settings)
176 177 with settings_patch as settings_mock:
177 178 settings_mock.get_ui_by_section_and_key.return_value = result_mock
178 179 settings_mock.get_setting_by_name.return_value = result_mock
179 180 result = model._collect_all_settings(global_=global_)
180 181
181 182 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
182 183 self._assert_get_settings_calls(
183 184 settings_mock, ui_settings, model.GENERAL_SETTINGS)
184 185 self._assert_collect_all_settings_result(
185 186 ui_settings, model.GENERAL_SETTINGS, result)
186 187
187 188 @pytest.mark.parametrize('settings, global_', [
188 189 ('global_settings', True),
189 190 ('repo_settings', False)
190 191 ])
191 192 def test_collect_all_settings_without_empty_value(self, settings, global_):
192 193 model = VcsSettingsModel()
193 194
194 195 settings_patch = mock.patch.object(model, settings)
195 196 with settings_patch as settings_mock:
196 197 settings_mock.get_ui_by_section_and_key.return_value = None
197 198 settings_mock.get_setting_by_name.return_value = None
198 199 result = model._collect_all_settings(global_=global_)
199 200
200 201 assert result == {}
201 202
202 203 def _mock_result(self):
203 204 result_mock = mock.Mock()
204 205 result_mock.ui_value = 'ui_value'
205 206 result_mock.ui_active = True
206 207 result_mock.app_settings_value = 'setting_value'
207 208 return result_mock
208 209
209 210 def _assert_get_settings_calls(
210 211 self, settings_mock, ui_settings, general_settings):
211 212 assert (
212 213 settings_mock.get_ui_by_section_and_key.call_count ==
213 214 len(ui_settings))
214 215 assert (
215 216 settings_mock.get_setting_by_name.call_count ==
216 217 len(general_settings))
217 218
218 219 for section, key in ui_settings:
219 220 expected_call = mock.call(section, key)
220 221 assert (
221 222 expected_call in
222 223 settings_mock.get_ui_by_section_and_key.call_args_list)
223 224
224 225 for name in general_settings:
225 226 expected_call = mock.call(name)
226 227 assert (
227 228 expected_call in
228 229 settings_mock.get_setting_by_name.call_args_list)
229 230
230 231 def _assert_collect_all_settings_result(
231 232 self, ui_settings, general_settings, result):
232 233 expected_result = {}
233 234 for section, key in ui_settings:
234 235 key = '{}_{}'.format(section, key.replace('.', '_'))
235 236
236 237 if section in ('extensions', 'hooks'):
237 238 value = True
238 239 elif key in ['vcs_git_lfs_enabled']:
239 240 value = True
240 241 else:
241 242 value = 'ui_value'
242 243 expected_result[key] = value
243 244
244 245 for name in general_settings:
245 246 key = 'rhodecode_' + name
246 247 expected_result[key] = 'setting_value'
247 248
248 249 assert expected_result == result
249 250
250 251
251 252 class TestCreateOrUpdateRepoHookSettings(object):
252 253 def test_create_when_no_repo_object_found(self, repo_stub):
253 254 model = VcsSettingsModel(repo=repo_stub.repo_name)
254 255
255 256 self._create_settings(model, HOOKS_FORM_DATA)
256 257
257 258 cleanup = []
258 259 try:
259 260 for section, key in model.HOOKS_SETTINGS:
260 261 ui = model.repo_settings.get_ui_by_section_and_key(
261 262 section, key)
262 263 assert ui.ui_active is True
263 264 cleanup.append(ui)
264 265 finally:
265 266 for ui in cleanup:
266 267 Session().delete(ui)
267 268 Session().commit()
268 269
269 270 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
270 271 model = VcsSettingsModel(repo=repo_stub.repo_name)
271 272
272 273 deleted_key = 'hooks_changegroup_repo_size'
273 274 data = HOOKS_FORM_DATA.copy()
274 275 data.pop(deleted_key)
275 276
276 277 with pytest.raises(ValueError) as exc_info:
277 278 model.create_or_update_repo_hook_settings(data)
278 279 Session().commit()
279 280
280 281 msg = 'The given data does not contain {} key'.format(deleted_key)
281 282 assert str(exc_info.value) == msg
282 283
283 284 def test_update_when_repo_object_found(self, repo_stub, settings_util):
284 285 model = VcsSettingsModel(repo=repo_stub.repo_name)
285 286 for section, key in model.HOOKS_SETTINGS:
286 287 settings_util.create_repo_rhodecode_ui(
287 288 repo_stub, section, None, key=key, active=False)
288 289 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
289 290 Session().commit()
290 291
291 292 for section, key in model.HOOKS_SETTINGS:
292 293 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
293 294 assert ui.ui_active is True
294 295
295 296 def _create_settings(self, model, data):
296 297 global_patch = mock.patch.object(model, 'global_settings')
297 298 global_setting = mock.Mock()
298 299 global_setting.ui_value = 'Test value'
299 300 with global_patch as global_mock:
300 301 global_mock.get_ui_by_section_and_key.return_value = global_setting
301 302 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
302 303 Session().commit()
303 304
304 305
305 306 class TestUpdateGlobalHookSettings(object):
306 307 def test_update_raises_exception_when_data_incomplete(self):
307 308 model = VcsSettingsModel()
308 309
309 310 deleted_key = 'hooks_changegroup_repo_size'
310 311 data = HOOKS_FORM_DATA.copy()
311 312 data.pop(deleted_key)
312 313
313 314 with pytest.raises(ValueError) as exc_info:
314 315 model.update_global_hook_settings(data)
315 316 Session().commit()
316 317
317 318 msg = 'The given data does not contain {} key'.format(deleted_key)
318 319 assert str(exc_info.value) == msg
319 320
320 321 def test_update_global_hook_settings(self, settings_util):
321 322 model = VcsSettingsModel()
322 323 setting_mock = mock.MagicMock()
323 324 setting_mock.ui_active = False
324 325 get_settings_patcher = mock.patch.object(
325 326 model.global_settings, 'get_ui_by_section_and_key',
326 327 return_value=setting_mock)
327 328 session_patcher = mock.patch('rhodecode.model.settings.Session')
328 329 with get_settings_patcher as get_settings_mock, session_patcher:
329 330 model.update_global_hook_settings(HOOKS_FORM_DATA)
330 331 Session().commit()
331 332
332 333 assert setting_mock.ui_active is True
333 334 assert get_settings_mock.call_count == 3
334 335
335 336
336 337 class TestCreateOrUpdateRepoGeneralSettings(object):
337 338 def test_calls_create_or_update_general_settings(self, repo_stub):
338 339 model = VcsSettingsModel(repo=repo_stub.repo_name)
339 340 create_patch = mock.patch.object(
340 341 model, '_create_or_update_general_settings')
341 342 with create_patch as create_mock:
342 343 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
343 344 Session().commit()
344 345
345 346 create_mock.assert_called_once_with(
346 347 model.repo_settings, GENERAL_FORM_DATA)
347 348
348 349 def test_raises_exception_when_repository_is_not_specified(self):
349 350 model = VcsSettingsModel()
350 351 with pytest.raises(Exception) as exc_info:
351 352 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
352 353 assert str(exc_info.value) == 'Repository is not specified'
353 354
354 355
355 356 class TestCreateOrUpdatGlobalGeneralSettings(object):
356 357 def test_calls_create_or_update_general_settings(self):
357 358 model = VcsSettingsModel()
358 359 create_patch = mock.patch.object(
359 360 model, '_create_or_update_general_settings')
360 361 with create_patch as create_mock:
361 362 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
362 363 create_mock.assert_called_once_with(
363 364 model.global_settings, GENERAL_FORM_DATA)
364 365
365 366
366 367 class TestCreateOrUpdateGeneralSettings(object):
367 368 def test_create_when_no_repo_settings_found(self, repo_stub):
368 369 model = VcsSettingsModel(repo=repo_stub.repo_name)
369 370 model._create_or_update_general_settings(
370 371 model.repo_settings, GENERAL_FORM_DATA)
371 372
372 373 cleanup = []
373 374 try:
374 375 for name in model.GENERAL_SETTINGS:
375 376 setting = model.repo_settings.get_setting_by_name(name)
376 377 assert setting.app_settings_value is True
377 378 cleanup.append(setting)
378 379 finally:
379 380 for setting in cleanup:
380 381 Session().delete(setting)
381 382 Session().commit()
382 383
383 384 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
384 385 model = VcsSettingsModel(repo=repo_stub.repo_name)
385 386
386 387 deleted_key = 'rhodecode_pr_merge_enabled'
387 388 data = GENERAL_FORM_DATA.copy()
388 389 data.pop(deleted_key)
389 390
390 391 with pytest.raises(ValueError) as exc_info:
391 392 model._create_or_update_general_settings(model.repo_settings, data)
392 393 Session().commit()
393 394
394 395 msg = 'The given data does not contain {} key'.format(deleted_key)
395 396 assert str(exc_info.value) == msg
396 397
397 398 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
398 399 model = VcsSettingsModel(repo=repo_stub.repo_name)
399 400 for name in model.GENERAL_SETTINGS:
400 401 settings_util.create_repo_rhodecode_setting(
401 402 repo_stub, name, False, 'bool')
402 403
403 404 model._create_or_update_general_settings(
404 405 model.repo_settings, GENERAL_FORM_DATA)
405 406 Session().commit()
406 407
407 408 for name in model.GENERAL_SETTINGS:
408 409 setting = model.repo_settings.get_setting_by_name(name)
409 410 assert setting.app_settings_value is True
410 411
411 412
412 413 class TestCreateRepoSvnSettings(object):
413 414 def test_calls_create_svn_settings(self, repo_stub):
414 415 model = VcsSettingsModel(repo=repo_stub.repo_name)
415 416 with mock.patch.object(model, '_create_svn_settings') as create_mock:
416 417 model.create_repo_svn_settings(SVN_FORM_DATA)
417 418 Session().commit()
418 419
419 420 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
420 421
421 422 def test_raises_exception_when_repository_is_not_specified(self):
422 423 model = VcsSettingsModel()
423 424 with pytest.raises(Exception) as exc_info:
424 425 model.create_repo_svn_settings(SVN_FORM_DATA)
425 426 Session().commit()
426 427
427 428 assert str(exc_info.value) == 'Repository is not specified'
428 429
429 430
430 431 class TestCreateSvnSettings(object):
431 432 def test_create(self, repo_stub):
432 433 model = VcsSettingsModel(repo=repo_stub.repo_name)
433 434 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
434 435 Session().commit()
435 436
436 437 branch_ui = model.repo_settings.get_ui_by_section(
437 438 model.SVN_BRANCH_SECTION)
438 439 tag_ui = model.repo_settings.get_ui_by_section(
439 440 model.SVN_TAG_SECTION)
440 441
441 442 try:
442 443 assert len(branch_ui) == 1
443 444 assert len(tag_ui) == 1
444 445 finally:
445 446 Session().delete(branch_ui[0])
446 447 Session().delete(tag_ui[0])
447 448 Session().commit()
448 449
449 450 def test_create_tag(self, repo_stub):
450 451 model = VcsSettingsModel(repo=repo_stub.repo_name)
451 452 data = SVN_FORM_DATA.copy()
452 453 data.pop('new_svn_branch')
453 454 model._create_svn_settings(model.repo_settings, data)
454 455 Session().commit()
455 456
456 457 branch_ui = model.repo_settings.get_ui_by_section(
457 458 model.SVN_BRANCH_SECTION)
458 459 tag_ui = model.repo_settings.get_ui_by_section(
459 460 model.SVN_TAG_SECTION)
460 461
461 462 try:
462 463 assert len(branch_ui) == 0
463 464 assert len(tag_ui) == 1
464 465 finally:
465 466 Session().delete(tag_ui[0])
466 467 Session().commit()
467 468
468 469 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
469 470 model = VcsSettingsModel(repo=repo_stub.repo_name)
470 471 model._create_svn_settings(model.repo_settings, {})
471 472 Session().commit()
472 473
473 474 branch_ui = model.repo_settings.get_ui_by_section(
474 475 model.SVN_BRANCH_SECTION)
475 476 tag_ui = model.repo_settings.get_ui_by_section(
476 477 model.SVN_TAG_SECTION)
477 478
478 479 assert len(branch_ui) == 0
479 480 assert len(tag_ui) == 0
480 481
481 482 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
482 483 model = VcsSettingsModel(repo=repo_stub.repo_name)
483 484 data = {
484 485 'new_svn_branch': '',
485 486 'new_svn_tag': ''
486 487 }
487 488 model._create_svn_settings(model.repo_settings, data)
488 489 Session().commit()
489 490
490 491 branch_ui = model.repo_settings.get_ui_by_section(
491 492 model.SVN_BRANCH_SECTION)
492 493 tag_ui = model.repo_settings.get_ui_by_section(
493 494 model.SVN_TAG_SECTION)
494 495
495 496 assert len(branch_ui) == 0
496 497 assert len(tag_ui) == 0
497 498
498 499
499 500 class TestCreateOrUpdateUi(object):
500 501 def test_create(self, repo_stub):
501 502 model = VcsSettingsModel(repo=repo_stub.repo_name)
502 503 model._create_or_update_ui(
503 504 model.repo_settings, 'test-section', 'test-key', active=False,
504 505 value='False')
505 506 Session().commit()
506 507
507 508 created_ui = model.repo_settings.get_ui_by_section_and_key(
508 509 'test-section', 'test-key')
509 510
510 511 try:
511 512 assert created_ui.ui_active is False
512 513 assert str2bool(created_ui.ui_value) is False
513 514 finally:
514 515 Session().delete(created_ui)
515 516 Session().commit()
516 517
517 518 def test_update(self, repo_stub, settings_util):
518 519 model = VcsSettingsModel(repo=repo_stub.repo_name)
519 520 # care about only 3 first settings
520 521 largefiles, phases, evolve = model.HG_SETTINGS[:3]
521 522
522 523 section = 'test-section'
523 524 key = 'test-key'
524 525 settings_util.create_repo_rhodecode_ui(
525 526 repo_stub, section, 'True', key=key, active=True)
526 527
527 528 model._create_or_update_ui(
528 529 model.repo_settings, section, key, active=False, value='False')
529 530 Session().commit()
530 531
531 532 created_ui = model.repo_settings.get_ui_by_section_and_key(
532 533 section, key)
533 534 assert created_ui.ui_active is False
534 535 assert str2bool(created_ui.ui_value) is False
535 536
536 537
537 538 class TestCreateOrUpdateRepoHgSettings(object):
538 539 FORM_DATA = {
539 540 'extensions_largefiles': False,
540 541 'extensions_evolve': False,
541 542 'phases_publish': False
542 543 }
543 544
544 545 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
545 546 model = VcsSettingsModel(repo=repo_stub.repo_name)
546 547 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
547 548 model.create_or_update_repo_hg_settings(self.FORM_DATA)
548 549 expected_calls = [
549 550 mock.call(model.repo_settings, 'extensions', 'largefiles', active=False, value=''),
550 551 mock.call(model.repo_settings, 'extensions', 'evolve', active=False, value=''),
551 552 mock.call(model.repo_settings, 'experimental', 'evolution', active=False, value=''),
552 553 mock.call(model.repo_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
553 554 mock.call(model.repo_settings, 'extensions', 'topic', active=False, value=''),
554 555 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
555 556 ]
556 557 assert expected_calls == create_mock.call_args_list
557 558
558 559 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
559 560 def test_key_is_not_found(self, repo_stub, field_to_remove):
560 561 model = VcsSettingsModel(repo=repo_stub.repo_name)
561 562 data = self.FORM_DATA.copy()
562 563 data.pop(field_to_remove)
563 564 with pytest.raises(ValueError) as exc_info:
564 565 model.create_or_update_repo_hg_settings(data)
565 566 Session().commit()
566 567
567 568 expected_message = 'The given data does not contain {} key'.format(
568 569 field_to_remove)
569 570 assert str(exc_info.value) == expected_message
570 571
571 572 def test_create_raises_exception_when_repository_not_specified(self):
572 573 model = VcsSettingsModel()
573 574 with pytest.raises(Exception) as exc_info:
574 575 model.create_or_update_repo_hg_settings(self.FORM_DATA)
575 576 Session().commit()
576 577
577 578 assert str(exc_info.value) == 'Repository is not specified'
578 579
579 580
580 581 class TestCreateOrUpdateGlobalHgSettings(object):
581 582 FORM_DATA = {
582 583 'extensions_largefiles': False,
583 584 'phases_publish': False,
584 585 'extensions_evolve': False
585 586 }
586 587
587 588 def test_creates_repo_hg_settings_when_data_is_correct(self):
588 589 model = VcsSettingsModel()
589 590 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
590 591 model.create_or_update_global_hg_settings(self.FORM_DATA)
591 592 Session().commit()
592 593
593 594 expected_calls = [
594 595 mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''),
595 596 mock.call(model.global_settings, 'phases', 'publish', value='False'),
596 597 mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''),
597 598 mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''),
598 599 mock.call(model.global_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
599 600 mock.call(model.global_settings, 'extensions', 'topic', active=False, value=''),
600 601 ]
601 602
602 603 assert expected_calls == create_mock.call_args_list
603 604
604 605 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
605 606 def test_key_is_not_found(self, repo_stub, field_to_remove):
606 607 model = VcsSettingsModel(repo=repo_stub.repo_name)
607 608 data = self.FORM_DATA.copy()
608 609 data.pop(field_to_remove)
609 610 with pytest.raises(Exception) as exc_info:
610 611 model.create_or_update_global_hg_settings(data)
611 612 Session().commit()
612 613
613 614 expected_message = 'The given data does not contain {} key'.format(
614 615 field_to_remove)
615 616 assert str(exc_info.value) == expected_message
616 617
617 618
618 619 class TestCreateOrUpdateGlobalGitSettings(object):
619 620 FORM_DATA = {
620 621 'vcs_git_lfs_enabled': False,
621 622 }
622 623
623 624 def test_creates_repo_hg_settings_when_data_is_correct(self):
624 625 model = VcsSettingsModel()
625 626 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
626 627 model.create_or_update_global_git_settings(self.FORM_DATA)
627 628 Session().commit()
628 629
629 630 expected_calls = [
630 631 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False),
631 632 ]
632 633 assert expected_calls == create_mock.call_args_list
633 634
634 635
635 636 class TestDeleteRepoSvnPattern(object):
636 637 def test_success_when_repo_is_set(self, backend_svn, settings_util):
637 638 repo = backend_svn.create_repo()
638 639 repo_name = repo.repo_name
639 640
640 641 model = VcsSettingsModel(repo=repo_name)
641 642 entry = settings_util.create_repo_rhodecode_ui(
642 643 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
643 644 Session().commit()
644 645
645 646 model.delete_repo_svn_pattern(entry.ui_id)
646 647
647 648 def test_fail_when_delete_id_from_other_repo(self, backend_svn):
648 649 repo_name = backend_svn.repo_name
649 650 model = VcsSettingsModel(repo=repo_name)
650 651 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
651 652 with delete_ui_patch as delete_ui_mock:
652 653 model.delete_repo_svn_pattern(123)
653 654 Session().commit()
654 655
655 656 delete_ui_mock.assert_called_once_with(-1)
656 657
657 658 def test_raises_exception_when_repository_is_not_specified(self):
658 659 model = VcsSettingsModel()
659 660 with pytest.raises(Exception) as exc_info:
660 661 model.delete_repo_svn_pattern(123)
661 662 assert str(exc_info.value) == 'Repository is not specified'
662 663
663 664
664 665 class TestDeleteGlobalSvnPattern(object):
665 666 def test_delete_global_svn_pattern_calls_delete_ui(self):
666 667 model = VcsSettingsModel()
667 668 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
668 669 with delete_ui_patch as delete_ui_mock:
669 670 model.delete_global_svn_pattern(123)
670 671 delete_ui_mock.assert_called_once_with(123)
671 672
672 673
673 674 class TestFilterUiSettings(object):
674 675 def test_settings_are_filtered(self):
675 676 model = VcsSettingsModel()
676 677 repo_settings = [
677 678 UiSetting('extensions', 'largefiles', '', True),
678 679 UiSetting('phases', 'publish', 'True', True),
679 680 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
680 681 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
681 682 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
682 683 UiSetting(
683 684 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
684 685 'test_branch', True),
685 686 UiSetting(
686 687 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
687 688 'test_tag', True),
688 689 ]
689 690 non_repo_settings = [
690 691 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
691 692 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
692 693 UiSetting('hooks', 'test2', 'hook', True),
693 694 UiSetting(
694 695 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
695 696 'test_tag', True),
696 697 ]
697 698 settings = repo_settings + non_repo_settings
698 699 filtered_settings = model._filter_ui_settings(settings)
699 700 assert sorted(filtered_settings) == sorted(repo_settings)
700 701
701 702
702 703 class TestFilterGeneralSettings(object):
703 704 def test_settings_are_filtered(self):
704 705 model = VcsSettingsModel()
705 706 settings = {
706 707 'rhodecode_abcde': 'value1',
707 708 'rhodecode_vwxyz': 'value2',
708 709 }
709 710 general_settings = {
710 711 'rhodecode_{}'.format(key): 'value'
711 712 for key in VcsSettingsModel.GENERAL_SETTINGS
712 713 }
713 714 settings.update(general_settings)
714 715
715 716 filtered_settings = model._filter_general_settings(general_settings)
716 717 assert sorted(filtered_settings) == sorted(general_settings)
717 718
718 719
719 720 class TestGetRepoUiSettings(object):
720 721 def test_global_uis_are_returned_when_no_repo_uis_found(
721 722 self, repo_stub):
722 723 model = VcsSettingsModel(repo=repo_stub.repo_name)
723 724 result = model.get_repo_ui_settings()
724 725 svn_sections = (
725 726 VcsSettingsModel.SVN_TAG_SECTION,
726 727 VcsSettingsModel.SVN_BRANCH_SECTION)
727 728 expected_result = [
728 729 s for s in model.global_settings.get_ui()
729 730 if s.section not in svn_sections]
730 731 assert sorted(result) == sorted(expected_result)
731 732
732 733 def test_repo_uis_are_overriding_global_uis(
733 734 self, repo_stub, settings_util):
734 735 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
735 736 settings_util.create_repo_rhodecode_ui(
736 737 repo_stub, section, 'repo', key=key, active=False)
737 738 model = VcsSettingsModel(repo=repo_stub.repo_name)
738 739 result = model.get_repo_ui_settings()
739 740 for setting in result:
740 741 locator = (setting.section, setting.key)
741 742 if locator in VcsSettingsModel.HOOKS_SETTINGS:
742 743 assert setting.value == 'repo'
743 744
744 745 assert setting.active is False
745 746
746 747 def test_global_svn_patterns_are_not_in_list(
747 748 self, repo_stub, settings_util):
748 749 svn_sections = (
749 750 VcsSettingsModel.SVN_TAG_SECTION,
750 751 VcsSettingsModel.SVN_BRANCH_SECTION)
751 752 for section in svn_sections:
752 753 settings_util.create_rhodecode_ui(
753 754 section, 'repo', key='deadbeef' + section, active=False)
754 755 Session().commit()
755 756
756 757 model = VcsSettingsModel(repo=repo_stub.repo_name)
757 758 result = model.get_repo_ui_settings()
758 759 for setting in result:
759 760 assert setting.section not in svn_sections
760 761
761 762 def test_repo_uis_filtered_by_section_are_returned(
762 763 self, repo_stub, settings_util):
763 764 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
764 765 settings_util.create_repo_rhodecode_ui(
765 766 repo_stub, section, 'repo', key=key, active=False)
766 767 model = VcsSettingsModel(repo=repo_stub.repo_name)
767 768 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
768 769 result = model.get_repo_ui_settings(section=section)
769 770 for setting in result:
770 771 assert setting.section == section
771 772
772 773 def test_repo_uis_filtered_by_key_are_returned(
773 774 self, repo_stub, settings_util):
774 775 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
775 776 settings_util.create_repo_rhodecode_ui(
776 777 repo_stub, section, 'repo', key=key, active=False)
777 778 model = VcsSettingsModel(repo=repo_stub.repo_name)
778 779 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
779 780 result = model.get_repo_ui_settings(key=key)
780 781 for setting in result:
781 782 assert setting.key == key
782 783
783 784 def test_raises_exception_when_repository_is_not_specified(self):
784 785 model = VcsSettingsModel()
785 786 with pytest.raises(Exception) as exc_info:
786 787 model.get_repo_ui_settings()
787 788 assert str(exc_info.value) == 'Repository is not specified'
788 789
789 790
790 791 class TestGetRepoGeneralSettings(object):
791 792 def test_global_settings_are_returned_when_no_repo_settings_found(
792 793 self, repo_stub):
793 794 model = VcsSettingsModel(repo=repo_stub.repo_name)
794 795 result = model.get_repo_general_settings()
795 796 expected_result = model.global_settings.get_all_settings()
796 797 assert sorted(result) == sorted(expected_result)
797 798
798 799 def test_repo_uis_are_overriding_global_uis(
799 800 self, repo_stub, settings_util):
800 801 for key in VcsSettingsModel.GENERAL_SETTINGS:
801 802 settings_util.create_repo_rhodecode_setting(
802 803 repo_stub, key, 'abcde', type_='unicode')
803 804 Session().commit()
804 805
805 806 model = VcsSettingsModel(repo=repo_stub.repo_name)
806 807 result = model.get_repo_ui_settings()
807 808 for key in result:
808 809 if key in VcsSettingsModel.GENERAL_SETTINGS:
809 810 assert result[key] == 'abcde'
810 811
811 812 def test_raises_exception_when_repository_is_not_specified(self):
812 813 model = VcsSettingsModel()
813 814 with pytest.raises(Exception) as exc_info:
814 815 model.get_repo_general_settings()
815 816 assert str(exc_info.value) == 'Repository is not specified'
816 817
817 818
818 819 class TestGetGlobalGeneralSettings(object):
819 820 def test_global_settings_are_returned(self, repo_stub):
820 821 model = VcsSettingsModel()
821 822 result = model.get_global_general_settings()
822 823 expected_result = model.global_settings.get_all_settings()
823 824 assert sorted(result) == sorted(expected_result)
824 825
825 826 def test_repo_uis_are_not_overriding_global_uis(
826 827 self, repo_stub, settings_util):
827 828 for key in VcsSettingsModel.GENERAL_SETTINGS:
828 829 settings_util.create_repo_rhodecode_setting(
829 830 repo_stub, key, 'abcde', type_='unicode')
830 831 Session().commit()
831 832
832 833 model = VcsSettingsModel(repo=repo_stub.repo_name)
833 834 result = model.get_global_general_settings()
834 835 expected_result = model.global_settings.get_all_settings()
835 836 assert sorted(result) == sorted(expected_result)
836 837
837 838
838 839 class TestGetGlobalUiSettings(object):
839 840 def test_global_uis_are_returned(self, repo_stub):
840 841 model = VcsSettingsModel()
841 842 result = model.get_global_ui_settings()
842 843 expected_result = model.global_settings.get_ui()
843 844 assert sorted(result) == sorted(expected_result)
844 845
845 846 def test_repo_uis_are_not_overriding_global_uis(
846 847 self, repo_stub, settings_util):
847 848 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
848 849 settings_util.create_repo_rhodecode_ui(
849 850 repo_stub, section, 'repo', key=key, active=False)
850 851 Session().commit()
851 852
852 853 model = VcsSettingsModel(repo=repo_stub.repo_name)
853 854 result = model.get_global_ui_settings()
854 855 expected_result = model.global_settings.get_ui()
855 856 assert sorted(result) == sorted(expected_result)
856 857
857 858 def test_ui_settings_filtered_by_section(
858 859 self, repo_stub, settings_util):
859 860 model = VcsSettingsModel(repo=repo_stub.repo_name)
860 861 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
861 862 result = model.get_global_ui_settings(section=section)
862 863 expected_result = model.global_settings.get_ui(section=section)
863 864 assert sorted(result) == sorted(expected_result)
864 865
865 866 def test_ui_settings_filtered_by_key(
866 867 self, repo_stub, settings_util):
867 868 model = VcsSettingsModel(repo=repo_stub.repo_name)
868 869 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
869 870 result = model.get_global_ui_settings(key=key)
870 871 expected_result = model.global_settings.get_ui(key=key)
871 872 assert sorted(result) == sorted(expected_result)
872 873
873 874
874 875 class TestGetGeneralSettings(object):
875 876 def test_global_settings_are_returned_when_inherited_is_true(
876 877 self, repo_stub, settings_util):
877 878 model = VcsSettingsModel(repo=repo_stub.repo_name)
878 879 model.inherit_global_settings = True
879 880 for key in VcsSettingsModel.GENERAL_SETTINGS:
880 881 settings_util.create_repo_rhodecode_setting(
881 882 repo_stub, key, 'abcde', type_='unicode')
882 883 Session().commit()
883 884
884 885 result = model.get_general_settings()
885 886 expected_result = model.get_global_general_settings()
886 887 assert sorted(result) == sorted(expected_result)
887 888
888 889 def test_repo_settings_are_returned_when_inherited_is_false(
889 890 self, repo_stub, settings_util):
890 891 model = VcsSettingsModel(repo=repo_stub.repo_name)
891 892 model.inherit_global_settings = False
892 893 for key in VcsSettingsModel.GENERAL_SETTINGS:
893 894 settings_util.create_repo_rhodecode_setting(
894 895 repo_stub, key, 'abcde', type_='unicode')
895 896 Session().commit()
896 897
897 898 result = model.get_general_settings()
898 899 expected_result = model.get_repo_general_settings()
899 900 assert sorted(result) == sorted(expected_result)
900 901
901 902 def test_global_settings_are_returned_when_no_repository_specified(self):
902 903 model = VcsSettingsModel()
903 904 result = model.get_general_settings()
904 905 expected_result = model.get_global_general_settings()
905 906 assert sorted(result) == sorted(expected_result)
906 907
907 908
908 909 class TestGetUiSettings(object):
909 910 def test_global_settings_are_returned_when_inherited_is_true(
910 911 self, repo_stub, settings_util):
911 912 model = VcsSettingsModel(repo=repo_stub.repo_name)
912 913 model.inherit_global_settings = True
913 914 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
914 915 settings_util.create_repo_rhodecode_ui(
915 916 repo_stub, section, 'repo', key=key, active=True)
916 917 Session().commit()
917 918
918 919 result = model.get_ui_settings()
919 920 expected_result = model.get_global_ui_settings()
920 921 assert sorted(result) == sorted(expected_result)
921 922
922 923 def test_repo_settings_are_returned_when_inherited_is_false(
923 924 self, repo_stub, settings_util):
924 925 model = VcsSettingsModel(repo=repo_stub.repo_name)
925 926 model.inherit_global_settings = False
926 927 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
927 928 settings_util.create_repo_rhodecode_ui(
928 929 repo_stub, section, 'repo', key=key, active=True)
929 930 Session().commit()
930 931
931 932 result = model.get_ui_settings()
932 933 expected_result = model.get_repo_ui_settings()
933 934 assert sorted(result) == sorted(expected_result)
934 935
935 936 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
936 937 model = VcsSettingsModel(repo=repo_stub.repo_name)
937 938 model.inherit_global_settings = False
938 939
939 940 args = ('section', 'key')
940 941 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
941 942 model.get_ui_settings(*args)
942 943 Session().commit()
943 944
944 945 settings_mock.assert_called_once_with(*args)
945 946
946 947 def test_global_settings_filtered_by_section_and_key(self):
947 948 model = VcsSettingsModel()
948 949 args = ('section', 'key')
949 950 with mock.patch.object(model, 'get_global_ui_settings') as (
950 951 settings_mock):
951 952 model.get_ui_settings(*args)
952 953 settings_mock.assert_called_once_with(*args)
953 954
954 955 def test_global_settings_are_returned_when_no_repository_specified(self):
955 956 model = VcsSettingsModel()
956 957 result = model.get_ui_settings()
957 958 expected_result = model.get_global_ui_settings()
958 959 assert sorted(result) == sorted(expected_result)
959 960
960 961
961 962 class TestGetSvnPatterns(object):
962 963 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
963 964 model = VcsSettingsModel(repo=repo_stub.repo_name)
964 965 args = ('section', )
965 966 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
966 967 model.get_svn_patterns(*args)
967 968
968 969 Session().commit()
969 970 settings_mock.assert_called_once_with(*args)
970 971
971 972 def test_global_settings_filtered_by_section_and_key(self):
972 973 model = VcsSettingsModel()
973 974 args = ('section', )
974 975 with mock.patch.object(model, 'get_global_ui_settings') as (
975 976 settings_mock):
976 977 model.get_svn_patterns(*args)
977 978 settings_mock.assert_called_once_with(*args)
978 979
979 980
980 981 class TestCreateOrUpdateRepoSettings(object):
981 982 FORM_DATA = {
982 983 'inherit_global_settings': False,
983 984 'hooks_changegroup_repo_size': False,
984 985 'hooks_changegroup_push_logger': False,
985 986 'hooks_outgoing_pull_logger': False,
986 987 'extensions_largefiles': False,
987 988 'extensions_evolve': False,
988 989 'vcs_git_lfs_enabled': False,
989 990 'phases_publish': 'False',
990 991 'rhodecode_pr_merge_enabled': False,
992 'rhodecode_auto_merge_enabled': False,
991 993 'rhodecode_use_outdated_comments': False,
992 994 'new_svn_branch': '',
993 995 'new_svn_tag': ''
994 996 }
995 997
996 998 def test_get_raises_exception_when_repository_not_specified(self):
997 999 model = VcsSettingsModel()
998 1000 with pytest.raises(Exception) as exc_info:
999 1001 model.create_or_update_repo_settings(data=self.FORM_DATA)
1000 1002 Session().commit()
1001 1003
1002 1004 assert str(exc_info.value) == 'Repository is not specified'
1003 1005
1004 1006 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
1005 1007 repo = backend_svn.create_repo()
1006 1008 model = VcsSettingsModel(repo=repo)
1007 1009 with self._patch_model(model) as mocks:
1008 1010 model.create_or_update_repo_settings(
1009 1011 data=self.FORM_DATA, inherit_global_settings=False)
1010 1012 Session().commit()
1011 1013
1012 1014 mocks['create_repo_svn_settings'].assert_called_once_with(
1013 1015 self.FORM_DATA)
1014 1016 non_called_methods = (
1015 1017 'create_or_update_repo_hook_settings',
1016 1018 'create_or_update_repo_pr_settings',
1017 1019 'create_or_update_repo_hg_settings')
1018 1020 for method in non_called_methods:
1019 1021 assert mocks[method].call_count == 0
1020 1022
1021 1023 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1022 1024 repo = backend_hg.create_repo()
1023 1025 model = VcsSettingsModel(repo=repo)
1024 1026 with self._patch_model(model) as mocks:
1025 1027 model.create_or_update_repo_settings(
1026 1028 data=self.FORM_DATA, inherit_global_settings=False)
1027 1029 Session().commit()
1028 1030
1029 1031 assert mocks['create_repo_svn_settings'].call_count == 0
1030 1032 called_methods = (
1031 1033 'create_or_update_repo_hook_settings',
1032 1034 'create_or_update_repo_pr_settings',
1033 1035 'create_or_update_repo_hg_settings')
1034 1036 for method in called_methods:
1035 1037 mocks[method].assert_called_once_with(self.FORM_DATA)
1036 1038
1037 1039 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1038 1040 self, backend_git):
1039 1041 repo = backend_git.create_repo()
1040 1042 model = VcsSettingsModel(repo=repo)
1041 1043 with self._patch_model(model) as mocks:
1042 1044 model.create_or_update_repo_settings(
1043 1045 data=self.FORM_DATA, inherit_global_settings=False)
1044 1046
1045 1047 assert mocks['create_repo_svn_settings'].call_count == 0
1046 1048 called_methods = (
1047 1049 'create_or_update_repo_hook_settings',
1048 1050 'create_or_update_repo_pr_settings')
1049 1051 non_called_methods = (
1050 1052 'create_repo_svn_settings',
1051 1053 'create_or_update_repo_hg_settings'
1052 1054 )
1053 1055 for method in called_methods:
1054 1056 mocks[method].assert_called_once_with(self.FORM_DATA)
1055 1057 for method in non_called_methods:
1056 1058 assert mocks[method].call_count == 0
1057 1059
1058 1060 def test_no_methods_are_called_when_settings_are_inherited(
1059 1061 self, backend):
1060 1062 repo = backend.create_repo()
1061 1063 model = VcsSettingsModel(repo=repo)
1062 1064 with self._patch_model(model) as mocks:
1063 1065 model.create_or_update_repo_settings(
1064 1066 data=self.FORM_DATA, inherit_global_settings=True)
1065 1067 for method_name in mocks:
1066 1068 assert mocks[method_name].call_count == 0
1067 1069
1068 1070 def test_cache_is_marked_for_invalidation(self, repo_stub):
1069 1071 model = VcsSettingsModel(repo=repo_stub)
1070 1072 invalidation_patcher = mock.patch(
1071 1073 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1072 1074 with invalidation_patcher as invalidation_mock:
1073 1075 model.create_or_update_repo_settings(
1074 1076 data=self.FORM_DATA, inherit_global_settings=True)
1075 1077 Session().commit()
1076 1078
1077 1079 invalidation_mock.assert_called_once_with(
1078 1080 repo_stub.repo_name, delete=True)
1079 1081
1080 1082 def test_inherit_flag_is_saved(self, repo_stub):
1081 1083 model = VcsSettingsModel(repo=repo_stub)
1082 1084 model.inherit_global_settings = True
1083 1085 with self._patch_model(model):
1084 1086 model.create_or_update_repo_settings(
1085 1087 data=self.FORM_DATA, inherit_global_settings=False)
1086 1088 Session().commit()
1087 1089
1088 1090 assert model.inherit_global_settings is False
1089 1091
1090 1092 def _patch_model(self, model):
1091 1093 return mock.patch.multiple(
1092 1094 model,
1093 1095 create_repo_svn_settings=mock.DEFAULT,
1094 1096 create_or_update_repo_hook_settings=mock.DEFAULT,
1095 1097 create_or_update_repo_pr_settings=mock.DEFAULT,
1096 1098 create_or_update_repo_hg_settings=mock.DEFAULT)
General Comments 0
You need to be logged in to leave comments. Login now