##// END OF EJS Templates
fix(mercurial): dropped support for depreacted hgsubversion fixes RCCE-12
super-admin -
r5250:c24343a3 default
parent child Browse files
Show More
@@ -1,695 +1,678 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.apps._base import ADMIN_PREFIX
25 25 from rhodecode.lib.hash_utils import md5_safe
26 26 from rhodecode.model.db import RhodeCodeUi
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel
29 29 from rhodecode.tests import assert_session_flash
30 30 from rhodecode.tests.routes import route_path
31 31
32 32
33 33 UPDATE_DATA_QUALNAME = 'rhodecode.model.update.UpdateModel.get_update_data'
34 34
35 35
36 36 @pytest.mark.usefixtures('autologin_user', 'app')
37 37 class TestAdminSettingsController(object):
38 38
39 39 @pytest.mark.parametrize('urlname', [
40 40 'admin_settings_vcs',
41 41 'admin_settings_mapping',
42 42 'admin_settings_global',
43 43 'admin_settings_visual',
44 44 'admin_settings_email',
45 45 'admin_settings_hooks',
46 46 'admin_settings_search',
47 47 ])
48 48 def test_simple_get(self, urlname):
49 49 self.app.get(route_path(urlname))
50 50
51 51 def test_create_custom_hook(self, csrf_token):
52 52 response = self.app.post(
53 53 route_path('admin_settings_hooks_update'),
54 54 params={
55 55 'new_hook_ui_key': 'test_hooks_1',
56 56 'new_hook_ui_value': 'cd /tmp',
57 57 'csrf_token': csrf_token})
58 58
59 59 response = response.follow()
60 60 response.mustcontain('test_hooks_1')
61 61 response.mustcontain('cd /tmp')
62 62
63 63 def test_create_custom_hook_delete(self, csrf_token):
64 64 response = self.app.post(
65 65 route_path('admin_settings_hooks_update'),
66 66 params={
67 67 'new_hook_ui_key': 'test_hooks_2',
68 68 'new_hook_ui_value': 'cd /tmp2',
69 69 'csrf_token': csrf_token})
70 70
71 71 response = response.follow()
72 72 response.mustcontain('test_hooks_2')
73 73 response.mustcontain('cd /tmp2')
74 74
75 75 hook_id = SettingsModel().get_ui_by_key('test_hooks_2').ui_id
76 76
77 77 # delete
78 78 self.app.post(
79 79 route_path('admin_settings_hooks_delete'),
80 80 params={'hook_id': hook_id, 'csrf_token': csrf_token})
81 81 response = self.app.get(route_path('admin_settings_hooks'))
82 82 response.mustcontain(no=['test_hooks_2'])
83 83 response.mustcontain(no=['cd /tmp2'])
84 84
85 85
86 86 @pytest.mark.usefixtures('autologin_user', 'app')
87 87 class TestAdminSettingsGlobal(object):
88 88
89 89 def test_pre_post_code_code_active(self, csrf_token):
90 90 pre_code = 'rc-pre-code-187652122'
91 91 post_code = 'rc-postcode-98165231'
92 92
93 93 response = self.post_and_verify_settings({
94 94 'rhodecode_pre_code': pre_code,
95 95 'rhodecode_post_code': post_code,
96 96 'csrf_token': csrf_token,
97 97 })
98 98
99 99 response = response.follow()
100 100 response.mustcontain(pre_code, post_code)
101 101
102 102 def test_pre_post_code_code_inactive(self, csrf_token):
103 103 pre_code = 'rc-pre-code-187652122'
104 104 post_code = 'rc-postcode-98165231'
105 105 response = self.post_and_verify_settings({
106 106 'rhodecode_pre_code': '',
107 107 'rhodecode_post_code': '',
108 108 'csrf_token': csrf_token,
109 109 })
110 110
111 111 response = response.follow()
112 112 response.mustcontain(no=[pre_code, post_code])
113 113
114 114 def test_captcha_activate(self, csrf_token):
115 115 self.post_and_verify_settings({
116 116 'rhodecode_captcha_private_key': '1234567890',
117 117 'rhodecode_captcha_public_key': '1234567890',
118 118 'csrf_token': csrf_token,
119 119 })
120 120
121 121 response = self.app.get(ADMIN_PREFIX + '/register')
122 122 response.mustcontain('captcha')
123 123
124 124 def test_captcha_deactivate(self, csrf_token):
125 125 self.post_and_verify_settings({
126 126 'rhodecode_captcha_private_key': '',
127 127 'rhodecode_captcha_public_key': '1234567890',
128 128 'csrf_token': csrf_token,
129 129 })
130 130
131 131 response = self.app.get(ADMIN_PREFIX + '/register')
132 132 response.mustcontain(no=['captcha'])
133 133
134 134 def test_title_change(self, csrf_token):
135 135 old_title = 'RhodeCode'
136 136
137 137 for new_title in ['Changed', 'Ε»Γ³Ε‚wik', old_title]:
138 138 response = self.post_and_verify_settings({
139 139 'rhodecode_title': new_title,
140 140 'csrf_token': csrf_token,
141 141 })
142 142
143 143 response = response.follow()
144 144 response.mustcontain(new_title)
145 145
146 146 def post_and_verify_settings(self, settings):
147 147 old_title = 'RhodeCode'
148 148 old_realm = 'RhodeCode authentication'
149 149 params = {
150 150 'rhodecode_title': old_title,
151 151 'rhodecode_realm': old_realm,
152 152 'rhodecode_pre_code': '',
153 153 'rhodecode_post_code': '',
154 154 'rhodecode_captcha_private_key': '',
155 155 'rhodecode_captcha_public_key': '',
156 156 'rhodecode_create_personal_repo_group': False,
157 157 'rhodecode_personal_repo_group_pattern': '${username}',
158 158 }
159 159 params.update(settings)
160 160 response = self.app.post(
161 161 route_path('admin_settings_global_update'), params=params)
162 162
163 163 assert_session_flash(response, 'Updated application settings')
164 164
165 165 app_settings = SettingsModel().get_all_settings()
166 166 del settings['csrf_token']
167 167 for key, value in settings.items():
168 168 assert app_settings[key] == value
169 169
170 170 return response
171 171
172 172
173 173 @pytest.mark.usefixtures('autologin_user', 'app')
174 174 class TestAdminSettingsVcs(object):
175 175
176 176 def test_contains_svn_default_patterns(self):
177 177 response = self.app.get(route_path('admin_settings_vcs'))
178 178 expected_patterns = [
179 179 '/trunk',
180 180 '/branches/*',
181 181 '/tags/*',
182 182 ]
183 183 for pattern in expected_patterns:
184 184 response.mustcontain(pattern)
185 185
186 186 def test_add_new_svn_branch_and_tag_pattern(
187 187 self, backend_svn, form_defaults, disable_sql_cache,
188 188 csrf_token):
189 189 form_defaults.update({
190 190 'new_svn_branch': '/exp/branches/*',
191 191 'new_svn_tag': '/important_tags/*',
192 192 'csrf_token': csrf_token,
193 193 })
194 194
195 195 response = self.app.post(
196 196 route_path('admin_settings_vcs_update'),
197 197 params=form_defaults, status=302)
198 198 response = response.follow()
199 199
200 200 # Expect to find the new values on the page
201 201 response.mustcontain('/exp/branches/*')
202 202 response.mustcontain('/important_tags/*')
203 203
204 204 # Expect that those patterns are used to match branches and tags now
205 205 repo = backend_svn['svn-simple-layout'].scm_instance()
206 206 assert 'exp/branches/exp-sphinx-docs' in repo.branches
207 207 assert 'important_tags/v0.5' in repo.tags
208 208
209 209 def test_add_same_svn_value_twice_shows_an_error_message(
210 210 self, form_defaults, csrf_token, settings_util):
211 211 settings_util.create_rhodecode_ui('vcs_svn_branch', '/test')
212 212 settings_util.create_rhodecode_ui('vcs_svn_tag', '/test')
213 213
214 214 response = self.app.post(
215 215 route_path('admin_settings_vcs_update'),
216 216 params={
217 217 'paths_root_path': form_defaults['paths_root_path'],
218 218 'new_svn_branch': '/test',
219 219 'new_svn_tag': '/test',
220 220 'csrf_token': csrf_token,
221 221 },
222 222 status=200)
223 223
224 224 response.mustcontain("Pattern already exists")
225 225 response.mustcontain("Some form inputs contain invalid data.")
226 226
227 227 @pytest.mark.parametrize('section', [
228 228 'vcs_svn_branch',
229 229 'vcs_svn_tag',
230 230 ])
231 231 def test_delete_svn_patterns(
232 232 self, section, csrf_token, settings_util):
233 233 setting = settings_util.create_rhodecode_ui(
234 234 section, '/test_delete', cleanup=False)
235 235
236 236 self.app.post(
237 237 route_path('admin_settings_vcs_svn_pattern_delete'),
238 238 params={
239 239 'delete_svn_pattern': setting.ui_id,
240 240 'csrf_token': csrf_token},
241 241 headers={'X-REQUESTED-WITH': 'XMLHttpRequest'})
242 242
243 243 @pytest.mark.parametrize('section', [
244 244 'vcs_svn_branch',
245 245 'vcs_svn_tag',
246 246 ])
247 247 def test_delete_svn_patterns_raises_404_when_no_xhr(
248 248 self, section, csrf_token, settings_util):
249 249 setting = settings_util.create_rhodecode_ui(section, '/test_delete')
250 250
251 251 self.app.post(
252 252 route_path('admin_settings_vcs_svn_pattern_delete'),
253 253 params={
254 254 'delete_svn_pattern': setting.ui_id,
255 255 'csrf_token': csrf_token},
256 256 status=404)
257 257
258 def test_extensions_hgsubversion(self, form_defaults, csrf_token):
259 form_defaults.update({
260 'csrf_token': csrf_token,
261 'extensions_hgsubversion': 'True',
262 })
263 response = self.app.post(
264 route_path('admin_settings_vcs_update'),
265 params=form_defaults,
266 status=302)
267
268 response = response.follow()
269 extensions_input = (
270 '<input id="extensions_hgsubversion" '
271 'name="extensions_hgsubversion" type="checkbox" '
272 'value="True" checked="checked" />')
273 response.mustcontain(extensions_input)
274
275 258 def test_extensions_hgevolve(self, form_defaults, csrf_token):
276 259 form_defaults.update({
277 260 'csrf_token': csrf_token,
278 261 'extensions_evolve': 'True',
279 262 })
280 263 response = self.app.post(
281 264 route_path('admin_settings_vcs_update'),
282 265 params=form_defaults,
283 266 status=302)
284 267
285 268 response = response.follow()
286 269 extensions_input = (
287 270 '<input id="extensions_evolve" '
288 271 'name="extensions_evolve" type="checkbox" '
289 272 'value="True" checked="checked" />')
290 273 response.mustcontain(extensions_input)
291 274
292 275 def test_has_a_section_for_pull_request_settings(self):
293 276 response = self.app.get(route_path('admin_settings_vcs'))
294 277 response.mustcontain('Pull Request Settings')
295 278
296 279 def test_has_an_input_for_invalidation_of_inline_comments(self):
297 280 response = self.app.get(route_path('admin_settings_vcs'))
298 281 assert_response = response.assert_response()
299 282 assert_response.one_element_exists(
300 283 '[name=rhodecode_use_outdated_comments]')
301 284
302 285 @pytest.mark.parametrize('new_value', [True, False])
303 286 def test_allows_to_change_invalidation_of_inline_comments(
304 287 self, form_defaults, csrf_token, new_value):
305 288 setting_key = 'use_outdated_comments'
306 289 setting = SettingsModel().create_or_update_setting(
307 290 setting_key, not new_value, 'bool')
308 291 Session().add(setting)
309 292 Session().commit()
310 293
311 294 form_defaults.update({
312 295 'csrf_token': csrf_token,
313 296 'rhodecode_use_outdated_comments': str(new_value),
314 297 })
315 298 response = self.app.post(
316 299 route_path('admin_settings_vcs_update'),
317 300 params=form_defaults,
318 301 status=302)
319 302 response = response.follow()
320 303 setting = SettingsModel().get_setting_by_name(setting_key)
321 304 assert setting.app_settings_value is new_value
322 305
323 306 @pytest.mark.parametrize('new_value', [True, False])
324 307 def test_allows_to_change_hg_rebase_merge_strategy(
325 308 self, form_defaults, csrf_token, new_value):
326 309 setting_key = 'hg_use_rebase_for_merging'
327 310
328 311 form_defaults.update({
329 312 'csrf_token': csrf_token,
330 313 'rhodecode_' + setting_key: str(new_value),
331 314 })
332 315
333 316 with mock.patch.dict(
334 317 rhodecode.CONFIG, {'labs_settings_active': 'true'}):
335 318 self.app.post(
336 319 route_path('admin_settings_vcs_update'),
337 320 params=form_defaults,
338 321 status=302)
339 322
340 323 setting = SettingsModel().get_setting_by_name(setting_key)
341 324 assert setting.app_settings_value is new_value
342 325
343 326 @pytest.fixture()
344 327 def disable_sql_cache(self, request):
345 328 # patch _do_orm_execute so it returns None similar like if we don't use a cached query
346 329 patcher = mock.patch(
347 330 'rhodecode.lib.caching_query.ORMCache._do_orm_execute', return_value=None)
348 331 request.addfinalizer(patcher.stop)
349 332 patcher.start()
350 333
351 334 @pytest.fixture()
352 335 def form_defaults(self):
353 336 from rhodecode.apps.admin.views.settings import AdminSettingsView
354 337 return AdminSettingsView._form_defaults()
355 338
356 339 # TODO: johbo: What we really want is to checkpoint before a test run and
357 340 # reset the session afterwards.
358 341 @pytest.fixture(scope='class', autouse=True)
359 342 def cleanup_settings(self, request, baseapp):
360 343 ui_id = RhodeCodeUi.ui_id
361 344 original_ids = [r.ui_id for r in RhodeCodeUi.query().with_entities(ui_id)]
362 345
363 346 @request.addfinalizer
364 347 def cleanup():
365 348 RhodeCodeUi.query().filter(
366 349 ui_id.notin_(original_ids)).delete(False)
367 350
368 351
369 352 @pytest.mark.usefixtures('autologin_user', 'app')
370 353 class TestLabsSettings(object):
371 354 def test_get_settings_page_disabled(self):
372 355 with mock.patch.dict(
373 356 rhodecode.CONFIG, {'labs_settings_active': 'false'}):
374 357
375 358 response = self.app.get(
376 359 route_path('admin_settings_labs'), status=302)
377 360
378 361 assert response.location.endswith(route_path('admin_settings'))
379 362
380 363 def test_get_settings_page_enabled(self):
381 364 from rhodecode.apps.admin.views import settings
382 365 lab_settings = [
383 366 settings.LabSetting(
384 367 key='rhodecode_bool',
385 368 type='bool',
386 369 group='bool group',
387 370 label='bool label',
388 371 help='bool help'
389 372 ),
390 373 settings.LabSetting(
391 374 key='rhodecode_text',
392 375 type='unicode',
393 376 group='text group',
394 377 label='text label',
395 378 help='text help'
396 379 ),
397 380 ]
398 381 with mock.patch.dict(rhodecode.CONFIG,
399 382 {'labs_settings_active': 'true'}):
400 383 with mock.patch.object(settings, '_LAB_SETTINGS', lab_settings):
401 384 response = self.app.get(route_path('admin_settings_labs'))
402 385
403 386 assert '<label>bool group:</label>' in response
404 387 assert '<label for="rhodecode_bool">bool label</label>' in response
405 388 assert '<p class="help-block">bool help</p>' in response
406 389 assert 'name="rhodecode_bool" type="checkbox"' in response
407 390
408 391 assert '<label>text group:</label>' in response
409 392 assert '<label for="rhodecode_text">text label</label>' in response
410 393 assert '<p class="help-block">text help</p>' in response
411 394 assert 'name="rhodecode_text" size="60" type="text"' in response
412 395
413 396
414 397 @pytest.mark.usefixtures('app')
415 398 class TestOpenSourceLicenses(object):
416 399
417 400 def test_records_are_displayed(self, autologin_user):
418 401 sample_licenses = [
419 402 {
420 403 "license": [
421 404 {
422 405 "fullName": "BSD 4-clause \"Original\" or \"Old\" License",
423 406 "shortName": "bsdOriginal",
424 407 "spdxId": "BSD-4-Clause",
425 408 "url": "http://spdx.org/licenses/BSD-4-Clause.html"
426 409 }
427 410 ],
428 411 "name": "python2.7-coverage-3.7.1"
429 412 },
430 413 {
431 414 "license": [
432 415 {
433 416 "fullName": "MIT License",
434 417 "shortName": "mit",
435 418 "spdxId": "MIT",
436 419 "url": "http://spdx.org/licenses/MIT.html"
437 420 }
438 421 ],
439 422 "name": "python2.7-bootstrapped-pip-9.0.1"
440 423 },
441 424 ]
442 425 read_licenses_patch = mock.patch(
443 426 'rhodecode.apps.admin.views.open_source_licenses.read_opensource_licenses',
444 427 return_value=sample_licenses)
445 428 with read_licenses_patch:
446 429 response = self.app.get(
447 430 route_path('admin_settings_open_source'), status=200)
448 431
449 432 assert_response = response.assert_response()
450 433 assert_response.element_contains(
451 434 '.panel-heading', 'Licenses of Third Party Packages')
452 435 for license_data in sample_licenses:
453 436 response.mustcontain(license_data["license"][0]["spdxId"])
454 437 assert_response.element_contains('.panel-body', license_data["name"])
455 438
456 439 def test_records_can_be_read(self, autologin_user):
457 440 response = self.app.get(
458 441 route_path('admin_settings_open_source'), status=200)
459 442 assert_response = response.assert_response()
460 443 assert_response.element_contains(
461 444 '.panel-heading', 'Licenses of Third Party Packages')
462 445
463 446 def test_forbidden_when_normal_user(self, autologin_regular_user):
464 447 self.app.get(
465 448 route_path('admin_settings_open_source'), status=404)
466 449
467 450
468 451 @pytest.mark.usefixtures('app')
469 452 class TestUserSessions(object):
470 453
471 454 def test_forbidden_when_normal_user(self, autologin_regular_user):
472 455 self.app.get(route_path('admin_settings_sessions'), status=404)
473 456
474 457 def test_show_sessions_page(self, autologin_user):
475 458 response = self.app.get(route_path('admin_settings_sessions'), status=200)
476 459 response.mustcontain('file')
477 460
478 461 def test_cleanup_old_sessions(self, autologin_user, csrf_token):
479 462
480 463 post_data = {
481 464 'csrf_token': csrf_token,
482 465 'expire_days': '60'
483 466 }
484 467 response = self.app.post(
485 468 route_path('admin_settings_sessions_cleanup'), params=post_data,
486 469 status=302)
487 470 assert_session_flash(response, 'Cleaned up old sessions')
488 471
489 472
490 473 @pytest.mark.usefixtures('app')
491 474 class TestAdminSystemInfo(object):
492 475
493 476 def test_forbidden_when_normal_user(self, autologin_regular_user):
494 477 self.app.get(route_path('admin_settings_system'), status=404)
495 478
496 479 def test_system_info_page(self, autologin_user):
497 480 response = self.app.get(route_path('admin_settings_system'))
498 481 response.mustcontain('RhodeCode Community Edition, version {}'.format(
499 482 rhodecode.__version__))
500 483
501 484 def test_system_update_new_version(self, autologin_user):
502 485 update_data = {
503 486 'versions': [
504 487 {
505 488 'version': '100.3.1415926535',
506 489 'general': 'The latest version we are ever going to ship'
507 490 },
508 491 {
509 492 'version': '0.0.0',
510 493 'general': 'The first version we ever shipped'
511 494 }
512 495 ]
513 496 }
514 497 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
515 498 response = self.app.get(route_path('admin_settings_system_update'))
516 499 response.mustcontain('A <b>new version</b> is available')
517 500
518 501 def test_system_update_nothing_new(self, autologin_user):
519 502 update_data = {
520 503 'versions': [
521 504 {
522 505 'version': '0.0.0',
523 506 'general': 'The first version we ever shipped'
524 507 }
525 508 ]
526 509 }
527 510 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
528 511 response = self.app.get(route_path('admin_settings_system_update'))
529 512 response.mustcontain(
530 513 'This instance is already running the <b>latest</b> stable version')
531 514
532 515 def test_system_update_bad_response(self, autologin_user):
533 516 with mock.patch(UPDATE_DATA_QUALNAME, side_effect=ValueError('foo')):
534 517 response = self.app.get(route_path('admin_settings_system_update'))
535 518 response.mustcontain(
536 519 'Bad data sent from update server')
537 520
538 521
539 522 @pytest.mark.usefixtures("app")
540 523 class TestAdminSettingsIssueTracker(object):
541 524 RC_PREFIX = 'rhodecode_'
542 525 SHORT_PATTERN_KEY = 'issuetracker_pat_'
543 526 PATTERN_KEY = RC_PREFIX + SHORT_PATTERN_KEY
544 527 DESC_KEY = RC_PREFIX + 'issuetracker_desc_'
545 528
546 529 def test_issuetracker_index(self, autologin_user):
547 530 response = self.app.get(route_path('admin_settings_issuetracker'))
548 531 assert response.status_code == 200
549 532
550 533 def test_add_empty_issuetracker_pattern(
551 534 self, request, autologin_user, csrf_token):
552 535 post_url = route_path('admin_settings_issuetracker_update')
553 536 post_data = {
554 537 'csrf_token': csrf_token
555 538 }
556 539 self.app.post(post_url, post_data, status=302)
557 540
558 541 def test_add_issuetracker_pattern(
559 542 self, request, autologin_user, csrf_token):
560 543 pattern = 'issuetracker_pat'
561 544 another_pattern = pattern+'1'
562 545 post_url = route_path('admin_settings_issuetracker_update')
563 546 post_data = {
564 547 'new_pattern_pattern_0': pattern,
565 548 'new_pattern_url_0': 'http://url',
566 549 'new_pattern_prefix_0': 'prefix',
567 550 'new_pattern_description_0': 'description',
568 551 'new_pattern_pattern_1': another_pattern,
569 552 'new_pattern_url_1': 'https://url1',
570 553 'new_pattern_prefix_1': 'prefix1',
571 554 'new_pattern_description_1': 'description1',
572 555 'csrf_token': csrf_token
573 556 }
574 557 self.app.post(post_url, post_data, status=302)
575 558 settings = SettingsModel().get_all_settings()
576 559 self.uid = md5_safe(pattern)
577 560 assert settings[self.PATTERN_KEY+self.uid] == pattern
578 561 self.another_uid = md5_safe(another_pattern)
579 562 assert settings[self.PATTERN_KEY+self.another_uid] == another_pattern
580 563
581 564 @request.addfinalizer
582 565 def cleanup():
583 566 defaults = SettingsModel().get_all_settings()
584 567
585 568 entries = [name for name in defaults if (
586 569 (self.uid in name) or (self.another_uid in name))]
587 570 start = len(self.RC_PREFIX)
588 571 for del_key in entries:
589 572 # TODO: anderson: get_by_name needs name without prefix
590 573 entry = SettingsModel().get_setting_by_name(del_key[start:])
591 574 Session().delete(entry)
592 575
593 576 Session().commit()
594 577
595 578 def test_edit_issuetracker_pattern(
596 579 self, autologin_user, backend, csrf_token, request):
597 580
598 581 old_pattern = 'issuetracker_pat1'
599 582 old_uid = md5_safe(old_pattern)
600 583
601 584 post_url = route_path('admin_settings_issuetracker_update')
602 585 post_data = {
603 586 'new_pattern_pattern_0': old_pattern,
604 587 'new_pattern_url_0': 'http://url',
605 588 'new_pattern_prefix_0': 'prefix',
606 589 'new_pattern_description_0': 'description',
607 590
608 591 'csrf_token': csrf_token
609 592 }
610 593 self.app.post(post_url, post_data, status=302)
611 594
612 595 new_pattern = 'issuetracker_pat1_edited'
613 596 self.new_uid = md5_safe(new_pattern)
614 597
615 598 post_url = route_path('admin_settings_issuetracker_update')
616 599 post_data = {
617 600 'new_pattern_pattern_{}'.format(old_uid): new_pattern,
618 601 'new_pattern_url_{}'.format(old_uid): 'https://url_edited',
619 602 'new_pattern_prefix_{}'.format(old_uid): 'prefix_edited',
620 603 'new_pattern_description_{}'.format(old_uid): 'description_edited',
621 604 'uid': old_uid,
622 605 'csrf_token': csrf_token
623 606 }
624 607 self.app.post(post_url, post_data, status=302)
625 608
626 609 settings = SettingsModel().get_all_settings()
627 610 assert settings[self.PATTERN_KEY+self.new_uid] == new_pattern
628 611 assert settings[self.DESC_KEY + self.new_uid] == 'description_edited'
629 612 assert self.PATTERN_KEY+old_uid not in settings
630 613
631 614 @request.addfinalizer
632 615 def cleanup():
633 616 IssueTrackerSettingsModel().delete_entries(old_uid)
634 617 IssueTrackerSettingsModel().delete_entries(self.new_uid)
635 618
636 619 def test_replace_issuetracker_pattern_description(
637 620 self, autologin_user, csrf_token, request, settings_util):
638 621 prefix = 'issuetracker'
639 622 pattern = 'issuetracker_pat'
640 623 self.uid = md5_safe(pattern)
641 624 pattern_key = '_'.join([prefix, 'pat', self.uid])
642 625 rc_pattern_key = '_'.join(['rhodecode', pattern_key])
643 626 desc_key = '_'.join([prefix, 'desc', self.uid])
644 627 rc_desc_key = '_'.join(['rhodecode', desc_key])
645 628 new_description = 'new_description'
646 629
647 630 settings_util.create_rhodecode_setting(
648 631 pattern_key, pattern, 'unicode', cleanup=False)
649 632 settings_util.create_rhodecode_setting(
650 633 desc_key, 'old description', 'unicode', cleanup=False)
651 634
652 635 post_url = route_path('admin_settings_issuetracker_update')
653 636 post_data = {
654 637 'new_pattern_pattern_0': pattern,
655 638 'new_pattern_url_0': 'https://url',
656 639 'new_pattern_prefix_0': 'prefix',
657 640 'new_pattern_description_0': new_description,
658 641 'uid': self.uid,
659 642 'csrf_token': csrf_token
660 643 }
661 644 self.app.post(post_url, post_data, status=302)
662 645 settings = SettingsModel().get_all_settings()
663 646 assert settings[rc_pattern_key] == pattern
664 647 assert settings[rc_desc_key] == new_description
665 648
666 649 @request.addfinalizer
667 650 def cleanup():
668 651 IssueTrackerSettingsModel().delete_entries(self.uid)
669 652
670 653 def test_delete_issuetracker_pattern(
671 654 self, autologin_user, backend, csrf_token, settings_util, xhr_header):
672 655
673 656 old_pattern = 'issuetracker_pat_deleted'
674 657 old_uid = md5_safe(old_pattern)
675 658
676 659 post_url = route_path('admin_settings_issuetracker_update')
677 660 post_data = {
678 661 'new_pattern_pattern_0': old_pattern,
679 662 'new_pattern_url_0': 'http://url',
680 663 'new_pattern_prefix_0': 'prefix',
681 664 'new_pattern_description_0': 'description',
682 665
683 666 'csrf_token': csrf_token
684 667 }
685 668 self.app.post(post_url, post_data, status=302)
686 669
687 670 post_url = route_path('admin_settings_issuetracker_delete')
688 671 post_data = {
689 672 'uid': old_uid,
690 673 'csrf_token': csrf_token
691 674 }
692 675 self.app.post(post_url, post_data, extra_environ=xhr_header, status=200)
693 676 settings = SettingsModel().get_all_settings()
694 677 assert self.PATTERN_KEY+old_uid not in settings
695 678 assert self.DESC_KEY + old_uid not in settings
@@ -1,687 +1,679 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Database creation, and setup module for RhodeCode Enterprise. Used for creation
21 21 of database as well as for migration operations
22 22 """
23 23
24 24 import os
25 25 import sys
26 26 import time
27 27 import uuid
28 28 import logging
29 29 import getpass
30 30 from os.path import dirname as dn, join as jn
31 31
32 32 from sqlalchemy.engine import create_engine
33 33
34 34 from rhodecode import __dbversion__
35 35 from rhodecode.model import init_model
36 36 from rhodecode.model.user import UserModel
37 37 from rhodecode.model.db import (
38 38 User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm,
39 39 DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository)
40 40 from rhodecode.model.meta import Session, Base
41 41 from rhodecode.model.permission import PermissionModel
42 42 from rhodecode.model.repo import RepoModel
43 43 from rhodecode.model.repo_group import RepoGroupModel
44 44 from rhodecode.model.settings import SettingsModel
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def notify(msg):
51 51 """
52 52 Notification for migrations messages
53 53 """
54 54 ml = len(msg) + (4 * 2)
55 55 print((('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()))
56 56
57 57
58 58 class DbManage(object):
59 59
60 60 def __init__(self, log_sql, dbconf, root, tests=False,
61 61 SESSION=None, cli_args=None, enc_key=b''):
62 62
63 63 self.dbname = dbconf.split('/')[-1]
64 64 self.tests = tests
65 65 self.root = root
66 66 self.dburi = dbconf
67 67 self.log_sql = log_sql
68 68 self.cli_args = cli_args or {}
69 69 self.sa = None
70 70 self.engine = None
71 71 self.enc_key = enc_key
72 72 # sets .sa .engine
73 73 self.init_db(SESSION=SESSION)
74 74
75 75 self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask'))
76 76
77 77 def db_exists(self):
78 78 if not self.sa:
79 79 self.init_db()
80 80 try:
81 81 self.sa.query(RhodeCodeUi)\
82 82 .filter(RhodeCodeUi.ui_key == '/')\
83 83 .scalar()
84 84 return True
85 85 except Exception:
86 86 return False
87 87 finally:
88 88 self.sa.rollback()
89 89
90 90 def get_ask_ok_func(self, param):
91 91 if param not in [None]:
92 92 # return a function lambda that has a default set to param
93 93 return lambda *args, **kwargs: param
94 94 else:
95 95 from rhodecode.lib.utils import ask_ok
96 96 return ask_ok
97 97
98 98 def init_db(self, SESSION=None):
99 99
100 100 if SESSION:
101 101 self.sa = SESSION
102 102 self.engine = SESSION.bind
103 103 else:
104 104 # init new sessions
105 105 engine = create_engine(self.dburi, echo=self.log_sql)
106 106 init_model(engine, encryption_key=self.enc_key)
107 107 self.sa = Session()
108 108 self.engine = engine
109 109
110 110 def create_tables(self, override=False):
111 111 """
112 112 Create a auth database
113 113 """
114 114
115 115 log.info("Existing database with the same name is going to be destroyed.")
116 116 log.info("Setup command will run DROP ALL command on that database.")
117 117 engine = self.engine
118 118
119 119 if self.tests:
120 120 destroy = True
121 121 else:
122 122 destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]')
123 123 if not destroy:
124 124 log.info('db tables bootstrap: Nothing done.')
125 125 sys.exit(0)
126 126 if destroy:
127 127 Base.metadata.drop_all(bind=engine)
128 128
129 129 checkfirst = not override
130 130 Base.metadata.create_all(bind=engine, checkfirst=checkfirst)
131 131 log.info('Created tables for %s', self.dbname)
132 132
133 133 def set_db_version(self):
134 134 ver = DbMigrateVersion()
135 135 ver.version = __dbversion__
136 136 ver.repository_id = 'rhodecode_db_migrations'
137 137 ver.repository_path = 'versions'
138 138 self.sa.add(ver)
139 139 log.info('db version set to: %s', __dbversion__)
140 140
141 141 def run_post_migration_tasks(self):
142 142 """
143 143 Run various tasks before actually doing migrations
144 144 """
145 145 # delete cache keys on each upgrade
146 146 total = CacheKey.query().count()
147 147 log.info("Deleting (%s) cache keys now...", total)
148 148 CacheKey.delete_all_cache()
149 149
150 150 def upgrade(self, version=None):
151 151 """
152 152 Upgrades given database schema to given revision following
153 153 all needed steps, to perform the upgrade
154 154
155 155 """
156 156
157 157 from rhodecode.lib.dbmigrate.migrate.versioning import api
158 158 from rhodecode.lib.dbmigrate.migrate.exceptions import DatabaseNotControlledError
159 159
160 160 if 'sqlite' in self.dburi:
161 161 print(
162 162 '********************** WARNING **********************\n'
163 163 'Make sure your version of sqlite is at least 3.7.X. \n'
164 164 'Earlier versions are known to fail on some migrations\n'
165 165 '*****************************************************\n')
166 166
167 167 upgrade = self.ask_ok(
168 168 'You are about to perform a database upgrade. Make '
169 169 'sure you have backed up your database. '
170 170 'Continue ? [y/n]')
171 171 if not upgrade:
172 172 log.info('No upgrade performed')
173 173 sys.exit(0)
174 174
175 175 repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
176 176 'rhodecode/lib/dbmigrate')
177 177 db_uri = self.dburi
178 178
179 179 if version:
180 180 DbMigrateVersion.set_version(version)
181 181
182 182 try:
183 183 curr_version = api.db_version(db_uri, repository_path)
184 184 msg = (f'Found current database db_uri under version '
185 185 f'control with version {curr_version}')
186 186
187 187 except (RuntimeError, DatabaseNotControlledError):
188 188 curr_version = 1
189 189 msg = f'Current database is not under version control. ' \
190 190 f'Setting as version {curr_version}'
191 191 api.version_control(db_uri, repository_path, curr_version)
192 192
193 193 notify(msg)
194 194
195 195 if curr_version == __dbversion__:
196 196 log.info('This database is already at the newest version')
197 197 sys.exit(0)
198 198
199 199 upgrade_steps = list(range(curr_version + 1, __dbversion__ + 1))
200 200 notify(f'attempting to upgrade database from '
201 201 f'version {curr_version} to version {__dbversion__}')
202 202
203 203 # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
204 204 _step = None
205 205 for step in upgrade_steps:
206 206 notify(f'performing upgrade step {step}')
207 207 time.sleep(0.5)
208 208
209 209 api.upgrade(db_uri, repository_path, step)
210 210 self.sa.rollback()
211 211 notify(f'schema upgrade for step {step} completed')
212 212
213 213 _step = step
214 214
215 215 self.run_post_migration_tasks()
216 216 notify(f'upgrade to version {step} successful')
217 217
218 218 def fix_repo_paths(self):
219 219 """
220 220 Fixes an old RhodeCode version path into new one without a '*'
221 221 """
222 222
223 223 paths = self.sa.query(RhodeCodeUi)\
224 224 .filter(RhodeCodeUi.ui_key == '/')\
225 225 .scalar()
226 226
227 227 paths.ui_value = paths.ui_value.replace('*', '')
228 228
229 229 try:
230 230 self.sa.add(paths)
231 231 self.sa.commit()
232 232 except Exception:
233 233 self.sa.rollback()
234 234 raise
235 235
236 236 def fix_default_user(self):
237 237 """
238 238 Fixes an old default user with some 'nicer' default values,
239 239 used mostly for anonymous access
240 240 """
241 241 def_user = self.sa.query(User)\
242 242 .filter(User.username == User.DEFAULT_USER)\
243 243 .one()
244 244
245 245 def_user.name = 'Anonymous'
246 246 def_user.lastname = 'User'
247 247 def_user.email = User.DEFAULT_USER_EMAIL
248 248
249 249 try:
250 250 self.sa.add(def_user)
251 251 self.sa.commit()
252 252 except Exception:
253 253 self.sa.rollback()
254 254 raise
255 255
256 256 def fix_settings(self):
257 257 """
258 258 Fixes rhodecode settings and adds ga_code key for google analytics
259 259 """
260 260
261 261 hgsettings3 = RhodeCodeSetting('ga_code', '')
262 262
263 263 try:
264 264 self.sa.add(hgsettings3)
265 265 self.sa.commit()
266 266 except Exception:
267 267 self.sa.rollback()
268 268 raise
269 269
270 270 def create_admin_and_prompt(self):
271 271
272 272 # defaults
273 273 defaults = self.cli_args
274 274 username = defaults.get('username')
275 275 password = defaults.get('password')
276 276 email = defaults.get('email')
277 277
278 278 if username is None:
279 279 username = input('Specify admin username:')
280 280 if password is None:
281 281 password = self._get_admin_password()
282 282 if not password:
283 283 # second try
284 284 password = self._get_admin_password()
285 285 if not password:
286 286 sys.exit()
287 287 if email is None:
288 288 email = input('Specify admin email:')
289 289 api_key = self.cli_args.get('api_key')
290 290 self.create_user(username, password, email, True,
291 291 strict_creation_check=False,
292 292 api_key=api_key)
293 293
294 294 def _get_admin_password(self):
295 295 password = getpass.getpass('Specify admin password '
296 296 '(min 6 chars):')
297 297 confirm = getpass.getpass('Confirm password:')
298 298
299 299 if password != confirm:
300 300 log.error('passwords mismatch')
301 301 return False
302 302 if len(password) < 6:
303 303 log.error('password is too short - use at least 6 characters')
304 304 return False
305 305
306 306 return password
307 307
308 308 def create_test_admin_and_users(self):
309 309 log.info('creating admin and regular test users')
310 310 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \
311 311 TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
312 312 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
313 313 TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
314 314 TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
315 315
316 316 self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
317 317 TEST_USER_ADMIN_EMAIL, True, api_key=True)
318 318
319 319 self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
320 320 TEST_USER_REGULAR_EMAIL, False, api_key=True)
321 321
322 322 self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
323 323 TEST_USER_REGULAR2_EMAIL, False, api_key=True)
324 324
325 325 def create_ui_settings(self, repo_store_path):
326 326 """
327 327 Creates ui settings, fills out hooks
328 328 and disables dotencode
329 329 """
330 330 settings_model = SettingsModel(sa=self.sa)
331 331 from rhodecode.lib.vcs.backends.hg import largefiles_store
332 332 from rhodecode.lib.vcs.backends.git import lfs_store
333 333
334 334 # Build HOOKS
335 335 hooks = [
336 336 (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'),
337 337
338 338 # HG
339 339 (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'),
340 340 (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'),
341 341 (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'),
342 342 (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'),
343 343 (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'),
344 344 (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'),
345 345
346 346 ]
347 347
348 348 for key, value in hooks:
349 349 hook_obj = settings_model.get_ui_by_key(key)
350 350 hooks2 = hook_obj if hook_obj else RhodeCodeUi()
351 351 hooks2.ui_section = 'hooks'
352 352 hooks2.ui_key = key
353 353 hooks2.ui_value = value
354 354 self.sa.add(hooks2)
355 355
356 356 # enable largefiles
357 357 largefiles = RhodeCodeUi()
358 358 largefiles.ui_section = 'extensions'
359 359 largefiles.ui_key = 'largefiles'
360 360 largefiles.ui_value = ''
361 361 self.sa.add(largefiles)
362 362
363 363 # set default largefiles cache dir, defaults to
364 364 # /repo_store_location/.cache/largefiles
365 365 largefiles = RhodeCodeUi()
366 366 largefiles.ui_section = 'largefiles'
367 367 largefiles.ui_key = 'usercache'
368 368 largefiles.ui_value = largefiles_store(repo_store_path)
369 369
370 370 self.sa.add(largefiles)
371 371
372 372 # set default lfs cache dir, defaults to
373 373 # /repo_store_location/.cache/lfs_store
374 374 lfsstore = RhodeCodeUi()
375 375 lfsstore.ui_section = 'vcs_git_lfs'
376 376 lfsstore.ui_key = 'store_location'
377 377 lfsstore.ui_value = lfs_store(repo_store_path)
378 378
379 379 self.sa.add(lfsstore)
380 380
381 # enable hgsubversion disabled by default
382 hgsubversion = RhodeCodeUi()
383 hgsubversion.ui_section = 'extensions'
384 hgsubversion.ui_key = 'hgsubversion'
385 hgsubversion.ui_value = ''
386 hgsubversion.ui_active = False
387 self.sa.add(hgsubversion)
388
389 381 # enable hgevolve disabled by default
390 382 hgevolve = RhodeCodeUi()
391 383 hgevolve.ui_section = 'extensions'
392 384 hgevolve.ui_key = 'evolve'
393 385 hgevolve.ui_value = ''
394 386 hgevolve.ui_active = False
395 387 self.sa.add(hgevolve)
396 388
397 389 hgevolve = RhodeCodeUi()
398 390 hgevolve.ui_section = 'experimental'
399 391 hgevolve.ui_key = 'evolution'
400 392 hgevolve.ui_value = ''
401 393 hgevolve.ui_active = False
402 394 self.sa.add(hgevolve)
403 395
404 396 hgevolve = RhodeCodeUi()
405 397 hgevolve.ui_section = 'experimental'
406 398 hgevolve.ui_key = 'evolution.exchange'
407 399 hgevolve.ui_value = ''
408 400 hgevolve.ui_active = False
409 401 self.sa.add(hgevolve)
410 402
411 403 hgevolve = RhodeCodeUi()
412 404 hgevolve.ui_section = 'extensions'
413 405 hgevolve.ui_key = 'topic'
414 406 hgevolve.ui_value = ''
415 407 hgevolve.ui_active = False
416 408 self.sa.add(hgevolve)
417 409
418 410 # enable hggit disabled by default
419 411 hggit = RhodeCodeUi()
420 412 hggit.ui_section = 'extensions'
421 413 hggit.ui_key = 'hggit'
422 414 hggit.ui_value = ''
423 415 hggit.ui_active = False
424 416 self.sa.add(hggit)
425 417
426 418 # set svn branch defaults
427 419 branches = ["/branches/*", "/trunk"]
428 420 tags = ["/tags/*"]
429 421
430 422 for branch in branches:
431 423 settings_model.create_ui_section_value(
432 424 RhodeCodeUi.SVN_BRANCH_ID, branch)
433 425
434 426 for tag in tags:
435 427 settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag)
436 428
437 429 def create_auth_plugin_options(self, skip_existing=False):
438 430 """
439 431 Create default auth plugin settings, and make it active
440 432
441 433 :param skip_existing:
442 434 """
443 435 defaults = [
444 436 ('auth_plugins',
445 437 'egg:rhodecode-enterprise-ce#token,egg:rhodecode-enterprise-ce#rhodecode',
446 438 'list'),
447 439
448 440 ('auth_authtoken_enabled',
449 441 'True',
450 442 'bool'),
451 443
452 444 ('auth_rhodecode_enabled',
453 445 'True',
454 446 'bool'),
455 447 ]
456 448 for k, v, t in defaults:
457 449 if (skip_existing and
458 450 SettingsModel().get_setting_by_name(k) is not None):
459 451 log.debug('Skipping option %s', k)
460 452 continue
461 453 setting = RhodeCodeSetting(k, v, t)
462 454 self.sa.add(setting)
463 455
464 456 def create_default_options(self, skip_existing=False):
465 457 """Creates default settings"""
466 458
467 459 for k, v, t in [
468 460 ('default_repo_enable_locking', False, 'bool'),
469 461 ('default_repo_enable_downloads', False, 'bool'),
470 462 ('default_repo_enable_statistics', False, 'bool'),
471 463 ('default_repo_private', False, 'bool'),
472 464 ('default_repo_type', 'hg', 'unicode')]:
473 465
474 466 if (skip_existing and
475 467 SettingsModel().get_setting_by_name(k) is not None):
476 468 log.debug('Skipping option %s', k)
477 469 continue
478 470 setting = RhodeCodeSetting(k, v, t)
479 471 self.sa.add(setting)
480 472
481 473 def fixup_groups(self):
482 474 def_usr = User.get_default_user()
483 475 for g in RepoGroup.query().all():
484 476 g.group_name = g.get_new_name(g.name)
485 477 self.sa.add(g)
486 478 # get default perm
487 479 default = UserRepoGroupToPerm.query()\
488 480 .filter(UserRepoGroupToPerm.group == g)\
489 481 .filter(UserRepoGroupToPerm.user == def_usr)\
490 482 .scalar()
491 483
492 484 if default is None:
493 485 log.debug('missing default permission for group %s adding', g)
494 486 perm_obj = RepoGroupModel()._create_default_perms(g)
495 487 self.sa.add(perm_obj)
496 488
497 489 def reset_permissions(self, username):
498 490 """
499 491 Resets permissions to default state, useful when old systems had
500 492 bad permissions, we must clean them up
501 493
502 494 :param username:
503 495 """
504 496 default_user = User.get_by_username(username)
505 497 if not default_user:
506 498 return
507 499
508 500 u2p = UserToPerm.query()\
509 501 .filter(UserToPerm.user == default_user).all()
510 502 fixed = False
511 503 if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
512 504 for p in u2p:
513 505 Session().delete(p)
514 506 fixed = True
515 507 self.populate_default_permissions()
516 508 return fixed
517 509
518 510 def config_prompt(self, test_repo_path='', retries=3):
519 511 defaults = self.cli_args
520 512 _path = defaults.get('repos_location')
521 513 if retries == 3:
522 514 log.info('Setting up repositories config')
523 515
524 516 if _path is not None:
525 517 path = _path
526 518 elif not self.tests and not test_repo_path:
527 519 path = input(
528 520 'Enter a valid absolute path to store repositories. '
529 521 'All repositories in that path will be added automatically:'
530 522 )
531 523 else:
532 524 path = test_repo_path
533 525 path_ok = True
534 526
535 527 # check proper dir
536 528 if not os.path.isdir(path):
537 529 path_ok = False
538 530 log.error('Given path %s is not a valid directory', path)
539 531
540 532 elif not os.path.isabs(path):
541 533 path_ok = False
542 534 log.error('Given path %s is not an absolute path', path)
543 535
544 536 # check if path is at least readable.
545 537 if not os.access(path, os.R_OK):
546 538 path_ok = False
547 539 log.error('Given path %s is not readable', path)
548 540
549 541 # check write access, warn user about non writeable paths
550 542 elif not os.access(path, os.W_OK) and path_ok:
551 543 log.warning('No write permission to given path %s', path)
552 544
553 545 q = (f'Given path {path} is not writeable, do you want to '
554 546 f'continue with read only mode ? [y/n]')
555 547 if not self.ask_ok(q):
556 548 log.error('Canceled by user')
557 549 sys.exit(-1)
558 550
559 551 if retries == 0:
560 552 sys.exit('max retries reached')
561 553 if not path_ok:
562 554 retries -= 1
563 555 return self.config_prompt(test_repo_path, retries)
564 556
565 557 real_path = os.path.normpath(os.path.realpath(path))
566 558
567 559 if real_path != os.path.normpath(path):
568 560 q = (f'Path looks like a symlink, RhodeCode Enterprise will store '
569 561 f'given path as {real_path} ? [y/n]')
570 562 if not self.ask_ok(q):
571 563 log.error('Canceled by user')
572 564 sys.exit(-1)
573 565
574 566 return real_path
575 567
576 568 def create_settings(self, path):
577 569
578 570 self.create_ui_settings(path)
579 571
580 572 ui_config = [
581 573 ('web', 'push_ssl', 'False'),
582 574 ('web', 'allow_archive', 'gz zip bz2'),
583 575 ('web', 'allow_push', '*'),
584 576 ('web', 'baseurl', '/'),
585 577 ('paths', '/', path),
586 578 ('phases', 'publish', 'True')
587 579 ]
588 580 for section, key, value in ui_config:
589 581 ui_conf = RhodeCodeUi()
590 582 setattr(ui_conf, 'ui_section', section)
591 583 setattr(ui_conf, 'ui_key', key)
592 584 setattr(ui_conf, 'ui_value', value)
593 585 self.sa.add(ui_conf)
594 586
595 587 # rhodecode app settings
596 588 settings = [
597 589 ('realm', 'RhodeCode', 'unicode'),
598 590 ('title', '', 'unicode'),
599 591 ('pre_code', '', 'unicode'),
600 592 ('post_code', '', 'unicode'),
601 593
602 594 # Visual
603 595 ('show_public_icon', True, 'bool'),
604 596 ('show_private_icon', True, 'bool'),
605 597 ('stylify_metatags', True, 'bool'),
606 598 ('dashboard_items', 100, 'int'),
607 599 ('admin_grid_items', 25, 'int'),
608 600
609 601 ('markup_renderer', 'markdown', 'unicode'),
610 602
611 603 ('repository_fields', True, 'bool'),
612 604 ('show_version', True, 'bool'),
613 605 ('show_revision_number', True, 'bool'),
614 606 ('show_sha_length', 12, 'int'),
615 607
616 608 ('use_gravatar', False, 'bool'),
617 609 ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
618 610
619 611 ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
620 612 ('clone_uri_id_tmpl', Repository.DEFAULT_CLONE_URI_ID, 'unicode'),
621 613 ('clone_uri_ssh_tmpl', Repository.DEFAULT_CLONE_URI_SSH, 'unicode'),
622 614 ('support_url', '', 'unicode'),
623 615 ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'),
624 616
625 617 # VCS Settings
626 618 ('pr_merge_enabled', True, 'bool'),
627 619 ('use_outdated_comments', True, 'bool'),
628 620 ('diff_cache', True, 'bool'),
629 621 ]
630 622
631 623 for key, val, type_ in settings:
632 624 sett = RhodeCodeSetting(key, val, type_)
633 625 self.sa.add(sett)
634 626
635 627 self.create_auth_plugin_options()
636 628 self.create_default_options()
637 629
638 630 log.info('created ui config')
639 631
640 632 def create_user(self, username, password, email='', admin=False,
641 633 strict_creation_check=True, api_key=None):
642 634 log.info('creating user `%s`', username)
643 635 user = UserModel().create_or_update(
644 636 username, password, email, firstname='RhodeCode', lastname='Admin',
645 637 active=True, admin=admin, extern_type="rhodecode",
646 638 strict_creation_check=strict_creation_check)
647 639
648 640 if api_key:
649 641 log.info('setting a new default auth token for user `%s`', username)
650 642 UserModel().add_auth_token(
651 643 user=user, lifetime_minutes=-1,
652 644 role=UserModel.auth_token_role.ROLE_ALL,
653 645 description='BUILTIN TOKEN')
654 646
655 647 def create_default_user(self):
656 648 log.info('creating default user')
657 649 # create default user for handling default permissions.
658 650 user = UserModel().create_or_update(username=User.DEFAULT_USER,
659 651 password=str(uuid.uuid1())[:20],
660 652 email=User.DEFAULT_USER_EMAIL,
661 653 firstname='Anonymous',
662 654 lastname='User',
663 655 strict_creation_check=False)
664 656 # based on configuration options activate/de-activate this user which
665 657 # controls anonymous access
666 658 if self.cli_args.get('public_access') is False:
667 659 log.info('Public access disabled')
668 660 user.active = False
669 661 Session().add(user)
670 662 Session().commit()
671 663
672 664 def create_permissions(self):
673 665 """
674 666 Creates all permissions defined in the system
675 667 """
676 668 # module.(access|create|change|delete)_[name]
677 669 # module.(none|read|write|admin)
678 670 log.info('creating permissions')
679 671 PermissionModel(self.sa).create_permissions()
680 672
681 673 def populate_default_permissions(self):
682 674 """
683 675 Populate default permissions. It will create only the default
684 676 permissions that are missing, and not alter already defined ones
685 677 """
686 678 log.info('creating default user permissions')
687 679 PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER)
@@ -1,1983 +1,1988 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Base module for all VCS systems
21 21 """
22 22 import os
23 23 import re
24 24 import time
25 25 import shutil
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import dataclasses
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35
36 36 import rhodecode
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib.utils2 import safe_str, CachedProperty
39 39 from rhodecode.lib.vcs.utils import author_name, author_email
40 40 from rhodecode.lib.vcs.conf import settings
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 46 RepositoryError)
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 FILEMODE_DEFAULT = 0o100644
53 53 FILEMODE_EXECUTABLE = 0o100755
54 54 EMPTY_COMMIT_ID = '0' * 40
55 55
56 56
57 57 @dataclasses.dataclass
58 58 class Reference:
59 59 type: str
60 60 name: str
61 61 commit_id: str
62 62
63 63 def __iter__(self):
64 64 yield self.type
65 65 yield self.name
66 66 yield self.commit_id
67 67
68 68 @property
69 69 def branch(self):
70 70 if self.type == 'branch':
71 71 return self.name
72 72
73 73 @property
74 74 def bookmark(self):
75 75 if self.type == 'book':
76 76 return self.name
77 77
78 78 @property
79 79 def to_str(self):
80 80 return reference_to_unicode(self)
81 81
82 82 def asdict(self):
83 83 return dict(
84 84 type=self.type,
85 85 name=self.name,
86 86 commit_id=self.commit_id
87 87 )
88 88
89 89
90 90 def unicode_to_reference(raw: str):
91 91 """
92 92 Convert a unicode (or string) to a reference object.
93 93 If unicode evaluates to False it returns None.
94 94 """
95 95 if raw:
96 96 refs = raw.split(':')
97 97 return Reference(*refs)
98 98 else:
99 99 return None
100 100
101 101
102 102 def reference_to_unicode(ref: Reference):
103 103 """
104 104 Convert a reference object to unicode.
105 105 If reference is None it returns None.
106 106 """
107 107 if ref:
108 108 return ':'.join(ref)
109 109 else:
110 110 return None
111 111
112 112
113 113 class MergeFailureReason(object):
114 114 """
115 115 Enumeration with all the reasons why the server side merge could fail.
116 116
117 117 DO NOT change the number of the reasons, as they may be stored in the
118 118 database.
119 119
120 120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 121 reasons.
122 122 """
123 123
124 124 # Everything went well.
125 125 NONE = 0
126 126
127 127 # An unexpected exception was raised. Check the logs for more details.
128 128 UNKNOWN = 1
129 129
130 130 # The merge was not successful, there are conflicts.
131 131 MERGE_FAILED = 2
132 132
133 133 # The merge succeeded but we could not push it to the target repository.
134 134 PUSH_FAILED = 3
135 135
136 136 # The specified target is not a head in the target repository.
137 137 TARGET_IS_NOT_HEAD = 4
138 138
139 139 # The source repository contains more branches than the target. Pushing
140 140 # the merge will create additional branches in the target.
141 141 HG_SOURCE_HAS_MORE_BRANCHES = 5
142 142
143 143 # The target reference has multiple heads. That does not allow to correctly
144 144 # identify the target location. This could only happen for mercurial
145 145 # branches.
146 146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
147 147
148 148 # The target repository is locked
149 149 TARGET_IS_LOCKED = 7
150 150
151 151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
152 152 # A involved commit could not be found.
153 153 _DEPRECATED_MISSING_COMMIT = 8
154 154
155 155 # The target repo reference is missing.
156 156 MISSING_TARGET_REF = 9
157 157
158 158 # The source repo reference is missing.
159 159 MISSING_SOURCE_REF = 10
160 160
161 161 # The merge was not successful, there are conflicts related to sub
162 162 # repositories.
163 163 SUBREPO_MERGE_FAILED = 11
164 164
165 165
166 166 class UpdateFailureReason(object):
167 167 """
168 168 Enumeration with all the reasons why the pull request update could fail.
169 169
170 170 DO NOT change the number of the reasons, as they may be stored in the
171 171 database.
172 172
173 173 Changing the name of a reason is acceptable and encouraged to deprecate old
174 174 reasons.
175 175 """
176 176
177 177 # Everything went well.
178 178 NONE = 0
179 179
180 180 # An unexpected exception was raised. Check the logs for more details.
181 181 UNKNOWN = 1
182 182
183 183 # The pull request is up to date.
184 184 NO_CHANGE = 2
185 185
186 186 # The pull request has a reference type that is not supported for update.
187 187 WRONG_REF_TYPE = 3
188 188
189 189 # Update failed because the target reference is missing.
190 190 MISSING_TARGET_REF = 4
191 191
192 192 # Update failed because the source reference is missing.
193 193 MISSING_SOURCE_REF = 5
194 194
195 195
196 196 class MergeResponse(object):
197 197
198 198 # uses .format(**metadata) for variables
199 199 MERGE_STATUS_MESSAGES = {
200 200 MergeFailureReason.NONE: lazy_ugettext(
201 201 'This pull request can be automatically merged.'),
202 202 MergeFailureReason.UNKNOWN: lazy_ugettext(
203 203 'This pull request cannot be merged because of an unhandled exception. '
204 204 '{exception}'),
205 205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
206 206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
207 207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
208 208 'This pull request could not be merged because push to '
209 209 'target:`{target}@{merge_commit}` failed.'),
210 210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
211 211 'This pull request cannot be merged because the target '
212 212 '`{target_ref.name}` is not a head.'),
213 213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
214 214 'This pull request cannot be merged because the source contains '
215 215 'more branches than the target.'),
216 216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
217 217 'This pull request cannot be merged because the target `{target_ref.name}` '
218 218 'has multiple heads: `{heads}`.'),
219 219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
220 220 'This pull request cannot be merged because the target repository is '
221 221 'locked by {locked_by}.'),
222 222
223 223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
224 224 'This pull request cannot be merged because the target '
225 225 'reference `{target_ref.name}` is missing.'),
226 226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
227 227 'This pull request cannot be merged because the source '
228 228 'reference `{source_ref.name}` is missing.'),
229 229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
230 230 'This pull request cannot be merged because of conflicts related '
231 231 'to sub repositories.'),
232 232
233 233 # Deprecations
234 234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
235 235 'This pull request cannot be merged because the target or the '
236 236 'source reference is missing.'),
237 237
238 238 }
239 239
240 240 def __init__(self, possible, executed, merge_ref: Reference, failure_reason, metadata=None):
241 241 self.possible = possible
242 242 self.executed = executed
243 243 self.merge_ref = merge_ref
244 244 self.failure_reason = failure_reason
245 245 self.metadata = metadata or {}
246 246
247 247 def __repr__(self):
248 248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
249 249
250 250 def __eq__(self, other):
251 251 same_instance = isinstance(other, self.__class__)
252 252 return same_instance \
253 253 and self.possible == other.possible \
254 254 and self.executed == other.executed \
255 255 and self.failure_reason == other.failure_reason
256 256
257 257 @property
258 258 def label(self):
259 259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
260 260 not k.startswith('_'))
261 261 return label_dict.get(self.failure_reason)
262 262
263 263 @property
264 264 def merge_status_message(self):
265 265 """
266 266 Return a human friendly error message for the given merge status code.
267 267 """
268 268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
269 269
270 270 try:
271 271 return msg.format(**self.metadata)
272 272 except Exception:
273 273 log.exception('Failed to format %s message', self)
274 274 return msg
275 275
276 276 def asdict(self):
277 277 data = {}
278 278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
279 279 'merge_status_message']:
280 280 data[k] = getattr(self, k)
281 281 return data
282 282
283 283
284 284 class TargetRefMissing(ValueError):
285 285 pass
286 286
287 287
288 288 class SourceRefMissing(ValueError):
289 289 pass
290 290
291 291
292 292 class BaseRepository(object):
293 293 """
294 294 Base Repository for final backends
295 295
296 296 .. attribute:: DEFAULT_BRANCH_NAME
297 297
298 298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
299 299
300 300 .. attribute:: commit_ids
301 301
302 302 list of all available commit ids, in ascending order
303 303
304 304 .. attribute:: path
305 305
306 306 absolute path to the repository
307 307
308 308 .. attribute:: bookmarks
309 309
310 310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
311 311 there are no bookmarks or the backend implementation does not support
312 312 bookmarks.
313 313
314 314 .. attribute:: tags
315 315
316 316 Mapping from name to :term:`Commit ID` of the tag.
317 317
318 318 """
319 319
320 320 DEFAULT_BRANCH_NAME = None
321 321 DEFAULT_CONTACT = "Unknown"
322 322 DEFAULT_DESCRIPTION = "unknown"
323 323 EMPTY_COMMIT_ID = '0' * 40
324 324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
325 325
326 326 path = None
327 327
328 328 _is_empty = None
329 329 _commit_ids = {}
330 330
331 331 def __init__(self, repo_path, config=None, create=False, **kwargs):
332 332 """
333 333 Initializes repository. Raises RepositoryError if repository could
334 334 not be find at the given ``repo_path`` or directory at ``repo_path``
335 335 exists and ``create`` is set to True.
336 336
337 337 :param repo_path: local path of the repository
338 338 :param config: repository configuration
339 339 :param create=False: if set to True, would try to create repository.
340 340 :param src_url=None: if set, should be proper url from which repository
341 341 would be cloned; requires ``create`` parameter to be set to True -
342 342 raises RepositoryError if src_url is set and create evaluates to
343 343 False
344 344 """
345 345 raise NotImplementedError
346 346
347 347 def __repr__(self):
348 348 return f'<{self.__class__.__name__} at {self.path}>'
349 349
350 350 def __len__(self):
351 351 return self.count()
352 352
353 353 def __eq__(self, other):
354 354 same_instance = isinstance(other, self.__class__)
355 355 return same_instance and other.path == self.path
356 356
357 357 def __ne__(self, other):
358 358 return not self.__eq__(other)
359 359
360 360 def get_create_shadow_cache_pr_path(self, db_repo):
361 361 path = db_repo.cached_diffs_dir
362 362 if not os.path.exists(path):
363 363 os.makedirs(path, 0o755)
364 364 return path
365 365
366 366 @classmethod
367 367 def get_default_config(cls, default=None):
368 368 config = Config()
369 369 if default and isinstance(default, list):
370 370 for section, key, val in default:
371 371 config.set(section, key, val)
372 372 return config
373 373
374 374 @LazyProperty
375 375 def _remote(self):
376 376 raise NotImplementedError
377 377
378 378 def _heads(self, branch=None):
379 379 return []
380 380
381 381 @LazyProperty
382 382 def EMPTY_COMMIT(self):
383 383 return EmptyCommit(self.EMPTY_COMMIT_ID)
384 384
385 385 @LazyProperty
386 386 def alias(self):
387 387 for k, v in settings.BACKENDS.items():
388 388 if v.split('.')[-1] == str(self.__class__.__name__):
389 389 return k
390 390
391 391 @LazyProperty
392 392 def name(self):
393 393 return safe_str(os.path.basename(self.path))
394 394
395 395 @LazyProperty
396 396 def description(self):
397 397 raise NotImplementedError
398 398
399 399 def refs(self):
400 400 """
401 401 returns a `dict` with branches, bookmarks, tags, and closed_branches
402 402 for this repository
403 403 """
404 404 return dict(
405 405 branches=self.branches,
406 406 branches_closed=self.branches_closed,
407 407 tags=self.tags,
408 408 bookmarks=self.bookmarks
409 409 )
410 410
411 411 @LazyProperty
412 412 def branches(self):
413 413 """
414 414 A `dict` which maps branch names to commit ids.
415 415 """
416 416 raise NotImplementedError
417 417
418 418 @LazyProperty
419 419 def branches_closed(self):
420 420 """
421 421 A `dict` which maps tags names to commit ids.
422 422 """
423 423 raise NotImplementedError
424 424
425 425 @LazyProperty
426 426 def bookmarks(self):
427 427 """
428 428 A `dict` which maps tags names to commit ids.
429 429 """
430 430 raise NotImplementedError
431 431
432 432 @LazyProperty
433 433 def tags(self):
434 434 """
435 435 A `dict` which maps tags names to commit ids.
436 436 """
437 437 raise NotImplementedError
438 438
439 439 @LazyProperty
440 440 def size(self):
441 441 """
442 442 Returns combined size in bytes for all repository files
443 443 """
444 444 tip = self.get_commit()
445 445 return tip.size
446 446
447 447 def size_at_commit(self, commit_id):
448 448 commit = self.get_commit(commit_id)
449 449 return commit.size
450 450
451 451 def _check_for_empty(self):
452 452 no_commits = len(self._commit_ids) == 0
453 453 if no_commits:
454 454 # check on remote to be sure
455 455 return self._remote.is_empty()
456 456 else:
457 457 return False
458 458
459 459 def is_empty(self):
460 460 if rhodecode.is_test:
461 461 return self._check_for_empty()
462 462
463 463 if self._is_empty is None:
464 464 # cache empty for production, but not tests
465 465 self._is_empty = self._check_for_empty()
466 466
467 467 return self._is_empty
468 468
469 469 @staticmethod
470 470 def check_url(url, config):
471 471 """
472 472 Function will check given url and try to verify if it's a valid
473 473 link.
474 474 """
475 475 raise NotImplementedError
476 476
477 477 @staticmethod
478 478 def is_valid_repository(path):
479 479 """
480 480 Check if given `path` contains a valid repository of this backend
481 481 """
482 482 raise NotImplementedError
483 483
484 484 # ==========================================================================
485 485 # COMMITS
486 486 # ==========================================================================
487 487
488 488 @CachedProperty
489 489 def commit_ids(self):
490 490 raise NotImplementedError
491 491
492 492 def append_commit_id(self, commit_id):
493 493 if commit_id not in self.commit_ids:
494 494 self._rebuild_cache(self.commit_ids + [commit_id])
495 495
496 496 # clear cache
497 497 self._invalidate_prop_cache('commit_ids')
498 498 self._is_empty = False
499 499
500 500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
501 501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
502 502 """
503 503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
504 504 are both None, most recent commit is returned.
505 505
506 506 :param pre_load: Optional. List of commit attributes to load.
507 507
508 508 :raises ``EmptyRepositoryError``: if there are no commits
509 509 """
510 510 raise NotImplementedError
511 511
512 512 def __iter__(self):
513 513 for commit_id in self.commit_ids:
514 514 yield self.get_commit(commit_id=commit_id)
515 515
516 516 def get_commits(
517 517 self, start_id=None, end_id=None, start_date=None, end_date=None,
518 518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
519 519 """
520 520 Returns iterator of `BaseCommit` objects from start to end
521 521 not inclusive. This should behave just like a list, ie. end is not
522 522 inclusive.
523 523
524 524 :param start_id: None or str, must be a valid commit id
525 525 :param end_id: None or str, must be a valid commit id
526 526 :param start_date:
527 527 :param end_date:
528 528 :param branch_name:
529 529 :param show_hidden:
530 530 :param pre_load:
531 531 :param translate_tags:
532 532 """
533 533 raise NotImplementedError
534 534
535 535 def __getitem__(self, key):
536 536 """
537 537 Allows index based access to the commit objects of this repository.
538 538 """
539 539 pre_load = ["author", "branch", "date", "message", "parents"]
540 540 if isinstance(key, slice):
541 541 return self._get_range(key, pre_load)
542 542 return self.get_commit(commit_idx=key, pre_load=pre_load)
543 543
544 544 def _get_range(self, slice_obj, pre_load):
545 545 for commit_id in self.commit_ids.__getitem__(slice_obj):
546 546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
547 547
548 548 def count(self):
549 549 return len(self.commit_ids)
550 550
551 551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
552 552 """
553 553 Creates and returns a tag for the given ``commit_id``.
554 554
555 555 :param name: name for new tag
556 556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
557 557 :param commit_id: commit id for which new tag would be created
558 558 :param message: message of the tag's commit
559 559 :param date: date of tag's commit
560 560
561 561 :raises TagAlreadyExistError: if tag with same name already exists
562 562 """
563 563 raise NotImplementedError
564 564
565 565 def remove_tag(self, name, user, message=None, date=None):
566 566 """
567 567 Removes tag with the given ``name``.
568 568
569 569 :param name: name of the tag to be removed
570 570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
571 571 :param message: message of the tag's removal commit
572 572 :param date: date of tag's removal commit
573 573
574 574 :raises TagDoesNotExistError: if tag with given name does not exists
575 575 """
576 576 raise NotImplementedError
577 577
578 578 def get_diff(
579 579 self, commit1, commit2, path=None, ignore_whitespace=False,
580 580 context=3, path1=None):
581 581 """
582 582 Returns (git like) *diff*, as plain text. Shows changes introduced by
583 583 `commit2` since `commit1`.
584 584
585 585 :param commit1: Entry point from which diff is shown. Can be
586 586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
587 587 the changes since empty state of the repository until `commit2`
588 588 :param commit2: Until which commit changes should be shown.
589 589 :param path: Can be set to a path of a file to create a diff of that
590 590 file. If `path1` is also set, this value is only associated to
591 591 `commit2`.
592 592 :param ignore_whitespace: If set to ``True``, would not show whitespace
593 593 changes. Defaults to ``False``.
594 594 :param context: How many lines before/after changed lines should be
595 595 shown. Defaults to ``3``.
596 596 :param path1: Can be set to a path to associate with `commit1`. This
597 597 parameter works only for backends which support diff generation for
598 598 different paths. Other backends will raise a `ValueError` if `path1`
599 599 is set and has a different value than `path`.
600 600 :param file_path: filter this diff by given path pattern
601 601 """
602 602 raise NotImplementedError
603 603
604 604 def strip(self, commit_id, branch=None):
605 605 """
606 606 Strip given commit_id from the repository
607 607 """
608 608 raise NotImplementedError
609 609
610 610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
611 611 """
612 612 Return a latest common ancestor commit if one exists for this repo
613 613 `commit_id1` vs `commit_id2` from `repo2`.
614 614
615 615 :param commit_id1: Commit it from this repository to use as a
616 616 target for the comparison.
617 617 :param commit_id2: Source commit id to use for comparison.
618 618 :param repo2: Source repository to use for comparison.
619 619 """
620 620 raise NotImplementedError
621 621
622 622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
623 623 """
624 624 Compare this repository's revision `commit_id1` with `commit_id2`.
625 625
626 626 Returns a tuple(commits, ancestor) that would be merged from
627 627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
628 628 will be returned as ancestor.
629 629
630 630 :param commit_id1: Commit it from this repository to use as a
631 631 target for the comparison.
632 632 :param commit_id2: Source commit id to use for comparison.
633 633 :param repo2: Source repository to use for comparison.
634 634 :param merge: If set to ``True`` will do a merge compare which also
635 635 returns the common ancestor.
636 636 :param pre_load: Optional. List of commit attributes to load.
637 637 """
638 638 raise NotImplementedError
639 639
640 640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
641 641 user_name='', user_email='', message='', dry_run=False,
642 642 use_rebase=False, close_branch=False):
643 643 """
644 644 Merge the revisions specified in `source_ref` from `source_repo`
645 645 onto the `target_ref` of this repository.
646 646
647 647 `source_ref` and `target_ref` are named tupls with the following
648 648 fields `type`, `name` and `commit_id`.
649 649
650 650 Returns a MergeResponse named tuple with the following fields
651 651 'possible', 'executed', 'source_commit', 'target_commit',
652 652 'merge_commit'.
653 653
654 654 :param repo_id: `repo_id` target repo id.
655 655 :param workspace_id: `workspace_id` unique identifier.
656 656 :param target_ref: `target_ref` points to the commit on top of which
657 657 the `source_ref` should be merged.
658 658 :param source_repo: The repository that contains the commits to be
659 659 merged.
660 660 :param source_ref: `source_ref` points to the topmost commit from
661 661 the `source_repo` which should be merged.
662 662 :param user_name: Merge commit `user_name`.
663 663 :param user_email: Merge commit `user_email`.
664 664 :param message: Merge commit `message`.
665 665 :param dry_run: If `True` the merge will not take place.
666 666 :param use_rebase: If `True` commits from the source will be rebased
667 667 on top of the target instead of being merged.
668 668 :param close_branch: If `True` branch will be close before merging it
669 669 """
670 670 if dry_run:
671 671 message = message or settings.MERGE_DRY_RUN_MESSAGE
672 672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
673 673 user_name = user_name or settings.MERGE_DRY_RUN_USER
674 674 else:
675 675 if not user_name:
676 676 raise ValueError('user_name cannot be empty')
677 677 if not user_email:
678 678 raise ValueError('user_email cannot be empty')
679 679 if not message:
680 680 raise ValueError('message cannot be empty')
681 681
682 682 try:
683 683 return self._merge_repo(
684 684 repo_id, workspace_id, target_ref, source_repo,
685 685 source_ref, message, user_name, user_email, dry_run=dry_run,
686 686 use_rebase=use_rebase, close_branch=close_branch)
687 687 except RepositoryError as exc:
688 688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
689 689 return MergeResponse(
690 690 False, False, None, MergeFailureReason.UNKNOWN,
691 691 metadata={'exception': str(exc)})
692 692
693 693 def _merge_repo(self, repo_id, workspace_id, target_ref,
694 694 source_repo, source_ref, merge_message,
695 695 merger_name, merger_email, dry_run=False,
696 696 use_rebase=False, close_branch=False):
697 697 """Internal implementation of merge."""
698 698 raise NotImplementedError
699 699
700 700 def _maybe_prepare_merge_workspace(
701 701 self, repo_id, workspace_id, target_ref, source_ref):
702 702 """
703 703 Create the merge workspace.
704 704
705 705 :param workspace_id: `workspace_id` unique identifier.
706 706 """
707 707 raise NotImplementedError
708 708
709 709 @classmethod
710 710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
711 711 """
712 712 Legacy version that was used before. We still need it for
713 713 backward compat
714 714 """
715 715 return os.path.join(
716 716 os.path.dirname(repo_path),
717 717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
718 718
719 719 @classmethod
720 720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
721 721 # The name of the shadow repository must start with '.', so it is
722 722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
723 723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
724 724 if os.path.exists(legacy_repository_path):
725 725 return legacy_repository_path
726 726 else:
727 727 return os.path.join(
728 728 os.path.dirname(repo_path),
729 729 f'.__shadow_repo_{repo_id}_{workspace_id}')
730 730
731 731 def cleanup_merge_workspace(self, repo_id, workspace_id):
732 732 """
733 733 Remove merge workspace.
734 734
735 735 This function MUST not fail in case there is no workspace associated to
736 736 the given `workspace_id`.
737 737
738 738 :param workspace_id: `workspace_id` unique identifier.
739 739 """
740 740 shadow_repository_path = self._get_shadow_repository_path(
741 741 self.path, repo_id, workspace_id)
742 742 shadow_repository_path_del = '{}.{}.delete'.format(
743 743 shadow_repository_path, time.time())
744 744
745 745 # move the shadow repo, so it never conflicts with the one used.
746 746 # we use this method because shutil.rmtree had some edge case problems
747 747 # removing symlinked repositories
748 748 if not os.path.isdir(shadow_repository_path):
749 749 return
750 750
751 751 shutil.move(shadow_repository_path, shadow_repository_path_del)
752 752 try:
753 753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
754 754 except Exception:
755 755 log.exception('Failed to gracefully remove shadow repo under %s',
756 756 shadow_repository_path_del)
757 757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
758 758
759 759 # ========== #
760 760 # COMMIT API #
761 761 # ========== #
762 762
763 763 @LazyProperty
764 764 def in_memory_commit(self):
765 765 """
766 766 Returns :class:`InMemoryCommit` object for this repository.
767 767 """
768 768 raise NotImplementedError
769 769
770 770 # ======================== #
771 771 # UTILITIES FOR SUBCLASSES #
772 772 # ======================== #
773 773
774 774 def _validate_diff_commits(self, commit1, commit2):
775 775 """
776 776 Validates that the given commits are related to this repository.
777 777
778 778 Intended as a utility for sub classes to have a consistent validation
779 779 of input parameters in methods like :meth:`get_diff`.
780 780 """
781 781 self._validate_commit(commit1)
782 782 self._validate_commit(commit2)
783 783 if (isinstance(commit1, EmptyCommit) and
784 784 isinstance(commit2, EmptyCommit)):
785 785 raise ValueError("Cannot compare two empty commits")
786 786
787 787 def _validate_commit(self, commit):
788 788 if not isinstance(commit, BaseCommit):
789 789 raise TypeError(
790 790 "%s is not of type BaseCommit" % repr(commit))
791 791 if commit.repository != self and not isinstance(commit, EmptyCommit):
792 792 raise ValueError(
793 793 "Commit %s must be a valid commit from this repository %s, "
794 794 "related to this repository instead %s." %
795 795 (commit, self, commit.repository))
796 796
797 797 def _validate_commit_id(self, commit_id):
798 798 if not isinstance(commit_id, str):
799 799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
800 800
801 801 def _validate_commit_idx(self, commit_idx):
802 802 if not isinstance(commit_idx, int):
803 803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
804 804
805 805 def _validate_branch_name(self, branch_name):
806 806 if branch_name and branch_name not in self.branches_all:
807 807 msg = (f"Branch {branch_name} not found in {self}")
808 808 raise BranchDoesNotExistError(msg)
809 809
810 810 #
811 811 # Supporting deprecated API parts
812 812 # TODO: johbo: consider to move this into a mixin
813 813 #
814 814
815 815 @property
816 816 def EMPTY_CHANGESET(self):
817 817 warnings.warn(
818 818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
819 819 return self.EMPTY_COMMIT_ID
820 820
821 821 @property
822 822 def revisions(self):
823 823 warnings.warn("Use commits attribute instead", DeprecationWarning)
824 824 return self.commit_ids
825 825
826 826 @revisions.setter
827 827 def revisions(self, value):
828 828 warnings.warn("Use commits attribute instead", DeprecationWarning)
829 829 self.commit_ids = value
830 830
831 831 def get_changeset(self, revision=None, pre_load=None):
832 832 warnings.warn("Use get_commit instead", DeprecationWarning)
833 833 commit_id = None
834 834 commit_idx = None
835 835 if isinstance(revision, str):
836 836 commit_id = revision
837 837 else:
838 838 commit_idx = revision
839 839 return self.get_commit(
840 840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
841 841
842 842 def get_changesets(
843 843 self, start=None, end=None, start_date=None, end_date=None,
844 844 branch_name=None, pre_load=None):
845 845 warnings.warn("Use get_commits instead", DeprecationWarning)
846 846 start_id = self._revision_to_commit(start)
847 847 end_id = self._revision_to_commit(end)
848 848 return self.get_commits(
849 849 start_id=start_id, end_id=end_id, start_date=start_date,
850 850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
851 851
852 852 def _revision_to_commit(self, revision):
853 853 """
854 854 Translates a revision to a commit_id
855 855
856 856 Helps to support the old changeset based API which allows to use
857 857 commit ids and commit indices interchangeable.
858 858 """
859 859 if revision is None:
860 860 return revision
861 861
862 862 if isinstance(revision, str):
863 863 commit_id = revision
864 864 else:
865 865 commit_id = self.commit_ids[revision]
866 866 return commit_id
867 867
868 868 @property
869 869 def in_memory_changeset(self):
870 870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
871 871 return self.in_memory_commit
872 872
873 873 def get_path_permissions(self, username):
874 874 """
875 875 Returns a path permission checker or None if not supported
876 876
877 877 :param username: session user name
878 878 :return: an instance of BasePathPermissionChecker or None
879 879 """
880 880 return None
881 881
882 882 def install_hooks(self, force=False):
883 883 return self._remote.install_hooks(force)
884 884
885 885 def get_hooks_info(self):
886 886 return self._remote.get_hooks_info()
887 887
888 888 def vcsserver_invalidate_cache(self, delete=False):
889 889 return self._remote.vcsserver_invalidate_cache(delete)
890 890
891 891
892 892 class BaseCommit(object):
893 893 """
894 894 Each backend should implement it's commit representation.
895 895
896 896 **Attributes**
897 897
898 898 ``repository``
899 899 repository object within which commit exists
900 900
901 901 ``id``
902 902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
903 903 just ``tip``.
904 904
905 905 ``raw_id``
906 906 raw commit representation (i.e. full 40 length sha for git
907 907 backend)
908 908
909 909 ``short_id``
910 910 shortened (if apply) version of ``raw_id``; it would be simple
911 911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
912 912 as ``raw_id`` for subversion
913 913
914 914 ``idx``
915 915 commit index
916 916
917 917 ``files``
918 918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
919 919
920 920 ``dirs``
921 921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
922 922
923 923 ``nodes``
924 924 combined list of ``Node`` objects
925 925
926 926 ``author``
927 927 author of the commit, as unicode
928 928
929 929 ``message``
930 930 message of the commit, as unicode
931 931
932 932 ``parents``
933 933 list of parent commits
934 934
935 935 """
936 936 repository = None
937 937 branch = None
938 938
939 939 """
940 940 Depending on the backend this should be set to the branch name of the
941 941 commit. Backends not supporting branches on commits should leave this
942 942 value as ``None``.
943 943 """
944 944
945 945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
946 946 """
947 947 This template is used to generate a default prefix for repository archives
948 948 if no prefix has been specified.
949 949 """
950 950
951 951 def __repr__(self):
952 952 return self.__str__()
953 953
954 954 def __str__(self):
955 955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
956 956
957 957 def __eq__(self, other):
958 958 same_instance = isinstance(other, self.__class__)
959 959 return same_instance and self.raw_id == other.raw_id
960 960
961 961 def __json__(self):
962 962 parents = []
963 963 try:
964 964 for parent in self.parents:
965 965 parents.append({'raw_id': parent.raw_id})
966 966 except NotImplementedError:
967 967 # empty commit doesn't have parents implemented
968 968 pass
969 969
970 970 return {
971 971 'short_id': self.short_id,
972 972 'raw_id': self.raw_id,
973 973 'revision': self.idx,
974 974 'message': self.message,
975 975 'date': self.date,
976 976 'author': self.author,
977 977 'parents': parents,
978 978 'branch': self.branch
979 979 }
980 980
981 981 def __getstate__(self):
982 982 d = self.__dict__.copy()
983 983 d.pop('_remote', None)
984 984 d.pop('repository', None)
985 985 return d
986 986
987 987 def get_remote(self):
988 988 return self._remote
989 989
990 990 def serialize(self):
991 991 return self.__json__()
992 992
993 993 def _get_refs(self):
994 994 return {
995 995 'branches': [self.branch] if self.branch else [],
996 996 'bookmarks': getattr(self, 'bookmarks', []),
997 997 'tags': self.tags
998 998 }
999 999
1000 1000 @LazyProperty
1001 1001 def last(self):
1002 1002 """
1003 1003 ``True`` if this is last commit in repository, ``False``
1004 1004 otherwise; trying to access this attribute while there is no
1005 1005 commits would raise `EmptyRepositoryError`
1006 1006 """
1007 1007 if self.repository is None:
1008 1008 raise CommitError("Cannot check if it's most recent commit")
1009 1009 return self.raw_id == self.repository.commit_ids[-1]
1010 1010
1011 1011 @LazyProperty
1012 1012 def parents(self):
1013 1013 """
1014 1014 Returns list of parent commits.
1015 1015 """
1016 1016 raise NotImplementedError
1017 1017
1018 1018 @LazyProperty
1019 1019 def first_parent(self):
1020 1020 """
1021 1021 Returns list of parent commits.
1022 1022 """
1023 1023 return self.parents[0] if self.parents else EmptyCommit()
1024 1024
1025 1025 @property
1026 1026 def merge(self):
1027 1027 """
1028 1028 Returns boolean if commit is a merge.
1029 1029 """
1030 1030 return len(self.parents) > 1
1031 1031
1032 1032 @LazyProperty
1033 1033 def children(self):
1034 1034 """
1035 1035 Returns list of child commits.
1036 1036 """
1037 1037 raise NotImplementedError
1038 1038
1039 1039 @LazyProperty
1040 1040 def id(self):
1041 1041 """
1042 1042 Returns string identifying this commit.
1043 1043 """
1044 1044 raise NotImplementedError
1045 1045
1046 1046 @LazyProperty
1047 1047 def raw_id(self):
1048 1048 """
1049 1049 Returns raw string identifying this commit.
1050 1050 """
1051 1051 raise NotImplementedError
1052 1052
1053 1053 @LazyProperty
1054 1054 def short_id(self):
1055 1055 """
1056 1056 Returns shortened version of ``raw_id`` attribute, as string,
1057 1057 identifying this commit, useful for presentation to users.
1058 1058 """
1059 1059 raise NotImplementedError
1060 1060
1061 1061 @LazyProperty
1062 1062 def idx(self):
1063 1063 """
1064 1064 Returns integer identifying this commit.
1065 1065 """
1066 1066 raise NotImplementedError
1067 1067
1068 1068 @LazyProperty
1069 1069 def committer(self):
1070 1070 """
1071 1071 Returns committer for this commit
1072 1072 """
1073 1073 raise NotImplementedError
1074 1074
1075 1075 @LazyProperty
1076 1076 def committer_name(self):
1077 1077 """
1078 1078 Returns committer name for this commit
1079 1079 """
1080 1080
1081 1081 return author_name(self.committer)
1082 1082
1083 1083 @LazyProperty
1084 1084 def committer_email(self):
1085 1085 """
1086 1086 Returns committer email address for this commit
1087 1087 """
1088 1088
1089 1089 return author_email(self.committer)
1090 1090
1091 1091 @LazyProperty
1092 1092 def author(self):
1093 1093 """
1094 1094 Returns author for this commit
1095 1095 """
1096 1096
1097 1097 raise NotImplementedError
1098 1098
1099 1099 @LazyProperty
1100 1100 def author_name(self):
1101 1101 """
1102 1102 Returns author name for this commit
1103 1103 """
1104 1104
1105 1105 return author_name(self.author)
1106 1106
1107 1107 @LazyProperty
1108 1108 def author_email(self):
1109 1109 """
1110 1110 Returns author email address for this commit
1111 1111 """
1112 1112
1113 1113 return author_email(self.author)
1114 1114
1115 1115 def get_file_mode(self, path: bytes):
1116 1116 """
1117 1117 Returns stat mode of the file at `path`.
1118 1118 """
1119 1119 raise NotImplementedError
1120 1120
1121 1121 def is_link(self, path):
1122 1122 """
1123 1123 Returns ``True`` if given `path` is a symlink
1124 1124 """
1125 1125 raise NotImplementedError
1126 1126
1127 1127 def is_node_binary(self, path):
1128 1128 """
1129 1129 Returns ``True`` is given path is a binary file
1130 1130 """
1131 1131 raise NotImplementedError
1132 1132
1133 1133 def node_md5_hash(self, path):
1134 1134 """
1135 1135 Returns md5 hash of a node data
1136 1136 """
1137 1137 raise NotImplementedError
1138 1138
1139 1139 def get_file_content(self, path) -> bytes:
1140 1140 """
1141 1141 Returns content of the file at the given `path`.
1142 1142 """
1143 1143 raise NotImplementedError
1144 1144
1145 1145 def get_file_content_streamed(self, path):
1146 1146 """
1147 1147 returns a streaming response from vcsserver with file content
1148 1148 """
1149 1149 raise NotImplementedError
1150 1150
1151 1151 def get_file_size(self, path):
1152 1152 """
1153 1153 Returns size of the file at the given `path`.
1154 1154 """
1155 1155 raise NotImplementedError
1156 1156
1157 1157 def get_path_commit(self, path, pre_load=None):
1158 1158 """
1159 1159 Returns last commit of the file at the given `path`.
1160 1160
1161 1161 :param pre_load: Optional. List of commit attributes to load.
1162 1162 """
1163 1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1164 1164 if not commits:
1165 1165 raise RepositoryError(
1166 1166 'Failed to fetch history for path {}. '
1167 1167 'Please check if such path exists in your repository'.format(
1168 1168 path))
1169 1169 return commits[0]
1170 1170
1171 1171 def get_path_history(self, path, limit=None, pre_load=None):
1172 1172 """
1173 1173 Returns history of file as reversed list of :class:`BaseCommit`
1174 1174 objects for which file at given `path` has been modified.
1175 1175
1176 1176 :param limit: Optional. Allows to limit the size of the returned
1177 1177 history. This is intended as a hint to the underlying backend, so
1178 1178 that it can apply optimizations depending on the limit.
1179 1179 :param pre_load: Optional. List of commit attributes to load.
1180 1180 """
1181 1181 raise NotImplementedError
1182 1182
1183 1183 def get_file_annotate(self, path, pre_load=None):
1184 1184 """
1185 1185 Returns a generator of four element tuples with
1186 1186 lineno, sha, commit lazy loader and line
1187 1187
1188 1188 :param pre_load: Optional. List of commit attributes to load.
1189 1189 """
1190 1190 raise NotImplementedError
1191 1191
1192 1192 def get_nodes(self, path, pre_load=None):
1193 1193 """
1194 1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1195 1195 state of commit at the given ``path``.
1196 1196
1197 1197 :raises ``CommitError``: if node at the given ``path`` is not
1198 1198 instance of ``DirNode``
1199 1199 """
1200 1200 raise NotImplementedError
1201 1201
1202 1202 def get_node(self, path):
1203 1203 """
1204 1204 Returns ``Node`` object from the given ``path``.
1205 1205
1206 1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1207 1207 ``path``
1208 1208 """
1209 1209 raise NotImplementedError
1210 1210
1211 1211 def get_largefile_node(self, path):
1212 1212 """
1213 1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1214 1214 or None if it's not a largefile node
1215 1215 """
1216 1216 return None
1217 1217
1218 1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1219 1219 archive_dir_name=None, write_metadata=False, mtime=None,
1220 1220 archive_at_path='/', cache_config=None):
1221 1221 """
1222 1222 Creates an archive containing the contents of the repository.
1223 1223
1224 1224 :param archive_name_key: unique key under this archive should be generated
1225 1225 :param kind: one of the following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1226 1226 :param archive_dir_name: name of root directory in archive.
1227 1227 Default is repository name and commit's short_id joined with dash:
1228 1228 ``"{repo_name}-{short_id}"``.
1229 1229 :param write_metadata: write a metadata file into archive.
1230 1230 :param mtime: custom modification time for archive creation, defaults
1231 1231 to time.time() if not given.
1232 1232 :param archive_at_path: pack files at this path (default '/')
1233 1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1234 1234
1235 1235 :raise VCSError: If prefix has a problem.
1236 1236 """
1237 1237 cache_config = cache_config or {}
1238 1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1239 1239 if kind not in allowed_kinds:
1240 1240 raise ImproperArchiveTypeError(
1241 1241 f'Archive kind ({kind}) not supported use one of {allowed_kinds}')
1242 1242
1243 1243 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1244 1244 mtime = mtime is not None or time.mktime(self.date.timetuple())
1245 1245 commit_id = self.raw_id
1246 1246
1247 1247 return self.repository._remote.archive_repo(
1248 1248 archive_name_key, kind, mtime, archive_at_path,
1249 1249 archive_dir_name, commit_id, cache_config)
1250 1250
1251 1251 def _validate_archive_prefix(self, archive_dir_name):
1252 1252 if archive_dir_name is None:
1253 1253 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1254 1254 repo_name=safe_str(self.repository.name),
1255 1255 short_id=self.short_id)
1256 1256 elif not isinstance(archive_dir_name, str):
1257 1257 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1258 1258 elif archive_dir_name.startswith('/'):
1259 1259 raise VCSError("Prefix cannot start with leading slash")
1260 1260 elif archive_dir_name.strip() == '':
1261 1261 raise VCSError("Prefix cannot be empty")
1262 1262 elif not archive_dir_name.isascii():
1263 1263 raise VCSError("Prefix cannot contain non ascii characters")
1264 1264 return archive_dir_name
1265 1265
1266 1266 @LazyProperty
1267 1267 def root(self):
1268 1268 """
1269 1269 Returns ``RootNode`` object for this commit.
1270 1270 """
1271 1271 return self.get_node('')
1272 1272
1273 1273 def next(self, branch=None):
1274 1274 """
1275 1275 Returns next commit from current, if branch is gives it will return
1276 1276 next commit belonging to this branch
1277 1277
1278 1278 :param branch: show commits within the given named branch
1279 1279 """
1280 1280 indexes = range(self.idx + 1, self.repository.count())
1281 1281 return self._find_next(indexes, branch)
1282 1282
1283 1283 def prev(self, branch=None):
1284 1284 """
1285 1285 Returns previous commit from current, if branch is gives it will
1286 1286 return previous commit belonging to this branch
1287 1287
1288 1288 :param branch: show commit within the given named branch
1289 1289 """
1290 1290 indexes = range(self.idx - 1, -1, -1)
1291 1291 return self._find_next(indexes, branch)
1292 1292
1293 1293 def _find_next(self, indexes, branch=None):
1294 1294 if branch and self.branch != branch:
1295 1295 raise VCSError('Branch option used on commit not belonging '
1296 1296 'to that branch')
1297 1297
1298 1298 for next_idx in indexes:
1299 1299 commit = self.repository.get_commit(commit_idx=next_idx)
1300 1300 if branch and branch != commit.branch:
1301 1301 continue
1302 1302 return commit
1303 1303 raise CommitDoesNotExistError
1304 1304
1305 1305 def diff(self, ignore_whitespace=True, context=3):
1306 1306 """
1307 1307 Returns a `Diff` object representing the change made by this commit.
1308 1308 """
1309 1309 parent = self.first_parent
1310 1310 diff = self.repository.get_diff(
1311 1311 parent, self,
1312 1312 ignore_whitespace=ignore_whitespace,
1313 1313 context=context)
1314 1314 return diff
1315 1315
1316 1316 @LazyProperty
1317 1317 def added(self):
1318 1318 """
1319 1319 Returns list of added ``FileNode`` objects.
1320 1320 """
1321 1321 raise NotImplementedError
1322 1322
1323 1323 @LazyProperty
1324 1324 def changed(self):
1325 1325 """
1326 1326 Returns list of modified ``FileNode`` objects.
1327 1327 """
1328 1328 raise NotImplementedError
1329 1329
1330 1330 @LazyProperty
1331 1331 def removed(self):
1332 1332 """
1333 1333 Returns list of removed ``FileNode`` objects.
1334 1334 """
1335 1335 raise NotImplementedError
1336 1336
1337 1337 @LazyProperty
1338 1338 def size(self):
1339 1339 """
1340 1340 Returns total number of bytes from contents of all filenodes.
1341 1341 """
1342 1342 return sum(node.size for node in self.get_filenodes_generator())
1343 1343
1344 1344 def walk(self, topurl=''):
1345 1345 """
1346 1346 Similar to os.walk method. Insted of filesystem it walks through
1347 1347 commit starting at given ``topurl``. Returns generator of tuples
1348 1348 (top_node, dirnodes, filenodes).
1349 1349 """
1350 1350 from rhodecode.lib.vcs.nodes import DirNode
1351 1351
1352 1352 if isinstance(topurl, DirNode):
1353 1353 top_node = topurl
1354 1354 else:
1355 1355 top_node = self.get_node(topurl)
1356 1356
1357 1357 has_default_pre_load = False
1358 1358 if isinstance(top_node, DirNode):
1359 1359 # used to inject as we walk same defaults as given top_node
1360 1360 default_pre_load = top_node.default_pre_load
1361 1361 has_default_pre_load = True
1362 1362
1363 1363 if not top_node.is_dir():
1364 1364 return
1365 1365 yield top_node, top_node.dirs, top_node.files
1366 1366 for dir_node in top_node.dirs:
1367 1367 if has_default_pre_load:
1368 1368 dir_node.default_pre_load = default_pre_load
1369 1369 yield from self.walk(dir_node)
1370 1370
1371 1371 def get_filenodes_generator(self):
1372 1372 """
1373 1373 Returns generator that yields *all* file nodes.
1374 1374 """
1375 1375 for topnode, dirs, files in self.walk():
1376 1376 yield from files
1377 1377
1378 1378 #
1379 1379 # Utilities for sub classes to support consistent behavior
1380 1380 #
1381 1381
1382 1382 def no_node_at_path(self, path):
1383 1383 return NodeDoesNotExistError(
1384 1384 f"There is no file nor directory at the given path: "
1385 1385 f"`{safe_str(path)}` at commit {self.short_id}")
1386 1386
1387 1387 def _fix_path(self, path: str) -> str:
1388 1388 """
1389 1389 Paths are stored without trailing slash so we need to get rid off it if
1390 1390 needed.
1391 1391 """
1392 1392 return safe_str(path).rstrip('/')
1393 1393
1394 1394 #
1395 1395 # Deprecated API based on changesets
1396 1396 #
1397 1397
1398 1398 @property
1399 1399 def revision(self):
1400 1400 warnings.warn("Use idx instead", DeprecationWarning)
1401 1401 return self.idx
1402 1402
1403 1403 @revision.setter
1404 1404 def revision(self, value):
1405 1405 warnings.warn("Use idx instead", DeprecationWarning)
1406 1406 self.idx = value
1407 1407
1408 1408 def get_file_changeset(self, path):
1409 1409 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1410 1410 return self.get_path_commit(path)
1411 1411
1412 1412
1413 1413 class BaseChangesetClass(type):
1414 1414
1415 1415 def __instancecheck__(self, instance):
1416 1416 return isinstance(instance, BaseCommit)
1417 1417
1418 1418
1419 1419 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1420 1420
1421 1421 def __new__(cls, *args, **kwargs):
1422 1422 warnings.warn(
1423 1423 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1424 1424 return super().__new__(cls, *args, **kwargs)
1425 1425
1426 1426
1427 1427 class BaseInMemoryCommit(object):
1428 1428 """
1429 1429 Represents differences between repository's state (most recent head) and
1430 1430 changes made *in place*.
1431 1431
1432 1432 **Attributes**
1433 1433
1434 1434 ``repository``
1435 1435 repository object for this in-memory-commit
1436 1436
1437 1437 ``added``
1438 1438 list of ``FileNode`` objects marked as *added*
1439 1439
1440 1440 ``changed``
1441 1441 list of ``FileNode`` objects marked as *changed*
1442 1442
1443 1443 ``removed``
1444 1444 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1445 1445 *removed*
1446 1446
1447 1447 ``parents``
1448 1448 list of :class:`BaseCommit` instances representing parents of
1449 1449 in-memory commit. Should always be 2-element sequence.
1450 1450
1451 1451 """
1452 1452
1453 1453 def __init__(self, repository):
1454 1454 self.repository = repository
1455 1455 self.added = []
1456 1456 self.changed = []
1457 1457 self.removed = []
1458 1458 self.parents = []
1459 1459
1460 1460 def add(self, *filenodes):
1461 1461 """
1462 1462 Marks given ``FileNode`` objects as *to be committed*.
1463 1463
1464 1464 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1465 1465 latest commit
1466 1466 :raises ``NodeAlreadyAddedError``: if node with same path is already
1467 1467 marked as *added*
1468 1468 """
1469 1469 # Check if not already marked as *added* first
1470 1470 for node in filenodes:
1471 1471 if node.path in (n.path for n in self.added):
1472 1472 raise NodeAlreadyAddedError(
1473 1473 "Such FileNode %s is already marked for addition"
1474 1474 % node.path)
1475 1475 for node in filenodes:
1476 1476 self.added.append(node)
1477 1477
1478 1478 def change(self, *filenodes):
1479 1479 """
1480 1480 Marks given ``FileNode`` objects to be *changed* in next commit.
1481 1481
1482 1482 :raises ``EmptyRepositoryError``: if there are no commits yet
1483 1483 :raises ``NodeAlreadyExistsError``: if node with same path is already
1484 1484 marked to be *changed*
1485 1485 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1486 1486 marked to be *removed*
1487 1487 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1488 1488 commit
1489 1489 :raises ``NodeNotChangedError``: if node hasn't really be changed
1490 1490 """
1491 1491 for node in filenodes:
1492 1492 if node.path in (n.path for n in self.removed):
1493 1493 raise NodeAlreadyRemovedError(
1494 1494 "Node at %s is already marked as removed" % node.path)
1495 1495 try:
1496 1496 self.repository.get_commit()
1497 1497 except EmptyRepositoryError:
1498 1498 raise EmptyRepositoryError(
1499 1499 "Nothing to change - try to *add* new nodes rather than "
1500 1500 "changing them")
1501 1501 for node in filenodes:
1502 1502 if node.path in (n.path for n in self.changed):
1503 1503 raise NodeAlreadyChangedError(
1504 1504 "Node at '%s' is already marked as changed" % node.path)
1505 1505 self.changed.append(node)
1506 1506
1507 1507 def remove(self, *filenodes):
1508 1508 """
1509 1509 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1510 1510 *removed* in next commit.
1511 1511
1512 1512 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1513 1513 be *removed*
1514 1514 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1515 1515 be *changed*
1516 1516 """
1517 1517 for node in filenodes:
1518 1518 if node.path in (n.path for n in self.removed):
1519 1519 raise NodeAlreadyRemovedError(
1520 1520 "Node is already marked to for removal at %s" % node.path)
1521 1521 if node.path in (n.path for n in self.changed):
1522 1522 raise NodeAlreadyChangedError(
1523 1523 "Node is already marked to be changed at %s" % node.path)
1524 1524 # We only mark node as *removed* - real removal is done by
1525 1525 # commit method
1526 1526 self.removed.append(node)
1527 1527
1528 1528 def reset(self):
1529 1529 """
1530 1530 Resets this instance to initial state (cleans ``added``, ``changed``
1531 1531 and ``removed`` lists).
1532 1532 """
1533 1533 self.added = []
1534 1534 self.changed = []
1535 1535 self.removed = []
1536 1536 self.parents = []
1537 1537
1538 1538 def get_ipaths(self):
1539 1539 """
1540 1540 Returns generator of paths from nodes marked as added, changed or
1541 1541 removed.
1542 1542 """
1543 1543 for node in itertools.chain(self.added, self.changed, self.removed):
1544 1544 yield node.path
1545 1545
1546 1546 def get_paths(self):
1547 1547 """
1548 1548 Returns list of paths from nodes marked as added, changed or removed.
1549 1549 """
1550 1550 return list(self.get_ipaths())
1551 1551
1552 1552 def check_integrity(self, parents=None):
1553 1553 """
1554 1554 Checks in-memory commit's integrity. Also, sets parents if not
1555 1555 already set.
1556 1556
1557 1557 :raises CommitError: if any error occurs (i.e.
1558 1558 ``NodeDoesNotExistError``).
1559 1559 """
1560 1560 if not self.parents:
1561 1561 parents = parents or []
1562 1562 if len(parents) == 0:
1563 1563 try:
1564 1564 parents = [self.repository.get_commit(), None]
1565 1565 except EmptyRepositoryError:
1566 1566 parents = [None, None]
1567 1567 elif len(parents) == 1:
1568 1568 parents += [None]
1569 1569 self.parents = parents
1570 1570
1571 1571 # Local parents, only if not None
1572 1572 parents = [p for p in self.parents if p]
1573 1573
1574 1574 # Check nodes marked as added
1575 1575 for p in parents:
1576 1576 for node in self.added:
1577 1577 try:
1578 1578 p.get_node(node.path)
1579 1579 except NodeDoesNotExistError:
1580 1580 pass
1581 1581 else:
1582 1582 raise NodeAlreadyExistsError(
1583 1583 f"Node `{node.path}` already exists at {p}")
1584 1584
1585 1585 # Check nodes marked as changed
1586 1586 missing = set(self.changed)
1587 1587 not_changed = set(self.changed)
1588 1588 if self.changed and not parents:
1589 1589 raise NodeDoesNotExistError(str(self.changed[0].path))
1590 1590 for p in parents:
1591 1591 for node in self.changed:
1592 1592 try:
1593 1593 old = p.get_node(node.path)
1594 1594 missing.remove(node)
1595 1595 # if content actually changed, remove node from not_changed
1596 1596 if old.content != node.content:
1597 1597 not_changed.remove(node)
1598 1598 except NodeDoesNotExistError:
1599 1599 pass
1600 1600 if self.changed and missing:
1601 1601 raise NodeDoesNotExistError(
1602 1602 f"Node `{node.path}` marked as modified but missing in parents: {parents}")
1603 1603
1604 1604 if self.changed and not_changed:
1605 1605 raise NodeNotChangedError(
1606 1606 "Node `%s` wasn't actually changed (parents: %s)"
1607 1607 % (not_changed.pop().path, parents))
1608 1608
1609 1609 # Check nodes marked as removed
1610 1610 if self.removed and not parents:
1611 1611 raise NodeDoesNotExistError(
1612 1612 "Cannot remove node at %s as there "
1613 1613 "were no parents specified" % self.removed[0].path)
1614 1614 really_removed = set()
1615 1615 for p in parents:
1616 1616 for node in self.removed:
1617 1617 try:
1618 1618 p.get_node(node.path)
1619 1619 really_removed.add(node)
1620 1620 except CommitError:
1621 1621 pass
1622 1622 not_removed = set(self.removed) - really_removed
1623 1623 if not_removed:
1624 1624 # TODO: johbo: This code branch does not seem to be covered
1625 1625 raise NodeDoesNotExistError(
1626 1626 "Cannot remove node at %s from "
1627 1627 "following parents: %s" % (not_removed, parents))
1628 1628
1629 1629 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1630 1630 """
1631 1631 Performs in-memory commit (doesn't check workdir in any way) and
1632 1632 returns newly created :class:`BaseCommit`. Updates repository's
1633 1633 attribute `commits`.
1634 1634
1635 1635 .. note::
1636 1636
1637 1637 While overriding this method each backend's should call
1638 1638 ``self.check_integrity(parents)`` in the first place.
1639 1639
1640 1640 :param message: message of the commit
1641 1641 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1642 1642 :param parents: single parent or sequence of parents from which commit
1643 1643 would be derived
1644 1644 :param date: ``datetime.datetime`` instance. Defaults to
1645 1645 ``datetime.datetime.now()``.
1646 1646 :param branch: branch name, as string. If none given, default backend's
1647 1647 branch would be used.
1648 1648
1649 1649 :raises ``CommitError``: if any error occurs while committing
1650 1650 """
1651 1651 raise NotImplementedError
1652 1652
1653 1653
1654 1654 class BaseInMemoryChangesetClass(type):
1655 1655
1656 1656 def __instancecheck__(self, instance):
1657 1657 return isinstance(instance, BaseInMemoryCommit)
1658 1658
1659 1659
1660 1660 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1661 1661
1662 1662 def __new__(cls, *args, **kwargs):
1663 1663 warnings.warn(
1664 1664 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1665 1665 return super().__new__(cls, *args, **kwargs)
1666 1666
1667 1667
1668 1668 class EmptyCommit(BaseCommit):
1669 1669 """
1670 1670 An dummy empty commit. It's possible to pass hash when creating
1671 1671 an EmptyCommit
1672 1672 """
1673 1673
1674 1674 def __init__(
1675 1675 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1676 1676 message='', author='', date=None):
1677 1677 self._empty_commit_id = commit_id
1678 1678 # TODO: johbo: Solve idx parameter, default value does not make
1679 1679 # too much sense
1680 1680 self.idx = idx
1681 1681 self.message = message
1682 1682 self.author = author
1683 1683 self.date = date or datetime.datetime.fromtimestamp(0)
1684 1684 self.repository = repo
1685 1685 self.alias = alias
1686 1686
1687 1687 @LazyProperty
1688 1688 def raw_id(self):
1689 1689 """
1690 1690 Returns raw string identifying this commit, useful for web
1691 1691 representation.
1692 1692 """
1693 1693
1694 1694 return self._empty_commit_id
1695 1695
1696 1696 @LazyProperty
1697 1697 def branch(self):
1698 1698 if self.alias:
1699 1699 from rhodecode.lib.vcs.backends import get_backend
1700 1700 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1701 1701
1702 1702 @LazyProperty
1703 1703 def short_id(self):
1704 1704 return self.raw_id[:12]
1705 1705
1706 1706 @LazyProperty
1707 1707 def id(self):
1708 1708 return self.raw_id
1709 1709
1710 1710 def get_path_commit(self, path, pre_load=None):
1711 1711 return self
1712 1712
1713 1713 def get_file_content(self, path) -> bytes:
1714 1714 return b''
1715 1715
1716 1716 def get_file_content_streamed(self, path):
1717 1717 yield self.get_file_content(path)
1718 1718
1719 1719 def get_file_size(self, path):
1720 1720 return 0
1721 1721
1722 1722
1723 1723 class EmptyChangesetClass(type):
1724 1724
1725 1725 def __instancecheck__(self, instance):
1726 1726 return isinstance(instance, EmptyCommit)
1727 1727
1728 1728
1729 1729 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1730 1730
1731 1731 def __new__(cls, *args, **kwargs):
1732 1732 warnings.warn(
1733 1733 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1734 1734 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1735 1735
1736 1736 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1737 1737 alias=None, revision=-1, message='', author='', date=None):
1738 1738 if requested_revision is not None:
1739 1739 warnings.warn(
1740 1740 "Parameter requested_revision not supported anymore",
1741 1741 DeprecationWarning)
1742 1742 super().__init__(
1743 1743 commit_id=cs, repo=repo, alias=alias, idx=revision,
1744 1744 message=message, author=author, date=date)
1745 1745
1746 1746 @property
1747 1747 def revision(self):
1748 1748 warnings.warn("Use idx instead", DeprecationWarning)
1749 1749 return self.idx
1750 1750
1751 1751 @revision.setter
1752 1752 def revision(self, value):
1753 1753 warnings.warn("Use idx instead", DeprecationWarning)
1754 1754 self.idx = value
1755 1755
1756 1756
1757 1757 class EmptyRepository(BaseRepository):
1758 1758 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1759 1759 pass
1760 1760
1761 1761 def get_diff(self, *args, **kwargs):
1762 1762 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1763 1763 return GitDiff(b'')
1764 1764
1765 1765
1766 1766 class CollectionGenerator(object):
1767 1767
1768 1768 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1769 1769 self.repo = repo
1770 1770 self.commit_ids = commit_ids
1771 1771 self.collection_size = collection_size
1772 1772 self.pre_load = pre_load
1773 1773 self.translate_tag = translate_tag
1774 1774
1775 1775 def __len__(self):
1776 1776 if self.collection_size is not None:
1777 1777 return self.collection_size
1778 1778 return self.commit_ids.__len__()
1779 1779
1780 1780 def __iter__(self):
1781 1781 for commit_id in self.commit_ids:
1782 1782 # TODO: johbo: Mercurial passes in commit indices or commit ids
1783 1783 yield self._commit_factory(commit_id)
1784 1784
1785 1785 def _commit_factory(self, commit_id):
1786 1786 """
1787 1787 Allows backends to override the way commits are generated.
1788 1788 """
1789 1789 return self.repo.get_commit(
1790 1790 commit_id=commit_id, pre_load=self.pre_load,
1791 1791 translate_tag=self.translate_tag)
1792 1792
1793 1793 def __getitem__(self, key):
1794 1794 """Return either a single element by index, or a sliced collection."""
1795 1795
1796 1796 if isinstance(key, slice):
1797 1797 commit_ids = self.commit_ids[key.start:key.stop]
1798 1798
1799 1799 else:
1800 1800 # single item
1801 1801 commit_ids = self.commit_ids[key]
1802 1802
1803 1803 return self.__class__(
1804 1804 self.repo, commit_ids, pre_load=self.pre_load,
1805 1805 translate_tag=self.translate_tag)
1806 1806
1807 1807 def __repr__(self):
1808 1808 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1809 1809
1810 1810
1811 1811 class Config(object):
1812 1812 """
1813 1813 Represents the configuration for a repository.
1814 1814
1815 1815 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1816 1816 standard library. It implements only the needed subset.
1817 1817 """
1818 1818
1819 1819 def __init__(self):
1820 1820 self._values = {}
1821 1821
1822 1822 def copy(self):
1823 1823 clone = Config()
1824 1824 for section, values in self._values.items():
1825 1825 clone._values[section] = values.copy()
1826 1826 return clone
1827 1827
1828 1828 def __repr__(self):
1829 1829 return '<Config({} sections) at {}>'.format(
1830 1830 len(self._values), hex(id(self)))
1831 1831
1832 1832 def items(self, section):
1833 1833 return self._values.get(section, {}).items()
1834 1834
1835 1835 def get(self, section, option):
1836 1836 return self._values.get(section, {}).get(option)
1837 1837
1838 1838 def set(self, section, option, value):
1839 1839 section_values = self._values.setdefault(section, {})
1840 1840 section_values[option] = value
1841 1841
1842 1842 def clear_section(self, section):
1843 1843 self._values[section] = {}
1844 1844
1845 def drop_option(self, section, option):
1846 if section not in self._values:
1847 raise ValueError(f'Section {section} does not exist')
1848 del self._values[section][option]
1849
1845 1850 def serialize(self):
1846 1851 """
1847 1852 Creates a list of three tuples (section, key, value) representing
1848 1853 this config object.
1849 1854 """
1850 1855 items = []
1851 1856 for section in self._values:
1852 1857 for option, value in self._values[section].items():
1853 1858 items.append(
1854 1859 (safe_str(section), safe_str(option), safe_str(value)))
1855 1860 return items
1856 1861
1857 1862
1858 1863 class Diff(object):
1859 1864 """
1860 1865 Represents a diff result from a repository backend.
1861 1866
1862 1867 Subclasses have to provide a backend specific value for
1863 1868 :attr:`_header_re` and :attr:`_meta_re`.
1864 1869 """
1865 1870 _meta_re = None
1866 1871 _header_re: bytes = re.compile(br"")
1867 1872
1868 1873 def __init__(self, raw_diff: bytes):
1869 1874 if not isinstance(raw_diff, bytes):
1870 1875 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1871 1876
1872 1877 self.raw = memoryview(raw_diff)
1873 1878
1874 1879 def get_header_re(self):
1875 1880 return self._header_re
1876 1881
1877 1882 def chunks(self):
1878 1883 """
1879 1884 split the diff in chunks of separate --git a/file b/file chunks
1880 1885 to make diffs consistent we must prepend with \n, and make sure
1881 1886 we can detect last chunk as this was also has special rule
1882 1887 """
1883 1888
1884 1889 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1885 1890
1886 1891 chunks = diff_parts[1:]
1887 1892 total_chunks = len(chunks)
1888 1893
1889 1894 def diff_iter(_chunks):
1890 1895 for cur_chunk, chunk in enumerate(_chunks, start=1):
1891 1896 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1892 1897 return diff_iter(chunks)
1893 1898
1894 1899
1895 1900 class DiffChunk(object):
1896 1901
1897 1902 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1898 1903 self.diff_obj = diff_obj
1899 1904
1900 1905 # since we split by \ndiff --git that part is lost from original diff
1901 1906 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1902 1907 if not is_last_chunk:
1903 1908 chunk += b'\n'
1904 1909 header_re = self.diff_obj.get_header_re()
1905 1910 match = header_re.match(chunk)
1906 1911 self.header = match.groupdict()
1907 1912 self.diff = chunk[match.end():]
1908 1913 self.raw = chunk
1909 1914
1910 1915 @property
1911 1916 def header_as_str(self):
1912 1917 if self.header:
1913 1918 def safe_str_on_bytes(val):
1914 1919 if isinstance(val, bytes):
1915 1920 return safe_str(val)
1916 1921 return val
1917 1922 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1918 1923
1919 1924 def __repr__(self):
1920 1925 return f'DiffChunk({self.header_as_str})'
1921 1926
1922 1927
1923 1928 class BasePathPermissionChecker(object):
1924 1929
1925 1930 @staticmethod
1926 1931 def create_from_patterns(includes, excludes):
1927 1932 if includes and '*' in includes and not excludes:
1928 1933 return AllPathPermissionChecker()
1929 1934 elif excludes and '*' in excludes:
1930 1935 return NonePathPermissionChecker()
1931 1936 else:
1932 1937 return PatternPathPermissionChecker(includes, excludes)
1933 1938
1934 1939 @property
1935 1940 def has_full_access(self):
1936 1941 raise NotImplementedError()
1937 1942
1938 1943 def has_access(self, path):
1939 1944 raise NotImplementedError()
1940 1945
1941 1946
1942 1947 class AllPathPermissionChecker(BasePathPermissionChecker):
1943 1948
1944 1949 @property
1945 1950 def has_full_access(self):
1946 1951 return True
1947 1952
1948 1953 def has_access(self, path):
1949 1954 return True
1950 1955
1951 1956
1952 1957 class NonePathPermissionChecker(BasePathPermissionChecker):
1953 1958
1954 1959 @property
1955 1960 def has_full_access(self):
1956 1961 return False
1957 1962
1958 1963 def has_access(self, path):
1959 1964 return False
1960 1965
1961 1966
1962 1967 class PatternPathPermissionChecker(BasePathPermissionChecker):
1963 1968
1964 1969 def __init__(self, includes, excludes):
1965 1970 self.includes = includes
1966 1971 self.excludes = excludes
1967 1972 self.includes_re = [] if not includes else [
1968 1973 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1969 1974 self.excludes_re = [] if not excludes else [
1970 1975 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1971 1976
1972 1977 @property
1973 1978 def has_full_access(self):
1974 1979 return '*' in self.includes and not self.excludes
1975 1980
1976 1981 def has_access(self, path):
1977 1982 for regex in self.excludes_re:
1978 1983 if regex.match(path):
1979 1984 return False
1980 1985 for regex in self.includes_re:
1981 1986 if regex.match(path):
1982 1987 return True
1983 1988 return False
@@ -1,1017 +1,1024 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 HG repository module
21 21 """
22 22 import os
23 23 import logging
24 24 import binascii
25 25 import configparser
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.str_utils import safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '')])
82
83 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 # From old installations we might still have this set enabled
85 # we explicitly remove this now here to make sure it wont propagate further
86 if config.get('extensions', 'hgsubversion') is not None:
87 config.drop_option('extensions', 'hgsubversion')
88
82 89 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 90
84 91 self._init_repo(create, src_url, do_workspace_checkout)
85 92
86 93 # caches
87 94 self._commit_ids = {}
88 95
89 96 @LazyProperty
90 97 def _remote(self):
91 98 repo_id = self.path
92 99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 100
94 101 @CachedProperty
95 102 def commit_ids(self):
96 103 """
97 104 Returns list of commit ids, in ascending order. Being lazy
98 105 attribute allows external tools to inject shas from cache.
99 106 """
100 107 commit_ids = self._get_all_commit_ids()
101 108 self._rebuild_cache(commit_ids)
102 109 return commit_ids
103 110
104 111 def _rebuild_cache(self, commit_ids):
105 112 self._commit_ids = {commit_id: index
106 113 for index, commit_id in enumerate(commit_ids)}
107 114
108 115 @CachedProperty
109 116 def branches(self):
110 117 return self._get_branches()
111 118
112 119 @CachedProperty
113 120 def branches_closed(self):
114 121 return self._get_branches(active=False, closed=True)
115 122
116 123 @CachedProperty
117 124 def branches_all(self):
118 125 all_branches = {}
119 126 all_branches.update(self.branches)
120 127 all_branches.update(self.branches_closed)
121 128 return all_branches
122 129
123 130 def _get_branches(self, active=True, closed=False):
124 131 """
125 132 Gets branches for this repository
126 133 Returns only not closed active branches by default
127 134
128 135 :param active: return also active branches
129 136 :param closed: return also closed branches
130 137
131 138 """
132 139 if self.is_empty():
133 140 return {}
134 141
135 142 def get_name(ctx):
136 143 return ctx[0]
137 144
138 145 _branches = [(n, h,) for n, h in
139 146 self._remote.branches(active, closed).items()]
140 147
141 148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 149
143 150 @CachedProperty
144 151 def tags(self):
145 152 """
146 153 Gets tags for this repository
147 154 """
148 155 return self._get_tags()
149 156
150 157 def _get_tags(self):
151 158 if self.is_empty():
152 159 return {}
153 160
154 161 def get_name(ctx):
155 162 return ctx[0]
156 163
157 164 _tags = [(n, h,) for n, h in
158 165 self._remote.tags().items()]
159 166
160 167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 168
162 169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 170 """
164 171 Creates and returns a tag for the given ``commit_id``.
165 172
166 173 :param name: name for new tag
167 174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 175 :param commit_id: commit id for which new tag would be created
169 176 :param message: message of the tag's commit
170 177 :param date: date of tag's commit
171 178
172 179 :raises TagAlreadyExistError: if tag with same name already exists
173 180 """
174 181 if name in self.tags:
175 182 raise TagAlreadyExistError("Tag %s already exists" % name)
176 183
177 184 commit = self.get_commit(commit_id=commit_id)
178 185 local = kwargs.setdefault('local', False)
179 186
180 187 if message is None:
181 188 message = f"Added tag {name} for commit {commit.short_id}"
182 189
183 190 date, tz = date_to_timestamp_plus_offset(date)
184 191
185 192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 193 self._remote.invalidate_vcs_cache()
187 194
188 195 # Reinitialize tags
189 196 self._invalidate_prop_cache('tags')
190 197 tag_id = self.tags[name]
191 198
192 199 return self.get_commit(commit_id=tag_id)
193 200
194 201 def remove_tag(self, name, user, message=None, date=None):
195 202 """
196 203 Removes tag with the given `name`.
197 204
198 205 :param name: name of the tag to be removed
199 206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 207 :param message: message of the tag's removal commit
201 208 :param date: date of tag's removal commit
202 209
203 210 :raises TagDoesNotExistError: if tag with given name does not exists
204 211 """
205 212 if name not in self.tags:
206 213 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 214
208 215 if message is None:
209 216 message = "Removed tag %s" % name
210 217 local = False
211 218
212 219 date, tz = date_to_timestamp_plus_offset(date)
213 220
214 221 self._remote.tag(name, nullid, message, local, user, date, tz)
215 222 self._remote.invalidate_vcs_cache()
216 223 self._invalidate_prop_cache('tags')
217 224
218 225 @LazyProperty
219 226 def bookmarks(self):
220 227 """
221 228 Gets bookmarks for this repository
222 229 """
223 230 return self._get_bookmarks()
224 231
225 232 def _get_bookmarks(self):
226 233 if self.is_empty():
227 234 return {}
228 235
229 236 def get_name(ctx):
230 237 return ctx[0]
231 238
232 239 _bookmarks = [
233 240 (n, h) for n, h in
234 241 self._remote.bookmarks().items()]
235 242
236 243 return OrderedDict(sorted(_bookmarks, key=get_name))
237 244
238 245 def _get_all_commit_ids(self):
239 246 return self._remote.get_all_commit_ids('visible')
240 247
241 248 def get_diff(
242 249 self, commit1, commit2, path='', ignore_whitespace=False,
243 250 context=3, path1=None):
244 251 """
245 252 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 253 `commit2` since `commit1`.
247 254
248 255 :param commit1: Entry point from which diff is shown. Can be
249 256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 257 the changes since empty state of the repository until `commit2`
251 258 :param commit2: Until which commit changes should be shown.
252 259 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 260 changes. Defaults to ``False``.
254 261 :param context: How many lines before/after changed lines should be
255 262 shown. Defaults to ``3``.
256 263 """
257 264 self._validate_diff_commits(commit1, commit2)
258 265 if path1 is not None and path1 != path:
259 266 raise ValueError("Diff of two different paths not supported.")
260 267
261 268 if path:
262 269 file_filter = [self.path, path]
263 270 else:
264 271 file_filter = None
265 272
266 273 diff = self._remote.diff(
267 274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 275 opt_git=True, opt_ignorews=ignore_whitespace,
269 276 context=context)
270 277 return MercurialDiff(diff)
271 278
272 279 def strip(self, commit_id, branch=None):
273 280 self._remote.strip(commit_id, update=False, backup=False)
274 281
275 282 self._remote.invalidate_vcs_cache()
276 283 # clear cache
277 284 self._invalidate_prop_cache('commit_ids')
278 285
279 286 return len(self.commit_ids)
280 287
281 288 def verify(self):
282 289 verify = self._remote.verify()
283 290
284 291 self._remote.invalidate_vcs_cache()
285 292 return verify
286 293
287 294 def hg_update_cache(self):
288 295 update_cache = self._remote.hg_update_cache()
289 296
290 297 self._remote.invalidate_vcs_cache()
291 298 return update_cache
292 299
293 300 def hg_rebuild_fn_cache(self):
294 301 update_cache = self._remote.hg_rebuild_fn_cache()
295 302
296 303 self._remote.invalidate_vcs_cache()
297 304 return update_cache
298 305
299 306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 308 self, commit_id1, repo2, commit_id2)
302 309
303 310 if commit_id1 == commit_id2:
304 311 return commit_id1
305 312
306 313 ancestors = self._remote.revs_from_revspec(
307 314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 315 other_path=repo2.path)
309 316
310 317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 318
312 319 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 320 return ancestor_id
314 321
315 322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 323 if commit_id1 == commit_id2:
317 324 commits = []
318 325 else:
319 326 if merge:
320 327 indexes = self._remote.revs_from_revspec(
321 328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 330 else:
324 331 indexes = self._remote.revs_from_revspec(
325 332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 333 commit_id1, other_path=repo2.path)
327 334
328 335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 336 for idx in indexes]
330 337
331 338 return commits
332 339
333 340 @staticmethod
334 341 def check_url(url, config):
335 342 """
336 343 Function will check given url and try to verify if it's a valid
337 344 link. Sometimes it may happened that mercurial will issue basic
338 345 auth request that can cause whole API to hang when used from python
339 346 or other external calls.
340 347
341 348 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 349 when the return code is non 200
343 350 """
344 351 # check first if it's not an local url
345 352 if os.path.isdir(url) or url.startswith('file:'):
346 353 return True
347 354
348 355 # Request the _remote to verify the url
349 356 return connection.Hg.check_url(url, config.serialize())
350 357
351 358 @staticmethod
352 359 def is_valid_repository(path):
353 360 return os.path.isdir(os.path.join(path, '.hg'))
354 361
355 362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 363 """
357 364 Function will check for mercurial repository in given path. If there
358 365 is no repository in that path it will raise an exception unless
359 366 `create` parameter is set to True - in that case repository would
360 367 be created.
361 368
362 369 If `src_url` is given, would try to clone repository from the
363 370 location at given clone_point. Additionally it'll make update to
364 371 working copy accordingly to `do_workspace_checkout` flag.
365 372 """
366 373 if create and os.path.exists(self.path):
367 374 raise RepositoryError(
368 375 f"Cannot create repository at {self.path}, location already exist")
369 376
370 377 if src_url:
371 378 url = str(self._get_url(src_url))
372 379 MercurialRepository.check_url(url, self.config)
373 380
374 381 self._remote.clone(url, self.path, do_workspace_checkout)
375 382
376 383 # Don't try to create if we've already cloned repo
377 384 create = False
378 385
379 386 if create:
380 387 os.makedirs(self.path, mode=0o755)
381 388
382 389 self._remote.localrepository(create)
383 390
384 391 @LazyProperty
385 392 def in_memory_commit(self):
386 393 return MercurialInMemoryCommit(self)
387 394
388 395 @LazyProperty
389 396 def description(self):
390 397 description = self._remote.get_config_value(
391 398 'web', 'description', untrusted=True)
392 399 return safe_str(description or self.DEFAULT_DESCRIPTION)
393 400
394 401 @LazyProperty
395 402 def contact(self):
396 403 contact = (
397 404 self._remote.get_config_value("web", "contact") or
398 405 self._remote.get_config_value("ui", "username"))
399 406 return safe_str(contact or self.DEFAULT_CONTACT)
400 407
401 408 @LazyProperty
402 409 def last_change(self):
403 410 """
404 411 Returns last change made on this repository as
405 412 `datetime.datetime` object.
406 413 """
407 414 try:
408 415 return self.get_commit().date
409 416 except RepositoryError:
410 417 tzoffset = makedate()[1]
411 418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 419
413 420 def _get_fs_mtime(self):
414 421 # fallback to filesystem
415 422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 423 st_path = os.path.join(self.path, '.hg', "store")
417 424 if os.path.exists(cl_path):
418 425 return os.stat(cl_path).st_mtime
419 426 else:
420 427 return os.stat(st_path).st_mtime
421 428
422 429 def _get_url(self, url):
423 430 """
424 431 Returns normalized url. If schema is not given, would fall
425 432 to filesystem
426 433 (``file:///``) schema.
427 434 """
428 435 if url != 'default' and '://' not in url:
429 436 url = "file:" + urllib.request.pathname2url(url)
430 437 return url
431 438
432 439 def get_hook_location(self):
433 440 """
434 441 returns absolute path to location where hooks are stored
435 442 """
436 443 return os.path.join(self.path, '.hg', '.hgrc')
437 444
438 445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 447 """
441 448 Returns ``MercurialCommit`` object representing repository's
442 449 commit at the given `commit_id` or `commit_idx`.
443 450 """
444 451 if self.is_empty():
445 452 raise EmptyRepositoryError("There are no commits yet")
446 453
447 454 if commit_id is not None:
448 455 self._validate_commit_id(commit_id)
449 456 try:
450 457 # we have cached idx, use it without contacting the remote
451 458 idx = self._commit_ids[commit_id]
452 459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 460 except KeyError:
454 461 pass
455 462
456 463 elif commit_idx is not None:
457 464 self._validate_commit_idx(commit_idx)
458 465 try:
459 466 _commit_id = self.commit_ids[commit_idx]
460 467 if commit_idx < 0:
461 468 commit_idx = self.commit_ids.index(_commit_id)
462 469
463 470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 471 except IndexError:
465 472 commit_id = commit_idx
466 473 else:
467 474 commit_id = "tip"
468 475
469 476 # case here is no cached version, do an actual lookup instead
470 477 try:
471 478 raw_id, idx = self._remote.lookup(commit_id, both=True)
472 479 except CommitDoesNotExistError:
473 480 msg = "Commit {} does not exist for `{}`".format(
474 481 *map(safe_str, [commit_id, self.name]))
475 482 raise CommitDoesNotExistError(msg)
476 483
477 484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
478 485
479 486 def get_commits(
480 487 self, start_id=None, end_id=None, start_date=None, end_date=None,
481 488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
482 489 """
483 490 Returns generator of ``MercurialCommit`` objects from start to end
484 491 (both are inclusive)
485 492
486 493 :param start_id: None, str(commit_id)
487 494 :param end_id: None, str(commit_id)
488 495 :param start_date: if specified, commits with commit date less than
489 496 ``start_date`` would be filtered out from returned set
490 497 :param end_date: if specified, commits with commit date greater than
491 498 ``end_date`` would be filtered out from returned set
492 499 :param branch_name: if specified, commits not reachable from given
493 500 branch would be filtered out from returned set
494 501 :param show_hidden: Show hidden commits such as obsolete or hidden from
495 502 Mercurial evolve
496 503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
497 504 exist.
498 505 :raise CommitDoesNotExistError: If commit for given ``start`` or
499 506 ``end`` could not be found.
500 507 """
501 508 # actually we should check now if it's not an empty repo
502 509 if self.is_empty():
503 510 raise EmptyRepositoryError("There are no commits yet")
504 511 self._validate_branch_name(branch_name)
505 512
506 513 branch_ancestors = False
507 514 if start_id is not None:
508 515 self._validate_commit_id(start_id)
509 516 c_start = self.get_commit(commit_id=start_id)
510 517 start_pos = self._commit_ids[c_start.raw_id]
511 518 else:
512 519 start_pos = None
513 520
514 521 if end_id is not None:
515 522 self._validate_commit_id(end_id)
516 523 c_end = self.get_commit(commit_id=end_id)
517 524 end_pos = max(0, self._commit_ids[c_end.raw_id])
518 525 else:
519 526 end_pos = None
520 527
521 528 if None not in [start_id, end_id] and start_pos > end_pos:
522 529 raise RepositoryError(
523 530 "Start commit '%s' cannot be after end commit '%s'" %
524 531 (start_id, end_id))
525 532
526 533 if end_pos is not None:
527 534 end_pos += 1
528 535
529 536 commit_filter = []
530 537
531 538 if branch_name and not branch_ancestors:
532 539 commit_filter.append(f'branch("{branch_name}")')
533 540 elif branch_name and branch_ancestors:
534 541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
535 542
536 543 if start_date and not end_date:
537 544 commit_filter.append(f'date(">{start_date}")')
538 545 if end_date and not start_date:
539 546 commit_filter.append(f'date("<{end_date}")')
540 547 if start_date and end_date:
541 548 commit_filter.append(
542 549 f'date(">{start_date}") and date("<{end_date}")')
543 550
544 551 if not show_hidden:
545 552 commit_filter.append('not obsolete()')
546 553 commit_filter.append('not hidden()')
547 554
548 555 # TODO: johbo: Figure out a simpler way for this solution
549 556 collection_generator = CollectionGenerator
550 557 if commit_filter:
551 558 commit_filter = ' and '.join(map(safe_str, commit_filter))
552 559 revisions = self._remote.rev_range([commit_filter])
553 560 collection_generator = MercurialIndexBasedCollectionGenerator
554 561 else:
555 562 revisions = self.commit_ids
556 563
557 564 if start_pos or end_pos:
558 565 revisions = revisions[start_pos:end_pos]
559 566
560 567 return collection_generator(self, revisions, pre_load=pre_load)
561 568
562 569 def pull(self, url, commit_ids=None):
563 570 """
564 571 Pull changes from external location.
565 572
566 573 :param commit_ids: Optional. Can be set to a list of commit ids
567 574 which shall be pulled from the other repository.
568 575 """
569 576 url = self._get_url(url)
570 577 self._remote.pull(url, commit_ids=commit_ids)
571 578 self._remote.invalidate_vcs_cache()
572 579
573 580 def fetch(self, url, commit_ids=None):
574 581 """
575 582 Backward compatibility with GIT fetch==pull
576 583 """
577 584 return self.pull(url, commit_ids=commit_ids)
578 585
579 586 def push(self, url):
580 587 url = self._get_url(url)
581 588 self._remote.sync_push(url)
582 589
583 590 def _local_clone(self, clone_path):
584 591 """
585 592 Create a local clone of the current repo.
586 593 """
587 594 self._remote.clone(self.path, clone_path, update_after_clone=True,
588 595 hooks=False)
589 596
590 597 def _update(self, revision, clean=False):
591 598 """
592 599 Update the working copy to the specified revision.
593 600 """
594 601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
595 602 self._remote.update(revision, clean=clean)
596 603
597 604 def _identify(self):
598 605 """
599 606 Return the current state of the working directory.
600 607 """
601 608 return self._remote.identify().strip().rstrip('+')
602 609
603 610 def _heads(self, branch=None):
604 611 """
605 612 Return the commit ids of the repository heads.
606 613 """
607 614 return self._remote.heads(branch=branch).strip().split(' ')
608 615
609 616 def _ancestor(self, revision1, revision2):
610 617 """
611 618 Return the common ancestor of the two revisions.
612 619 """
613 620 return self._remote.ancestor(revision1, revision2)
614 621
615 622 def _local_push(
616 623 self, revision, repository_path, push_branches=False,
617 624 enable_hooks=False):
618 625 """
619 626 Push the given revision to the specified repository.
620 627
621 628 :param push_branches: allow to create branches in the target repo.
622 629 """
623 630 self._remote.push(
624 631 [revision], repository_path, hooks=enable_hooks,
625 632 push_branches=push_branches)
626 633
627 634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
628 635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
629 636 """
630 637 Merge the given source_revision into the checked out revision.
631 638
632 639 Returns the commit id of the merge and a boolean indicating if the
633 640 commit needs to be pushed.
634 641 """
635 642
636 643 source_ref_commit_id = source_ref.commit_id
637 644 target_ref_commit_id = target_ref.commit_id
638 645
639 646 # update our workdir to target ref, for proper merge
640 647 self._update(target_ref_commit_id, clean=True)
641 648
642 649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
643 650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
644 651
645 652 if close_commit_id:
646 653 # NOTE(marcink): if we get the close commit, this is our new source
647 654 # which will include the close commit itself.
648 655 source_ref_commit_id = close_commit_id
649 656
650 657 if ancestor == source_ref_commit_id:
651 658 # Nothing to do, the changes were already integrated
652 659 return target_ref_commit_id, False
653 660
654 661 elif ancestor == target_ref_commit_id and is_the_same_branch:
655 662 # In this case we should force a commit message
656 663 return source_ref_commit_id, True
657 664
658 665 unresolved = None
659 666 if use_rebase:
660 667 try:
661 668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
662 669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
663 670 self._remote.rebase(
664 671 source=source_ref_commit_id, dest=target_ref_commit_id)
665 672 self._remote.invalidate_vcs_cache()
666 673 self._update(bookmark_name, clean=True)
667 674 return self._identify(), True
668 675 except RepositoryError as e:
669 676 # The rebase-abort may raise another exception which 'hides'
670 677 # the original one, therefore we log it here.
671 678 log.exception('Error while rebasing shadow repo during merge.')
672 679 if 'unresolved conflicts' in safe_str(e):
673 680 unresolved = self._remote.get_unresolved_files()
674 681 log.debug('unresolved files: %s', unresolved)
675 682
676 683 # Cleanup any rebase leftovers
677 684 self._remote.invalidate_vcs_cache()
678 685 self._remote.rebase(abort=True)
679 686 self._remote.invalidate_vcs_cache()
680 687 self._remote.update(clean=True)
681 688 if unresolved:
682 689 raise UnresolvedFilesInRepo(unresolved)
683 690 else:
684 691 raise
685 692 else:
686 693 try:
687 694 self._remote.merge(source_ref_commit_id)
688 695 self._remote.invalidate_vcs_cache()
689 696 self._remote.commit(
690 697 message=safe_str(merge_message),
691 698 username=safe_str(f'{user_name} <{user_email}>'))
692 699 self._remote.invalidate_vcs_cache()
693 700 return self._identify(), True
694 701 except RepositoryError as e:
695 702 # The merge-abort may raise another exception which 'hides'
696 703 # the original one, therefore we log it here.
697 704 log.exception('Error while merging shadow repo during merge.')
698 705 if 'unresolved merge conflicts' in safe_str(e):
699 706 unresolved = self._remote.get_unresolved_files()
700 707 log.debug('unresolved files: %s', unresolved)
701 708
702 709 # Cleanup any merge leftovers
703 710 self._remote.update(clean=True)
704 711 if unresolved:
705 712 raise UnresolvedFilesInRepo(unresolved)
706 713 else:
707 714 raise
708 715
709 716 def _local_close(self, target_ref, user_name, user_email,
710 717 source_ref, close_message=''):
711 718 """
712 719 Close the branch of the given source_revision
713 720
714 721 Returns the commit id of the close and a boolean indicating if the
715 722 commit needs to be pushed.
716 723 """
717 724 self._update(source_ref.commit_id)
718 725 message = close_message or f"Closing branch: `{source_ref.name}`"
719 726 try:
720 727 self._remote.commit(
721 728 message=safe_str(message),
722 729 username=safe_str(f'{user_name} <{user_email}>'),
723 730 close_branch=True)
724 731 self._remote.invalidate_vcs_cache()
725 732 return self._identify(), True
726 733 except RepositoryError:
727 734 # Cleanup any commit leftovers
728 735 self._remote.update(clean=True)
729 736 raise
730 737
731 738 def _is_the_same_branch(self, target_ref, source_ref):
732 739 return (
733 740 self._get_branch_name(target_ref) ==
734 741 self._get_branch_name(source_ref))
735 742
736 743 def _get_branch_name(self, ref):
737 744 if ref.type == 'branch':
738 745 return ref.name
739 746 return self._remote.ctx_branch(ref.commit_id)
740 747
741 748 def _maybe_prepare_merge_workspace(
742 749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
743 750 shadow_repository_path = self._get_shadow_repository_path(
744 751 self.path, repo_id, workspace_id)
745 752 if not os.path.exists(shadow_repository_path):
746 753 self._local_clone(shadow_repository_path)
747 754 log.debug(
748 755 'Prepared shadow repository in %s', shadow_repository_path)
749 756
750 757 return shadow_repository_path
751 758
752 759 def _merge_repo(self, repo_id, workspace_id, target_ref,
753 760 source_repo, source_ref, merge_message,
754 761 merger_name, merger_email, dry_run=False,
755 762 use_rebase=False, close_branch=False):
756 763
757 764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
758 765 'rebase' if use_rebase else 'merge', dry_run)
759 766
760 767 if target_ref.commit_id not in self._heads():
761 768 return MergeResponse(
762 769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
763 770 metadata={'target_ref': target_ref})
764 771
765 772 try:
766 773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
767 774 heads_all = self._heads(target_ref.name)
768 775 max_heads = 10
769 776 if len(heads_all) > max_heads:
770 777 heads = '\n,'.join(
771 778 heads_all[:max_heads] +
772 779 [f'and {len(heads_all)-max_heads} more.'])
773 780 else:
774 781 heads = '\n,'.join(heads_all)
775 782 metadata = {
776 783 'target_ref': target_ref,
777 784 'source_ref': source_ref,
778 785 'heads': heads
779 786 }
780 787 return MergeResponse(
781 788 False, False, None,
782 789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
783 790 metadata=metadata)
784 791 except CommitDoesNotExistError:
785 792 log.exception('Failure when looking up branch heads on hg target')
786 793 return MergeResponse(
787 794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
788 795 metadata={'target_ref': target_ref})
789 796
790 797 shadow_repository_path = self._maybe_prepare_merge_workspace(
791 798 repo_id, workspace_id, target_ref, source_ref)
792 799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
793 800
794 801 log.debug('Pulling in target reference %s', target_ref)
795 802 self._validate_pull_reference(target_ref)
796 803 shadow_repo._local_pull(self.path, target_ref)
797 804
798 805 try:
799 806 log.debug('Pulling in source reference %s', source_ref)
800 807 source_repo._validate_pull_reference(source_ref)
801 808 shadow_repo._local_pull(source_repo.path, source_ref)
802 809 except CommitDoesNotExistError:
803 810 log.exception('Failure when doing local pull on hg shadow repo')
804 811 return MergeResponse(
805 812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
806 813 metadata={'source_ref': source_ref})
807 814
808 815 merge_ref = None
809 816 merge_commit_id = None
810 817 close_commit_id = None
811 818 merge_failure_reason = MergeFailureReason.NONE
812 819 metadata = {}
813 820
814 821 # enforce that close branch should be used only in case we source from
815 822 # an actual Branch
816 823 close_branch = close_branch and source_ref.type == 'branch'
817 824
818 825 # don't allow to close branch if source and target are the same
819 826 close_branch = close_branch and source_ref.name != target_ref.name
820 827
821 828 needs_push_on_close = False
822 829 if close_branch and not use_rebase and not dry_run:
823 830 try:
824 831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
825 832 target_ref, merger_name, merger_email, source_ref)
826 833 merge_possible = True
827 834 except RepositoryError:
828 835 log.exception('Failure when doing close branch on '
829 836 'shadow repo: %s', shadow_repo)
830 837 merge_possible = False
831 838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
832 839 else:
833 840 merge_possible = True
834 841
835 842 needs_push = False
836 843 if merge_possible:
837 844
838 845 try:
839 846 merge_commit_id, needs_push = shadow_repo._local_merge(
840 847 target_ref, merge_message, merger_name, merger_email,
841 848 source_ref, use_rebase=use_rebase,
842 849 close_commit_id=close_commit_id, dry_run=dry_run)
843 850 merge_possible = True
844 851
845 852 # read the state of the close action, if it
846 853 # maybe required a push
847 854 needs_push = needs_push or needs_push_on_close
848 855
849 856 # Set a bookmark pointing to the merge commit. This bookmark
850 857 # may be used to easily identify the last successful merge
851 858 # commit in the shadow repository.
852 859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
853 860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
854 861 except SubrepoMergeError:
855 862 log.exception(
856 863 'Subrepo merge error during local merge on hg shadow repo.')
857 864 merge_possible = False
858 865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
859 866 needs_push = False
860 867 except RepositoryError as e:
861 868 log.exception('Failure when doing local merge on hg shadow repo')
862 869 metadata['unresolved_files'] = 'no unresolved files found'
863 870
864 871 if isinstance(e, UnresolvedFilesInRepo):
865 872 all_conflicts = list(e.args[0])
866 873 max_conflicts = 20
867 874 if len(all_conflicts) > max_conflicts:
868 875 conflicts = all_conflicts[:max_conflicts] \
869 876 + [f'and {len(all_conflicts)-max_conflicts} more.']
870 877 else:
871 878 conflicts = all_conflicts
872 879 metadata['unresolved_files'] = \
873 880 '\n* conflict: ' + \
874 881 ('\n * conflict: '.join(conflicts))
875 882
876 883 merge_possible = False
877 884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
878 885 needs_push = False
879 886
880 887 if merge_possible and not dry_run:
881 888 if needs_push:
882 889 # In case the target is a bookmark, update it, so after pushing
883 890 # the bookmarks is also updated in the target.
884 891 if target_ref.type == 'book':
885 892 shadow_repo.bookmark(
886 893 target_ref.name, revision=merge_commit_id)
887 894 try:
888 895 shadow_repo_with_hooks = self.get_shadow_instance(
889 896 shadow_repository_path,
890 897 enable_hooks=True)
891 898 # This is the actual merge action, we push from shadow
892 899 # into origin.
893 900 # Note: the push_branches option will push any new branch
894 901 # defined in the source repository to the target. This may
895 902 # be dangerous as branches are permanent in Mercurial.
896 903 # This feature was requested in issue #441.
897 904 shadow_repo_with_hooks._local_push(
898 905 merge_commit_id, self.path, push_branches=True,
899 906 enable_hooks=True)
900 907
901 908 # maybe we also need to push the close_commit_id
902 909 if close_commit_id:
903 910 shadow_repo_with_hooks._local_push(
904 911 close_commit_id, self.path, push_branches=True,
905 912 enable_hooks=True)
906 913 merge_succeeded = True
907 914 except RepositoryError:
908 915 log.exception(
909 916 'Failure when doing local push from the shadow '
910 917 'repository to the target repository at %s.', self.path)
911 918 merge_succeeded = False
912 919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
913 920 metadata['target'] = 'hg shadow repo'
914 921 metadata['merge_commit'] = merge_commit_id
915 922 else:
916 923 merge_succeeded = True
917 924 else:
918 925 merge_succeeded = False
919 926
920 927 return MergeResponse(
921 928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
922 929 metadata=metadata)
923 930
924 931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
925 932 config = self.config.copy()
926 933 if not enable_hooks:
927 934 config.clear_section('hooks')
928 935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
929 936
930 937 def _validate_pull_reference(self, reference):
931 938 if not (reference.name in self.bookmarks or
932 939 reference.name in self.branches or
933 940 self.get_commit(reference.commit_id)):
934 941 raise CommitDoesNotExistError(
935 942 'Unknown branch, bookmark or commit id')
936 943
937 944 def _local_pull(self, repository_path, reference):
938 945 """
939 946 Fetch a branch, bookmark or commit from a local repository.
940 947 """
941 948 repository_path = os.path.abspath(repository_path)
942 949 if repository_path == self.path:
943 950 raise ValueError('Cannot pull from the same repository')
944 951
945 952 reference_type_to_option_name = {
946 953 'book': 'bookmark',
947 954 'branch': 'branch',
948 955 }
949 956 option_name = reference_type_to_option_name.get(
950 957 reference.type, 'revision')
951 958
952 959 if option_name == 'revision':
953 960 ref = reference.commit_id
954 961 else:
955 962 ref = reference.name
956 963
957 964 options = {option_name: [ref]}
958 965 self._remote.pull_cmd(repository_path, hooks=False, **options)
959 966 self._remote.invalidate_vcs_cache()
960 967
961 968 def bookmark(self, bookmark, revision=None):
962 969 if isinstance(bookmark, str):
963 970 bookmark = safe_str(bookmark)
964 971 self._remote.bookmark(bookmark, revision=revision)
965 972 self._remote.invalidate_vcs_cache()
966 973
967 974 def get_path_permissions(self, username):
968 975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
969 976
970 977 def read_patterns(suffix):
971 978 svalue = None
972 979 for section, option in [
973 980 ('narrowacl', username + suffix),
974 981 ('narrowacl', 'default' + suffix),
975 982 ('narrowhgacl', username + suffix),
976 983 ('narrowhgacl', 'default' + suffix)
977 984 ]:
978 985 try:
979 986 svalue = hgacl.get(section, option)
980 987 break # stop at the first value we find
981 988 except configparser.NoOptionError:
982 989 pass
983 990 if not svalue:
984 991 return None
985 992 result = ['/']
986 993 for pattern in svalue.split():
987 994 result.append(pattern)
988 995 if '*' not in pattern and '?' not in pattern:
989 996 result.append(pattern + '/*')
990 997 return result
991 998
992 999 if os.path.exists(hgacl_file):
993 1000 try:
994 1001 hgacl = configparser.RawConfigParser()
995 1002 hgacl.read(hgacl_file)
996 1003
997 1004 includes = read_patterns('.includes')
998 1005 excludes = read_patterns('.excludes')
999 1006 return BasePathPermissionChecker.create_from_patterns(
1000 1007 includes, excludes)
1001 1008 except BaseException as e:
1002 1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1003 1010 hgacl_file, self.name, e)
1004 1011 raise exceptions.RepositoryRequirementError(msg)
1005 1012 else:
1006 1013 return None
1007 1014
1008 1015
1009 1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1010 1017
1011 1018 def _commit_factory(self, commit_id):
1012 1019 if isinstance(commit_id, int):
1013 1020 return self.repo.get_commit(
1014 1021 commit_idx=commit_id, pre_load=self.pre_load)
1015 1022 else:
1016 1023 return self.repo.get_commit(
1017 1024 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,639 +1,638 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 this is forms validation classes
21 21 http://formencode.org/module-formencode.validators.html
22 22 for list off all availible validators
23 23
24 24 we can create our own validators
25 25
26 26 The table below outlines the options which can be used in a schema in addition to the validators themselves
27 27 pre_validators [] These validators will be applied before the schema
28 28 chained_validators [] These validators will be applied after the schema
29 29 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
30 30 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
31 31 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
32 32 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
33 33
34 34
35 35 <name> = formencode.validators.<name of validator>
36 36 <name> must equal form name
37 37 list=[1,2,3,4,5]
38 38 for SELECT use formencode.All(OneOf(list), Int())
39 39
40 40 """
41 41
42 42 import deform
43 43 import logging
44 44 import formencode
45 45
46 46 from pkg_resources import resource_filename
47 47 from formencode import All, Pipe
48 48
49 49 from pyramid.threadlocal import get_current_request
50 50
51 51 from rhodecode import BACKENDS
52 52 from rhodecode.lib import helpers
53 53 from rhodecode.model import validators as v
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 deform_templates = resource_filename('deform', 'templates')
59 59 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
60 60 search_path = (rhodecode_templates, deform_templates)
61 61
62 62
63 63 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
64 64 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
65 65 def __call__(self, template_name, **kw):
66 66 kw['h'] = helpers
67 67 kw['request'] = get_current_request()
68 68 return self.load(template_name)(**kw)
69 69
70 70
71 71 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
72 72 deform.Form.set_default_renderer(form_renderer)
73 73
74 74
75 75 def LoginForm(localizer):
76 76 _ = localizer
77 77
78 78 class _LoginForm(formencode.Schema):
79 79 allow_extra_fields = True
80 80 filter_extra_fields = True
81 81 username = v.UnicodeString(
82 82 strip=True,
83 83 min=1,
84 84 not_empty=True,
85 85 messages={
86 86 'empty': _('Please enter a login'),
87 87 'tooShort': _('Enter a value %(min)i characters long or more')
88 88 }
89 89 )
90 90
91 91 password = v.UnicodeString(
92 92 strip=False,
93 93 min=3,
94 94 max=72,
95 95 not_empty=True,
96 96 messages={
97 97 'empty': _('Please enter a password'),
98 98 'tooShort': _('Enter %(min)i characters or more')}
99 99 )
100 100
101 101 remember = v.StringBoolean(if_missing=False)
102 102
103 103 chained_validators = [v.ValidAuth(localizer)]
104 104 return _LoginForm
105 105
106 106
107 107 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
108 108 old_data = old_data or {}
109 109 available_languages = available_languages or []
110 110 _ = localizer
111 111
112 112 class _UserForm(formencode.Schema):
113 113 allow_extra_fields = True
114 114 filter_extra_fields = True
115 115 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
116 116 v.ValidUsername(localizer, edit, old_data))
117 117 if edit:
118 118 new_password = All(
119 119 v.ValidPassword(localizer),
120 120 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
121 121 )
122 122 password_confirmation = All(
123 123 v.ValidPassword(localizer),
124 124 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
125 125 )
126 126 admin = v.StringBoolean(if_missing=False)
127 127 else:
128 128 password = All(
129 129 v.ValidPassword(localizer),
130 130 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
131 131 )
132 132 password_confirmation = All(
133 133 v.ValidPassword(localizer),
134 134 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
135 135 )
136 136
137 137 password_change = v.StringBoolean(if_missing=False)
138 138 create_repo_group = v.StringBoolean(if_missing=False)
139 139
140 140 active = v.StringBoolean(if_missing=False)
141 141 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
142 142 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 143 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
144 144 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
145 145 if_missing='')
146 146 extern_name = v.UnicodeString(strip=True)
147 147 extern_type = v.UnicodeString(strip=True)
148 148 language = v.OneOf(available_languages, hideList=False,
149 149 testValueList=True, if_missing=None)
150 150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 151 return _UserForm
152 152
153 153
154 154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 155 old_data = old_data or {}
156 156 _ = localizer
157 157
158 158 class _UserGroupForm(formencode.Schema):
159 159 allow_extra_fields = True
160 160 filter_extra_fields = True
161 161
162 162 users_group_name = All(
163 163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 164 v.ValidUserGroup(localizer, edit, old_data)
165 165 )
166 166 user_group_description = v.UnicodeString(strip=True, min=1,
167 167 not_empty=False)
168 168
169 169 users_group_active = v.StringBoolean(if_missing=False)
170 170
171 171 if edit:
172 172 # this is user group owner
173 173 user = All(
174 174 v.UnicodeString(not_empty=True),
175 175 v.ValidRepoUser(localizer, allow_disabled))
176 176 return _UserGroupForm
177 177
178 178
179 179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 180 can_create_in_root=False, allow_disabled=False):
181 181 _ = localizer
182 182 old_data = old_data or {}
183 183 available_groups = available_groups or []
184 184
185 185 class _RepoGroupForm(formencode.Schema):
186 186 allow_extra_fields = True
187 187 filter_extra_fields = False
188 188
189 189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 190 v.SlugifyName(localizer),)
191 191 group_description = v.UnicodeString(strip=True, min=1,
192 192 not_empty=False)
193 193 group_copy_permissions = v.StringBoolean(if_missing=False)
194 194
195 195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 196 testValueList=True, not_empty=True)
197 197 enable_locking = v.StringBoolean(if_missing=False)
198 198 chained_validators = [
199 199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200 200
201 201 if edit:
202 202 # this is repo group owner
203 203 user = All(
204 204 v.UnicodeString(not_empty=True),
205 205 v.ValidRepoUser(localizer, allow_disabled))
206 206 return _RepoGroupForm
207 207
208 208
209 209 def RegisterForm(localizer, edit=False, old_data=None):
210 210 _ = localizer
211 211 old_data = old_data or {}
212 212
213 213 class _RegisterForm(formencode.Schema):
214 214 allow_extra_fields = True
215 215 filter_extra_fields = True
216 216 username = All(
217 217 v.ValidUsername(localizer, edit, old_data),
218 218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 219 )
220 220 password = All(
221 221 v.ValidPassword(localizer),
222 222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 223 )
224 224 password_confirmation = All(
225 225 v.ValidPassword(localizer),
226 226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 227 )
228 228 active = v.StringBoolean(if_missing=False)
229 229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232 232
233 233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 234 return _RegisterForm
235 235
236 236
237 237 def PasswordResetForm(localizer):
238 238 _ = localizer
239 239
240 240 class _PasswordResetForm(formencode.Schema):
241 241 allow_extra_fields = True
242 242 filter_extra_fields = True
243 243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 244 return _PasswordResetForm
245 245
246 246
247 247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
248 248 _ = localizer
249 249 old_data = old_data or {}
250 250 repo_groups = repo_groups or []
251 251 supported_backends = BACKENDS.keys()
252 252
253 253 class _RepoForm(formencode.Schema):
254 254 allow_extra_fields = True
255 255 filter_extra_fields = False
256 256 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
257 257 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
258 258 repo_group = All(v.CanWriteGroup(localizer, old_data),
259 259 v.OneOf(repo_groups, hideList=True))
260 260 repo_type = v.OneOf(supported_backends, required=False,
261 261 if_missing=old_data.get('repo_type'))
262 262 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
263 263 repo_private = v.StringBoolean(if_missing=False)
264 264 repo_copy_permissions = v.StringBoolean(if_missing=False)
265 265 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
266 266
267 267 repo_enable_statistics = v.StringBoolean(if_missing=False)
268 268 repo_enable_downloads = v.StringBoolean(if_missing=False)
269 269 repo_enable_locking = v.StringBoolean(if_missing=False)
270 270
271 271 if edit:
272 272 # this is repo owner
273 273 user = All(
274 274 v.UnicodeString(not_empty=True),
275 275 v.ValidRepoUser(localizer, allow_disabled))
276 276 clone_uri_change = v.UnicodeString(
277 277 not_empty=False, if_missing=v.Missing)
278 278
279 279 chained_validators = [v.ValidCloneUri(localizer),
280 280 v.ValidRepoName(localizer, edit, old_data)]
281 281 return _RepoForm
282 282
283 283
284 284 def RepoPermsForm(localizer):
285 285 _ = localizer
286 286
287 287 class _RepoPermsForm(formencode.Schema):
288 288 allow_extra_fields = True
289 289 filter_extra_fields = False
290 290 chained_validators = [v.ValidPerms(localizer, type_='repo')]
291 291 return _RepoPermsForm
292 292
293 293
294 294 def RepoGroupPermsForm(localizer, valid_recursive_choices):
295 295 _ = localizer
296 296
297 297 class _RepoGroupPermsForm(formencode.Schema):
298 298 allow_extra_fields = True
299 299 filter_extra_fields = False
300 300 recursive = v.OneOf(valid_recursive_choices)
301 301 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
302 302 return _RepoGroupPermsForm
303 303
304 304
305 305 def UserGroupPermsForm(localizer):
306 306 _ = localizer
307 307
308 308 class _UserPermsForm(formencode.Schema):
309 309 allow_extra_fields = True
310 310 filter_extra_fields = False
311 311 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
312 312 return _UserPermsForm
313 313
314 314
315 315 def RepoFieldForm(localizer):
316 316 _ = localizer
317 317
318 318 class _RepoFieldForm(formencode.Schema):
319 319 filter_extra_fields = True
320 320 allow_extra_fields = True
321 321
322 322 new_field_key = All(v.FieldKey(localizer),
323 323 v.UnicodeString(strip=True, min=3, not_empty=True))
324 324 new_field_value = v.UnicodeString(not_empty=False, if_missing='')
325 325 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
326 326 if_missing='str')
327 327 new_field_label = v.UnicodeString(not_empty=False)
328 328 new_field_desc = v.UnicodeString(not_empty=False)
329 329 return _RepoFieldForm
330 330
331 331
332 332 def RepoForkForm(localizer, edit=False, old_data=None,
333 333 supported_backends=BACKENDS.keys(), repo_groups=None):
334 334 _ = localizer
335 335 old_data = old_data or {}
336 336 repo_groups = repo_groups or []
337 337
338 338 class _RepoForkForm(formencode.Schema):
339 339 allow_extra_fields = True
340 340 filter_extra_fields = False
341 341 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
342 342 v.SlugifyName(localizer))
343 343 repo_group = All(v.CanWriteGroup(localizer, ),
344 344 v.OneOf(repo_groups, hideList=True))
345 345 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
346 346 description = v.UnicodeString(strip=True, min=1, not_empty=True)
347 347 private = v.StringBoolean(if_missing=False)
348 348 copy_permissions = v.StringBoolean(if_missing=False)
349 349 fork_parent_id = v.UnicodeString()
350 350 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
351 351 return _RepoForkForm
352 352
353 353
354 354 def ApplicationSettingsForm(localizer):
355 355 _ = localizer
356 356
357 357 class _ApplicationSettingsForm(formencode.Schema):
358 358 allow_extra_fields = True
359 359 filter_extra_fields = False
360 360 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
361 361 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
362 362 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
363 363 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
364 364 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
365 365 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
366 366 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
367 367 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
368 368 return _ApplicationSettingsForm
369 369
370 370
371 371 def ApplicationVisualisationForm(localizer):
372 372 from rhodecode.model.db import Repository
373 373 _ = localizer
374 374
375 375 class _ApplicationVisualisationForm(formencode.Schema):
376 376 allow_extra_fields = True
377 377 filter_extra_fields = False
378 378 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
379 379 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
380 380 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
381 381
382 382 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
383 383 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
384 384 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
385 385 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
386 386 rhodecode_show_version = v.StringBoolean(if_missing=False)
387 387 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
388 388 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
389 389 rhodecode_gravatar_url = v.UnicodeString(min=3)
390 390 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
391 391 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
392 392 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
393 393 rhodecode_support_url = v.UnicodeString()
394 394 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
395 395 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
396 396 return _ApplicationVisualisationForm
397 397
398 398
399 399 class _BaseVcsSettingsForm(formencode.Schema):
400 400
401 401 allow_extra_fields = True
402 402 filter_extra_fields = False
403 403 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
404 404 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
405 405 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
406 406
407 407 # PR/Code-review
408 408 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
409 409 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
410 410
411 411 # hg
412 412 extensions_largefiles = v.StringBoolean(if_missing=False)
413 413 extensions_evolve = v.StringBoolean(if_missing=False)
414 414 phases_publish = v.StringBoolean(if_missing=False)
415 415
416 416 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
417 417 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
418 418
419 419 # git
420 420 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
421 421 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
422 422 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
423 423
424 424 # svn
425 425 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
426 426 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
427 427
428 428 # cache
429 429 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
430 430
431 431
432 432 def ApplicationUiSettingsForm(localizer):
433 433 _ = localizer
434 434
435 435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
436 436 web_push_ssl = v.StringBoolean(if_missing=False)
437 437 paths_root_path = All(
438 438 v.ValidPath(localizer),
439 439 v.UnicodeString(strip=True, min=1, not_empty=True)
440 440 )
441 441 largefiles_usercache = All(
442 442 v.ValidPath(localizer),
443 443 v.UnicodeString(strip=True, min=2, not_empty=True))
444 444 vcs_git_lfs_store_location = All(
445 445 v.ValidPath(localizer),
446 446 v.UnicodeString(strip=True, min=2, not_empty=True))
447 extensions_hgsubversion = v.StringBoolean(if_missing=False)
448 447 extensions_hggit = v.StringBoolean(if_missing=False)
449 448 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
450 449 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
451 450 return _ApplicationUiSettingsForm
452 451
453 452
454 453 def RepoVcsSettingsForm(localizer, repo_name):
455 454 _ = localizer
456 455
457 456 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
458 457 inherit_global_settings = v.StringBoolean(if_missing=False)
459 458 new_svn_branch = v.ValidSvnPattern(localizer,
460 459 section='vcs_svn_branch', repo_name=repo_name)
461 460 new_svn_tag = v.ValidSvnPattern(localizer,
462 461 section='vcs_svn_tag', repo_name=repo_name)
463 462 return _RepoVcsSettingsForm
464 463
465 464
466 465 def LabsSettingsForm(localizer):
467 466 _ = localizer
468 467
469 468 class _LabSettingsForm(formencode.Schema):
470 469 allow_extra_fields = True
471 470 filter_extra_fields = False
472 471 return _LabSettingsForm
473 472
474 473
475 474 def ApplicationPermissionsForm(
476 475 localizer, register_choices, password_reset_choices,
477 476 extern_activate_choices):
478 477 _ = localizer
479 478
480 479 class _DefaultPermissionsForm(formencode.Schema):
481 480 allow_extra_fields = True
482 481 filter_extra_fields = True
483 482
484 483 anonymous = v.StringBoolean(if_missing=False)
485 484 default_register = v.OneOf(register_choices)
486 485 default_register_message = v.UnicodeString()
487 486 default_password_reset = v.OneOf(password_reset_choices)
488 487 default_extern_activate = v.OneOf(extern_activate_choices)
489 488 return _DefaultPermissionsForm
490 489
491 490
492 491 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
493 492 user_group_perms_choices):
494 493 _ = localizer
495 494
496 495 class _ObjectPermissionsForm(formencode.Schema):
497 496 allow_extra_fields = True
498 497 filter_extra_fields = True
499 498 overwrite_default_repo = v.StringBoolean(if_missing=False)
500 499 overwrite_default_group = v.StringBoolean(if_missing=False)
501 500 overwrite_default_user_group = v.StringBoolean(if_missing=False)
502 501
503 502 default_repo_perm = v.OneOf(repo_perms_choices)
504 503 default_group_perm = v.OneOf(group_perms_choices)
505 504 default_user_group_perm = v.OneOf(user_group_perms_choices)
506 505
507 506 return _ObjectPermissionsForm
508 507
509 508
510 509 def BranchPermissionsForm(localizer, branch_perms_choices):
511 510 _ = localizer
512 511
513 512 class _BranchPermissionsForm(formencode.Schema):
514 513 allow_extra_fields = True
515 514 filter_extra_fields = True
516 515 overwrite_default_branch = v.StringBoolean(if_missing=False)
517 516 default_branch_perm = v.OneOf(branch_perms_choices)
518 517
519 518 return _BranchPermissionsForm
520 519
521 520
522 521 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
523 522 repo_group_create_choices, user_group_create_choices,
524 523 fork_choices, inherit_default_permissions_choices):
525 524 _ = localizer
526 525
527 526 class _DefaultPermissionsForm(formencode.Schema):
528 527 allow_extra_fields = True
529 528 filter_extra_fields = True
530 529
531 530 anonymous = v.StringBoolean(if_missing=False)
532 531
533 532 default_repo_create = v.OneOf(create_choices)
534 533 default_repo_create_on_write = v.OneOf(create_on_write_choices)
535 534 default_user_group_create = v.OneOf(user_group_create_choices)
536 535 default_repo_group_create = v.OneOf(repo_group_create_choices)
537 536 default_fork_create = v.OneOf(fork_choices)
538 537 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
539 538 return _DefaultPermissionsForm
540 539
541 540
542 541 def UserIndividualPermissionsForm(localizer):
543 542 _ = localizer
544 543
545 544 class _DefaultPermissionsForm(formencode.Schema):
546 545 allow_extra_fields = True
547 546 filter_extra_fields = True
548 547
549 548 inherit_default_permissions = v.StringBoolean(if_missing=False)
550 549 return _DefaultPermissionsForm
551 550
552 551
553 552 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
554 553 _ = localizer
555 554 old_data = old_data or {}
556 555
557 556 class _DefaultsForm(formencode.Schema):
558 557 allow_extra_fields = True
559 558 filter_extra_fields = True
560 559 default_repo_type = v.OneOf(supported_backends)
561 560 default_repo_private = v.StringBoolean(if_missing=False)
562 561 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
563 562 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
564 563 default_repo_enable_locking = v.StringBoolean(if_missing=False)
565 564 return _DefaultsForm
566 565
567 566
568 567 def AuthSettingsForm(localizer):
569 568 _ = localizer
570 569
571 570 class _AuthSettingsForm(formencode.Schema):
572 571 allow_extra_fields = True
573 572 filter_extra_fields = True
574 573 auth_plugins = All(v.ValidAuthPlugins(localizer),
575 574 v.UniqueListFromString(localizer)(not_empty=True))
576 575 return _AuthSettingsForm
577 576
578 577
579 578 def UserExtraEmailForm(localizer):
580 579 _ = localizer
581 580
582 581 class _UserExtraEmailForm(formencode.Schema):
583 582 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
584 583 return _UserExtraEmailForm
585 584
586 585
587 586 def UserExtraIpForm(localizer):
588 587 _ = localizer
589 588
590 589 class _UserExtraIpForm(formencode.Schema):
591 590 ip = v.ValidIp(localizer)(not_empty=True)
592 591 return _UserExtraIpForm
593 592
594 593
595 594 def PullRequestForm(localizer, repo_id):
596 595 _ = localizer
597 596
598 597 class ReviewerForm(formencode.Schema):
599 598 user_id = v.Int(not_empty=True)
600 599 reasons = All()
601 600 rules = All(v.UniqueList(localizer, convert=int)())
602 601 mandatory = v.StringBoolean()
603 602 role = v.String(if_missing='reviewer')
604 603
605 604 class ObserverForm(formencode.Schema):
606 605 user_id = v.Int(not_empty=True)
607 606 reasons = All()
608 607 rules = All(v.UniqueList(localizer, convert=int)())
609 608 mandatory = v.StringBoolean()
610 609 role = v.String(if_missing='observer')
611 610
612 611 class _PullRequestForm(formencode.Schema):
613 612 allow_extra_fields = True
614 613 filter_extra_fields = True
615 614
616 615 common_ancestor = v.UnicodeString(strip=True, required=True)
617 616 source_repo = v.UnicodeString(strip=True, required=True)
618 617 source_ref = v.UnicodeString(strip=True, required=True)
619 618 target_repo = v.UnicodeString(strip=True, required=True)
620 619 target_ref = v.UnicodeString(strip=True, required=True)
621 620 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
622 621 v.UniqueList(localizer)(not_empty=True))
623 622 review_members = formencode.ForEach(ReviewerForm())
624 623 observer_members = formencode.ForEach(ObserverForm())
625 624 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
626 625 pullrequest_desc = v.UnicodeString(strip=True, required=False)
627 626 description_renderer = v.UnicodeString(strip=True, required=False)
628 627
629 628 return _PullRequestForm
630 629
631 630
632 631 def IssueTrackerPatternsForm(localizer):
633 632 _ = localizer
634 633
635 634 class _IssueTrackerPatternsForm(formencode.Schema):
636 635 allow_extra_fields = True
637 636 filter_extra_fields = False
638 637 chained_validators = [v.ValidPattern(localizer)]
639 638 return _IssueTrackerPatternsForm
@@ -1,927 +1,925 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import re
21 21 import logging
22 22 import time
23 23 import functools
24 24 from collections import namedtuple
25 25
26 26 from pyramid.threadlocal import get_current_request
27 27
28 28 from rhodecode.lib import rc_cache
29 29 from rhodecode.lib.hash_utils import sha1_safe
30 30 from rhodecode.lib.html_filters import sanitize_html
31 31 from rhodecode.lib.utils2 import (
32 32 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
33 33 from rhodecode.lib.vcs.backends import base
34 34 from rhodecode.lib.statsd_client import StatsdClient
35 35 from rhodecode.model import BaseModel
36 36 from rhodecode.model.db import (
37 37 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
38 38 from rhodecode.model.meta import Session
39 39
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 UiSetting = namedtuple(
45 45 'UiSetting', ['section', 'key', 'value', 'active'])
46 46
47 47 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
48 48
49 49
50 50 class SettingNotFound(Exception):
51 51 def __init__(self, setting_id):
52 52 msg = f'Setting `{setting_id}` is not found'
53 53 super().__init__(msg)
54 54
55 55
56 56 class SettingsModel(BaseModel):
57 57 BUILTIN_HOOKS = (
58 58 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
59 59 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
60 60 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
61 61 RhodeCodeUi.HOOK_PUSH_KEY,)
62 62 HOOKS_SECTION = 'hooks'
63 63
64 64 def __init__(self, sa=None, repo=None):
65 65 self.repo = repo
66 66 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
67 67 self.SettingsDbModel = (
68 68 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
69 69 super().__init__(sa)
70 70
71 71 def get_keyname(self, key_name, prefix='rhodecode_'):
72 72 return f'{prefix}{key_name}'
73 73
74 74 def get_ui_by_key(self, key):
75 75 q = self.UiDbModel.query()
76 76 q = q.filter(self.UiDbModel.ui_key == key)
77 77 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 78 return q.scalar()
79 79
80 80 def get_ui_by_section(self, section):
81 81 q = self.UiDbModel.query()
82 82 q = q.filter(self.UiDbModel.ui_section == section)
83 83 q = self._filter_by_repo(RepoRhodeCodeUi, q)
84 84 return q.all()
85 85
86 86 def get_ui_by_section_and_key(self, section, key):
87 87 q = self.UiDbModel.query()
88 88 q = q.filter(self.UiDbModel.ui_section == section)
89 89 q = q.filter(self.UiDbModel.ui_key == key)
90 90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
91 91 return q.scalar()
92 92
93 93 def get_ui(self, section=None, key=None):
94 94 q = self.UiDbModel.query()
95 95 q = self._filter_by_repo(RepoRhodeCodeUi, q)
96 96
97 97 if section:
98 98 q = q.filter(self.UiDbModel.ui_section == section)
99 99 if key:
100 100 q = q.filter(self.UiDbModel.ui_key == key)
101 101
102 102 # TODO: mikhail: add caching
103 103 result = [
104 104 UiSetting(
105 105 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
106 106 value=safe_str(r.ui_value), active=r.ui_active
107 107 )
108 108 for r in q.all()
109 109 ]
110 110 return result
111 111
112 112 def get_builtin_hooks(self):
113 113 q = self.UiDbModel.query()
114 114 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
115 115 return self._get_hooks(q)
116 116
117 117 def get_custom_hooks(self):
118 118 q = self.UiDbModel.query()
119 119 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
120 120 return self._get_hooks(q)
121 121
122 122 def create_ui_section_value(self, section, val, key=None, active=True):
123 123 new_ui = self.UiDbModel()
124 124 new_ui.ui_section = section
125 125 new_ui.ui_value = val
126 126 new_ui.ui_active = active
127 127
128 128 repository_id = ''
129 129 if self.repo:
130 130 repo = self._get_repo(self.repo)
131 131 repository_id = repo.repo_id
132 132 new_ui.repository_id = repository_id
133 133
134 134 if not key:
135 135 # keys are unique so they need appended info
136 136 if self.repo:
137 137 key = sha1_safe(f'{section}{val}{repository_id}')
138 138 else:
139 139 key = sha1_safe(f'{section}{val}')
140 140
141 141 new_ui.ui_key = key
142 142
143 143 Session().add(new_ui)
144 144 return new_ui
145 145
146 146 def create_or_update_hook(self, key, value):
147 147 ui = (
148 148 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
149 149 self.UiDbModel())
150 150 ui.ui_section = self.HOOKS_SECTION
151 151 ui.ui_active = True
152 152 ui.ui_key = key
153 153 ui.ui_value = value
154 154
155 155 if self.repo:
156 156 repo = self._get_repo(self.repo)
157 157 repository_id = repo.repo_id
158 158 ui.repository_id = repository_id
159 159
160 160 Session().add(ui)
161 161 return ui
162 162
163 163 def delete_ui(self, id_):
164 164 ui = self.UiDbModel.get(id_)
165 165 if not ui:
166 166 raise SettingNotFound(id_)
167 167 Session().delete(ui)
168 168
169 169 def get_setting_by_name(self, name):
170 170 q = self._get_settings_query()
171 171 q = q.filter(self.SettingsDbModel.app_settings_name == name)
172 172 return q.scalar()
173 173
174 174 def create_or_update_setting(
175 175 self, name, val: Optional | str = Optional(''), type_: Optional | str = Optional('unicode')):
176 176 """
177 177 Creates or updates RhodeCode setting. If updates are triggered, it will
178 178 only update parameters that are explicitly set Optional instance will
179 179 be skipped
180 180
181 181 :param name:
182 182 :param val:
183 183 :param type_:
184 184 :return:
185 185 """
186 186
187 187 res = self.get_setting_by_name(name)
188 188 repo = self._get_repo(self.repo) if self.repo else None
189 189
190 190 if not res:
191 191 val = Optional.extract(val)
192 192 type_ = Optional.extract(type_)
193 193
194 194 args = (
195 195 (repo.repo_id, name, val, type_)
196 196 if repo else (name, val, type_))
197 197 res = self.SettingsDbModel(*args)
198 198
199 199 else:
200 200 if self.repo:
201 201 res.repository_id = repo.repo_id
202 202
203 203 res.app_settings_name = name
204 204 if not isinstance(type_, Optional):
205 205 # update if set
206 206 res.app_settings_type = type_
207 207 if not isinstance(val, Optional):
208 208 # update if set
209 209 res.app_settings_value = val
210 210
211 211 Session().add(res)
212 212 return res
213 213
214 214 def get_cache_region(self):
215 215 repo = self._get_repo(self.repo) if self.repo else None
216 216 cache_key = f"repo.v1.{repo.repo_id}" if repo else "repo.v1.ALL"
217 217 cache_namespace_uid = f'cache_settings.{cache_key}'
218 218 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
219 219 return region, cache_namespace_uid
220 220
221 221 def invalidate_settings_cache(self, hard=False):
222 222 region, namespace_key = self.get_cache_region()
223 223 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
224 224 'invalidate_settings_cache', region, namespace_key)
225 225
226 226 # we use hard cleanup if invalidation is sent
227 227 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
228 228
229 229 def get_cache_call_method(self, cache=True):
230 230 region, cache_key = self.get_cache_region()
231 231
232 232 @region.conditional_cache_on_arguments(condition=cache)
233 233 def _get_all_settings(name, key):
234 234 q = self._get_settings_query()
235 235 if not q:
236 236 raise Exception('Could not get application settings !')
237 237
238 238 settings = {
239 239 self.get_keyname(res.app_settings_name): res.app_settings_value
240 240 for res in q
241 241 }
242 242 return settings
243 243 return _get_all_settings
244 244
245 245 def get_all_settings(self, cache=False, from_request=True):
246 246 # defines if we use GLOBAL, or PER_REPO
247 247 repo = self._get_repo(self.repo) if self.repo else None
248 248
249 249 # initially try the request context; this is the fastest
250 250 # we only fetch global config, NOT for repo-specific
251 251 if from_request and not repo:
252 252 request = get_current_request()
253 253
254 254 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
255 255 rc_config = request.call_context.rc_config
256 256 if rc_config:
257 257 return rc_config
258 258
259 259 _region, cache_key = self.get_cache_region()
260 260 _get_all_settings = self.get_cache_call_method(cache=cache)
261 261
262 262 start = time.time()
263 263 result = _get_all_settings('rhodecode_settings', cache_key)
264 264 compute_time = time.time() - start
265 265 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
266 266
267 267 statsd = StatsdClient.statsd
268 268 if statsd:
269 269 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
270 270 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
271 271 use_decimals=False)
272 272
273 273 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
274 274
275 275 return result
276 276
277 277 def get_auth_settings(self):
278 278 q = self._get_settings_query()
279 279 q = q.filter(
280 280 self.SettingsDbModel.app_settings_name.startswith('auth_'))
281 281 rows = q.all()
282 282 auth_settings = {
283 283 row.app_settings_name: row.app_settings_value for row in rows}
284 284 return auth_settings
285 285
286 286 def get_auth_plugins(self):
287 287 auth_plugins = self.get_setting_by_name("auth_plugins")
288 288 return auth_plugins.app_settings_value
289 289
290 290 def get_default_repo_settings(self, strip_prefix=False):
291 291 q = self._get_settings_query()
292 292 q = q.filter(
293 293 self.SettingsDbModel.app_settings_name.startswith('default_'))
294 294 rows = q.all()
295 295
296 296 result = {}
297 297 for row in rows:
298 298 key = row.app_settings_name
299 299 if strip_prefix:
300 300 key = remove_prefix(key, prefix='default_')
301 301 result.update({key: row.app_settings_value})
302 302 return result
303 303
304 304 def get_repo(self):
305 305 repo = self._get_repo(self.repo)
306 306 if not repo:
307 307 raise Exception(
308 308 f'Repository `{self.repo}` cannot be found inside the database')
309 309 return repo
310 310
311 311 def _filter_by_repo(self, model, query):
312 312 if self.repo:
313 313 repo = self.get_repo()
314 314 query = query.filter(model.repository_id == repo.repo_id)
315 315 return query
316 316
317 317 def _get_hooks(self, query):
318 318 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
319 319 query = self._filter_by_repo(RepoRhodeCodeUi, query)
320 320 return query.all()
321 321
322 322 def _get_settings_query(self):
323 323 q = self.SettingsDbModel.query()
324 324 return self._filter_by_repo(RepoRhodeCodeSetting, q)
325 325
326 326 def list_enabled_social_plugins(self, settings):
327 327 enabled = []
328 328 for plug in SOCIAL_PLUGINS_LIST:
329 329 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
330 330 enabled.append(plug)
331 331 return enabled
332 332
333 333
334 334 def assert_repo_settings(func):
335 335 @functools.wraps(func)
336 336 def _wrapper(self, *args, **kwargs):
337 337 if not self.repo_settings:
338 338 raise Exception('Repository is not specified')
339 339 return func(self, *args, **kwargs)
340 340 return _wrapper
341 341
342 342
343 343 class IssueTrackerSettingsModel(object):
344 344 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
345 345 SETTINGS_PREFIX = 'issuetracker_'
346 346
347 347 def __init__(self, sa=None, repo=None):
348 348 self.global_settings = SettingsModel(sa=sa)
349 349 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
350 350
351 351 @property
352 352 def inherit_global_settings(self):
353 353 if not self.repo_settings:
354 354 return True
355 355 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
356 356 return setting.app_settings_value if setting else True
357 357
358 358 @inherit_global_settings.setter
359 359 def inherit_global_settings(self, value):
360 360 if self.repo_settings:
361 361 settings = self.repo_settings.create_or_update_setting(
362 362 self.INHERIT_SETTINGS, value, type_='bool')
363 363 Session().add(settings)
364 364
365 365 def _get_keyname(self, key, uid, prefix='rhodecode_'):
366 366 return f'{prefix}{self.SETTINGS_PREFIX}{key}_{uid}'
367 367
368 368 def _make_dict_for_settings(self, qs):
369 369 prefix_match = self._get_keyname('pat', '',)
370 370
371 371 issuetracker_entries = {}
372 372 # create keys
373 373 for k, v in qs.items():
374 374 if k.startswith(prefix_match):
375 375 uid = k[len(prefix_match):]
376 376 issuetracker_entries[uid] = None
377 377
378 378 def url_cleaner(input_str):
379 379 input_str = input_str.replace('"', '').replace("'", '')
380 380 input_str = sanitize_html(input_str, strip=True)
381 381 return input_str
382 382
383 383 # populate
384 384 for uid in issuetracker_entries:
385 385 url_data = qs.get(self._get_keyname('url', uid))
386 386
387 387 pat = qs.get(self._get_keyname('pat', uid))
388 388 try:
389 389 pat_compiled = re.compile(r'%s' % pat)
390 390 except re.error:
391 391 pat_compiled = None
392 392
393 393 issuetracker_entries[uid] = AttributeDict({
394 394 'pat': pat,
395 395 'pat_compiled': pat_compiled,
396 396 'url': url_cleaner(
397 397 qs.get(self._get_keyname('url', uid)) or ''),
398 398 'pref': sanitize_html(
399 399 qs.get(self._get_keyname('pref', uid)) or ''),
400 400 'desc': qs.get(
401 401 self._get_keyname('desc', uid)),
402 402 })
403 403
404 404 return issuetracker_entries
405 405
406 406 def get_global_settings(self, cache=False):
407 407 """
408 408 Returns list of global issue tracker settings
409 409 """
410 410 defaults = self.global_settings.get_all_settings(cache=cache)
411 411 settings = self._make_dict_for_settings(defaults)
412 412 return settings
413 413
414 414 def get_repo_settings(self, cache=False):
415 415 """
416 416 Returns list of issue tracker settings per repository
417 417 """
418 418 if not self.repo_settings:
419 419 raise Exception('Repository is not specified')
420 420 all_settings = self.repo_settings.get_all_settings(cache=cache)
421 421 settings = self._make_dict_for_settings(all_settings)
422 422 return settings
423 423
424 424 def get_settings(self, cache=False):
425 425 if self.inherit_global_settings:
426 426 return self.get_global_settings(cache=cache)
427 427 else:
428 428 return self.get_repo_settings(cache=cache)
429 429
430 430 def delete_entries(self, uid):
431 431 if self.repo_settings:
432 432 all_patterns = self.get_repo_settings()
433 433 settings_model = self.repo_settings
434 434 else:
435 435 all_patterns = self.get_global_settings()
436 436 settings_model = self.global_settings
437 437 entries = all_patterns.get(uid, [])
438 438
439 439 for del_key in entries:
440 440 setting_name = self._get_keyname(del_key, uid, prefix='')
441 441 entry = settings_model.get_setting_by_name(setting_name)
442 442 if entry:
443 443 Session().delete(entry)
444 444
445 445 Session().commit()
446 446
447 447 def create_or_update_setting(
448 448 self, name, val=Optional(''), type_=Optional('unicode')):
449 449 if self.repo_settings:
450 450 setting = self.repo_settings.create_or_update_setting(
451 451 name, val, type_)
452 452 else:
453 453 setting = self.global_settings.create_or_update_setting(
454 454 name, val, type_)
455 455 return setting
456 456
457 457
458 458 class VcsSettingsModel(object):
459 459
460 460 INHERIT_SETTINGS = 'inherit_vcs_settings'
461 461 GENERAL_SETTINGS = (
462 462 'use_outdated_comments',
463 463 'pr_merge_enabled',
464 464 'hg_use_rebase_for_merging',
465 465 'hg_close_branch_before_merging',
466 466 'git_use_rebase_for_merging',
467 467 'git_close_branch_before_merging',
468 468 'diff_cache',
469 469 )
470 470
471 471 HOOKS_SETTINGS = (
472 472 ('hooks', 'changegroup.repo_size'),
473 473 ('hooks', 'changegroup.push_logger'),
474 474 ('hooks', 'outgoing.pull_logger'),
475 475 )
476 476 HG_SETTINGS = (
477 477 ('extensions', 'largefiles'),
478 478 ('phases', 'publish'),
479 479 ('extensions', 'evolve'),
480 480 ('extensions', 'topic'),
481 481 ('experimental', 'evolution'),
482 482 ('experimental', 'evolution.exchange'),
483 483 )
484 484 GIT_SETTINGS = (
485 485 ('vcs_git_lfs', 'enabled'),
486 486 )
487 487 GLOBAL_HG_SETTINGS = (
488 488 ('extensions', 'largefiles'),
489 489 ('largefiles', 'usercache'),
490 490 ('phases', 'publish'),
491 ('extensions', 'hgsubversion'),
492 491 ('extensions', 'evolve'),
493 492 ('extensions', 'topic'),
494 493 ('experimental', 'evolution'),
495 494 ('experimental', 'evolution.exchange'),
496 495 )
497 496
498 497 GLOBAL_GIT_SETTINGS = (
499 498 ('vcs_git_lfs', 'enabled'),
500 499 ('vcs_git_lfs', 'store_location')
501 500 )
502 501
503 502 GLOBAL_SVN_SETTINGS = (
504 503 ('vcs_svn_proxy', 'http_requests_enabled'),
505 504 ('vcs_svn_proxy', 'http_server_url')
506 505 )
507 506
508 507 SVN_BRANCH_SECTION = 'vcs_svn_branch'
509 508 SVN_TAG_SECTION = 'vcs_svn_tag'
510 509 SSL_SETTING = ('web', 'push_ssl')
511 510 PATH_SETTING = ('paths', '/')
512 511
513 512 def __init__(self, sa=None, repo=None):
514 513 self.global_settings = SettingsModel(sa=sa)
515 514 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
516 515 self._ui_settings = (
517 516 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
518 517 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
519 518
520 519 @property
521 520 @assert_repo_settings
522 521 def inherit_global_settings(self):
523 522 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
524 523 return setting.app_settings_value if setting else True
525 524
526 525 @inherit_global_settings.setter
527 526 @assert_repo_settings
528 527 def inherit_global_settings(self, value):
529 528 self.repo_settings.create_or_update_setting(
530 529 self.INHERIT_SETTINGS, value, type_='bool')
531 530
532 531 def get_keyname(self, key_name, prefix='rhodecode_'):
533 532 return f'{prefix}{key_name}'
534 533
535 534 def get_global_svn_branch_patterns(self):
536 535 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
537 536
538 537 @assert_repo_settings
539 538 def get_repo_svn_branch_patterns(self):
540 539 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
541 540
542 541 def get_global_svn_tag_patterns(self):
543 542 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
544 543
545 544 @assert_repo_settings
546 545 def get_repo_svn_tag_patterns(self):
547 546 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
548 547
549 548 def get_global_settings(self):
550 549 return self._collect_all_settings(global_=True)
551 550
552 551 @assert_repo_settings
553 552 def get_repo_settings(self):
554 553 return self._collect_all_settings(global_=False)
555 554
556 555 @assert_repo_settings
557 556 def get_repo_settings_inherited(self):
558 557 global_settings = self.get_global_settings()
559 558 global_settings.update(self.get_repo_settings())
560 559 return global_settings
561 560
562 561 @assert_repo_settings
563 562 def create_or_update_repo_settings(
564 563 self, data, inherit_global_settings=False):
565 564 from rhodecode.model.scm import ScmModel
566 565
567 566 self.inherit_global_settings = inherit_global_settings
568 567
569 568 repo = self.repo_settings.get_repo()
570 569 if not inherit_global_settings:
571 570 if repo.repo_type == 'svn':
572 571 self.create_repo_svn_settings(data)
573 572 else:
574 573 self.create_or_update_repo_hook_settings(data)
575 574 self.create_or_update_repo_pr_settings(data)
576 575
577 576 if repo.repo_type == 'hg':
578 577 self.create_or_update_repo_hg_settings(data)
579 578
580 579 if repo.repo_type == 'git':
581 580 self.create_or_update_repo_git_settings(data)
582 581
583 582 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
584 583
585 584 @assert_repo_settings
586 585 def create_or_update_repo_hook_settings(self, data):
587 586 for section, key in self.HOOKS_SETTINGS:
588 587 data_key = self._get_form_ui_key(section, key)
589 588 if data_key not in data:
590 589 raise ValueError(
591 590 f'The given data does not contain {data_key} key')
592 591
593 592 active = data.get(data_key)
594 593 repo_setting = self.repo_settings.get_ui_by_section_and_key(
595 594 section, key)
596 595 if not repo_setting:
597 596 global_setting = self.global_settings.\
598 597 get_ui_by_section_and_key(section, key)
599 598 self.repo_settings.create_ui_section_value(
600 599 section, global_setting.ui_value, key=key, active=active)
601 600 else:
602 601 repo_setting.ui_active = active
603 602 Session().add(repo_setting)
604 603
605 604 def update_global_hook_settings(self, data):
606 605 for section, key in self.HOOKS_SETTINGS:
607 606 data_key = self._get_form_ui_key(section, key)
608 607 if data_key not in data:
609 608 raise ValueError(
610 609 f'The given data does not contain {data_key} key')
611 610 active = data.get(data_key)
612 611 repo_setting = self.global_settings.get_ui_by_section_and_key(
613 612 section, key)
614 613 repo_setting.ui_active = active
615 614 Session().add(repo_setting)
616 615
617 616 @assert_repo_settings
618 617 def create_or_update_repo_pr_settings(self, data):
619 618 return self._create_or_update_general_settings(
620 619 self.repo_settings, data)
621 620
622 621 def create_or_update_global_pr_settings(self, data):
623 622 return self._create_or_update_general_settings(
624 623 self.global_settings, data)
625 624
626 625 @assert_repo_settings
627 626 def create_repo_svn_settings(self, data):
628 627 return self._create_svn_settings(self.repo_settings, data)
629 628
630 629 def _set_evolution(self, settings, is_enabled):
631 630 if is_enabled:
632 631 # if evolve is active set evolution=all
633 632
634 633 self._create_or_update_ui(
635 634 settings, *('experimental', 'evolution'), value='all',
636 635 active=True)
637 636 self._create_or_update_ui(
638 637 settings, *('experimental', 'evolution.exchange'), value='yes',
639 638 active=True)
640 639 # if evolve is active set topics server support
641 640 self._create_or_update_ui(
642 641 settings, *('extensions', 'topic'), value='',
643 642 active=True)
644 643
645 644 else:
646 645 self._create_or_update_ui(
647 646 settings, *('experimental', 'evolution'), value='',
648 647 active=False)
649 648 self._create_or_update_ui(
650 649 settings, *('experimental', 'evolution.exchange'), value='no',
651 650 active=False)
652 651 self._create_or_update_ui(
653 652 settings, *('extensions', 'topic'), value='',
654 653 active=False)
655 654
656 655 @assert_repo_settings
657 656 def create_or_update_repo_hg_settings(self, data):
658 657 largefiles, phases, evolve = \
659 658 self.HG_SETTINGS[:3]
660 659 largefiles_key, phases_key, evolve_key = \
661 660 self._get_settings_keys(self.HG_SETTINGS[:3], data)
662 661
663 662 self._create_or_update_ui(
664 663 self.repo_settings, *largefiles, value='',
665 664 active=data[largefiles_key])
666 665 self._create_or_update_ui(
667 666 self.repo_settings, *evolve, value='',
668 667 active=data[evolve_key])
669 668 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
670 669
671 670 self._create_or_update_ui(
672 671 self.repo_settings, *phases, value=safe_str(data[phases_key]))
673 672
674 673 def create_or_update_global_hg_settings(self, data):
675 largefiles, largefiles_store, phases, hgsubversion, evolve \
676 = self.GLOBAL_HG_SETTINGS[:5]
677 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
678 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data)
674 opts_len = 4
675 largefiles, largefiles_store, phases, evolve \
676 = self.GLOBAL_HG_SETTINGS[:opts_len]
677 largefiles_key, largefiles_store_key, phases_key, evolve_key \
678 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data)
679 679
680 680 self._create_or_update_ui(
681 681 self.global_settings, *largefiles, value='',
682 682 active=data[largefiles_key])
683 683 self._create_or_update_ui(
684 684 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
685 685 self._create_or_update_ui(
686 686 self.global_settings, *phases, value=safe_str(data[phases_key]))
687 687 self._create_or_update_ui(
688 self.global_settings, *hgsubversion, active=data[subversion_key])
689 self._create_or_update_ui(
690 688 self.global_settings, *evolve, value='',
691 689 active=data[evolve_key])
692 690 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
693 691
694 692 def create_or_update_repo_git_settings(self, data):
695 693 # NOTE(marcink): # comma makes unpack work properly
696 694 lfs_enabled, \
697 695 = self.GIT_SETTINGS
698 696
699 697 lfs_enabled_key, \
700 698 = self._get_settings_keys(self.GIT_SETTINGS, data)
701 699
702 700 self._create_or_update_ui(
703 701 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
704 702 active=data[lfs_enabled_key])
705 703
706 704 def create_or_update_global_git_settings(self, data):
707 705 lfs_enabled, lfs_store_location \
708 706 = self.GLOBAL_GIT_SETTINGS
709 707 lfs_enabled_key, lfs_store_location_key \
710 708 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
711 709
712 710 self._create_or_update_ui(
713 711 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
714 712 active=data[lfs_enabled_key])
715 713 self._create_or_update_ui(
716 714 self.global_settings, *lfs_store_location,
717 715 value=data[lfs_store_location_key])
718 716
719 717 def create_or_update_global_svn_settings(self, data):
720 718 # branch/tags patterns
721 719 self._create_svn_settings(self.global_settings, data)
722 720
723 721 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
724 722 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
725 723 self.GLOBAL_SVN_SETTINGS, data)
726 724
727 725 self._create_or_update_ui(
728 726 self.global_settings, *http_requests_enabled,
729 727 value=safe_str(data[http_requests_enabled_key]))
730 728 self._create_or_update_ui(
731 729 self.global_settings, *http_server_url,
732 730 value=data[http_server_url_key])
733 731
734 732 def update_global_ssl_setting(self, value):
735 733 self._create_or_update_ui(
736 734 self.global_settings, *self.SSL_SETTING, value=value)
737 735
738 736 def update_global_path_setting(self, value):
739 737 self._create_or_update_ui(
740 738 self.global_settings, *self.PATH_SETTING, value=value)
741 739
742 740 @assert_repo_settings
743 741 def delete_repo_svn_pattern(self, id_):
744 742 ui = self.repo_settings.UiDbModel.get(id_)
745 743 if ui and ui.repository.repo_name == self.repo_settings.repo:
746 744 # only delete if it's the same repo as initialized settings
747 745 self.repo_settings.delete_ui(id_)
748 746 else:
749 747 # raise error as if we wouldn't find this option
750 748 self.repo_settings.delete_ui(-1)
751 749
752 750 def delete_global_svn_pattern(self, id_):
753 751 self.global_settings.delete_ui(id_)
754 752
755 753 @assert_repo_settings
756 754 def get_repo_ui_settings(self, section=None, key=None):
757 755 global_uis = self.global_settings.get_ui(section, key)
758 756 repo_uis = self.repo_settings.get_ui(section, key)
759 757
760 758 filtered_repo_uis = self._filter_ui_settings(repo_uis)
761 759 filtered_repo_uis_keys = [
762 760 (s.section, s.key) for s in filtered_repo_uis]
763 761
764 762 def _is_global_ui_filtered(ui):
765 763 return (
766 764 (ui.section, ui.key) in filtered_repo_uis_keys
767 765 or ui.section in self._svn_sections)
768 766
769 767 filtered_global_uis = [
770 768 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
771 769
772 770 return filtered_global_uis + filtered_repo_uis
773 771
774 772 def get_global_ui_settings(self, section=None, key=None):
775 773 return self.global_settings.get_ui(section, key)
776 774
777 775 def get_ui_settings_as_config_obj(self, section=None, key=None):
778 776 config = base.Config()
779 777
780 778 ui_settings = self.get_ui_settings(section=section, key=key)
781 779
782 780 for entry in ui_settings:
783 781 config.set(entry.section, entry.key, entry.value)
784 782
785 783 return config
786 784
787 785 def get_ui_settings(self, section=None, key=None):
788 786 if not self.repo_settings or self.inherit_global_settings:
789 787 return self.get_global_ui_settings(section, key)
790 788 else:
791 789 return self.get_repo_ui_settings(section, key)
792 790
793 791 def get_svn_patterns(self, section=None):
794 792 if not self.repo_settings:
795 793 return self.get_global_ui_settings(section)
796 794 else:
797 795 return self.get_repo_ui_settings(section)
798 796
799 797 @assert_repo_settings
800 798 def get_repo_general_settings(self):
801 799 global_settings = self.global_settings.get_all_settings()
802 800 repo_settings = self.repo_settings.get_all_settings()
803 801 filtered_repo_settings = self._filter_general_settings(repo_settings)
804 802 global_settings.update(filtered_repo_settings)
805 803 return global_settings
806 804
807 805 def get_global_general_settings(self):
808 806 return self.global_settings.get_all_settings()
809 807
810 808 def get_general_settings(self):
811 809 if not self.repo_settings or self.inherit_global_settings:
812 810 return self.get_global_general_settings()
813 811 else:
814 812 return self.get_repo_general_settings()
815 813
816 814 def get_repos_location(self):
817 815 return self.global_settings.get_ui_by_key('/').ui_value
818 816
819 817 def _filter_ui_settings(self, settings):
820 818 filtered_settings = [
821 819 s for s in settings if self._should_keep_setting(s)]
822 820 return filtered_settings
823 821
824 822 def _should_keep_setting(self, setting):
825 823 keep = (
826 824 (setting.section, setting.key) in self._ui_settings or
827 825 setting.section in self._svn_sections)
828 826 return keep
829 827
830 828 def _filter_general_settings(self, settings):
831 829 keys = [self.get_keyname(key) for key in self.GENERAL_SETTINGS]
832 830 return {
833 831 k: settings[k]
834 832 for k in settings if k in keys}
835 833
836 834 def _collect_all_settings(self, global_=False):
837 835 settings = self.global_settings if global_ else self.repo_settings
838 836 result = {}
839 837
840 838 for section, key in self._ui_settings:
841 839 ui = settings.get_ui_by_section_and_key(section, key)
842 840 result_key = self._get_form_ui_key(section, key)
843 841
844 842 if ui:
845 843 if section in ('hooks', 'extensions'):
846 844 result[result_key] = ui.ui_active
847 845 elif result_key in ['vcs_git_lfs_enabled']:
848 846 result[result_key] = ui.ui_active
849 847 else:
850 848 result[result_key] = ui.ui_value
851 849
852 850 for name in self.GENERAL_SETTINGS:
853 851 setting = settings.get_setting_by_name(name)
854 852 if setting:
855 853 result_key = self.get_keyname(name)
856 854 result[result_key] = setting.app_settings_value
857 855
858 856 return result
859 857
860 858 def _get_form_ui_key(self, section, key):
861 859 return '{section}_{key}'.format(
862 860 section=section, key=key.replace('.', '_'))
863 861
864 862 def _create_or_update_ui(
865 863 self, settings, section, key, value=None, active=None):
866 864 ui = settings.get_ui_by_section_and_key(section, key)
867 865 if not ui:
868 866 active = True if active is None else active
869 867 settings.create_ui_section_value(
870 868 section, value, key=key, active=active)
871 869 else:
872 870 if active is not None:
873 871 ui.ui_active = active
874 872 if value is not None:
875 873 ui.ui_value = value
876 874 Session().add(ui)
877 875
878 876 def _create_svn_settings(self, settings, data):
879 877 svn_settings = {
880 878 'new_svn_branch': self.SVN_BRANCH_SECTION,
881 879 'new_svn_tag': self.SVN_TAG_SECTION
882 880 }
883 881 for key in svn_settings:
884 882 if data.get(key):
885 883 settings.create_ui_section_value(svn_settings[key], data[key])
886 884
887 885 def _create_or_update_general_settings(self, settings, data):
888 886 for name in self.GENERAL_SETTINGS:
889 887 data_key = self.get_keyname(name)
890 888 if data_key not in data:
891 889 raise ValueError(
892 890 f'The given data does not contain {data_key} key')
893 891 setting = settings.create_or_update_setting(
894 892 name, data[data_key], 'bool')
895 893 Session().add(setting)
896 894
897 895 def _get_settings_keys(self, settings, data):
898 896 data_keys = [self._get_form_ui_key(*s) for s in settings]
899 897 for data_key in data_keys:
900 898 if data_key not in data:
901 899 raise ValueError(
902 900 f'The given data does not contain {data_key} key')
903 901 return data_keys
904 902
905 903 def create_largeobjects_dirs_if_needed(self, repo_store_path):
906 904 """
907 905 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
908 906 does a repository scan if enabled in the settings.
909 907 """
910 908
911 909 from rhodecode.lib.vcs.backends.hg import largefiles_store
912 910 from rhodecode.lib.vcs.backends.git import lfs_store
913 911
914 912 paths = [
915 913 largefiles_store(repo_store_path),
916 914 lfs_store(repo_store_path)]
917 915
918 916 for path in paths:
919 917 if os.path.isdir(path):
920 918 continue
921 919 if os.path.isfile(path):
922 920 continue
923 921 # not a file nor dir, we try to create it
924 922 try:
925 923 os.makedirs(path)
926 924 except Exception:
927 925 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,384 +1,375 b''
1 1 ## snippet for displaying vcs settings
2 2 ## usage:
3 3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 4 ## ${vcss.vcs_settings_fields()}
5 5
6 6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, allow_repo_location_change=False, **kwargs)">
7 7 % if display_globals:
8 8 <div class="panel panel-default">
9 9 <div class="panel-heading" id="general">
10 10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3>
11 11 </div>
12 12 <div class="panel-body">
13 13 <div class="field">
14 14 <div class="checkbox">
15 15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
16 16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
17 17 </div>
18 18 <div class="label">
19 19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
20 20 </div>
21 21 </div>
22 22 </div>
23 23 </div>
24 24 % endif
25 25
26 26 % if display_globals:
27 27 <div class="panel panel-default">
28 28 <div class="panel-heading" id="vcs-storage-options">
29 29 <h3 class="panel-title">${_('Main Storage Location')}<a class="permalink" href="#vcs-storage-options"> ΒΆ</a></h3>
30 30 </div>
31 31 <div class="panel-body">
32 32 <div class="field">
33 33 <div class="inputx locked_input">
34 34 %if allow_repo_location_change:
35 35 ${h.text('paths_root_path',size=59,readonly="readonly", class_="disabled")}
36 36 <span id="path_unlock" class="tooltip"
37 37 title="${h.tooltip(_('Click to unlock. You must restart RhodeCode in order to make this setting take effect.'))}">
38 38 <div class="btn btn-default lock_input_button"><i id="path_unlock_icon" class="icon-lock"></i></div>
39 39 </span>
40 40 %else:
41 41 ${_('Repository location change is disabled. You can enable this by changing the `allow_repo_location_change` inside .ini file.')}
42 42 ## form still requires this but we cannot internally change it anyway
43 43 ${h.hidden('paths_root_path',size=30,readonly="readonly", class_="disabled")}
44 44 %endif
45 45 </div>
46 46 </div>
47 47 <div class="label">
48 48 <span class="help-block">${_('Filesystem location where repositories should be stored. After changing this value a restart and rescan of the repository folder are required.')}</span>
49 49 </div>
50 50 </div>
51 51 </div>
52 52 % endif
53 53
54 54 % if display_globals or repo_type in ['git', 'hg']:
55 55 <div class="panel panel-default">
56 56 <div class="panel-heading" id="vcs-hooks-options">
57 57 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
58 58 </div>
59 59 <div class="panel-body">
60 60 <div class="field">
61 61 <div class="checkbox">
62 62 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
63 63 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
64 64 </div>
65 65
66 66 <div class="label">
67 67 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
68 68 </div>
69 69 <div class="checkbox">
70 70 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
71 71 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
72 72 </div>
73 73 <div class="label">
74 74 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
75 75 </div>
76 76 <div class="checkbox">
77 77 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
78 78 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
79 79 </div>
80 80 <div class="label">
81 81 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
82 82 </div>
83 83 </div>
84 84 </div>
85 85 </div>
86 86 % endif
87 87
88 88 % if display_globals or repo_type in ['hg']:
89 89 <div class="panel panel-default">
90 90 <div class="panel-heading" id="vcs-hg-options">
91 91 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
92 92 </div>
93 93 <div class="panel-body">
94 94 <div class="checkbox">
95 95 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
96 96 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
97 97 </div>
98 98 <div class="label">
99 99 % if display_globals:
100 100 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
101 101 % else:
102 102 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
103 103 % endif
104 104 </div>
105 105
106 106 % if display_globals:
107 107 <div class="field">
108 108 <div class="input">
109 109 ${h.text('largefiles_usercache' + suffix, size=59)}
110 110 </div>
111 111 </div>
112 112 <div class="label">
113 113 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
114 114 </div>
115 115 % endif
116 116
117 117 <div class="checkbox">
118 118 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
119 119 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
120 120 </div>
121 121 <div class="label">
122 122 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
123 123 </div>
124 % if display_globals:
125 <div class="checkbox">
126 ${h.checkbox('extensions_hgsubversion' + suffix,'True')}
127 <label for="extensions_hgsubversion${suffix}">${_('Enable hgsubversion extension')}</label>
128 </div>
129 <div class="label">
130 <span class="help-block">${_('Requires hgsubversion library to be installed. Allows cloning remote SVN repositories and migrates them to Mercurial type.')}</span>
131 </div>
132 % endif
133 124
134 125 <div class="checkbox">
135 126 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
136 127 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
137 128 </div>
138 129 <div class="label">
139 130 % if display_globals:
140 131 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
141 132 % else:
142 133 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
143 134 % endif
144 135 </div>
145 136
146 137 </div>
147 138 </div>
148 139 % endif
149 140
150 141 % if display_globals or repo_type in ['git']:
151 142 <div class="panel panel-default">
152 143 <div class="panel-heading" id="vcs-git-options">
153 144 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
154 145 </div>
155 146 <div class="panel-body">
156 147 <div class="checkbox">
157 148 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
158 149 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
159 150 </div>
160 151 <div class="label">
161 152 % if display_globals:
162 153 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
163 154 % else:
164 155 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
165 156 % endif
166 157 </div>
167 158
168 159 % if display_globals:
169 160 <div class="field">
170 161 <div class="input">
171 162 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
172 163 </div>
173 164 </div>
174 165 <div class="label">
175 166 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
176 167 </div>
177 168 % endif
178 169 </div>
179 170 </div>
180 171 % endif
181 172
182 173
183 174 % if display_globals:
184 175 <div class="panel panel-default">
185 176 <div class="panel-heading" id="vcs-global-svn-options">
186 177 <h3 class="panel-title">${_('Global Subversion Settings')}<a class="permalink" href="#vcs-global-svn-options"> ΒΆ</a></h3>
187 178 </div>
188 179 <div class="panel-body">
189 180 <div class="field">
190 181 <div class="checkbox">
191 182 ${h.checkbox('vcs_svn_proxy_http_requests_enabled' + suffix, 'True', **kwargs)}
192 183 <label for="vcs_svn_proxy_http_requests_enabled${suffix}">${_('Proxy subversion HTTP requests')}</label>
193 184 </div>
194 185 <div class="label">
195 186 <span class="help-block">
196 187 ${_('Subversion HTTP Support. Enables communication with SVN over HTTP protocol.')}
197 188 <a href="${h.route_url('enterprise_svn_setup')}" target="_blank">${_('SVN Protocol setup Documentation')}</a>.
198 189 </span>
199 190 </div>
200 191 </div>
201 192 <div class="field">
202 193 <div class="label">
203 194 <label for="vcs_svn_proxy_http_server_url">${_('Subversion HTTP Server URL')}</label><br/>
204 195 </div>
205 196 <div class="input">
206 197 ${h.text('vcs_svn_proxy_http_server_url',size=59)}
207 198 % if c.svn_proxy_generate_config:
208 199 <span class="buttons">
209 200 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Generate Apache Config')}</button>
210 201 </span>
211 202 % endif
212 203 </div>
213 204 </div>
214 205 </div>
215 206 </div>
216 207 % endif
217 208
218 209 % if display_globals or repo_type in ['svn']:
219 210 <div class="panel panel-default">
220 211 <div class="panel-heading" id="vcs-svn-options">
221 212 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
222 213 </div>
223 214 <div class="panel-body">
224 215 <div class="field">
225 216 <div class="content" >
226 217 <label>${_('Repository patterns')}</label><br/>
227 218 </div>
228 219 </div>
229 220 <div class="label">
230 221 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
231 222 </div>
232 223
233 224 <div class="field branch_patterns">
234 225 <div class="input" >
235 226 <label>${_('Branches')}:</label><br/>
236 227 </div>
237 228 % if svn_branch_patterns:
238 229 % for branch in svn_branch_patterns:
239 230 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
240 231 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
241 232 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
242 233 % if kwargs.get('disabled') != 'disabled':
243 234 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
244 235 ${_('Delete')}
245 236 </span>
246 237 % endif
247 238 </div>
248 239 % endfor
249 240 %endif
250 241 </div>
251 242 % if kwargs.get('disabled') != 'disabled':
252 243 <div class="field branch_patterns">
253 244 <div class="input" >
254 245 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
255 246 </div>
256 247 </div>
257 248 % endif
258 249 <div class="field tag_patterns">
259 250 <div class="input" >
260 251 <label>${_('Tags')}:</label><br/>
261 252 </div>
262 253 % if svn_tag_patterns:
263 254 % for tag in svn_tag_patterns:
264 255 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
265 256 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
266 257 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
267 258 % if kwargs.get('disabled') != 'disabled':
268 259 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
269 260 ${_('Delete')}
270 261 </span>
271 262 %endif
272 263 </div>
273 264 % endfor
274 265 % endif
275 266 </div>
276 267 % if kwargs.get('disabled') != 'disabled':
277 268 <div class="field tag_patterns">
278 269 <div class="input" >
279 270 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
280 271 </div>
281 272 </div>
282 273 %endif
283 274 </div>
284 275 </div>
285 276 % else:
286 277 ${h.hidden('new_svn_branch' + suffix, '')}
287 278 ${h.hidden('new_svn_tag' + suffix, '')}
288 279 % endif
289 280
290 281
291 282 % if display_globals or repo_type in ['hg', 'git']:
292 283 <div class="panel panel-default">
293 284 <div class="panel-heading" id="vcs-pull-requests-options">
294 285 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
295 286 </div>
296 287 <div class="panel-body">
297 288 <div class="checkbox">
298 289 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
299 290 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
300 291 </div>
301 292 <div class="label">
302 293 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
303 294 </div>
304 295 <div class="checkbox">
305 296 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
306 297 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
307 298 </div>
308 299 <div class="label">
309 300 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
310 301 </div>
311 302 </div>
312 303 </div>
313 304 % endif
314 305
315 306 % if display_globals or repo_type in ['hg', 'git', 'svn']:
316 307 <div class="panel panel-default">
317 308 <div class="panel-heading" id="vcs-pull-requests-options">
318 309 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
319 310 </div>
320 311 <div class="panel-body">
321 312 <div class="checkbox">
322 313 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
323 314 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
324 315 </div>
325 316 </div>
326 317 </div>
327 318 % endif
328 319
329 320 % if display_globals or repo_type in ['hg',]:
330 321 <div class="panel panel-default">
331 322 <div class="panel-heading" id="vcs-pull-requests-options">
332 323 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
333 324 </div>
334 325 <div class="panel-body">
335 326 ## Specific HG settings
336 327 <div class="checkbox">
337 328 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
338 329 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
339 330 </div>
340 331 <div class="label">
341 332 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
342 333 </div>
343 334
344 335 <div class="checkbox">
345 336 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
346 337 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
347 338 </div>
348 339 <div class="label">
349 340 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
350 341 </div>
351 342
352 343
353 344 </div>
354 345 </div>
355 346 % endif
356 347
357 348 ## DISABLED FOR GIT FOR NOW as the rebase/close is not supported yet
358 349 ## % if display_globals or repo_type in ['git']:
359 350 ## <div class="panel panel-default">
360 351 ## <div class="panel-heading" id="vcs-pull-requests-options">
361 352 ## <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
362 353 ## </div>
363 354 ## <div class="panel-body">
364 355 ## <div class="checkbox">
365 356 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
366 357 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
367 358 ## </div>
368 359 ## <div class="label">
369 360 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
370 361 ## </div>
371 362 ##
372 363 ## <div class="checkbox">
373 364 ## ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
374 365 ## <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
375 366 ## </div>
376 367 ## <div class="label">
377 368 ## <span class="help-block">${_('Delete branch after merging it into destination branch. No effect when rebase strategy is use.')}</span>
378 369 ## </div>
379 370 ## </div>
380 371 ## </div>
381 372 ## % endif
382 373
383 374
384 375 </%def>
@@ -1,1142 +1,1140 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 from rhodecode.lib.utils2 import str2bool
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.model.settings import VcsSettingsModel, UiSetting
26 26
27 27
28 28 HOOKS_FORM_DATA = {
29 29 'hooks_changegroup_repo_size': True,
30 30 'hooks_changegroup_push_logger': True,
31 31 'hooks_outgoing_pull_logger': True
32 32 }
33 33
34 34 SVN_FORM_DATA = {
35 35 'new_svn_branch': 'test-branch',
36 36 'new_svn_tag': 'test-tag'
37 37 }
38 38
39 39 GENERAL_FORM_DATA = {
40 40 'rhodecode_pr_merge_enabled': True,
41 41 'rhodecode_use_outdated_comments': True,
42 42 'rhodecode_hg_use_rebase_for_merging': True,
43 43 'rhodecode_hg_close_branch_before_merging': True,
44 44 'rhodecode_git_use_rebase_for_merging': True,
45 45 'rhodecode_git_close_branch_before_merging': True,
46 46 'rhodecode_diff_cache': True,
47 47 }
48 48
49 49
50 50 class TestInheritGlobalSettingsProperty(object):
51 51 def test_get_raises_exception_when_repository_not_specified(self):
52 52 model = VcsSettingsModel()
53 53 with pytest.raises(Exception) as exc_info:
54 54 model.inherit_global_settings
55 55 assert str(exc_info.value) == 'Repository is not specified'
56 56
57 57 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
58 58 model = VcsSettingsModel(repo=repo_stub.repo_name)
59 59 assert model.inherit_global_settings is True
60 60
61 61 def test_value_is_returned(self, repo_stub, settings_util):
62 62 model = VcsSettingsModel(repo=repo_stub.repo_name)
63 63 settings_util.create_repo_rhodecode_setting(
64 64 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
65 65 assert model.inherit_global_settings is False
66 66
67 67 def test_value_is_set(self, repo_stub):
68 68 model = VcsSettingsModel(repo=repo_stub.repo_name)
69 69 model.inherit_global_settings = False
70 70 setting = model.repo_settings.get_setting_by_name(
71 71 VcsSettingsModel.INHERIT_SETTINGS)
72 72 try:
73 73 assert setting.app_settings_type == 'bool'
74 74 assert setting.app_settings_value is False
75 75 finally:
76 76 Session().delete(setting)
77 77 Session().commit()
78 78
79 79 def test_set_raises_exception_when_repository_not_specified(self):
80 80 model = VcsSettingsModel()
81 81 with pytest.raises(Exception) as exc_info:
82 82 model.inherit_global_settings = False
83 83 assert str(exc_info.value) == 'Repository is not specified'
84 84
85 85
86 86 class TestVcsSettingsModel(object):
87 87 def test_global_svn_branch_patterns(self):
88 88 model = VcsSettingsModel()
89 89 expected_result = {'test': 'test'}
90 90 with mock.patch.object(model, 'global_settings') as settings_mock:
91 91 get_settings = settings_mock.get_ui_by_section
92 92 get_settings.return_value = expected_result
93 93 settings_mock.return_value = expected_result
94 94 result = model.get_global_svn_branch_patterns()
95 95
96 96 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
97 97 assert expected_result == result
98 98
99 99 def test_repo_svn_branch_patterns(self):
100 100 model = VcsSettingsModel()
101 101 expected_result = {'test': 'test'}
102 102 with mock.patch.object(model, 'repo_settings') as settings_mock:
103 103 get_settings = settings_mock.get_ui_by_section
104 104 get_settings.return_value = expected_result
105 105 settings_mock.return_value = expected_result
106 106 result = model.get_repo_svn_branch_patterns()
107 107
108 108 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
109 109 assert expected_result == result
110 110
111 111 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
112 112 self):
113 113 model = VcsSettingsModel()
114 114 with pytest.raises(Exception) as exc_info:
115 115 model.get_repo_svn_branch_patterns()
116 116 assert str(exc_info.value) == 'Repository is not specified'
117 117
118 118 def test_global_svn_tag_patterns(self):
119 119 model = VcsSettingsModel()
120 120 expected_result = {'test': 'test'}
121 121 with mock.patch.object(model, 'global_settings') as settings_mock:
122 122 get_settings = settings_mock.get_ui_by_section
123 123 get_settings.return_value = expected_result
124 124 settings_mock.return_value = expected_result
125 125 result = model.get_global_svn_tag_patterns()
126 126
127 127 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
128 128 assert expected_result == result
129 129
130 130 def test_repo_svn_tag_patterns(self):
131 131 model = VcsSettingsModel()
132 132 expected_result = {'test': 'test'}
133 133 with mock.patch.object(model, 'repo_settings') as settings_mock:
134 134 get_settings = settings_mock.get_ui_by_section
135 135 get_settings.return_value = expected_result
136 136 settings_mock.return_value = expected_result
137 137 result = model.get_repo_svn_tag_patterns()
138 138
139 139 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
140 140 assert expected_result == result
141 141
142 142 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
143 143 model = VcsSettingsModel()
144 144 with pytest.raises(Exception) as exc_info:
145 145 model.get_repo_svn_tag_patterns()
146 146 assert str(exc_info.value) == 'Repository is not specified'
147 147
148 148 def test_get_global_settings(self):
149 149 expected_result = {'test': 'test'}
150 150 model = VcsSettingsModel()
151 151 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
152 152 collect_mock.return_value = expected_result
153 153 result = model.get_global_settings()
154 154
155 155 collect_mock.assert_called_once_with(global_=True)
156 156 assert result == expected_result
157 157
158 158 def test_get_repo_settings(self, repo_stub):
159 159 model = VcsSettingsModel(repo=repo_stub.repo_name)
160 160 expected_result = {'test': 'test'}
161 161 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
162 162 collect_mock.return_value = expected_result
163 163 result = model.get_repo_settings()
164 164
165 165 collect_mock.assert_called_once_with(global_=False)
166 166 assert result == expected_result
167 167
168 168 @pytest.mark.parametrize('settings, global_', [
169 169 ('global_settings', True),
170 170 ('repo_settings', False)
171 171 ])
172 172 def test_collect_all_settings(self, settings, global_):
173 173 model = VcsSettingsModel()
174 174 result_mock = self._mock_result()
175 175
176 176 settings_patch = mock.patch.object(model, settings)
177 177 with settings_patch as settings_mock:
178 178 settings_mock.get_ui_by_section_and_key.return_value = result_mock
179 179 settings_mock.get_setting_by_name.return_value = result_mock
180 180 result = model._collect_all_settings(global_=global_)
181 181
182 182 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
183 183 self._assert_get_settings_calls(
184 184 settings_mock, ui_settings, model.GENERAL_SETTINGS)
185 185 self._assert_collect_all_settings_result(
186 186 ui_settings, model.GENERAL_SETTINGS, result)
187 187
188 188 @pytest.mark.parametrize('settings, global_', [
189 189 ('global_settings', True),
190 190 ('repo_settings', False)
191 191 ])
192 192 def test_collect_all_settings_without_empty_value(self, settings, global_):
193 193 model = VcsSettingsModel()
194 194
195 195 settings_patch = mock.patch.object(model, settings)
196 196 with settings_patch as settings_mock:
197 197 settings_mock.get_ui_by_section_and_key.return_value = None
198 198 settings_mock.get_setting_by_name.return_value = None
199 199 result = model._collect_all_settings(global_=global_)
200 200
201 201 assert result == {}
202 202
203 203 def _mock_result(self):
204 204 result_mock = mock.Mock()
205 205 result_mock.ui_value = 'ui_value'
206 206 result_mock.ui_active = True
207 207 result_mock.app_settings_value = 'setting_value'
208 208 return result_mock
209 209
210 210 def _assert_get_settings_calls(
211 211 self, settings_mock, ui_settings, general_settings):
212 212 assert (
213 213 settings_mock.get_ui_by_section_and_key.call_count ==
214 214 len(ui_settings))
215 215 assert (
216 216 settings_mock.get_setting_by_name.call_count ==
217 217 len(general_settings))
218 218
219 219 for section, key in ui_settings:
220 220 expected_call = mock.call(section, key)
221 221 assert (
222 222 expected_call in
223 223 settings_mock.get_ui_by_section_and_key.call_args_list)
224 224
225 225 for name in general_settings:
226 226 expected_call = mock.call(name)
227 227 assert (
228 228 expected_call in
229 229 settings_mock.get_setting_by_name.call_args_list)
230 230
231 231 def _assert_collect_all_settings_result(
232 232 self, ui_settings, general_settings, result):
233 233 expected_result = {}
234 234 for section, key in ui_settings:
235 235 key = '{}_{}'.format(section, key.replace('.', '_'))
236 236
237 237 if section in ('extensions', 'hooks'):
238 238 value = True
239 239 elif key in ['vcs_git_lfs_enabled']:
240 240 value = True
241 241 else:
242 242 value = 'ui_value'
243 243 expected_result[key] = value
244 244
245 245 for name in general_settings:
246 246 key = 'rhodecode_' + name
247 247 expected_result[key] = 'setting_value'
248 248
249 249 assert expected_result == result
250 250
251 251
252 252 class TestCreateOrUpdateRepoHookSettings(object):
253 253 def test_create_when_no_repo_object_found(self, repo_stub):
254 254 model = VcsSettingsModel(repo=repo_stub.repo_name)
255 255
256 256 self._create_settings(model, HOOKS_FORM_DATA)
257 257
258 258 cleanup = []
259 259 try:
260 260 for section, key in model.HOOKS_SETTINGS:
261 261 ui = model.repo_settings.get_ui_by_section_and_key(
262 262 section, key)
263 263 assert ui.ui_active is True
264 264 cleanup.append(ui)
265 265 finally:
266 266 for ui in cleanup:
267 267 Session().delete(ui)
268 268 Session().commit()
269 269
270 270 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
271 271 model = VcsSettingsModel(repo=repo_stub.repo_name)
272 272
273 273 deleted_key = 'hooks_changegroup_repo_size'
274 274 data = HOOKS_FORM_DATA.copy()
275 275 data.pop(deleted_key)
276 276
277 277 with pytest.raises(ValueError) as exc_info:
278 278 model.create_or_update_repo_hook_settings(data)
279 279 Session().commit()
280 280
281 281 msg = 'The given data does not contain {} key'.format(deleted_key)
282 282 assert str(exc_info.value) == msg
283 283
284 284 def test_update_when_repo_object_found(self, repo_stub, settings_util):
285 285 model = VcsSettingsModel(repo=repo_stub.repo_name)
286 286 for section, key in model.HOOKS_SETTINGS:
287 287 settings_util.create_repo_rhodecode_ui(
288 288 repo_stub, section, None, key=key, active=False)
289 289 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
290 290 Session().commit()
291 291
292 292 for section, key in model.HOOKS_SETTINGS:
293 293 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
294 294 assert ui.ui_active is True
295 295
296 296 def _create_settings(self, model, data):
297 297 global_patch = mock.patch.object(model, 'global_settings')
298 298 global_setting = mock.Mock()
299 299 global_setting.ui_value = 'Test value'
300 300 with global_patch as global_mock:
301 301 global_mock.get_ui_by_section_and_key.return_value = global_setting
302 302 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
303 303 Session().commit()
304 304
305 305
306 306 class TestUpdateGlobalHookSettings(object):
307 307 def test_update_raises_exception_when_data_incomplete(self):
308 308 model = VcsSettingsModel()
309 309
310 310 deleted_key = 'hooks_changegroup_repo_size'
311 311 data = HOOKS_FORM_DATA.copy()
312 312 data.pop(deleted_key)
313 313
314 314 with pytest.raises(ValueError) as exc_info:
315 315 model.update_global_hook_settings(data)
316 316 Session().commit()
317 317
318 318 msg = 'The given data does not contain {} key'.format(deleted_key)
319 319 assert str(exc_info.value) == msg
320 320
321 321 def test_update_global_hook_settings(self, settings_util):
322 322 model = VcsSettingsModel()
323 323 setting_mock = mock.MagicMock()
324 324 setting_mock.ui_active = False
325 325 get_settings_patcher = mock.patch.object(
326 326 model.global_settings, 'get_ui_by_section_and_key',
327 327 return_value=setting_mock)
328 328 session_patcher = mock.patch('rhodecode.model.settings.Session')
329 329 with get_settings_patcher as get_settings_mock, session_patcher:
330 330 model.update_global_hook_settings(HOOKS_FORM_DATA)
331 331 Session().commit()
332 332
333 333 assert setting_mock.ui_active is True
334 334 assert get_settings_mock.call_count == 3
335 335
336 336
337 337 class TestCreateOrUpdateRepoGeneralSettings(object):
338 338 def test_calls_create_or_update_general_settings(self, repo_stub):
339 339 model = VcsSettingsModel(repo=repo_stub.repo_name)
340 340 create_patch = mock.patch.object(
341 341 model, '_create_or_update_general_settings')
342 342 with create_patch as create_mock:
343 343 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
344 344 Session().commit()
345 345
346 346 create_mock.assert_called_once_with(
347 347 model.repo_settings, GENERAL_FORM_DATA)
348 348
349 349 def test_raises_exception_when_repository_is_not_specified(self):
350 350 model = VcsSettingsModel()
351 351 with pytest.raises(Exception) as exc_info:
352 352 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
353 353 assert str(exc_info.value) == 'Repository is not specified'
354 354
355 355
356 356 class TestCreateOrUpdatGlobalGeneralSettings(object):
357 357 def test_calls_create_or_update_general_settings(self):
358 358 model = VcsSettingsModel()
359 359 create_patch = mock.patch.object(
360 360 model, '_create_or_update_general_settings')
361 361 with create_patch as create_mock:
362 362 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
363 363 create_mock.assert_called_once_with(
364 364 model.global_settings, GENERAL_FORM_DATA)
365 365
366 366
367 367 class TestCreateOrUpdateGeneralSettings(object):
368 368 def test_create_when_no_repo_settings_found(self, repo_stub):
369 369 model = VcsSettingsModel(repo=repo_stub.repo_name)
370 370 model._create_or_update_general_settings(
371 371 model.repo_settings, GENERAL_FORM_DATA)
372 372
373 373 cleanup = []
374 374 try:
375 375 for name in model.GENERAL_SETTINGS:
376 376 setting = model.repo_settings.get_setting_by_name(name)
377 377 assert setting.app_settings_value is True
378 378 cleanup.append(setting)
379 379 finally:
380 380 for setting in cleanup:
381 381 Session().delete(setting)
382 382 Session().commit()
383 383
384 384 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
385 385 model = VcsSettingsModel(repo=repo_stub.repo_name)
386 386
387 387 deleted_key = 'rhodecode_pr_merge_enabled'
388 388 data = GENERAL_FORM_DATA.copy()
389 389 data.pop(deleted_key)
390 390
391 391 with pytest.raises(ValueError) as exc_info:
392 392 model._create_or_update_general_settings(model.repo_settings, data)
393 393 Session().commit()
394 394
395 395 msg = 'The given data does not contain {} key'.format(deleted_key)
396 396 assert str(exc_info.value) == msg
397 397
398 398 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
399 399 model = VcsSettingsModel(repo=repo_stub.repo_name)
400 400 for name in model.GENERAL_SETTINGS:
401 401 settings_util.create_repo_rhodecode_setting(
402 402 repo_stub, name, False, 'bool')
403 403
404 404 model._create_or_update_general_settings(
405 405 model.repo_settings, GENERAL_FORM_DATA)
406 406 Session().commit()
407 407
408 408 for name in model.GENERAL_SETTINGS:
409 409 setting = model.repo_settings.get_setting_by_name(name)
410 410 assert setting.app_settings_value is True
411 411
412 412
413 413 class TestCreateRepoSvnSettings(object):
414 414 def test_calls_create_svn_settings(self, repo_stub):
415 415 model = VcsSettingsModel(repo=repo_stub.repo_name)
416 416 with mock.patch.object(model, '_create_svn_settings') as create_mock:
417 417 model.create_repo_svn_settings(SVN_FORM_DATA)
418 418 Session().commit()
419 419
420 420 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
421 421
422 422 def test_raises_exception_when_repository_is_not_specified(self):
423 423 model = VcsSettingsModel()
424 424 with pytest.raises(Exception) as exc_info:
425 425 model.create_repo_svn_settings(SVN_FORM_DATA)
426 426 Session().commit()
427 427
428 428 assert str(exc_info.value) == 'Repository is not specified'
429 429
430 430
431 431 class TestCreateSvnSettings(object):
432 432 def test_create(self, repo_stub):
433 433 model = VcsSettingsModel(repo=repo_stub.repo_name)
434 434 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
435 435 Session().commit()
436 436
437 437 branch_ui = model.repo_settings.get_ui_by_section(
438 438 model.SVN_BRANCH_SECTION)
439 439 tag_ui = model.repo_settings.get_ui_by_section(
440 440 model.SVN_TAG_SECTION)
441 441
442 442 try:
443 443 assert len(branch_ui) == 1
444 444 assert len(tag_ui) == 1
445 445 finally:
446 446 Session().delete(branch_ui[0])
447 447 Session().delete(tag_ui[0])
448 448 Session().commit()
449 449
450 450 def test_create_tag(self, repo_stub):
451 451 model = VcsSettingsModel(repo=repo_stub.repo_name)
452 452 data = SVN_FORM_DATA.copy()
453 453 data.pop('new_svn_branch')
454 454 model._create_svn_settings(model.repo_settings, data)
455 455 Session().commit()
456 456
457 457 branch_ui = model.repo_settings.get_ui_by_section(
458 458 model.SVN_BRANCH_SECTION)
459 459 tag_ui = model.repo_settings.get_ui_by_section(
460 460 model.SVN_TAG_SECTION)
461 461
462 462 try:
463 463 assert len(branch_ui) == 0
464 464 assert len(tag_ui) == 1
465 465 finally:
466 466 Session().delete(tag_ui[0])
467 467 Session().commit()
468 468
469 469 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
470 470 model = VcsSettingsModel(repo=repo_stub.repo_name)
471 471 model._create_svn_settings(model.repo_settings, {})
472 472 Session().commit()
473 473
474 474 branch_ui = model.repo_settings.get_ui_by_section(
475 475 model.SVN_BRANCH_SECTION)
476 476 tag_ui = model.repo_settings.get_ui_by_section(
477 477 model.SVN_TAG_SECTION)
478 478
479 479 assert len(branch_ui) == 0
480 480 assert len(tag_ui) == 0
481 481
482 482 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
483 483 model = VcsSettingsModel(repo=repo_stub.repo_name)
484 484 data = {
485 485 'new_svn_branch': '',
486 486 'new_svn_tag': ''
487 487 }
488 488 model._create_svn_settings(model.repo_settings, data)
489 489 Session().commit()
490 490
491 491 branch_ui = model.repo_settings.get_ui_by_section(
492 492 model.SVN_BRANCH_SECTION)
493 493 tag_ui = model.repo_settings.get_ui_by_section(
494 494 model.SVN_TAG_SECTION)
495 495
496 496 assert len(branch_ui) == 0
497 497 assert len(tag_ui) == 0
498 498
499 499
500 500 class TestCreateOrUpdateUi(object):
501 501 def test_create(self, repo_stub):
502 502 model = VcsSettingsModel(repo=repo_stub.repo_name)
503 503 model._create_or_update_ui(
504 504 model.repo_settings, 'test-section', 'test-key', active=False,
505 505 value='False')
506 506 Session().commit()
507 507
508 508 created_ui = model.repo_settings.get_ui_by_section_and_key(
509 509 'test-section', 'test-key')
510 510
511 511 try:
512 512 assert created_ui.ui_active is False
513 513 assert str2bool(created_ui.ui_value) is False
514 514 finally:
515 515 Session().delete(created_ui)
516 516 Session().commit()
517 517
518 518 def test_update(self, repo_stub, settings_util):
519 519 model = VcsSettingsModel(repo=repo_stub.repo_name)
520 520 # care about only 3 first settings
521 521 largefiles, phases, evolve = model.HG_SETTINGS[:3]
522 522
523 523 section = 'test-section'
524 524 key = 'test-key'
525 525 settings_util.create_repo_rhodecode_ui(
526 526 repo_stub, section, 'True', key=key, active=True)
527 527
528 528 model._create_or_update_ui(
529 529 model.repo_settings, section, key, active=False, value='False')
530 530 Session().commit()
531 531
532 532 created_ui = model.repo_settings.get_ui_by_section_and_key(
533 533 section, key)
534 534 assert created_ui.ui_active is False
535 535 assert str2bool(created_ui.ui_value) is False
536 536
537 537
538 538 class TestCreateOrUpdateRepoHgSettings(object):
539 539 FORM_DATA = {
540 540 'extensions_largefiles': False,
541 541 'extensions_evolve': False,
542 542 'phases_publish': False
543 543 }
544 544
545 545 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
546 546 model = VcsSettingsModel(repo=repo_stub.repo_name)
547 547 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
548 548 model.create_or_update_repo_hg_settings(self.FORM_DATA)
549 549 expected_calls = [
550 550 mock.call(model.repo_settings, 'extensions', 'largefiles', active=False, value=''),
551 551 mock.call(model.repo_settings, 'extensions', 'evolve', active=False, value=''),
552 552 mock.call(model.repo_settings, 'experimental', 'evolution', active=False, value=''),
553 553 mock.call(model.repo_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
554 554 mock.call(model.repo_settings, 'extensions', 'topic', active=False, value=''),
555 555 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
556 556 ]
557 557 assert expected_calls == create_mock.call_args_list
558 558
559 559 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
560 560 def test_key_is_not_found(self, repo_stub, field_to_remove):
561 561 model = VcsSettingsModel(repo=repo_stub.repo_name)
562 562 data = self.FORM_DATA.copy()
563 563 data.pop(field_to_remove)
564 564 with pytest.raises(ValueError) as exc_info:
565 565 model.create_or_update_repo_hg_settings(data)
566 566 Session().commit()
567 567
568 568 expected_message = 'The given data does not contain {} key'.format(
569 569 field_to_remove)
570 570 assert str(exc_info.value) == expected_message
571 571
572 572 def test_create_raises_exception_when_repository_not_specified(self):
573 573 model = VcsSettingsModel()
574 574 with pytest.raises(Exception) as exc_info:
575 575 model.create_or_update_repo_hg_settings(self.FORM_DATA)
576 576 Session().commit()
577 577
578 578 assert str(exc_info.value) == 'Repository is not specified'
579 579
580 580
581 581 class TestUpdateGlobalSslSetting(object):
582 582 def test_updates_global_hg_settings(self):
583 583 model = VcsSettingsModel()
584 584 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
585 585 model.update_global_ssl_setting('False')
586 586 Session().commit()
587 587
588 588 create_mock.assert_called_once_with(
589 589 model.global_settings, 'web', 'push_ssl', value='False')
590 590
591 591
592 592 class TestUpdateGlobalPathSetting(object):
593 593 def test_updates_global_path_settings(self):
594 594 model = VcsSettingsModel()
595 595 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
596 596 model.update_global_path_setting('False')
597 597 Session().commit()
598 598
599 599 create_mock.assert_called_once_with(
600 600 model.global_settings, 'paths', '/', value='False')
601 601
602 602
603 603 class TestCreateOrUpdateGlobalHgSettings(object):
604 604 FORM_DATA = {
605 605 'extensions_largefiles': False,
606 606 'largefiles_usercache': '/example/largefiles-store',
607 607 'phases_publish': False,
608 'extensions_hgsubversion': False,
609 608 'extensions_evolve': False
610 609 }
611 610
612 611 def test_creates_repo_hg_settings_when_data_is_correct(self):
613 612 model = VcsSettingsModel()
614 613 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
615 614 model.create_or_update_global_hg_settings(self.FORM_DATA)
616 615 Session().commit()
617 616
618 617 expected_calls = [
619 618 mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''),
620 619 mock.call(model.global_settings, 'largefiles', 'usercache', value='/example/largefiles-store'),
621 620 mock.call(model.global_settings, 'phases', 'publish', value='False'),
622 mock.call(model.global_settings, 'extensions', 'hgsubversion', active=False),
623 621 mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''),
624 622 mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''),
625 623 mock.call(model.global_settings, 'experimental', 'evolution.exchange', active=False, value='no'),
626 624 mock.call(model.global_settings, 'extensions', 'topic', active=False, value=''),
627 625 ]
628 626
629 627 assert expected_calls == create_mock.call_args_list
630 628
631 629 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
632 630 def test_key_is_not_found(self, repo_stub, field_to_remove):
633 631 model = VcsSettingsModel(repo=repo_stub.repo_name)
634 632 data = self.FORM_DATA.copy()
635 633 data.pop(field_to_remove)
636 634 with pytest.raises(Exception) as exc_info:
637 635 model.create_or_update_global_hg_settings(data)
638 636 Session().commit()
639 637
640 638 expected_message = 'The given data does not contain {} key'.format(
641 639 field_to_remove)
642 640 assert str(exc_info.value) == expected_message
643 641
644 642
645 643 class TestCreateOrUpdateGlobalGitSettings(object):
646 644 FORM_DATA = {
647 645 'vcs_git_lfs_enabled': False,
648 646 'vcs_git_lfs_store_location': '/example/lfs-store',
649 647 }
650 648
651 649 def test_creates_repo_hg_settings_when_data_is_correct(self):
652 650 model = VcsSettingsModel()
653 651 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
654 652 model.create_or_update_global_git_settings(self.FORM_DATA)
655 653 Session().commit()
656 654
657 655 expected_calls = [
658 656 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False),
659 657 mock.call(model.global_settings, 'vcs_git_lfs', 'store_location', value='/example/lfs-store'),
660 658 ]
661 659 assert expected_calls == create_mock.call_args_list
662 660
663 661
664 662 class TestDeleteRepoSvnPattern(object):
665 663 def test_success_when_repo_is_set(self, backend_svn, settings_util):
666 664 repo = backend_svn.create_repo()
667 665 repo_name = repo.repo_name
668 666
669 667 model = VcsSettingsModel(repo=repo_name)
670 668 entry = settings_util.create_repo_rhodecode_ui(
671 669 repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch')
672 670 Session().commit()
673 671
674 672 model.delete_repo_svn_pattern(entry.ui_id)
675 673
676 674 def test_fail_when_delete_id_from_other_repo(self, backend_svn):
677 675 repo_name = backend_svn.repo_name
678 676 model = VcsSettingsModel(repo=repo_name)
679 677 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
680 678 with delete_ui_patch as delete_ui_mock:
681 679 model.delete_repo_svn_pattern(123)
682 680 Session().commit()
683 681
684 682 delete_ui_mock.assert_called_once_with(-1)
685 683
686 684 def test_raises_exception_when_repository_is_not_specified(self):
687 685 model = VcsSettingsModel()
688 686 with pytest.raises(Exception) as exc_info:
689 687 model.delete_repo_svn_pattern(123)
690 688 assert str(exc_info.value) == 'Repository is not specified'
691 689
692 690
693 691 class TestDeleteGlobalSvnPattern(object):
694 692 def test_delete_global_svn_pattern_calls_delete_ui(self):
695 693 model = VcsSettingsModel()
696 694 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
697 695 with delete_ui_patch as delete_ui_mock:
698 696 model.delete_global_svn_pattern(123)
699 697 delete_ui_mock.assert_called_once_with(123)
700 698
701 699
702 700 class TestFilterUiSettings(object):
703 701 def test_settings_are_filtered(self):
704 702 model = VcsSettingsModel()
705 703 repo_settings = [
706 704 UiSetting('extensions', 'largefiles', '', True),
707 705 UiSetting('phases', 'publish', 'True', True),
708 706 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
709 707 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
710 708 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
711 709 UiSetting(
712 710 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
713 711 'test_branch', True),
714 712 UiSetting(
715 713 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
716 714 'test_tag', True),
717 715 ]
718 716 non_repo_settings = [
719 717 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
720 718 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
721 719 UiSetting('hooks', 'test2', 'hook', True),
722 720 UiSetting(
723 721 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
724 722 'test_tag', True),
725 723 ]
726 724 settings = repo_settings + non_repo_settings
727 725 filtered_settings = model._filter_ui_settings(settings)
728 726 assert sorted(filtered_settings) == sorted(repo_settings)
729 727
730 728
731 729 class TestFilterGeneralSettings(object):
732 730 def test_settings_are_filtered(self):
733 731 model = VcsSettingsModel()
734 732 settings = {
735 733 'rhodecode_abcde': 'value1',
736 734 'rhodecode_vwxyz': 'value2',
737 735 }
738 736 general_settings = {
739 737 'rhodecode_{}'.format(key): 'value'
740 738 for key in VcsSettingsModel.GENERAL_SETTINGS
741 739 }
742 740 settings.update(general_settings)
743 741
744 742 filtered_settings = model._filter_general_settings(general_settings)
745 743 assert sorted(filtered_settings) == sorted(general_settings)
746 744
747 745
748 746 class TestGetRepoUiSettings(object):
749 747 def test_global_uis_are_returned_when_no_repo_uis_found(
750 748 self, repo_stub):
751 749 model = VcsSettingsModel(repo=repo_stub.repo_name)
752 750 result = model.get_repo_ui_settings()
753 751 svn_sections = (
754 752 VcsSettingsModel.SVN_TAG_SECTION,
755 753 VcsSettingsModel.SVN_BRANCH_SECTION)
756 754 expected_result = [
757 755 s for s in model.global_settings.get_ui()
758 756 if s.section not in svn_sections]
759 757 assert sorted(result) == sorted(expected_result)
760 758
761 759 def test_repo_uis_are_overriding_global_uis(
762 760 self, repo_stub, settings_util):
763 761 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
764 762 settings_util.create_repo_rhodecode_ui(
765 763 repo_stub, section, 'repo', key=key, active=False)
766 764 model = VcsSettingsModel(repo=repo_stub.repo_name)
767 765 result = model.get_repo_ui_settings()
768 766 for setting in result:
769 767 locator = (setting.section, setting.key)
770 768 if locator in VcsSettingsModel.HOOKS_SETTINGS:
771 769 assert setting.value == 'repo'
772 770
773 771 assert setting.active is False
774 772
775 773 def test_global_svn_patterns_are_not_in_list(
776 774 self, repo_stub, settings_util):
777 775 svn_sections = (
778 776 VcsSettingsModel.SVN_TAG_SECTION,
779 777 VcsSettingsModel.SVN_BRANCH_SECTION)
780 778 for section in svn_sections:
781 779 settings_util.create_rhodecode_ui(
782 780 section, 'repo', key='deadbeef' + section, active=False)
783 781 Session().commit()
784 782
785 783 model = VcsSettingsModel(repo=repo_stub.repo_name)
786 784 result = model.get_repo_ui_settings()
787 785 for setting in result:
788 786 assert setting.section not in svn_sections
789 787
790 788 def test_repo_uis_filtered_by_section_are_returned(
791 789 self, repo_stub, settings_util):
792 790 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
793 791 settings_util.create_repo_rhodecode_ui(
794 792 repo_stub, section, 'repo', key=key, active=False)
795 793 model = VcsSettingsModel(repo=repo_stub.repo_name)
796 794 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
797 795 result = model.get_repo_ui_settings(section=section)
798 796 for setting in result:
799 797 assert setting.section == section
800 798
801 799 def test_repo_uis_filtered_by_key_are_returned(
802 800 self, repo_stub, settings_util):
803 801 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
804 802 settings_util.create_repo_rhodecode_ui(
805 803 repo_stub, section, 'repo', key=key, active=False)
806 804 model = VcsSettingsModel(repo=repo_stub.repo_name)
807 805 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
808 806 result = model.get_repo_ui_settings(key=key)
809 807 for setting in result:
810 808 assert setting.key == key
811 809
812 810 def test_raises_exception_when_repository_is_not_specified(self):
813 811 model = VcsSettingsModel()
814 812 with pytest.raises(Exception) as exc_info:
815 813 model.get_repo_ui_settings()
816 814 assert str(exc_info.value) == 'Repository is not specified'
817 815
818 816
819 817 class TestGetRepoGeneralSettings(object):
820 818 def test_global_settings_are_returned_when_no_repo_settings_found(
821 819 self, repo_stub):
822 820 model = VcsSettingsModel(repo=repo_stub.repo_name)
823 821 result = model.get_repo_general_settings()
824 822 expected_result = model.global_settings.get_all_settings()
825 823 assert sorted(result) == sorted(expected_result)
826 824
827 825 def test_repo_uis_are_overriding_global_uis(
828 826 self, repo_stub, settings_util):
829 827 for key in VcsSettingsModel.GENERAL_SETTINGS:
830 828 settings_util.create_repo_rhodecode_setting(
831 829 repo_stub, key, 'abcde', type_='unicode')
832 830 Session().commit()
833 831
834 832 model = VcsSettingsModel(repo=repo_stub.repo_name)
835 833 result = model.get_repo_ui_settings()
836 834 for key in result:
837 835 if key in VcsSettingsModel.GENERAL_SETTINGS:
838 836 assert result[key] == 'abcde'
839 837
840 838 def test_raises_exception_when_repository_is_not_specified(self):
841 839 model = VcsSettingsModel()
842 840 with pytest.raises(Exception) as exc_info:
843 841 model.get_repo_general_settings()
844 842 assert str(exc_info.value) == 'Repository is not specified'
845 843
846 844
847 845 class TestGetGlobalGeneralSettings(object):
848 846 def test_global_settings_are_returned(self, repo_stub):
849 847 model = VcsSettingsModel()
850 848 result = model.get_global_general_settings()
851 849 expected_result = model.global_settings.get_all_settings()
852 850 assert sorted(result) == sorted(expected_result)
853 851
854 852 def test_repo_uis_are_not_overriding_global_uis(
855 853 self, repo_stub, settings_util):
856 854 for key in VcsSettingsModel.GENERAL_SETTINGS:
857 855 settings_util.create_repo_rhodecode_setting(
858 856 repo_stub, key, 'abcde', type_='unicode')
859 857 Session().commit()
860 858
861 859 model = VcsSettingsModel(repo=repo_stub.repo_name)
862 860 result = model.get_global_general_settings()
863 861 expected_result = model.global_settings.get_all_settings()
864 862 assert sorted(result) == sorted(expected_result)
865 863
866 864
867 865 class TestGetGlobalUiSettings(object):
868 866 def test_global_uis_are_returned(self, repo_stub):
869 867 model = VcsSettingsModel()
870 868 result = model.get_global_ui_settings()
871 869 expected_result = model.global_settings.get_ui()
872 870 assert sorted(result) == sorted(expected_result)
873 871
874 872 def test_repo_uis_are_not_overriding_global_uis(
875 873 self, repo_stub, settings_util):
876 874 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
877 875 settings_util.create_repo_rhodecode_ui(
878 876 repo_stub, section, 'repo', key=key, active=False)
879 877 Session().commit()
880 878
881 879 model = VcsSettingsModel(repo=repo_stub.repo_name)
882 880 result = model.get_global_ui_settings()
883 881 expected_result = model.global_settings.get_ui()
884 882 assert sorted(result) == sorted(expected_result)
885 883
886 884 def test_ui_settings_filtered_by_section(
887 885 self, repo_stub, settings_util):
888 886 model = VcsSettingsModel(repo=repo_stub.repo_name)
889 887 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
890 888 result = model.get_global_ui_settings(section=section)
891 889 expected_result = model.global_settings.get_ui(section=section)
892 890 assert sorted(result) == sorted(expected_result)
893 891
894 892 def test_ui_settings_filtered_by_key(
895 893 self, repo_stub, settings_util):
896 894 model = VcsSettingsModel(repo=repo_stub.repo_name)
897 895 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
898 896 result = model.get_global_ui_settings(key=key)
899 897 expected_result = model.global_settings.get_ui(key=key)
900 898 assert sorted(result) == sorted(expected_result)
901 899
902 900
903 901 class TestGetGeneralSettings(object):
904 902 def test_global_settings_are_returned_when_inherited_is_true(
905 903 self, repo_stub, settings_util):
906 904 model = VcsSettingsModel(repo=repo_stub.repo_name)
907 905 model.inherit_global_settings = True
908 906 for key in VcsSettingsModel.GENERAL_SETTINGS:
909 907 settings_util.create_repo_rhodecode_setting(
910 908 repo_stub, key, 'abcde', type_='unicode')
911 909 Session().commit()
912 910
913 911 result = model.get_general_settings()
914 912 expected_result = model.get_global_general_settings()
915 913 assert sorted(result) == sorted(expected_result)
916 914
917 915 def test_repo_settings_are_returned_when_inherited_is_false(
918 916 self, repo_stub, settings_util):
919 917 model = VcsSettingsModel(repo=repo_stub.repo_name)
920 918 model.inherit_global_settings = False
921 919 for key in VcsSettingsModel.GENERAL_SETTINGS:
922 920 settings_util.create_repo_rhodecode_setting(
923 921 repo_stub, key, 'abcde', type_='unicode')
924 922 Session().commit()
925 923
926 924 result = model.get_general_settings()
927 925 expected_result = model.get_repo_general_settings()
928 926 assert sorted(result) == sorted(expected_result)
929 927
930 928 def test_global_settings_are_returned_when_no_repository_specified(self):
931 929 model = VcsSettingsModel()
932 930 result = model.get_general_settings()
933 931 expected_result = model.get_global_general_settings()
934 932 assert sorted(result) == sorted(expected_result)
935 933
936 934
937 935 class TestGetUiSettings(object):
938 936 def test_global_settings_are_returned_when_inherited_is_true(
939 937 self, repo_stub, settings_util):
940 938 model = VcsSettingsModel(repo=repo_stub.repo_name)
941 939 model.inherit_global_settings = True
942 940 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
943 941 settings_util.create_repo_rhodecode_ui(
944 942 repo_stub, section, 'repo', key=key, active=True)
945 943 Session().commit()
946 944
947 945 result = model.get_ui_settings()
948 946 expected_result = model.get_global_ui_settings()
949 947 assert sorted(result) == sorted(expected_result)
950 948
951 949 def test_repo_settings_are_returned_when_inherited_is_false(
952 950 self, repo_stub, settings_util):
953 951 model = VcsSettingsModel(repo=repo_stub.repo_name)
954 952 model.inherit_global_settings = False
955 953 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
956 954 settings_util.create_repo_rhodecode_ui(
957 955 repo_stub, section, 'repo', key=key, active=True)
958 956 Session().commit()
959 957
960 958 result = model.get_ui_settings()
961 959 expected_result = model.get_repo_ui_settings()
962 960 assert sorted(result) == sorted(expected_result)
963 961
964 962 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
965 963 model = VcsSettingsModel(repo=repo_stub.repo_name)
966 964 model.inherit_global_settings = False
967 965
968 966 args = ('section', 'key')
969 967 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
970 968 model.get_ui_settings(*args)
971 969 Session().commit()
972 970
973 971 settings_mock.assert_called_once_with(*args)
974 972
975 973 def test_global_settings_filtered_by_section_and_key(self):
976 974 model = VcsSettingsModel()
977 975 args = ('section', 'key')
978 976 with mock.patch.object(model, 'get_global_ui_settings') as (
979 977 settings_mock):
980 978 model.get_ui_settings(*args)
981 979 settings_mock.assert_called_once_with(*args)
982 980
983 981 def test_global_settings_are_returned_when_no_repository_specified(self):
984 982 model = VcsSettingsModel()
985 983 result = model.get_ui_settings()
986 984 expected_result = model.get_global_ui_settings()
987 985 assert sorted(result) == sorted(expected_result)
988 986
989 987
990 988 class TestGetSvnPatterns(object):
991 989 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
992 990 model = VcsSettingsModel(repo=repo_stub.repo_name)
993 991 args = ('section', )
994 992 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
995 993 model.get_svn_patterns(*args)
996 994
997 995 Session().commit()
998 996 settings_mock.assert_called_once_with(*args)
999 997
1000 998 def test_global_settings_filtered_by_section_and_key(self):
1001 999 model = VcsSettingsModel()
1002 1000 args = ('section', )
1003 1001 with mock.patch.object(model, 'get_global_ui_settings') as (
1004 1002 settings_mock):
1005 1003 model.get_svn_patterns(*args)
1006 1004 settings_mock.assert_called_once_with(*args)
1007 1005
1008 1006
1009 1007 class TestGetReposLocation(object):
1010 1008 def test_returns_repos_location(self, repo_stub):
1011 1009 model = VcsSettingsModel()
1012 1010
1013 1011 result_mock = mock.Mock()
1014 1012 result_mock.ui_value = '/tmp'
1015 1013
1016 1014 with mock.patch.object(model, 'global_settings') as settings_mock:
1017 1015 settings_mock.get_ui_by_key.return_value = result_mock
1018 1016 result = model.get_repos_location()
1019 1017
1020 1018 settings_mock.get_ui_by_key.assert_called_once_with('/')
1021 1019 assert result == '/tmp'
1022 1020
1023 1021
1024 1022 class TestCreateOrUpdateRepoSettings(object):
1025 1023 FORM_DATA = {
1026 1024 'inherit_global_settings': False,
1027 1025 'hooks_changegroup_repo_size': False,
1028 1026 'hooks_changegroup_push_logger': False,
1029 1027 'hooks_outgoing_pull_logger': False,
1030 1028 'extensions_largefiles': False,
1031 1029 'extensions_evolve': False,
1032 1030 'largefiles_usercache': '/example/largefiles-store',
1033 1031 'vcs_git_lfs_enabled': False,
1034 1032 'vcs_git_lfs_store_location': '/',
1035 1033 'phases_publish': 'False',
1036 1034 'rhodecode_pr_merge_enabled': False,
1037 1035 'rhodecode_use_outdated_comments': False,
1038 1036 'new_svn_branch': '',
1039 1037 'new_svn_tag': ''
1040 1038 }
1041 1039
1042 1040 def test_get_raises_exception_when_repository_not_specified(self):
1043 1041 model = VcsSettingsModel()
1044 1042 with pytest.raises(Exception) as exc_info:
1045 1043 model.create_or_update_repo_settings(data=self.FORM_DATA)
1046 1044 Session().commit()
1047 1045
1048 1046 assert str(exc_info.value) == 'Repository is not specified'
1049 1047
1050 1048 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
1051 1049 repo = backend_svn.create_repo()
1052 1050 model = VcsSettingsModel(repo=repo)
1053 1051 with self._patch_model(model) as mocks:
1054 1052 model.create_or_update_repo_settings(
1055 1053 data=self.FORM_DATA, inherit_global_settings=False)
1056 1054 Session().commit()
1057 1055
1058 1056 mocks['create_repo_svn_settings'].assert_called_once_with(
1059 1057 self.FORM_DATA)
1060 1058 non_called_methods = (
1061 1059 'create_or_update_repo_hook_settings',
1062 1060 'create_or_update_repo_pr_settings',
1063 1061 'create_or_update_repo_hg_settings')
1064 1062 for method in non_called_methods:
1065 1063 assert mocks[method].call_count == 0
1066 1064
1067 1065 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1068 1066 repo = backend_hg.create_repo()
1069 1067 model = VcsSettingsModel(repo=repo)
1070 1068 with self._patch_model(model) as mocks:
1071 1069 model.create_or_update_repo_settings(
1072 1070 data=self.FORM_DATA, inherit_global_settings=False)
1073 1071 Session().commit()
1074 1072
1075 1073 assert mocks['create_repo_svn_settings'].call_count == 0
1076 1074 called_methods = (
1077 1075 'create_or_update_repo_hook_settings',
1078 1076 'create_or_update_repo_pr_settings',
1079 1077 'create_or_update_repo_hg_settings')
1080 1078 for method in called_methods:
1081 1079 mocks[method].assert_called_once_with(self.FORM_DATA)
1082 1080
1083 1081 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1084 1082 self, backend_git):
1085 1083 repo = backend_git.create_repo()
1086 1084 model = VcsSettingsModel(repo=repo)
1087 1085 with self._patch_model(model) as mocks:
1088 1086 model.create_or_update_repo_settings(
1089 1087 data=self.FORM_DATA, inherit_global_settings=False)
1090 1088
1091 1089 assert mocks['create_repo_svn_settings'].call_count == 0
1092 1090 called_methods = (
1093 1091 'create_or_update_repo_hook_settings',
1094 1092 'create_or_update_repo_pr_settings')
1095 1093 non_called_methods = (
1096 1094 'create_repo_svn_settings',
1097 1095 'create_or_update_repo_hg_settings'
1098 1096 )
1099 1097 for method in called_methods:
1100 1098 mocks[method].assert_called_once_with(self.FORM_DATA)
1101 1099 for method in non_called_methods:
1102 1100 assert mocks[method].call_count == 0
1103 1101
1104 1102 def test_no_methods_are_called_when_settings_are_inherited(
1105 1103 self, backend):
1106 1104 repo = backend.create_repo()
1107 1105 model = VcsSettingsModel(repo=repo)
1108 1106 with self._patch_model(model) as mocks:
1109 1107 model.create_or_update_repo_settings(
1110 1108 data=self.FORM_DATA, inherit_global_settings=True)
1111 1109 for method_name in mocks:
1112 1110 assert mocks[method_name].call_count == 0
1113 1111
1114 1112 def test_cache_is_marked_for_invalidation(self, repo_stub):
1115 1113 model = VcsSettingsModel(repo=repo_stub)
1116 1114 invalidation_patcher = mock.patch(
1117 1115 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1118 1116 with invalidation_patcher as invalidation_mock:
1119 1117 model.create_or_update_repo_settings(
1120 1118 data=self.FORM_DATA, inherit_global_settings=True)
1121 1119 Session().commit()
1122 1120
1123 1121 invalidation_mock.assert_called_once_with(
1124 1122 repo_stub.repo_name, delete=True)
1125 1123
1126 1124 def test_inherit_flag_is_saved(self, repo_stub):
1127 1125 model = VcsSettingsModel(repo=repo_stub)
1128 1126 model.inherit_global_settings = True
1129 1127 with self._patch_model(model):
1130 1128 model.create_or_update_repo_settings(
1131 1129 data=self.FORM_DATA, inherit_global_settings=False)
1132 1130 Session().commit()
1133 1131
1134 1132 assert model.inherit_global_settings is False
1135 1133
1136 1134 def _patch_model(self, model):
1137 1135 return mock.patch.multiple(
1138 1136 model,
1139 1137 create_repo_svn_settings=mock.DEFAULT,
1140 1138 create_or_update_repo_hook_settings=mock.DEFAULT,
1141 1139 create_or_update_repo_pr_settings=mock.DEFAULT,
1142 1140 create_or_update_repo_hg_settings=mock.DEFAULT)
General Comments 0
You need to be logged in to leave comments. Login now