##// END OF EJS Templates
python3: more unicode fixes
super-admin -
r4961:f77d0ff9 default
parent child Browse files
Show More
@@ -1,580 +1,580 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import urllib.parse
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.tests import (
27 27 assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN,
28 28 no_newline_id_generator)
29 29 from rhodecode.tests.fixture import Fixture
30 30 from rhodecode.lib.auth import check_password
31 31 from rhodecode.lib import helpers as h
32 32 from rhodecode.model.auth_token import AuthTokenModel
33 33 from rhodecode.model.db import User, Notification, UserApiKeys
34 34 from rhodecode.model.meta import Session
35 35
36 36 fixture = Fixture()
37 37
38 38 whitelist_view = ['RepoCommitsView:repo_commit_raw']
39 39
40 40
41 41 def route_path(name, params=None, **kwargs):
42 42 import urllib.request, urllib.parse, urllib.error
43 43 from rhodecode.apps._base import ADMIN_PREFIX
44 44
45 45 base_url = {
46 46 'login': ADMIN_PREFIX + '/login',
47 47 'logout': ADMIN_PREFIX + '/logout',
48 48 'register': ADMIN_PREFIX + '/register',
49 49 'reset_password':
50 50 ADMIN_PREFIX + '/password_reset',
51 51 'reset_password_confirmation':
52 52 ADMIN_PREFIX + '/password_reset_confirmation',
53 53
54 54 'admin_permissions_application':
55 55 ADMIN_PREFIX + '/permissions/application',
56 56 'admin_permissions_application_update':
57 57 ADMIN_PREFIX + '/permissions/application/update',
58 58
59 59 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}'
60 60
61 61 }[name].format(**kwargs)
62 62
63 63 if params:
64 64 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
65 65 return base_url
66 66
67 67
68 68 @pytest.mark.usefixtures('app')
69 69 class TestLoginController(object):
70 70 destroy_users = set()
71 71
72 72 @classmethod
73 73 def teardown_class(cls):
74 74 fixture.destroy_users(cls.destroy_users)
75 75
76 76 def teardown_method(self, method):
77 77 for n in Notification.query().all():
78 78 Session().delete(n)
79 79
80 80 Session().commit()
81 81 assert Notification.query().all() == []
82 82
83 83 def test_index(self):
84 84 response = self.app.get(route_path('login'))
85 85 assert response.status == '200 OK'
86 86 # Test response...
87 87
88 88 def test_login_admin_ok(self):
89 89 response = self.app.post(route_path('login'),
90 90 {'username': 'test_admin',
91 91 'password': 'test12'}, status=302)
92 92 response = response.follow()
93 93 session = response.get_session_from_response()
94 94 username = session['rhodecode_user'].get('username')
95 95 assert username == 'test_admin'
96 96 response.mustcontain('logout')
97 97
98 98 def test_login_regular_ok(self):
99 99 response = self.app.post(route_path('login'),
100 100 {'username': 'test_regular',
101 101 'password': 'test12'}, status=302)
102 102
103 103 response = response.follow()
104 104 session = response.get_session_from_response()
105 105 username = session['rhodecode_user'].get('username')
106 106 assert username == 'test_regular'
107 107 response.mustcontain('logout')
108 108
109 109 def test_login_regular_forbidden_when_super_admin_restriction(self):
110 110 from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin
111 111 with fixture.auth_restriction(self.app._pyramid_registry,
112 112 RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN):
113 113 response = self.app.post(route_path('login'),
114 114 {'username': 'test_regular',
115 115 'password': 'test12'})
116 116
117 117 response.mustcontain('invalid user name')
118 118 response.mustcontain('invalid password')
119 119
120 120 def test_login_regular_forbidden_when_scope_restriction(self):
121 121 from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin
122 122 with fixture.scope_restriction(self.app._pyramid_registry,
123 123 RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_VCS):
124 124 response = self.app.post(route_path('login'),
125 125 {'username': 'test_regular',
126 126 'password': 'test12'})
127 127
128 128 response.mustcontain('invalid user name')
129 129 response.mustcontain('invalid password')
130 130
131 131 def test_login_ok_came_from(self):
132 132 test_came_from = '/_admin/users?branch=stable'
133 133 _url = '{}?came_from={}'.format(route_path('login'), test_came_from)
134 134 response = self.app.post(
135 135 _url, {'username': 'test_admin', 'password': 'test12'}, status=302)
136 136
137 137 assert 'branch=stable' in response.location
138 138 response = response.follow()
139 139
140 140 assert response.status == '200 OK'
141 141 response.mustcontain('Users administration')
142 142
143 143 def test_redirect_to_login_with_get_args(self):
144 144 with fixture.anon_access(False):
145 145 kwargs = {'branch': 'stable'}
146 146 response = self.app.get(
147 147 h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs),
148 148 status=302)
149 149
150 150 response_query = urllib.parse.urlparse.parse_qsl(response.location)
151 151 assert 'branch=stable' in response_query[0][1]
152 152
153 153 def test_login_form_with_get_args(self):
154 154 _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login'))
155 155 response = self.app.get(_url)
156 156 assert 'branch%3Dstable' in response.form.action
157 157
158 158 @pytest.mark.parametrize("url_came_from", [
159 159 'data:text/html,<script>window.alert("xss")</script>',
160 160 'mailto:test@rhodecode.org',
161 161 'file:///etc/passwd',
162 162 'ftp://some.ftp.server',
163 163 'http://other.domain',
164 164 '/\r\nX-Forwarded-Host: http://example.org',
165 165 ], ids=no_newline_id_generator)
166 166 def test_login_bad_came_froms(self, url_came_from):
167 167 _url = '{}?came_from={}'.format(route_path('login'), url_came_from)
168 168 response = self.app.post(
169 169 _url,
170 170 {'username': 'test_admin', 'password': 'test12'})
171 171 assert response.status == '302 Found'
172 172 response = response.follow()
173 173 assert response.status == '200 OK'
174 174 assert response.request.path == '/'
175 175
176 176 def test_login_short_password(self):
177 177 response = self.app.post(route_path('login'),
178 178 {'username': 'test_admin',
179 179 'password': 'as'})
180 180 assert response.status == '200 OK'
181 181
182 182 response.mustcontain('Enter 3 characters or more')
183 183
184 184 def test_login_wrong_non_ascii_password(self, user_regular):
185 185 response = self.app.post(
186 186 route_path('login'),
187 187 {'username': user_regular.username,
188 188 'password': u'invalid-non-asci\xe4'.encode('utf8')})
189 189
190 190 response.mustcontain('invalid user name')
191 191 response.mustcontain('invalid password')
192 192
193 193 def test_login_with_non_ascii_password(self, user_util):
194 194 password = u'valid-non-ascii\xe4'
195 195 user = user_util.create_user(password=password)
196 196 response = self.app.post(
197 197 route_path('login'),
198 198 {'username': user.username,
199 'password': password.encode('utf-8')})
199 'password': password})
200 200 assert response.status_code == 302
201 201
202 202 def test_login_wrong_username_password(self):
203 203 response = self.app.post(route_path('login'),
204 204 {'username': 'error',
205 205 'password': 'test12'})
206 206
207 207 response.mustcontain('invalid user name')
208 208 response.mustcontain('invalid password')
209 209
210 210 def test_login_admin_ok_password_migration(self, real_crypto_backend):
211 211 from rhodecode.lib import auth
212 212
213 213 # create new user, with sha256 password
214 214 temp_user = 'test_admin_sha256'
215 215 user = fixture.create_user(temp_user)
216 216 user.password = auth._RhodeCodeCryptoSha256().hash_create(
217 217 b'test123')
218 218 Session().add(user)
219 219 Session().commit()
220 220 self.destroy_users.add(temp_user)
221 221 response = self.app.post(route_path('login'),
222 222 {'username': temp_user,
223 223 'password': 'test123'}, status=302)
224 224
225 225 response = response.follow()
226 226 session = response.get_session_from_response()
227 227 username = session['rhodecode_user'].get('username')
228 228 assert username == temp_user
229 229 response.mustcontain('logout')
230 230
231 231 # new password should be bcrypted, after log-in and transfer
232 232 user = User.get_by_username(temp_user)
233 233 assert user.password.startswith('$')
234 234
235 235 # REGISTRATIONS
236 236 def test_register(self):
237 237 response = self.app.get(route_path('register'))
238 238 response.mustcontain('Create an Account')
239 239
240 240 def test_register_err_same_username(self):
241 241 uname = 'test_admin'
242 242 response = self.app.post(
243 243 route_path('register'),
244 244 {
245 245 'username': uname,
246 246 'password': 'test12',
247 247 'password_confirmation': 'test12',
248 248 'email': 'goodmail@domain.com',
249 249 'firstname': 'test',
250 250 'lastname': 'test'
251 251 }
252 252 )
253 253
254 254 assertr = response.assert_response()
255 255 msg = 'Username "%(username)s" already exists'
256 256 msg = msg % {'username': uname}
257 257 assertr.element_contains('#username+.error-message', msg)
258 258
259 259 def test_register_err_same_email(self):
260 260 response = self.app.post(
261 261 route_path('register'),
262 262 {
263 263 'username': 'test_admin_0',
264 264 'password': 'test12',
265 265 'password_confirmation': 'test12',
266 266 'email': 'test_admin@mail.com',
267 267 'firstname': 'test',
268 268 'lastname': 'test'
269 269 }
270 270 )
271 271
272 272 assertr = response.assert_response()
273 273 msg = u'This e-mail address is already taken'
274 274 assertr.element_contains('#email+.error-message', msg)
275 275
276 276 def test_register_err_same_email_case_sensitive(self):
277 277 response = self.app.post(
278 278 route_path('register'),
279 279 {
280 280 'username': 'test_admin_1',
281 281 'password': 'test12',
282 282 'password_confirmation': 'test12',
283 283 'email': 'TesT_Admin@mail.COM',
284 284 'firstname': 'test',
285 285 'lastname': 'test'
286 286 }
287 287 )
288 288 assertr = response.assert_response()
289 289 msg = u'This e-mail address is already taken'
290 290 assertr.element_contains('#email+.error-message', msg)
291 291
292 292 def test_register_err_wrong_data(self):
293 293 response = self.app.post(
294 294 route_path('register'),
295 295 {
296 296 'username': 'xs',
297 297 'password': 'test',
298 298 'password_confirmation': 'test',
299 299 'email': 'goodmailm',
300 300 'firstname': 'test',
301 301 'lastname': 'test'
302 302 }
303 303 )
304 304 assert response.status == '200 OK'
305 305 response.mustcontain('An email address must contain a single @')
306 306 response.mustcontain('Enter a value 6 characters long or more')
307 307
308 308 def test_register_err_username(self):
309 309 response = self.app.post(
310 310 route_path('register'),
311 311 {
312 312 'username': 'error user',
313 313 'password': 'test12',
314 314 'password_confirmation': 'test12',
315 315 'email': 'goodmailm',
316 316 'firstname': 'test',
317 317 'lastname': 'test'
318 318 }
319 319 )
320 320
321 321 response.mustcontain('An email address must contain a single @')
322 322 response.mustcontain(
323 323 'Username may only contain '
324 324 'alphanumeric characters underscores, '
325 325 'periods or dashes and must begin with '
326 326 'alphanumeric character')
327 327
328 328 def test_register_err_case_sensitive(self):
329 329 usr = 'Test_Admin'
330 330 response = self.app.post(
331 331 route_path('register'),
332 332 {
333 333 'username': usr,
334 334 'password': 'test12',
335 335 'password_confirmation': 'test12',
336 336 'email': 'goodmailm',
337 337 'firstname': 'test',
338 338 'lastname': 'test'
339 339 }
340 340 )
341 341
342 342 assertr = response.assert_response()
343 343 msg = u'Username "%(username)s" already exists'
344 344 msg = msg % {'username': usr}
345 345 assertr.element_contains('#username+.error-message', msg)
346 346
347 347 def test_register_special_chars(self):
348 348 response = self.app.post(
349 349 route_path('register'),
350 350 {
351 351 'username': 'xxxaxn',
352 352 'password': 'Δ…Δ‡ΕΊΕΌΔ…Ε›Ε›Ε›Ε›',
353 353 'password_confirmation': 'Δ…Δ‡ΕΊΕΌΔ…Ε›Ε›Ε›Ε›',
354 354 'email': 'goodmailm@test.plx',
355 355 'firstname': 'test',
356 356 'lastname': 'test'
357 357 }
358 358 )
359 359
360 360 msg = u'Invalid characters (non-ascii) in password'
361 361 response.mustcontain(msg)
362 362
363 363 def test_register_password_mismatch(self):
364 364 response = self.app.post(
365 365 route_path('register'),
366 366 {
367 367 'username': 'xs',
368 368 'password': '123qwe',
369 369 'password_confirmation': 'qwe123',
370 370 'email': 'goodmailm@test.plxa',
371 371 'firstname': 'test',
372 372 'lastname': 'test'
373 373 }
374 374 )
375 375 msg = u'Passwords do not match'
376 376 response.mustcontain(msg)
377 377
378 378 def test_register_ok(self):
379 379 username = 'test_regular4'
380 380 password = 'qweqwe'
381 381 email = 'marcin@test.com'
382 382 name = 'testname'
383 383 lastname = 'testlastname'
384 384
385 385 # this initializes a session
386 386 response = self.app.get(route_path('register'))
387 387 response.mustcontain('Create an Account')
388 388
389 389
390 390 response = self.app.post(
391 391 route_path('register'),
392 392 {
393 393 'username': username,
394 394 'password': password,
395 395 'password_confirmation': password,
396 396 'email': email,
397 397 'firstname': name,
398 398 'lastname': lastname,
399 399 'admin': True
400 400 },
401 401 status=302
402 402 ) # This should be overridden
403 403
404 404 assert_session_flash(
405 405 response, 'You have successfully registered with RhodeCode. You can log-in now.')
406 406
407 407 ret = Session().query(User).filter(
408 408 User.username == 'test_regular4').one()
409 409 assert ret.username == username
410 410 assert check_password(password, ret.password)
411 411 assert ret.email == email
412 412 assert ret.name == name
413 413 assert ret.lastname == lastname
414 414 assert ret.auth_tokens is not None
415 415 assert not ret.admin
416 416
417 417 def test_forgot_password_wrong_mail(self):
418 418 bad_email = 'marcin@wrongmail.org'
419 419 # this initializes a session
420 420 self.app.get(route_path('reset_password'))
421 421
422 422 response = self.app.post(
423 423 route_path('reset_password'), {'email': bad_email, }
424 424 )
425 425 assert_session_flash(response,
426 426 'If such email exists, a password reset link was sent to it.')
427 427
428 428 def test_forgot_password(self, user_util):
429 429 # this initializes a session
430 430 self.app.get(route_path('reset_password'))
431 431
432 432 user = user_util.create_user()
433 433 user_id = user.user_id
434 434 email = user.email
435 435
436 436 response = self.app.post(route_path('reset_password'), {'email': email, })
437 437
438 438 assert_session_flash(response,
439 439 'If such email exists, a password reset link was sent to it.')
440 440
441 441 # BAD KEY
442 442 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey')
443 443 response = self.app.get(confirm_url, status=302)
444 444 assert response.location.endswith(route_path('reset_password'))
445 445 assert_session_flash(response, 'Given reset token is invalid')
446 446
447 447 response.follow() # cleanup flash
448 448
449 449 # GOOD KEY
450 450 key = UserApiKeys.query()\
451 451 .filter(UserApiKeys.user_id == user_id)\
452 452 .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\
453 453 .first()
454 454
455 455 assert key
456 456
457 457 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key)
458 458 response = self.app.get(confirm_url)
459 459 assert response.status == '302 Found'
460 460 assert response.location.endswith(route_path('login'))
461 461
462 462 assert_session_flash(
463 463 response,
464 464 'Your password reset was successful, '
465 465 'a new password has been sent to your email')
466 466
467 467 response.follow()
468 468
469 469 def _get_api_whitelist(self, values=None):
470 470 config = {'api_access_controllers_whitelist': values or []}
471 471 return config
472 472
473 473 @pytest.mark.parametrize("test_name, auth_token", [
474 474 ('none', None),
475 475 ('empty_string', ''),
476 476 ('fake_number', '123456'),
477 477 ('proper_auth_token', None)
478 478 ])
479 479 def test_access_not_whitelisted_page_via_auth_token(
480 480 self, test_name, auth_token, user_admin):
481 481
482 482 whitelist = self._get_api_whitelist([])
483 483 with mock.patch.dict('rhodecode.CONFIG', whitelist):
484 484 assert [] == whitelist['api_access_controllers_whitelist']
485 485 if test_name == 'proper_auth_token':
486 486 # use builtin if api_key is None
487 487 auth_token = user_admin.api_key
488 488
489 489 with fixture.anon_access(False):
490 490 self.app.get(
491 491 route_path('repo_commit_raw',
492 492 repo_name=HG_REPO, commit_id='tip',
493 493 params=dict(api_key=auth_token)),
494 494 status=302)
495 495
496 496 @pytest.mark.parametrize("test_name, auth_token, code", [
497 497 ('none', None, 302),
498 498 ('empty_string', '', 302),
499 499 ('fake_number', '123456', 302),
500 500 ('proper_auth_token', None, 200)
501 501 ])
502 502 def test_access_whitelisted_page_via_auth_token(
503 503 self, test_name, auth_token, code, user_admin):
504 504
505 505 whitelist = self._get_api_whitelist(whitelist_view)
506 506
507 507 with mock.patch.dict('rhodecode.CONFIG', whitelist):
508 508 assert whitelist_view == whitelist['api_access_controllers_whitelist']
509 509
510 510 if test_name == 'proper_auth_token':
511 511 auth_token = user_admin.api_key
512 512 assert auth_token
513 513
514 514 with fixture.anon_access(False):
515 515 self.app.get(
516 516 route_path('repo_commit_raw',
517 517 repo_name=HG_REPO, commit_id='tip',
518 518 params=dict(api_key=auth_token)),
519 519 status=code)
520 520
521 521 @pytest.mark.parametrize("test_name, auth_token, code", [
522 522 ('proper_auth_token', None, 200),
523 523 ('wrong_auth_token', '123456', 302),
524 524 ])
525 525 def test_access_whitelisted_page_via_auth_token_bound_to_token(
526 526 self, test_name, auth_token, code, user_admin):
527 527
528 528 expected_token = auth_token
529 529 if test_name == 'proper_auth_token':
530 530 auth_token = user_admin.api_key
531 531 expected_token = auth_token
532 532 assert auth_token
533 533
534 534 whitelist = self._get_api_whitelist([
535 535 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)])
536 536
537 537 with mock.patch.dict('rhodecode.CONFIG', whitelist):
538 538
539 539 with fixture.anon_access(False):
540 540 self.app.get(
541 541 route_path('repo_commit_raw',
542 542 repo_name=HG_REPO, commit_id='tip',
543 543 params=dict(api_key=auth_token)),
544 544 status=code)
545 545
546 546 def test_access_page_via_extra_auth_token(self):
547 547 whitelist = self._get_api_whitelist(whitelist_view)
548 548 with mock.patch.dict('rhodecode.CONFIG', whitelist):
549 549 assert whitelist_view == \
550 550 whitelist['api_access_controllers_whitelist']
551 551
552 552 new_auth_token = AuthTokenModel().create(
553 553 TEST_USER_ADMIN_LOGIN, 'test')
554 554 Session().commit()
555 555 with fixture.anon_access(False):
556 556 self.app.get(
557 557 route_path('repo_commit_raw',
558 558 repo_name=HG_REPO, commit_id='tip',
559 559 params=dict(api_key=new_auth_token.api_key)),
560 560 status=200)
561 561
562 562 def test_access_page_via_expired_auth_token(self):
563 563 whitelist = self._get_api_whitelist(whitelist_view)
564 564 with mock.patch.dict('rhodecode.CONFIG', whitelist):
565 565 assert whitelist_view == \
566 566 whitelist['api_access_controllers_whitelist']
567 567
568 568 new_auth_token = AuthTokenModel().create(
569 569 TEST_USER_ADMIN_LOGIN, 'test')
570 570 Session().commit()
571 571 # patch the api key and make it expired
572 572 new_auth_token.expires = 0
573 573 Session().add(new_auth_token)
574 574 Session().commit()
575 575 with fixture.anon_access(False):
576 576 self.app.get(
577 577 route_path('repo_commit_raw',
578 578 repo_name=HG_REPO, commit_id='tip',
579 579 params=dict(api_key=new_auth_token.api_key)),
580 580 status=302)
@@ -1,496 +1,494 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 import configparser
28 28 from itertools import chain
29 29 from io import StringIO
30 30
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 from rhodecode.lib.datelib import utcdate_fromtimestamp
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import safe_int
36 36 from rhodecode.lib.vcs.conf import settings
37 37 from rhodecode.lib.vcs.backends import base
38 38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
39 39 from rhodecode.lib.vcs.nodes import (
40 40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
41 41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
42 42 RemovedFileNodesGenerator, LargeFileNode)
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49
50 50 _filter_pre_load = [
51 51 # done through a more complex tree walk on parents
52 52 "affected_files",
53 53 # done through subprocess not remote call
54 54 "children",
55 55 # done through a more complex tree walk on parents
56 56 "status",
57 57 # mercurial specific property not supported here
58 58 "_file_paths",
59 59 # mercurial specific property not supported here
60 60 'obsolete',
61 61 # mercurial specific property not supported here
62 62 'phase',
63 63 # mercurial specific property not supported here
64 64 'hidden'
65 65 ]
66 66
67 67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 68 self.repository = repository
69 69 self._remote = repository._remote
70 70 # TODO: johbo: Tweak of raw_id should not be necessary
71 71 self.raw_id = safe_str(raw_id)
72 72 self.idx = idx
73 73
74 74 self._set_bulk_properties(pre_load)
75 75
76 76 # caches
77 77 self._stat_modes = {} # stat info for paths
78 78 self._paths = {} # path processed with parse_tree
79 79 self.nodes = {}
80 80 self._submodules = None
81 81
82 82 def _set_bulk_properties(self, pre_load):
83 83
84 84 if not pre_load:
85 85 return
86 86 pre_load = [entry for entry in pre_load
87 87 if entry not in self._filter_pre_load]
88 88 if not pre_load:
89 89 return
90 90
91 91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 92 for attr, value in result.items():
93 93 if attr in ["author", "message"]:
94 94 if value:
95 95 value = safe_unicode(value)
96 96 elif attr == "date":
97 97 value = utcdate_fromtimestamp(*value)
98 98 elif attr == "parents":
99 99 value = self._make_commits(value)
100 100 elif attr == "branch":
101 101 value = self._set_branch(value)
102 102 self.__dict__[attr] = value
103 103
104 104 @LazyProperty
105 105 def _commit(self):
106 106 return self._remote[self.raw_id]
107 107
108 108 @LazyProperty
109 109 def _tree_id(self):
110 110 return self._remote[self._commit['tree']]['id']
111 111
112 112 @LazyProperty
113 113 def id(self):
114 114 return self.raw_id
115 115
116 116 @LazyProperty
117 117 def short_id(self):
118 118 return self.raw_id[:12]
119 119
120 120 @LazyProperty
121 121 def message(self):
122 122 return safe_unicode(self._remote.message(self.id))
123 123
124 124 @LazyProperty
125 125 def committer(self):
126 126 return safe_unicode(self._remote.author(self.id))
127 127
128 128 @LazyProperty
129 129 def author(self):
130 130 return safe_unicode(self._remote.author(self.id))
131 131
132 132 @LazyProperty
133 133 def date(self):
134 134 unix_ts, tz = self._remote.date(self.raw_id)
135 135 return utcdate_fromtimestamp(unix_ts, tz)
136 136
137 137 @LazyProperty
138 138 def status(self):
139 139 """
140 140 Returns modified, added, removed, deleted files for current commit
141 141 """
142 142 return self.changed, self.added, self.removed
143 143
144 144 @LazyProperty
145 145 def tags(self):
146 146 tags = [safe_unicode(name) for name,
147 147 commit_id in self.repository.tags.items()
148 148 if commit_id == self.raw_id]
149 149 return tags
150 150
151 151 @LazyProperty
152 152 def commit_branches(self):
153 153 branches = []
154 154 for name, commit_id in self.repository.branches.items():
155 155 if commit_id == self.raw_id:
156 156 branches.append(name)
157 157 return branches
158 158
159 159 def _set_branch(self, branches):
160 160 if branches:
161 161 # actually commit can have multiple branches in git
162 162 return safe_unicode(branches[0])
163 163
164 164 @LazyProperty
165 165 def branch(self):
166 166 branches = self._remote.branch(self.raw_id)
167 167 return self._set_branch(branches)
168 168
169 169 def _get_tree_id_for_path(self, path):
170 170 path = safe_str(path)
171 171 if path in self._paths:
172 172 return self._paths[path]
173 173
174 174 tree_id = self._tree_id
175 175
176 176 path = path.strip('/')
177 177 if path == '':
178 178 data = [tree_id, "tree"]
179 179 self._paths[''] = data
180 180 return data
181 181
182 182 tree_id, tree_type, tree_mode = \
183 183 self._remote.tree_and_type_for_path(self.raw_id, path)
184 184 if tree_id is None:
185 185 raise self.no_node_at_path(path)
186 186
187 187 self._paths[path] = [tree_id, tree_type]
188 188 self._stat_modes[path] = tree_mode
189 189
190 190 if path not in self._paths:
191 191 raise self.no_node_at_path(path)
192 192
193 193 return self._paths[path]
194 194
195 195 def _get_kind(self, path):
196 196 tree_id, type_ = self._get_tree_id_for_path(path)
197 197 if type_ == 'blob':
198 198 return NodeKind.FILE
199 199 elif type_ == 'tree':
200 200 return NodeKind.DIR
201 201 elif type_ == 'link':
202 202 return NodeKind.SUBMODULE
203 203 return None
204 204
205 205 def _get_filectx(self, path):
206 206 path = self._fix_path(path)
207 207 if self._get_kind(path) != NodeKind.FILE:
208 208 raise CommitError(
209 209 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
210 210 return path
211 211
212 212 def _get_file_nodes(self):
213 213 return chain(*(t[2] for t in self.walk()))
214 214
215 215 @LazyProperty
216 216 def parents(self):
217 217 """
218 218 Returns list of parent commits.
219 219 """
220 220 parent_ids = self._remote.parents(self.id)
221 221 return self._make_commits(parent_ids)
222 222
223 223 @LazyProperty
224 224 def children(self):
225 225 """
226 226 Returns list of child commits.
227 227 """
228 228
229 229 children = self._remote.children(self.raw_id)
230 230 return self._make_commits(children)
231 231
232 232 def _make_commits(self, commit_ids):
233 233 def commit_maker(_commit_id):
234 234 return self.repository.get_commit(commit_id=commit_id)
235 235
236 236 return [commit_maker(commit_id) for commit_id in commit_ids]
237 237
238 238 def get_file_mode(self, path):
239 239 """
240 240 Returns stat mode of the file at the given `path`.
241 241 """
242 242 path = safe_str(path)
243 243 # ensure path is traversed
244 244 self._get_tree_id_for_path(path)
245 245 return self._stat_modes[path]
246 246
247 247 def is_link(self, path):
248 248 return stat.S_ISLNK(self.get_file_mode(path))
249 249
250 250 def is_node_binary(self, path):
251 251 tree_id, _ = self._get_tree_id_for_path(path)
252 252 return self._remote.is_binary(tree_id)
253 253
254 254 def get_file_content(self, path):
255 255 """
256 256 Returns content of the file at given `path`.
257 257 """
258 258 tree_id, _ = self._get_tree_id_for_path(path)
259 259 return self._remote.blob_as_pretty_string(tree_id)
260 260
261 261 def get_file_content_streamed(self, path):
262 262 tree_id, _ = self._get_tree_id_for_path(path)
263 263 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
264 264 return stream_method(tree_id)
265 265
266 266 def get_file_size(self, path):
267 267 """
268 268 Returns size of the file at given `path`.
269 269 """
270 270 tree_id, _ = self._get_tree_id_for_path(path)
271 271 return self._remote.blob_raw_length(tree_id)
272 272
273 273 def get_path_history(self, path, limit=None, pre_load=None):
274 274 """
275 275 Returns history of file as reversed list of `GitCommit` objects for
276 276 which file at given `path` has been modified.
277 277 """
278 278
279 279 path = self._get_filectx(path)
280 280 hist = self._remote.node_history(self.raw_id, path, limit)
281 281 return [
282 282 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
283 283 for commit_id in hist]
284 284
285 285 def get_file_annotate(self, path, pre_load=None):
286 286 """
287 287 Returns a generator of four element tuples with
288 288 lineno, commit_id, commit lazy loader and line
289 289 """
290 290
291 291 result = self._remote.node_annotate(self.raw_id, path)
292 292
293 293 for ln_no, commit_id, content in result:
294 294 yield (
295 295 ln_no, commit_id,
296 296 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
297 297 content)
298 298
299 299 def get_nodes(self, path):
300 300
301 301 if self._get_kind(path) != NodeKind.DIR:
302 302 raise CommitError(
303 303 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
304 304 path = self._fix_path(path)
305 305
306 306 tree_id, _ = self._get_tree_id_for_path(path)
307 307
308 308 dirnodes = []
309 309 filenodes = []
310 310
311 311 # extracted tree ID gives us our files...
312 312 bytes_path = safe_str(path) # libgit operates on bytes
313 313 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
314 314 if type_ == 'link':
315 315 url = self._get_submodule_url('/'.join((bytes_path, name)))
316 316 dirnodes.append(SubModuleNode(
317 317 name, url=url, commit=id_, alias=self.repository.alias))
318 318 continue
319 319
320 320 if bytes_path != '':
321 321 obj_path = '/'.join((bytes_path, name))
322 322 else:
323 323 obj_path = name
324 324 if obj_path not in self._stat_modes:
325 325 self._stat_modes[obj_path] = stat_
326 326
327 327 if type_ == 'tree':
328 328 dirnodes.append(DirNode(obj_path, commit=self))
329 329 elif type_ == 'blob':
330 330 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
331 331 else:
332 332 raise CommitError(
333 333 "Requested object should be Tree or Blob, is %s", type_)
334 334
335 335 nodes = dirnodes + filenodes
336 336 for node in nodes:
337 337 if node.path not in self.nodes:
338 338 self.nodes[node.path] = node
339 339 nodes.sort()
340 340 return nodes
341 341
342 342 def get_node(self, path, pre_load=None):
343 if isinstance(path, unicode):
344 path = path.encode('utf-8')
345 343 path = self._fix_path(path)
346 344 if path not in self.nodes:
347 345 try:
348 346 tree_id, type_ = self._get_tree_id_for_path(path)
349 347 except CommitError:
350 348 raise NodeDoesNotExistError(
351 349 "Cannot find one of parents' directories for a given "
352 350 "path: %s" % path)
353 351
354 352 if type_ in ['link', 'commit']:
355 353 url = self._get_submodule_url(path)
356 354 node = SubModuleNode(path, url=url, commit=tree_id,
357 355 alias=self.repository.alias)
358 356 elif type_ == 'tree':
359 357 if path == '':
360 358 node = RootNode(commit=self)
361 359 else:
362 360 node = DirNode(path, commit=self)
363 361 elif type_ == 'blob':
364 362 node = FileNode(path, commit=self, pre_load=pre_load)
365 363 self._stat_modes[path] = node.mode
366 364 else:
367 365 raise self.no_node_at_path(path)
368 366
369 367 # cache node
370 368 self.nodes[path] = node
371 369
372 370 return self.nodes[path]
373 371
374 372 def get_largefile_node(self, path):
375 373 tree_id, _ = self._get_tree_id_for_path(path)
376 374 pointer_spec = self._remote.is_large_file(tree_id)
377 375
378 376 if pointer_spec:
379 377 # content of that file regular FileNode is the hash of largefile
380 378 file_id = pointer_spec.get('oid_hash')
381 379 if self._remote.in_largefiles_store(file_id):
382 380 lf_path = self._remote.store_path(file_id)
383 381 return LargeFileNode(lf_path, commit=self, org_path=path)
384 382
385 383 @LazyProperty
386 384 def affected_files(self):
387 385 """
388 386 Gets a fast accessible file changes for given commit
389 387 """
390 388 added, modified, deleted = self._changes_cache
391 389 return list(added.union(modified).union(deleted))
392 390
393 391 @LazyProperty
394 392 def _changes_cache(self):
395 393 added = set()
396 394 modified = set()
397 395 deleted = set()
398 396 _r = self._remote
399 397
400 398 parents = self.parents
401 399 if not self.parents:
402 400 parents = [base.EmptyCommit()]
403 401 for parent in parents:
404 402 if isinstance(parent, base.EmptyCommit):
405 403 oid = None
406 404 else:
407 405 oid = parent.raw_id
408 406 changes = _r.tree_changes(oid, self.raw_id)
409 407 for (oldpath, newpath), (_, _), (_, _) in changes:
410 408 if newpath and oldpath:
411 409 modified.add(newpath)
412 410 elif newpath and not oldpath:
413 411 added.add(newpath)
414 412 elif not newpath and oldpath:
415 413 deleted.add(oldpath)
416 414 return added, modified, deleted
417 415
418 416 def _get_paths_for_status(self, status):
419 417 """
420 418 Returns sorted list of paths for given ``status``.
421 419
422 420 :param status: one of: *added*, *modified* or *deleted*
423 421 """
424 422 added, modified, deleted = self._changes_cache
425 423 return sorted({
426 424 'added': list(added),
427 425 'modified': list(modified),
428 426 'deleted': list(deleted)}[status]
429 427 )
430 428
431 429 @LazyProperty
432 430 def added(self):
433 431 """
434 432 Returns list of added ``FileNode`` objects.
435 433 """
436 434 if not self.parents:
437 435 return list(self._get_file_nodes())
438 436 return AddedFileNodesGenerator(self.added_paths, self)
439 437
440 438 @LazyProperty
441 439 def added_paths(self):
442 440 return [n for n in self._get_paths_for_status('added')]
443 441
444 442 @LazyProperty
445 443 def changed(self):
446 444 """
447 445 Returns list of modified ``FileNode`` objects.
448 446 """
449 447 if not self.parents:
450 448 return []
451 449 return ChangedFileNodesGenerator(self.changed_paths, self)
452 450
453 451 @LazyProperty
454 452 def changed_paths(self):
455 453 return [n for n in self._get_paths_for_status('modified')]
456 454
457 455 @LazyProperty
458 456 def removed(self):
459 457 """
460 458 Returns list of removed ``FileNode`` objects.
461 459 """
462 460 if not self.parents:
463 461 return []
464 462 return RemovedFileNodesGenerator(self.removed_paths, self)
465 463
466 464 @LazyProperty
467 465 def removed_paths(self):
468 466 return [n for n in self._get_paths_for_status('deleted')]
469 467
470 468 def _get_submodule_url(self, submodule_path):
471 469 git_modules_path = '.gitmodules'
472 470
473 471 if self._submodules is None:
474 472 self._submodules = {}
475 473
476 474 try:
477 475 submodules_node = self.get_node(git_modules_path)
478 476 except NodeDoesNotExistError:
479 477 return None
480 478
481 479 # ConfigParser fails if there are whitespaces, also it needs an iterable
482 480 # file like content
483 481 def iter_content(_content):
484 482 for line in _content.splitlines():
485 483 yield line
486 484
487 485 parser = configparser.RawConfigParser()
488 486 parser.read_file(iter_content(submodules_node.content))
489 487
490 488 for section in parser.sections():
491 489 path = parser.get(section, 'path')
492 490 url = parser.get(section, 'url')
493 491 if path and url:
494 492 self._submodules[path.strip('/')] = url
495 493
496 494 return self._submodules.get(submodule_path.strip('/'))
@@ -1,95 +1,96 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG inmemory module
23 23 """
24 24
25 25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 26 from rhodecode.lib.utils import safe_str
27 27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
28 28 from rhodecode.lib.vcs.exceptions import RepositoryError
29 29
30 30
31 31 class MercurialInMemoryCommit(BaseInMemoryCommit):
32 32
33 33 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
34 34 """
35 35 Performs in-memory commit (doesn't check workdir in any way) and
36 36 returns newly created `MercurialCommit`. Updates repository's
37 37 `commit_ids`.
38 38
39 39 :param message: message of the commit
40 40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
41 41 :param parents: single parent or sequence of parents from which commit
42 42 would be derived
43 43 :param date: `datetime.datetime` instance. Defaults to
44 44 ``datetime.datetime.now()``.
45 45 :param branch: Optional. Branch name as unicode. Will use the backend's
46 46 default if not given.
47 47
48 48 :raises `RepositoryError`: if any error occurs while committing
49 49 """
50 50 self.check_integrity(parents)
51 51
52 if not isinstance(message, unicode) or not isinstance(author, unicode):
52 if not isinstance(message, str) or not isinstance(author, str):
53 53 # TODO: johbo: Should be a TypeError
54 raise RepositoryError('Given message and author needs to be '
55 'an <unicode> instance got %r & %r instead'
56 % (type(message), type(author)))
54 raise RepositoryError(
55 f'Given message and author needs to be '
56 f'an <str> instance got {type(message)} & {type(author)} instead'
57 )
57 58
58 59 if branch is None:
59 60 branch = self.repository.DEFAULT_BRANCH_NAME
60 61 kwargs['branch'] = safe_str(branch)
61 62
62 63 message = safe_str(message)
63 64 author = safe_str(author)
64 65
65 66 parent_ids = [p.raw_id if p else None for p in self.parents]
66 67
67 68 ENCODING = "UTF-8"
68 69
69 70 updated = []
70 71 for node in self.added + self.changed:
71 72 if node.is_binary:
72 73 content = node.content
73 74 else:
74 75 content = node.content.encode(ENCODING)
75 76 updated.append({
76 77 'path': node.path,
77 78 'content': content,
78 79 'mode': node.mode,
79 80 })
80 81
81 82 removed = [node.path for node in self.removed]
82 83
83 84 date, tz = date_to_timestamp_plus_offset(date)
84 85
85 86 commit_id = self.repository._remote.commitctx(
86 87 message=message, parents=parent_ids,
87 88 commit_time=date, commit_timezone=tz, user=author,
88 89 files=self.get_paths(), extra=kwargs, removed=removed,
89 90 updated=updated)
90 91 self.repository.append_commit_id(commit_id)
91 92
92 93 self.repository.branches = self.repository._get_branches()
93 94 tip = self.repository.get_commit(commit_id)
94 95 self.reset()
95 96 return tip
@@ -1,1013 +1,1013 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import configparser
28 28 import urllib.request, urllib.parse, urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.utils import safe_unicode, safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 301 self, commit_id1, repo2, commit_id2)
302 302
303 303 if commit_id1 == commit_id2:
304 304 return commit_id1
305 305
306 306 ancestors = self._remote.revs_from_revspec(
307 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 308 other_path=repo2.path)
309 309
310 310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 311
312 312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 313 return ancestor_id
314 314
315 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 316 if commit_id1 == commit_id2:
317 317 commits = []
318 318 else:
319 319 if merge:
320 320 indexes = self._remote.revs_from_revspec(
321 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 323 else:
324 324 indexes = self._remote.revs_from_revspec(
325 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 326 commit_id1, other_path=repo2.path)
327 327
328 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 329 for idx in indexes]
330 330
331 331 return commits
332 332
333 333 @staticmethod
334 334 def check_url(url, config):
335 335 """
336 336 Function will check given url and try to verify if it's a valid
337 337 link. Sometimes it may happened that mercurial will issue basic
338 338 auth request that can cause whole API to hang when used from python
339 339 or other external calls.
340 340
341 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 342 when the return code is non 200
343 343 """
344 344 # check first if it's not an local url
345 345 if os.path.isdir(url) or url.startswith('file:'):
346 346 return True
347 347
348 348 # Request the _remote to verify the url
349 349 return connection.Hg.check_url(url, config.serialize())
350 350
351 351 @staticmethod
352 352 def is_valid_repository(path):
353 353 return os.path.isdir(os.path.join(path, '.hg'))
354 354
355 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 356 """
357 357 Function will check for mercurial repository in given path. If there
358 358 is no repository in that path it will raise an exception unless
359 359 `create` parameter is set to True - in that case repository would
360 360 be created.
361 361
362 362 If `src_url` is given, would try to clone repository from the
363 363 location at given clone_point. Additionally it'll make update to
364 364 working copy accordingly to `do_workspace_checkout` flag.
365 365 """
366 366 if create and os.path.exists(self.path):
367 367 raise RepositoryError(
368 368 "Cannot create repository at %s, location already exist"
369 369 % self.path)
370 370
371 371 if src_url:
372 372 url = str(self._get_url(src_url))
373 373 MercurialRepository.check_url(url, self.config)
374 374
375 375 self._remote.clone(url, self.path, do_workspace_checkout)
376 376
377 377 # Don't try to create if we've already cloned repo
378 378 create = False
379 379
380 380 if create:
381 381 os.makedirs(self.path, mode=0o755)
382 382 self._remote.localrepository(create)
383 383
384 384 @LazyProperty
385 385 def in_memory_commit(self):
386 386 return MercurialInMemoryCommit(self)
387 387
388 388 @LazyProperty
389 389 def description(self):
390 390 description = self._remote.get_config_value(
391 391 'web', 'description', untrusted=True)
392 392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393 393
394 394 @LazyProperty
395 395 def contact(self):
396 396 contact = (
397 397 self._remote.get_config_value("web", "contact") or
398 398 self._remote.get_config_value("ui", "username"))
399 399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400 400
401 401 @LazyProperty
402 402 def last_change(self):
403 403 """
404 404 Returns last change made on this repository as
405 405 `datetime.datetime` object.
406 406 """
407 407 try:
408 408 return self.get_commit().date
409 409 except RepositoryError:
410 410 tzoffset = makedate()[1]
411 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 412
413 413 def _get_fs_mtime(self):
414 414 # fallback to filesystem
415 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 416 st_path = os.path.join(self.path, '.hg', "store")
417 417 if os.path.exists(cl_path):
418 418 return os.stat(cl_path).st_mtime
419 419 else:
420 420 return os.stat(st_path).st_mtime
421 421
422 422 def _get_url(self, url):
423 423 """
424 424 Returns normalized url. If schema is not given, would fall
425 425 to filesystem
426 426 (``file:///``) schema.
427 427 """
428 428 url = url.encode('utf8')
429 429 if url != 'default' and '://' not in url:
430 430 url = "file:" + urllib.request.pathname2url(url)
431 431 return url
432 432
433 433 def get_hook_location(self):
434 434 """
435 435 returns absolute path to location where hooks are stored
436 436 """
437 437 return os.path.join(self.path, '.hg', '.hgrc')
438 438
439 439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 441 """
442 442 Returns ``MercurialCommit`` object representing repository's
443 443 commit at the given `commit_id` or `commit_idx`.
444 444 """
445 445 if self.is_empty():
446 446 raise EmptyRepositoryError("There are no commits yet")
447 447
448 448 if commit_id is not None:
449 449 self._validate_commit_id(commit_id)
450 450 try:
451 451 # we have cached idx, use it without contacting the remote
452 452 idx = self._commit_ids[commit_id]
453 453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 454 except KeyError:
455 455 pass
456 456
457 457 elif commit_idx is not None:
458 458 self._validate_commit_idx(commit_idx)
459 459 try:
460 460 _commit_id = self.commit_ids[commit_idx]
461 461 if commit_idx < 0:
462 462 commit_idx = self.commit_ids.index(_commit_id)
463 463
464 464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 465 except IndexError:
466 466 commit_id = commit_idx
467 467 else:
468 468 commit_id = "tip"
469 469
470 470 #TODO: decide if we pass bytes or str into lookup ?
471 471 # if isinstance(commit_id, unicode):
472 472 # commit_id = safe_str(commit_id)
473 473
474 474 try:
475 475 raw_id, idx = self._remote.lookup(commit_id, both=True)
476 476 except CommitDoesNotExistError:
477 477 msg = "Commit {} does not exist for `{}`".format(
478 478 *map(safe_str, [commit_id, self.name]))
479 479 raise CommitDoesNotExistError(msg)
480 480
481 481 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
482 482
483 483 def get_commits(
484 484 self, start_id=None, end_id=None, start_date=None, end_date=None,
485 485 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
486 486 """
487 487 Returns generator of ``MercurialCommit`` objects from start to end
488 488 (both are inclusive)
489 489
490 490 :param start_id: None, str(commit_id)
491 491 :param end_id: None, str(commit_id)
492 492 :param start_date: if specified, commits with commit date less than
493 493 ``start_date`` would be filtered out from returned set
494 494 :param end_date: if specified, commits with commit date greater than
495 495 ``end_date`` would be filtered out from returned set
496 496 :param branch_name: if specified, commits not reachable from given
497 497 branch would be filtered out from returned set
498 498 :param show_hidden: Show hidden commits such as obsolete or hidden from
499 499 Mercurial evolve
500 500 :raise BranchDoesNotExistError: If given ``branch_name`` does not
501 501 exist.
502 502 :raise CommitDoesNotExistError: If commit for given ``start`` or
503 503 ``end`` could not be found.
504 504 """
505 505 # actually we should check now if it's not an empty repo
506 506 if self.is_empty():
507 507 raise EmptyRepositoryError("There are no commits yet")
508 508 self._validate_branch_name(branch_name)
509 509
510 510 branch_ancestors = False
511 511 if start_id is not None:
512 512 self._validate_commit_id(start_id)
513 513 c_start = self.get_commit(commit_id=start_id)
514 514 start_pos = self._commit_ids[c_start.raw_id]
515 515 else:
516 516 start_pos = None
517 517
518 518 if end_id is not None:
519 519 self._validate_commit_id(end_id)
520 520 c_end = self.get_commit(commit_id=end_id)
521 521 end_pos = max(0, self._commit_ids[c_end.raw_id])
522 522 else:
523 523 end_pos = None
524 524
525 525 if None not in [start_id, end_id] and start_pos > end_pos:
526 526 raise RepositoryError(
527 527 "Start commit '%s' cannot be after end commit '%s'" %
528 528 (start_id, end_id))
529 529
530 530 if end_pos is not None:
531 531 end_pos += 1
532 532
533 533 commit_filter = []
534 534
535 535 if branch_name and not branch_ancestors:
536 536 commit_filter.append('branch("%s")' % (branch_name,))
537 537 elif branch_name and branch_ancestors:
538 538 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
539 539
540 540 if start_date and not end_date:
541 541 commit_filter.append('date(">%s")' % (start_date,))
542 542 if end_date and not start_date:
543 543 commit_filter.append('date("<%s")' % (end_date,))
544 544 if start_date and end_date:
545 545 commit_filter.append(
546 546 'date(">%s") and date("<%s")' % (start_date, end_date))
547 547
548 548 if not show_hidden:
549 549 commit_filter.append('not obsolete()')
550 550 commit_filter.append('not hidden()')
551 551
552 552 # TODO: johbo: Figure out a simpler way for this solution
553 553 collection_generator = CollectionGenerator
554 554 if commit_filter:
555 555 commit_filter = ' and '.join(map(safe_str, commit_filter))
556 556 revisions = self._remote.rev_range([commit_filter])
557 557 collection_generator = MercurialIndexBasedCollectionGenerator
558 558 else:
559 559 revisions = self.commit_ids
560 560
561 561 if start_pos or end_pos:
562 562 revisions = revisions[start_pos:end_pos]
563 563
564 564 return collection_generator(self, revisions, pre_load=pre_load)
565 565
566 566 def pull(self, url, commit_ids=None):
567 567 """
568 568 Pull changes from external location.
569 569
570 570 :param commit_ids: Optional. Can be set to a list of commit ids
571 571 which shall be pulled from the other repository.
572 572 """
573 573 url = self._get_url(url)
574 574 self._remote.pull(url, commit_ids=commit_ids)
575 575 self._remote.invalidate_vcs_cache()
576 576
577 577 def fetch(self, url, commit_ids=None):
578 578 """
579 579 Backward compatibility with GIT fetch==pull
580 580 """
581 581 return self.pull(url, commit_ids=commit_ids)
582 582
583 583 def push(self, url):
584 584 url = self._get_url(url)
585 585 self._remote.sync_push(url)
586 586
587 587 def _local_clone(self, clone_path):
588 588 """
589 589 Create a local clone of the current repo.
590 590 """
591 591 self._remote.clone(self.path, clone_path, update_after_clone=True,
592 592 hooks=False)
593 593
594 594 def _update(self, revision, clean=False):
595 595 """
596 596 Update the working copy to the specified revision.
597 597 """
598 598 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
599 599 self._remote.update(revision, clean=clean)
600 600
601 601 def _identify(self):
602 602 """
603 603 Return the current state of the working directory.
604 604 """
605 605 return self._remote.identify().strip().rstrip('+')
606 606
607 607 def _heads(self, branch=None):
608 608 """
609 609 Return the commit ids of the repository heads.
610 610 """
611 611 return self._remote.heads(branch=branch).strip().split(' ')
612 612
613 613 def _ancestor(self, revision1, revision2):
614 614 """
615 615 Return the common ancestor of the two revisions.
616 616 """
617 617 return self._remote.ancestor(revision1, revision2)
618 618
619 619 def _local_push(
620 620 self, revision, repository_path, push_branches=False,
621 621 enable_hooks=False):
622 622 """
623 623 Push the given revision to the specified repository.
624 624
625 625 :param push_branches: allow to create branches in the target repo.
626 626 """
627 627 self._remote.push(
628 628 [revision], repository_path, hooks=enable_hooks,
629 629 push_branches=push_branches)
630 630
631 631 def _local_merge(self, target_ref, merge_message, user_name, user_email,
632 632 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
633 633 """
634 634 Merge the given source_revision into the checked out revision.
635 635
636 636 Returns the commit id of the merge and a boolean indicating if the
637 637 commit needs to be pushed.
638 638 """
639 639 source_ref_commit_id = source_ref.commit_id
640 640 target_ref_commit_id = target_ref.commit_id
641 641
642 642 # update our workdir to target ref, for proper merge
643 643 self._update(target_ref_commit_id, clean=True)
644 644
645 645 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
646 646 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
647 647
648 648 if close_commit_id:
649 649 # NOTE(marcink): if we get the close commit, this is our new source
650 650 # which will include the close commit itself.
651 651 source_ref_commit_id = close_commit_id
652 652
653 653 if ancestor == source_ref_commit_id:
654 654 # Nothing to do, the changes were already integrated
655 655 return target_ref_commit_id, False
656 656
657 657 elif ancestor == target_ref_commit_id and is_the_same_branch:
658 658 # In this case we should force a commit message
659 659 return source_ref_commit_id, True
660 660
661 661 unresolved = None
662 662 if use_rebase:
663 663 try:
664 664 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
665 665 self.bookmark(bookmark_name, revision=source_ref.commit_id)
666 666 self._remote.rebase(
667 667 source=source_ref_commit_id, dest=target_ref_commit_id)
668 668 self._remote.invalidate_vcs_cache()
669 669 self._update(bookmark_name, clean=True)
670 670 return self._identify(), True
671 671 except RepositoryError as e:
672 672 # The rebase-abort may raise another exception which 'hides'
673 673 # the original one, therefore we log it here.
674 674 log.exception('Error while rebasing shadow repo during merge.')
675 675 if 'unresolved conflicts' in safe_str(e):
676 676 unresolved = self._remote.get_unresolved_files()
677 677 log.debug('unresolved files: %s', unresolved)
678 678
679 679 # Cleanup any rebase leftovers
680 680 self._remote.invalidate_vcs_cache()
681 681 self._remote.rebase(abort=True)
682 682 self._remote.invalidate_vcs_cache()
683 683 self._remote.update(clean=True)
684 684 if unresolved:
685 685 raise UnresolvedFilesInRepo(unresolved)
686 686 else:
687 687 raise
688 688 else:
689 689 try:
690 690 self._remote.merge(source_ref_commit_id)
691 691 self._remote.invalidate_vcs_cache()
692 692 self._remote.commit(
693 693 message=safe_str(merge_message),
694 694 username=safe_str('%s <%s>' % (user_name, user_email)))
695 695 self._remote.invalidate_vcs_cache()
696 696 return self._identify(), True
697 697 except RepositoryError as e:
698 698 # The merge-abort may raise another exception which 'hides'
699 699 # the original one, therefore we log it here.
700 700 log.exception('Error while merging shadow repo during merge.')
701 701 if 'unresolved merge conflicts' in safe_str(e):
702 702 unresolved = self._remote.get_unresolved_files()
703 703 log.debug('unresolved files: %s', unresolved)
704 704
705 705 # Cleanup any merge leftovers
706 706 self._remote.update(clean=True)
707 707 if unresolved:
708 708 raise UnresolvedFilesInRepo(unresolved)
709 709 else:
710 710 raise
711 711
712 712 def _local_close(self, target_ref, user_name, user_email,
713 713 source_ref, close_message=''):
714 714 """
715 715 Close the branch of the given source_revision
716 716
717 717 Returns the commit id of the close and a boolean indicating if the
718 718 commit needs to be pushed.
719 719 """
720 720 self._update(source_ref.commit_id)
721 721 message = close_message or "Closing branch: `{}`".format(source_ref.name)
722 722 try:
723 723 self._remote.commit(
724 724 message=safe_str(message),
725 725 username=safe_str('%s <%s>' % (user_name, user_email)),
726 726 close_branch=True)
727 727 self._remote.invalidate_vcs_cache()
728 728 return self._identify(), True
729 729 except RepositoryError:
730 730 # Cleanup any commit leftovers
731 731 self._remote.update(clean=True)
732 732 raise
733 733
734 734 def _is_the_same_branch(self, target_ref, source_ref):
735 735 return (
736 736 self._get_branch_name(target_ref) ==
737 737 self._get_branch_name(source_ref))
738 738
739 739 def _get_branch_name(self, ref):
740 740 if ref.type == 'branch':
741 741 return ref.name
742 742 return self._remote.ctx_branch(ref.commit_id)
743 743
744 744 def _maybe_prepare_merge_workspace(
745 745 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
746 746 shadow_repository_path = self._get_shadow_repository_path(
747 747 self.path, repo_id, workspace_id)
748 748 if not os.path.exists(shadow_repository_path):
749 749 self._local_clone(shadow_repository_path)
750 750 log.debug(
751 751 'Prepared shadow repository in %s', shadow_repository_path)
752 752
753 753 return shadow_repository_path
754 754
755 755 def _merge_repo(self, repo_id, workspace_id, target_ref,
756 756 source_repo, source_ref, merge_message,
757 757 merger_name, merger_email, dry_run=False,
758 758 use_rebase=False, close_branch=False):
759 759
760 760 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
761 761 'rebase' if use_rebase else 'merge', dry_run)
762 762 if target_ref.commit_id not in self._heads():
763 763 return MergeResponse(
764 764 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
765 765 metadata={'target_ref': target_ref})
766 766
767 767 try:
768 768 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
769 769 heads_all = self._heads(target_ref.name)
770 770 max_heads = 10
771 771 if len(heads_all) > max_heads:
772 772 heads = '\n,'.join(
773 773 heads_all[:max_heads] +
774 774 ['and {} more.'.format(len(heads_all)-max_heads)])
775 775 else:
776 776 heads = '\n,'.join(heads_all)
777 777 metadata = {
778 778 'target_ref': target_ref,
779 779 'source_ref': source_ref,
780 780 'heads': heads
781 781 }
782 782 return MergeResponse(
783 783 False, False, None,
784 784 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
785 785 metadata=metadata)
786 786 except CommitDoesNotExistError:
787 787 log.exception('Failure when looking up branch heads on hg target')
788 788 return MergeResponse(
789 789 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
790 790 metadata={'target_ref': target_ref})
791 791
792 792 shadow_repository_path = self._maybe_prepare_merge_workspace(
793 793 repo_id, workspace_id, target_ref, source_ref)
794 794 shadow_repo = self.get_shadow_instance(shadow_repository_path)
795 795
796 796 log.debug('Pulling in target reference %s', target_ref)
797 797 self._validate_pull_reference(target_ref)
798 798 shadow_repo._local_pull(self.path, target_ref)
799 799
800 800 try:
801 801 log.debug('Pulling in source reference %s', source_ref)
802 802 source_repo._validate_pull_reference(source_ref)
803 803 shadow_repo._local_pull(source_repo.path, source_ref)
804 804 except CommitDoesNotExistError:
805 805 log.exception('Failure when doing local pull on hg shadow repo')
806 806 return MergeResponse(
807 807 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
808 808 metadata={'source_ref': source_ref})
809 809
810 810 merge_ref = None
811 811 merge_commit_id = None
812 812 close_commit_id = None
813 813 merge_failure_reason = MergeFailureReason.NONE
814 814 metadata = {}
815 815
816 816 # enforce that close branch should be used only in case we source from
817 817 # an actual Branch
818 818 close_branch = close_branch and source_ref.type == 'branch'
819 819
820 820 # don't allow to close branch if source and target are the same
821 821 close_branch = close_branch and source_ref.name != target_ref.name
822 822
823 823 needs_push_on_close = False
824 824 if close_branch and not use_rebase and not dry_run:
825 825 try:
826 826 close_commit_id, needs_push_on_close = shadow_repo._local_close(
827 827 target_ref, merger_name, merger_email, source_ref)
828 828 merge_possible = True
829 829 except RepositoryError:
830 830 log.exception('Failure when doing close branch on '
831 831 'shadow repo: %s', shadow_repo)
832 832 merge_possible = False
833 833 merge_failure_reason = MergeFailureReason.MERGE_FAILED
834 834 else:
835 835 merge_possible = True
836 836
837 837 needs_push = False
838 838 if merge_possible:
839 839
840 840 try:
841 841 merge_commit_id, needs_push = shadow_repo._local_merge(
842 842 target_ref, merge_message, merger_name, merger_email,
843 843 source_ref, use_rebase=use_rebase,
844 844 close_commit_id=close_commit_id, dry_run=dry_run)
845 845 merge_possible = True
846 846
847 847 # read the state of the close action, if it
848 848 # maybe required a push
849 849 needs_push = needs_push or needs_push_on_close
850 850
851 851 # Set a bookmark pointing to the merge commit. This bookmark
852 852 # may be used to easily identify the last successful merge
853 853 # commit in the shadow repository.
854 854 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
855 855 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
856 856 except SubrepoMergeError:
857 857 log.exception(
858 858 'Subrepo merge error during local merge on hg shadow repo.')
859 859 merge_possible = False
860 860 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
861 861 needs_push = False
862 862 except RepositoryError as e:
863 863 log.exception('Failure when doing local merge on hg shadow repo')
864 864 if isinstance(e, UnresolvedFilesInRepo):
865 865 all_conflicts = list(e.args[0])
866 866 max_conflicts = 20
867 867 if len(all_conflicts) > max_conflicts:
868 868 conflicts = all_conflicts[:max_conflicts] \
869 869 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
870 870 else:
871 871 conflicts = all_conflicts
872 872 metadata['unresolved_files'] = \
873 873 '\n* conflict: ' + \
874 874 ('\n * conflict: '.join(conflicts))
875 875
876 876 merge_possible = False
877 877 merge_failure_reason = MergeFailureReason.MERGE_FAILED
878 878 needs_push = False
879 879
880 880 if merge_possible and not dry_run:
881 881 if needs_push:
882 882 # In case the target is a bookmark, update it, so after pushing
883 883 # the bookmarks is also updated in the target.
884 884 if target_ref.type == 'book':
885 885 shadow_repo.bookmark(
886 886 target_ref.name, revision=merge_commit_id)
887 887 try:
888 888 shadow_repo_with_hooks = self.get_shadow_instance(
889 889 shadow_repository_path,
890 890 enable_hooks=True)
891 891 # This is the actual merge action, we push from shadow
892 892 # into origin.
893 893 # Note: the push_branches option will push any new branch
894 894 # defined in the source repository to the target. This may
895 895 # be dangerous as branches are permanent in Mercurial.
896 896 # This feature was requested in issue #441.
897 897 shadow_repo_with_hooks._local_push(
898 898 merge_commit_id, self.path, push_branches=True,
899 899 enable_hooks=True)
900 900
901 901 # maybe we also need to push the close_commit_id
902 902 if close_commit_id:
903 903 shadow_repo_with_hooks._local_push(
904 904 close_commit_id, self.path, push_branches=True,
905 905 enable_hooks=True)
906 906 merge_succeeded = True
907 907 except RepositoryError:
908 908 log.exception(
909 909 'Failure when doing local push from the shadow '
910 910 'repository to the target repository at %s.', self.path)
911 911 merge_succeeded = False
912 912 merge_failure_reason = MergeFailureReason.PUSH_FAILED
913 913 metadata['target'] = 'hg shadow repo'
914 914 metadata['merge_commit'] = merge_commit_id
915 915 else:
916 916 merge_succeeded = True
917 917 else:
918 918 merge_succeeded = False
919 919
920 920 return MergeResponse(
921 921 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
922 922 metadata=metadata)
923 923
924 924 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
925 925 config = self.config.copy()
926 926 if not enable_hooks:
927 927 config.clear_section('hooks')
928 928 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
929 929
930 930 def _validate_pull_reference(self, reference):
931 931 if not (reference.name in self.bookmarks or
932 932 reference.name in self.branches or
933 933 self.get_commit(reference.commit_id)):
934 934 raise CommitDoesNotExistError(
935 935 'Unknown branch, bookmark or commit id')
936 936
937 937 def _local_pull(self, repository_path, reference):
938 938 """
939 939 Fetch a branch, bookmark or commit from a local repository.
940 940 """
941 941 repository_path = os.path.abspath(repository_path)
942 942 if repository_path == self.path:
943 943 raise ValueError('Cannot pull from the same repository')
944 944
945 945 reference_type_to_option_name = {
946 946 'book': 'bookmark',
947 947 'branch': 'branch',
948 948 }
949 949 option_name = reference_type_to_option_name.get(
950 950 reference.type, 'revision')
951 951
952 952 if option_name == 'revision':
953 953 ref = reference.commit_id
954 954 else:
955 955 ref = reference.name
956 956
957 957 options = {option_name: [ref]}
958 958 self._remote.pull_cmd(repository_path, hooks=False, **options)
959 959 self._remote.invalidate_vcs_cache()
960 960
961 961 def bookmark(self, bookmark, revision=None):
962 if isinstance(bookmark, unicode):
962 if isinstance(bookmark, str):
963 963 bookmark = safe_str(bookmark)
964 964 self._remote.bookmark(bookmark, revision=revision)
965 965 self._remote.invalidate_vcs_cache()
966 966
967 967 def get_path_permissions(self, username):
968 968 hgacl_file = os.path.join(self.path, '.hg/hgacl')
969 969
970 970 def read_patterns(suffix):
971 971 svalue = None
972 972 for section, option in [
973 973 ('narrowacl', username + suffix),
974 974 ('narrowacl', 'default' + suffix),
975 975 ('narrowhgacl', username + suffix),
976 976 ('narrowhgacl', 'default' + suffix)
977 977 ]:
978 978 try:
979 979 svalue = hgacl.get(section, option)
980 980 break # stop at the first value we find
981 981 except configparser.NoOptionError:
982 982 pass
983 983 if not svalue:
984 984 return None
985 985 result = ['/']
986 986 for pattern in svalue.split():
987 987 result.append(pattern)
988 988 if '*' not in pattern and '?' not in pattern:
989 989 result.append(pattern + '/*')
990 990 return result
991 991
992 992 if os.path.exists(hgacl_file):
993 993 try:
994 994 hgacl = configparser.RawConfigParser()
995 995 hgacl.read(hgacl_file)
996 996
997 997 includes = read_patterns('.includes')
998 998 excludes = read_patterns('.excludes')
999 999 return BasePathPermissionChecker.create_from_patterns(
1000 1000 includes, excludes)
1001 1001 except BaseException as e:
1002 1002 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1003 1003 hgacl_file, self.name, e)
1004 1004 raise exceptions.RepositoryRequirementError(msg)
1005 1005 else:
1006 1006 return None
1007 1007
1008 1008
1009 1009 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1010 1010
1011 1011 def _commit_factory(self, commit_id):
1012 1012 return self.repo.get_commit(
1013 1013 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,253 +1,253 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN commit module
23 23 """
24 24
25 25
26 26 import dateutil.parser
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from rhodecode.lib.utils import safe_str, safe_unicode
30 30 from rhodecode.lib.vcs import nodes, path as vcspath
31 31 from rhodecode.lib.vcs.backends import base
32 32 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
33 33
34 34
35 35 _SVN_PROP_TRUE = '*'
36 36
37 37
38 38 class SubversionCommit(base.BaseCommit):
39 39 """
40 40 Subversion specific implementation of commits
41 41
42 42 .. attribute:: branch
43 43
44 44 The Subversion backend does not support to assign branches to
45 45 specific commits. This attribute has always the value `None`.
46 46
47 47 """
48 48
49 49 def __init__(self, repository, commit_id):
50 50 self.repository = repository
51 51 self.idx = self.repository._get_commit_idx(commit_id)
52 52 self._svn_rev = self.idx + 1
53 53 self._remote = repository._remote
54 54 # TODO: handling of raw_id should be a method on repository itself,
55 55 # which knows how to translate commit index and commit id
56 56 self.raw_id = commit_id
57 57 self.short_id = commit_id
58 58 self.id = 'r%s' % (commit_id, )
59 59
60 60 # TODO: Implement the following placeholder attributes
61 61 self.nodes = {}
62 62 self.tags = []
63 63
64 64 @property
65 65 def author(self):
66 66 return safe_unicode(self._properties.get('svn:author'))
67 67
68 68 @property
69 69 def date(self):
70 70 return _date_from_svn_properties(self._properties)
71 71
72 72 @property
73 73 def message(self):
74 74 return safe_unicode(self._properties.get('svn:log'))
75 75
76 76 @LazyProperty
77 77 def _properties(self):
78 78 return self._remote.revision_properties(self._svn_rev)
79 79
80 80 @LazyProperty
81 81 def parents(self):
82 82 parent_idx = self.idx - 1
83 83 if parent_idx >= 0:
84 84 parent = self.repository.get_commit(commit_idx=parent_idx)
85 85 return [parent]
86 86 return []
87 87
88 88 @LazyProperty
89 89 def children(self):
90 90 child_idx = self.idx + 1
91 91 if child_idx < len(self.repository.commit_ids):
92 92 child = self.repository.get_commit(commit_idx=child_idx)
93 93 return [child]
94 94 return []
95 95
96 96 def get_file_mode(self, path):
97 97 # Note: Subversion flags files which are executable with a special
98 98 # property `svn:executable` which is set to the value ``"*"``.
99 99 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
100 100 return base.FILEMODE_EXECUTABLE
101 101 else:
102 102 return base.FILEMODE_DEFAULT
103 103
104 104 def is_link(self, path):
105 105 # Note: Subversion has a flag for special files, the content of the
106 106 # file contains the type of that file.
107 107 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
108 108 return self.get_file_content(path).startswith('link')
109 109 return False
110 110
111 111 def is_node_binary(self, path):
112 112 path = self._fix_path(path)
113 113 return self._remote.is_binary(self._svn_rev, safe_str(path))
114 114
115 115 def _get_file_property(self, path, name):
116 116 file_properties = self._remote.node_properties(
117 117 safe_str(path), self._svn_rev)
118 118 return file_properties.get(name)
119 119
120 120 def get_file_content(self, path):
121 121 path = self._fix_path(path)
122 122 return self._remote.get_file_content(safe_str(path), self._svn_rev)
123 123
124 124 def get_file_content_streamed(self, path):
125 125 path = self._fix_path(path)
126 126 stream_method = getattr(self._remote, 'stream:get_file_content')
127 127 return stream_method(safe_str(path), self._svn_rev)
128 128
129 129 def get_file_size(self, path):
130 130 path = self._fix_path(path)
131 131 return self._remote.get_file_size(safe_str(path), self._svn_rev)
132 132
133 133 def get_path_history(self, path, limit=None, pre_load=None):
134 134 path = safe_str(self._fix_path(path))
135 135 history = self._remote.node_history(path, self._svn_rev, limit)
136 136 return [
137 137 self.repository.get_commit(commit_id=str(svn_rev))
138 138 for svn_rev in history]
139 139
140 140 def get_file_annotate(self, path, pre_load=None):
141 141 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
142 142
143 143 for zero_based_line_no, svn_rev, content in result:
144 144 commit_id = str(svn_rev)
145 145 line_no = zero_based_line_no + 1
146 146 yield (
147 147 line_no,
148 148 commit_id,
149 149 lambda: self.repository.get_commit(commit_id=commit_id),
150 150 content)
151 151
152 152 def get_node(self, path, pre_load=None):
153 153 path = self._fix_path(path)
154 154 if path not in self.nodes:
155 155
156 156 if path == '':
157 157 node = nodes.RootNode(commit=self)
158 158 else:
159 159 node_type = self._remote.get_node_type(
160 160 safe_str(path), self._svn_rev)
161 161 if node_type == 'dir':
162 162 node = nodes.DirNode(path, commit=self)
163 163 elif node_type == 'file':
164 164 node = nodes.FileNode(path, commit=self, pre_load=pre_load)
165 165 else:
166 166 raise self.no_node_at_path(path)
167 167
168 168 self.nodes[path] = node
169 169 return self.nodes[path]
170 170
171 171 def get_nodes(self, path):
172 172 if self._get_kind(path) != nodes.NodeKind.DIR:
173 173 raise CommitError(
174 174 "Directory does not exist for commit %s at "
175 175 " '%s'" % (self.raw_id, path))
176 176 path = safe_str(self._fix_path(path))
177 177
178 178 path_nodes = []
179 179 for name, kind in self._remote.get_nodes(path, revision=self._svn_rev):
180 180 node_path = vcspath.join(path, name)
181 181 if kind == 'dir':
182 182 node = nodes.DirNode(node_path, commit=self)
183 183 elif kind == 'file':
184 184 node = nodes.FileNode(node_path, commit=self)
185 185 else:
186 186 raise ValueError("Node kind %s not supported." % (kind, ))
187 187 self.nodes[node_path] = node
188 188 path_nodes.append(node)
189 189
190 190 return path_nodes
191 191
192 192 def _get_kind(self, path):
193 193 path = self._fix_path(path)
194 194 kind = self._remote.get_node_type(path, self._svn_rev)
195 195 if kind == 'file':
196 196 return nodes.NodeKind.FILE
197 197 elif kind == 'dir':
198 198 return nodes.NodeKind.DIR
199 199 else:
200 200 raise CommitError(
201 201 "Node does not exist at the given path '%s'" % (path, ))
202 202
203 203 @LazyProperty
204 204 def _changes_cache(self):
205 205 return self._remote.revision_changes(self._svn_rev)
206 206
207 207 @LazyProperty
208 208 def affected_files(self):
209 209 changed_files = set()
210 for files in self._changes_cache.itervalues():
210 for files in self._changes_cache.values():
211 211 changed_files.update(files)
212 212 return list(changed_files)
213 213
214 214 @LazyProperty
215 215 def id(self):
216 216 return self.raw_id
217 217
218 218 @property
219 219 def added(self):
220 220 return nodes.AddedFileNodesGenerator(self.added_paths, self)
221 221
222 222 @LazyProperty
223 223 def added_paths(self):
224 224 return [n for n in self._changes_cache['added']]
225 225
226 226 @property
227 227 def changed(self):
228 228 return nodes.ChangedFileNodesGenerator(self.changed_paths, self)
229 229
230 230 @LazyProperty
231 231 def changed_paths(self):
232 232 return [n for n in self._changes_cache['changed']]
233 233
234 234 @property
235 235 def removed(self):
236 236 return nodes.RemovedFileNodesGenerator(self.removed_paths, self)
237 237
238 238 @LazyProperty
239 239 def removed_paths(self):
240 240 return [n for n in self._changes_cache['removed']]
241 241
242 242
243 243 def _date_from_svn_properties(properties):
244 244 """
245 245 Parses the date out of given svn properties.
246 246
247 247 :return: :class:`datetime.datetime` instance. The object is naive.
248 248 """
249 249
250 250 aware_date = dateutil.parser.parse(properties.get('svn:date'))
251 251 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
252 252 final_date = aware_date
253 253 return final_date.replace(tzinfo=None)
@@ -1,717 +1,716 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = true
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG
30 30 host = 0.0.0.0
31 31 port = 5000
32 32
33 33
34 34 ; ###########################
35 35 ; GUNICORN APPLICATION SERVER
36 36 ; ###########################
37 37
38 38 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
39 39
40 40 ; Module to use, this setting shouldn't be changed
41 41 use = egg:gunicorn#main
42 42
43 43 ; Sets the number of process workers. More workers means more concurrent connections
44 44 ; RhodeCode can handle at the same time. Each additional worker also it increases
45 45 ; memory usage as each has it's own set of caches.
46 46 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
47 47 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
48 48 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
49 49 ; when using more than 1 worker.
50 50 #workers = 2
51 51
52 52 ; Gunicorn access log level
53 53 #loglevel = info
54 54
55 55 ; Process name visible in process list
56 56 #proc_name = rhodecode
57 57
58 58 ; Type of worker class, one of `sync`, `gevent`
59 59 ; Recommended type is `gevent`
60 60 #worker_class = gevent
61 61
62 62 ; The maximum number of simultaneous clients per worker. Valid only for gevent
63 63 #worker_connections = 10
64 64
65 65 ; Max number of requests that worker will handle before being gracefully restarted.
66 66 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
67 67 #max_requests = 1000
68 68 #max_requests_jitter = 30
69 69
70 70 ; Amount of time a worker can spend with handling a request before it
71 71 ; gets killed and restarted. By default set to 21600 (6hrs)
72 72 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
73 73 #timeout = 21600
74 74
75 75 ; The maximum size of HTTP request line in bytes.
76 76 ; 0 for unlimited
77 77 #limit_request_line = 0
78 78
79 79
80 80 ; Prefix middleware for RhodeCode.
81 81 ; recommended when using proxy setup.
82 82 ; allows to set RhodeCode under a prefix in server.
83 83 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
84 84 ; And set your prefix like: `prefix = /custom_prefix`
85 85 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
86 86 ; to make your cookies only work on prefix url
87 87 [filter:proxy-prefix]
88 88 use = egg:PasteDeploy#prefix
89 89 prefix = /
90 90
91 91 [app:main]
92 92 ; The %(here)s variable will be replaced with the absolute path of parent directory
93 93 ; of this file
94 94 ; Each option in the app:main can be override by an environmental variable
95 95 ;
96 96 ;To override an option:
97 97 ;
98 98 ;RC_<KeyName>
99 99 ;Everything should be uppercase, . and - should be replaced by _.
100 100 ;For example, if you have these configuration settings:
101 101 ;rc_cache.repo_object.backend = foo
102 102 ;can be overridden by
103 103 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
104 104
105 105 is_test = True
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ; enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111
112 112 ## RHODECODE PLUGINS ##
113 113 rhodecode.includes = rhodecode.api
114 114
115 115 # api prefix url
116 116 rhodecode.api.url = /_admin/api
117 117
118 118
119 119 ## END RHODECODE PLUGINS ##
120 120
121 121 ## encryption key used to encrypt social plugin tokens,
122 122 ## remote_urls with credentials etc, if not set it defaults to
123 123 ## `beaker.session.secret`
124 124 #rhodecode.encrypted_values.secret =
125 125
126 126 ; decryption strict mode (enabled by default). It controls if decryption raises
127 127 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
128 128 #rhodecode.encrypted_values.strict = false
129 129
130 130 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
131 131 ; fernet is safer, and we strongly recommend switching to it.
132 132 ; Due to backward compatibility aes is used as default.
133 133 #rhodecode.encrypted_values.algorithm = fernet
134 134
135 135 ; Return gzipped responses from RhodeCode (static files/application)
136 136 gzip_responses = false
137 137
138 138 ; Auto-generate javascript routes file on startup
139 139 generate_js_files = false
140 140
141 141 ; System global default language.
142 142 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
143 143 lang = en
144 144
145 145 ; Perform a full repository scan and import on each server start.
146 146 ; Settings this to true could lead to very long startup time.
147 147 startup.import_repos = true
148 148
149 149 ; Uncomment and set this path to use archive download cache.
150 150 ; Once enabled, generated archives will be cached at this location
151 151 ; and served from the cache during subsequent requests for the same archive of
152 152 ; the repository.
153 153 #archive_cache_dir = /tmp/tarballcache
154 154
155 155 ; URL at which the application is running. This is used for Bootstrapping
156 156 ; requests in context when no web request is available. Used in ishell, or
157 157 ; SSH calls. Set this for events to receive proper url for SSH calls.
158 158 app.base_url = http://rhodecode.local
159 159
160 160 ; Unique application ID. Should be a random unique string for security.
161 161 app_instance_uuid = rc-production
162 162
163 163 ## cut off limit for large diffs (size in bytes)
164 164 cut_off_limit_diff = 1024000
165 165 cut_off_limit_file = 256000
166 166
167 167 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
168 168 vcs_full_cache = false
169 169
170 170 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
171 171 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
172 172 force_https = false
173 173
174 174 ; use Strict-Transport-Security headers
175 175 use_htsts = false
176 176
177 177 ; Set to true if your repos are exposed using the dumb protocol
178 178 git_update_server_info = false
179 179
180 180 ; RSS/ATOM feed options
181 181 rss_cut_off_limit = 256000
182 182 rss_items_per_page = 10
183 183 rss_include_diff = false
184 184
185 185 ; gist URL alias, used to create nicer urls for gist. This should be an
186 186 ; url that does rewrites to _admin/gists/{gistid}.
187 187 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
188 188 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
189 189 gist_alias_url =
190 190
191 191 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
192 192 ; used for access.
193 193 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
194 194 ; came from the the logged in user who own this authentication token.
195 195 ; Additionally @TOKEN syntax can be used to bound the view to specific
196 196 ; authentication token. Such view would be only accessible when used together
197 197 ; with this authentication token
198 198 ; list of all views can be found under `/_admin/permissions/auth_token_access`
199 199 ; The list should be "," separated and on a single line.
200 200 ; Most common views to enable:
201 201
202 202 # RepoCommitsView:repo_commit_download
203 203 # RepoCommitsView:repo_commit_patch
204 204 # RepoCommitsView:repo_commit_raw
205 205 # RepoCommitsView:repo_commit_raw@TOKEN
206 206 # RepoFilesView:repo_files_diff
207 207 # RepoFilesView:repo_archivefile
208 208 # RepoFilesView:repo_file_raw
209 209 # GistView:*
210 210 api_access_controllers_whitelist =
211 211
212 212 ; Default encoding used to convert from and to unicode
213 213 ; can be also a comma separated list of encoding in case of mixed encodings
214 214 default_encoding = UTF-8
215 215
216 216 ; instance-id prefix
217 217 ; a prefix key for this instance used for cache invalidation when running
218 218 ; multiple instances of RhodeCode, make sure it's globally unique for
219 219 ; all running RhodeCode instances. Leave empty if you don't use it
220 220 instance_id =
221 221
222 222 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
223 223 ; of an authentication plugin also if it is disabled by it's settings.
224 224 ; This could be useful if you are unable to log in to the system due to broken
225 225 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
226 226 ; module to log in again and fix the settings.
227 227 ; Available builtin plugin IDs (hash is part of the ID):
228 228 ; egg:rhodecode-enterprise-ce#rhodecode
229 229 ; egg:rhodecode-enterprise-ce#pam
230 230 ; egg:rhodecode-enterprise-ce#ldap
231 231 ; egg:rhodecode-enterprise-ce#jasig_cas
232 232 ; egg:rhodecode-enterprise-ce#headers
233 233 ; egg:rhodecode-enterprise-ce#crowd
234 234
235 235 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
236 236
237 237 ; Flag to control loading of legacy plugins in py:/path format
238 238 auth_plugin.import_legacy_plugins = true
239 239
240 240 ; alternative return HTTP header for failed authentication. Default HTTP
241 241 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
242 242 ; handling that causing a series of failed authentication calls.
243 243 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
244 244 ; This will be served instead of default 401 on bad authentication
245 245 auth_ret_code =
246 246
247 247 ; use special detection method when serving auth_ret_code, instead of serving
248 248 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
249 249 ; and then serve auth_ret_code to clients
250 250 auth_ret_code_detection = false
251 251
252 252 ; locking return code. When repository is locked return this HTTP code. 2XX
253 253 ; codes don't break the transactions while 4XX codes do
254 254 lock_ret_code = 423
255 255
256 256 ; allows to change the repository location in settings page
257 257 allow_repo_location_change = true
258 258
259 259 ; allows to setup custom hooks in settings page
260 260 allow_custom_hooks_settings = true
261 261
262 262 ## generated license token, goto license page in RhodeCode settings to obtain
263 263 ## new token
264 264 license_token = abra-cada-bra1-rce3
265 265
266 266 ## supervisor connection uri, for managing supervisor and logs.
267 267 supervisor.uri =
268 268 ## supervisord group name/id we only want this RC instance to handle
269 269 supervisor.group_id = dev
270 270
271 271 ## Display extended labs settings
272 272 labs_settings_active = true
273 273
274 274 ; Custom exception store path, defaults to TMPDIR
275 275 ; This is used to store exception from RhodeCode in shared directory
276 276 #exception_tracker.store_path =
277 277
278 278 ; Send email with exception details when it happens
279 279 #exception_tracker.send_email = false
280 280
281 281 ; Comma separated list of recipients for exception emails,
282 282 ; e.g admin@rhodecode.com,devops@rhodecode.com
283 283 ; Can be left empty, then emails will be sent to ALL super-admins
284 284 #exception_tracker.send_email_recipients =
285 285
286 286 ; optional prefix to Add to email Subject
287 287 #exception_tracker.email_prefix = [RHODECODE ERROR]
288 288
289 289 ; File store configuration. This is used to store and serve uploaded files
290 290 file_store.enabled = true
291 291
292 292 ; Storage backend, available options are: local
293 293 file_store.backend = local
294 294
295 295 ; path to store the uploaded binaries
296 296 file_store.storage_path = %(here)s/data/file_store
297 297
298 298
299 299 ; #############
300 300 ; CELERY CONFIG
301 301 ; #############
302 302
303 303 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
304 304
305 305 use_celery = false
306 306
307 307 ; path to store schedule database
308 308 #celerybeat-schedule.path =
309 309
310 310 ; connection url to the message broker (default redis)
311 311 celery.broker_url = redis://localhost:6379/8
312 312
313 313 ; rabbitmq example
314 314 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
315 315
316 316 ; maximum tasks to execute before worker restart
317 317 celery.max_tasks_per_child = 20
318 318
319 319 ; tasks will never be sent to the queue, but executed locally instead.
320 320 celery.task_always_eager = false
321 321
322 322 ; #############
323 323 ; DOGPILE CACHE
324 324 ; #############
325 325
326 326 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
327 327 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
328 328 cache_dir = %(here)s/data
329 329
330 330 ## locking and default file storage for Beaker. Putting this into a ramdisk
331 331 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
332 332 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
333 333 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
334 334
335 335 beaker.cache.regions = long_term
336 336
337 337 beaker.cache.long_term.type = memory
338 338 beaker.cache.long_term.expire = 36000
339 339 beaker.cache.long_term.key_length = 256
340 340
341 341
342 342 #####################################
343 343 ### DOGPILE CACHE ####
344 344 #####################################
345 345
346 346 ## permission tree cache settings
347 347 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
348 348 rc_cache.cache_perms.expiration_time = 0
349 349 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
350 350
351 351
352 352 ## cache settings for SQL queries
353 353 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
354 354 rc_cache.sql_cache_short.expiration_time = 0
355 355
356 356
357 357 ; ##############
358 358 ; BEAKER SESSION
359 359 ; ##############
360 360
361 361 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
362 362 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
363 363 ; Fastest ones are Redis and ext:database
364 364 beaker.session.type = file
365 365 beaker.session.data_dir = %(here)s/rc/data/sessions/data
366 366
367 367 ; Redis based sessions
368 368 #beaker.session.type = ext:redis
369 369 #beaker.session.url = redis://127.0.0.1:6379/2
370 370
371 371 ; DB based session, fast, and allows easy management over logged in users
372 372 #beaker.session.type = ext:database
373 373 #beaker.session.table_name = db_session
374 374 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
375 375 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
376 376 #beaker.session.sa.pool_recycle = 3600
377 377 #beaker.session.sa.echo = false
378 378
379 379 beaker.session.key = rhodecode
380 380 beaker.session.secret = test-rc-uytcxaz
381 381 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
382 382
383 383 ; Secure encrypted cookie. Requires AES and AES python libraries
384 384 ; you must disable beaker.session.secret to use this
385 385 #beaker.session.encrypt_key = key_for_encryption
386 386 #beaker.session.validate_key = validation_key
387 387
388 388 ; Sets session as invalid (also logging out user) if it haven not been
389 389 ; accessed for given amount of time in seconds
390 390 beaker.session.timeout = 2592000
391 391 beaker.session.httponly = true
392 392
393 393 ; Path to use for the cookie. Set to prefix if you use prefix middleware
394 394 #beaker.session.cookie_path = /custom_prefix
395 395
396 396 ; Set https secure cookie
397 397 beaker.session.secure = false
398 398
399 399 ## auto save the session to not to use .save()
400 400 beaker.session.auto = false
401 401
402 402 ; default cookie expiration time in seconds, set to `true` to set expire
403 403 ; at browser close
404 404 #beaker.session.cookie_expires = 3600
405 405
406 406 ; #############################
407 407 ; SEARCH INDEXING CONFIGURATION
408 408 ; #############################
409 409
410 410 ; Full text search indexer is available in rhodecode-tools under
411 411 ; `rhodecode-tools index` command
412 412
413 413 ; WHOOSH Backend, doesn't require additional services to run
414 414 ; it works good with few dozen repos
415 415 search.module = rhodecode.lib.index.whoosh
416 416 search.location = %(here)s/data/index
417 417
418 418 ; ####################
419 419 ; CHANNELSTREAM CONFIG
420 420 ; ####################
421 421
422 422 ; channelstream enables persistent connections and live notification
423 423 ; in the system. It's also used by the chat system
424 424
425 425 channelstream.enabled = false
426 426
427 427 ; server address for channelstream server on the backend
428 428 channelstream.server = 127.0.0.1:9800
429 429
430 430 ; location of the channelstream server from outside world
431 431 ; use ws:// for http or wss:// for https. This address needs to be handled
432 432 ; by external HTTP server such as Nginx or Apache
433 433 ; see Nginx/Apache configuration examples in our docs
434 434 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
435 435 channelstream.secret = secret
436 436 channelstream.history.location = %(here)s/channelstream_history
437 437
438 438 ; Internal application path that Javascript uses to connect into.
439 439 ; If you use proxy-prefix the prefix should be added before /_channelstream
440 440 channelstream.proxy_path = /_channelstream
441 441
442 442
443 443 ; ##############################
444 444 ; MAIN RHODECODE DATABASE CONFIG
445 445 ; ##############################
446 446
447 447 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
448 448 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
449 449 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
450 450 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
451 451 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
452 452
453 453 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
454 454
455 455 ; see sqlalchemy docs for other advanced settings
456 456 ; print the sql statements to output
457 457 sqlalchemy.db1.echo = false
458 458
459 459 ; recycle the connections after this amount of seconds
460 460 sqlalchemy.db1.pool_recycle = 3600
461 sqlalchemy.db1.convert_unicode = true
462 461
463 462 ; the number of connections to keep open inside the connection pool.
464 463 ; 0 indicates no limit
465 464 #sqlalchemy.db1.pool_size = 5
466 465
467 466 ; The number of connections to allow in connection pool "overflow", that is
468 467 ; connections that can be opened above and beyond the pool_size setting,
469 468 ; which defaults to five.
470 469 #sqlalchemy.db1.max_overflow = 10
471 470
472 471 ; Connection check ping, used to detect broken database connections
473 472 ; could be enabled to better handle cases if MySQL has gone away errors
474 473 #sqlalchemy.db1.ping_connection = true
475 474
476 475 ; ##########
477 476 ; VCS CONFIG
478 477 ; ##########
479 478 vcs.server.enable = true
480 479 vcs.server = localhost:9901
481 480
482 481 ; Web server connectivity protocol, responsible for web based VCS operations
483 482 ; Available protocols are:
484 483 ; `http` - use http-rpc backend (default)
485 484 vcs.server.protocol = http
486 485
487 486 ; Push/Pull operations protocol, available options are:
488 487 ; `http` - use http-rpc backend (default)
489 488 vcs.scm_app_implementation = http
490 489
491 490 ; Push/Pull operations hooks protocol, available options are:
492 491 ; `http` - use http-rpc backend (default)
493 492 vcs.hooks.protocol = http
494 493
495 494 ; Host on which this instance is listening for hooks. If vcsserver is in other location
496 495 ; this should be adjusted.
497 496 vcs.hooks.host = *
498 497
499 498 ; Start VCSServer with this instance as a subprocess, useful for development
500 499 vcs.start_server = false
501 500
502 501 ; List of enabled VCS backends, available options are:
503 502 ; `hg` - mercurial
504 503 ; `git` - git
505 504 ; `svn` - subversion
506 505 vcs.backends = hg, git, svn
507 506
508 507 ; Wait this number of seconds before killing connection to the vcsserver
509 508 vcs.connection_timeout = 3600
510 509
511 510 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
512 511 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
513 512 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
514 513 #vcs.svn.compatible_version = 1.8
515 514
516 515 ; Cache flag to cache vcsserver remote calls locally
517 516 ; It uses cache_region `cache_repo`
518 517 vcs.methods.cache = false
519 518
520 519 ; ####################################################
521 520 ; Subversion proxy support (mod_dav_svn)
522 521 ; Maps RhodeCode repo groups into SVN paths for Apache
523 522 ; ####################################################
524 523
525 524 ; Enable or disable the config file generation.
526 525 svn.proxy.generate_config = false
527 526
528 527 ; Generate config file with `SVNListParentPath` set to `On`.
529 528 svn.proxy.list_parent_path = true
530 529
531 530 ; Set location and file name of generated config file.
532 531 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
533 532
534 533 ; alternative mod_dav config template. This needs to be a valid mako template
535 534 ; Example template can be found in the source code:
536 535 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
537 536 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
538 537
539 538 ; Used as a prefix to the `Location` block in the generated config file.
540 539 ; In most cases it should be set to `/`.
541 540 svn.proxy.location_root = /
542 541
543 542 ; Command to reload the mod dav svn configuration on change.
544 543 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
545 544 ; Make sure user who runs RhodeCode process is allowed to reload Apache
546 545 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
547 546
548 547 ; If the timeout expires before the reload command finishes, the command will
549 548 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
550 549 #svn.proxy.reload_timeout = 10
551 550
552 551 ; ####################
553 552 ; SSH Support Settings
554 553 ; ####################
555 554
556 555 ; Defines if a custom authorized_keys file should be created and written on
557 556 ; any change user ssh keys. Setting this to false also disables possibility
558 557 ; of adding SSH keys by users from web interface. Super admins can still
559 558 ; manage SSH Keys.
560 559 ssh.generate_authorized_keyfile = true
561 560
562 561 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
563 562 # ssh.authorized_keys_ssh_opts =
564 563
565 564 ; Path to the authorized_keys file where the generate entries are placed.
566 565 ; It is possible to have multiple key files specified in `sshd_config` e.g.
567 566 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
568 567 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
569 568
570 569 ; Command to execute the SSH wrapper. The binary is available in the
571 570 ; RhodeCode installation directory.
572 571 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
573 572 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
574 573
575 574 ; Allow shell when executing the ssh-wrapper command
576 575 ssh.wrapper_cmd_allow_shell = false
577 576
578 577 ; Enables logging, and detailed output send back to the client during SSH
579 578 ; operations. Useful for debugging, shouldn't be used in production.
580 579 ssh.enable_debug_logging = false
581 580
582 581 ; Paths to binary executable, by default they are the names, but we can
583 582 ; override them if we want to use a custom one
584 583 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
585 584 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
586 585 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
587 586
588 587 ; Enables SSH key generator web interface. Disabling this still allows users
589 588 ; to add their own keys.
590 589 ssh.enable_ui_key_generator = true
591 590
592 591 ; Statsd client config, this is used to send metrics to statsd
593 592 ; We recommend setting statsd_exported and scrape them using Promethues
594 593 #statsd.enabled = false
595 594 #statsd.statsd_host = 0.0.0.0
596 595 #statsd.statsd_port = 8125
597 596 #statsd.statsd_prefix =
598 597 #statsd.statsd_ipv6 = false
599 598
600 599 ; configure logging automatically at server startup set to false
601 600 ; to use the below custom logging config.
602 601 ; RC_LOGGING_FORMATTER
603 602 ; RC_LOGGING_LEVEL
604 603 ; env variables can control the settings for logging in case of autoconfigure
605 604
606 605 logging.autoconfigure = false
607 606
608 607 ; specify your own custom logging config file to configure logging
609 608 #logging.logging_conf_file = /path/to/custom_logging.ini
610 609
611 610 ; Dummy marker to add new entries after.
612 611 ; Add any custom entries below. Please don't remove this marker.
613 612 custom.conf = 1
614 613
615 614
616 615 ; #####################
617 616 ; LOGGING CONFIGURATION
618 617 ; #####################
619 618
620 619 [loggers]
621 620 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
622 621
623 622 [handlers]
624 623 keys = console, console_sql
625 624
626 625 [formatters]
627 626 keys = generic, json, color_formatter, color_formatter_sql
628 627
629 628 ; #######
630 629 ; LOGGERS
631 630 ; #######
632 631 [logger_root]
633 632 level = NOTSET
634 633 handlers = console
635 634
636 635 [logger_routes]
637 636 level = DEBUG
638 637 handlers =
639 638 qualname = routes.middleware
640 639 ## "level = DEBUG" logs the route matched and routing variables.
641 640 propagate = 1
642 641
643 642 [logger_sqlalchemy]
644 643 level = INFO
645 644 handlers = console_sql
646 645 qualname = sqlalchemy.engine
647 646 propagate = 0
648 647
649 648 [logger_beaker]
650 649 level = DEBUG
651 650 handlers =
652 651 qualname = beaker.container
653 652 propagate = 1
654 653
655 654 [logger_rhodecode]
656 655 level = DEBUG
657 656 handlers =
658 657 qualname = rhodecode
659 658 propagate = 1
660 659
661 660 [logger_ssh_wrapper]
662 661 level = DEBUG
663 662 handlers =
664 663 qualname = ssh_wrapper
665 664 propagate = 1
666 665
667 666 [logger_celery]
668 667 level = DEBUG
669 668 handlers =
670 669 qualname = celery
671 670
672 671
673 672 ; ########
674 673 ; HANDLERS
675 674 ; ########
676 675
677 676 [handler_console]
678 677 class = StreamHandler
679 678 args = (sys.stderr, )
680 679 level = DEBUG
681 680 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
682 681 ; This allows sending properly formatted logs to grafana loki or elasticsearch
683 682 formatter = generic
684 683
685 684 [handler_console_sql]
686 685 ; "level = DEBUG" logs SQL queries and results.
687 686 ; "level = INFO" logs SQL queries.
688 687 ; "level = WARN" logs neither. (Recommended for production systems.)
689 688 class = StreamHandler
690 689 args = (sys.stderr, )
691 690 level = WARN
692 691 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
693 692 ; This allows sending properly formatted logs to grafana loki or elasticsearch
694 693 formatter = generic
695 694
696 695 ; ##########
697 696 ; FORMATTERS
698 697 ; ##########
699 698
700 699 [formatter_generic]
701 700 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
702 701 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
703 702 datefmt = %Y-%m-%d %H:%M:%S
704 703
705 704 [formatter_color_formatter]
706 705 class = rhodecode.lib.logging_formatter.ColorFormatter
707 706 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
708 707 datefmt = %Y-%m-%d %H:%M:%S
709 708
710 709 [formatter_color_formatter_sql]
711 710 class = rhodecode.lib.logging_formatter.ColorFormatterSql
712 711 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
713 712 datefmt = %Y-%m-%d %H:%M:%S
714 713
715 714 [formatter_json]
716 715 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
717 716 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
General Comments 0
You need to be logged in to leave comments. Login now