##// END OF EJS Templates
release: merged default into stable
marcink -
r3012:22b5bff9 merge stable
parent child Browse files
Show More
@@ -0,0 +1,43 b''
1 |RCE| 4.13.1 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2018-08-06
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18 - core: added option to prefix cache keys for usage in cluster.
19 - exception-tracker: store event sending exception for easier event fail debugging.
20 - maintenance: add repack and fsck for git maintenance execution list.
21
22
23 Security
24 ^^^^^^^^
25
26
27
28 Performance
29 ^^^^^^^^^^^
30
31
32
33 Fixes
34 ^^^^^
35
36 - caches: use single default cache dir for all backends.
37 - caches: don't use lower in cache settings to support uppercase PATHS
38
39
40 Upgrade notes
41 ^^^^^^^^^^^^^
42
43 - Unscheduled release addressing reported problems, and improving stability.
@@ -9,6 +9,7 b' Release Notes'
9 .. toctree::
9 .. toctree::
10 :maxdepth: 1
10 :maxdepth: 1
11
11
12 release-notes-4.13.1.rst
12 release-notes-4.13.0.rst
13 release-notes-4.13.0.rst
13 release-notes-4.12.4.rst
14 release-notes-4.12.4.rst
14 release-notes-4.12.3.rst
15 release-notes-4.12.3.rst
@@ -1653,6 +1653,8 b' self: super: {'
1653 self."setuptools-scm"
1653 self."setuptools-scm"
1654 self."amqp"
1654 self."amqp"
1655 self."authomatic"
1655 self."authomatic"
1656 self."atomicwrites"
1657 self."attrs"
1656 self."babel"
1658 self."babel"
1657 self."beaker"
1659 self."beaker"
1658 self."celery"
1660 self."celery"
@@ -4,6 +4,8 b' setuptools-scm==2.1.0'
4 amqp==2.3.1
4 amqp==2.3.1
5 # not released authomatic that has updated some oauth providers
5 # not released authomatic that has updated some oauth providers
6 https://code.rhodecode.com/upstream/authomatic/archive/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e.tar.gz?md5=3c68720a1322b25254009518d1ff6801#egg=authomatic==0.1.0.post1
6 https://code.rhodecode.com/upstream/authomatic/archive/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e.tar.gz?md5=3c68720a1322b25254009518d1ff6801#egg=authomatic==0.1.0.post1
7 atomicwrites==1.1.5
8 attrs==18.1.0
7 babel==1.3
9 babel==1.3
8 beaker==1.9.1
10 beaker==1.9.1
9 celery==4.1.1
11 celery==4.1.1
@@ -32,8 +32,6 b' log = logging.getLogger(__name__)'
32 class RepoMaintenanceView(RepoAppView):
32 class RepoMaintenanceView(RepoAppView):
33 def load_default_context(self):
33 def load_default_context(self):
34 c = self._get_local_tmpl_context()
34 c = self._get_local_tmpl_context()
35
36
37 return c
35 return c
38
36
39 @LoginRequired()
37 @LoginRequired()
@@ -436,13 +436,23 b' def _sanitize_vcs_settings(settings):'
436
436
437
437
438 def _sanitize_cache_settings(settings):
438 def _sanitize_cache_settings(settings):
439 _string_setting(settings, 'cache_dir',
439 default_cache_dir = os.path.join(tempfile.gettempdir(), 'rc_cache')
440 os.path.join(tempfile.gettempdir(), 'rc_cache'))
440
441 # save default, cache dir, and use it for all backends later.
442 default_cache_dir = _string_setting(
443 settings,
444 'cache_dir',
445 default_cache_dir, lower=False, default_when_empty=True)
446
447 # ensure we have our dir created
448 if not os.path.isdir(default_cache_dir):
449 os.makedirs(default_cache_dir, mode=0755)
450
441 # cache_perms
451 # cache_perms
442 _string_setting(
452 _string_setting(
443 settings,
453 settings,
444 'rc_cache.cache_perms.backend',
454 'rc_cache.cache_perms.backend',
445 'dogpile.cache.rc.file_namespace')
455 'dogpile.cache.rc.file_namespace', lower=False)
446 _int_setting(
456 _int_setting(
447 settings,
457 settings,
448 'rc_cache.cache_perms.expiration_time',
458 'rc_cache.cache_perms.expiration_time',
@@ -450,13 +460,13 b' def _sanitize_cache_settings(settings):'
450 _string_setting(
460 _string_setting(
451 settings,
461 settings,
452 'rc_cache.cache_perms.arguments.filename',
462 'rc_cache.cache_perms.arguments.filename',
453 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
463 os.path.join(default_cache_dir, 'rc_cache_1'), lower=False)
454
464
455 # cache_repo
465 # cache_repo
456 _string_setting(
466 _string_setting(
457 settings,
467 settings,
458 'rc_cache.cache_repo.backend',
468 'rc_cache.cache_repo.backend',
459 'dogpile.cache.rc.file_namespace')
469 'dogpile.cache.rc.file_namespace', lower=False)
460 _int_setting(
470 _int_setting(
461 settings,
471 settings,
462 'rc_cache.cache_repo.expiration_time',
472 'rc_cache.cache_repo.expiration_time',
@@ -464,13 +474,13 b' def _sanitize_cache_settings(settings):'
464 _string_setting(
474 _string_setting(
465 settings,
475 settings,
466 'rc_cache.cache_repo.arguments.filename',
476 'rc_cache.cache_repo.arguments.filename',
467 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
477 os.path.join(default_cache_dir, 'rc_cache_2'), lower=False)
468
478
469 # cache_license
479 # cache_license
470 _string_setting(
480 _string_setting(
471 settings,
481 settings,
472 'rc_cache.cache_license.backend',
482 'rc_cache.cache_license.backend',
473 'dogpile.cache.rc.file_namespace')
483 'dogpile.cache.rc.file_namespace', lower=False)
474 _int_setting(
484 _int_setting(
475 settings,
485 settings,
476 'rc_cache.cache_license.expiration_time',
486 'rc_cache.cache_license.expiration_time',
@@ -478,13 +488,13 b' def _sanitize_cache_settings(settings):'
478 _string_setting(
488 _string_setting(
479 settings,
489 settings,
480 'rc_cache.cache_license.arguments.filename',
490 'rc_cache.cache_license.arguments.filename',
481 os.path.join(tempfile.gettempdir(), 'rc_cache_3'))
491 os.path.join(default_cache_dir, 'rc_cache_3'), lower=False)
482
492
483 # cache_repo_longterm memory, 96H
493 # cache_repo_longterm memory, 96H
484 _string_setting(
494 _string_setting(
485 settings,
495 settings,
486 'rc_cache.cache_repo_longterm.backend',
496 'rc_cache.cache_repo_longterm.backend',
487 'dogpile.cache.rc.memory_lru')
497 'dogpile.cache.rc.memory_lru', lower=False)
488 _int_setting(
498 _int_setting(
489 settings,
499 settings,
490 'rc_cache.cache_repo_longterm.expiration_time',
500 'rc_cache.cache_repo_longterm.expiration_time',
@@ -498,7 +508,7 b' def _sanitize_cache_settings(settings):'
498 _string_setting(
508 _string_setting(
499 settings,
509 settings,
500 'rc_cache.sql_cache_short.backend',
510 'rc_cache.sql_cache_short.backend',
501 'dogpile.cache.rc.memory_lru')
511 'dogpile.cache.rc.memory_lru', lower=False)
502 _int_setting(
512 _int_setting(
503 settings,
513 settings,
504 'rc_cache.sql_cache_short.expiration_time',
514 'rc_cache.sql_cache_short.expiration_time',
@@ -511,6 +521,7 b' def _sanitize_cache_settings(settings):'
511
521
512 def _int_setting(settings, name, default):
522 def _int_setting(settings, name, default):
513 settings[name] = int(settings.get(name, default))
523 settings[name] = int(settings.get(name, default))
524 return settings[name]
514
525
515
526
516 def _bool_setting(settings, name, default):
527 def _bool_setting(settings, name, default):
@@ -518,6 +529,7 b' def _bool_setting(settings, name, defaul'
518 if isinstance(input_val, unicode):
529 if isinstance(input_val, unicode):
519 input_val = input_val.encode('utf8')
530 input_val = input_val.encode('utf8')
520 settings[name] = asbool(input_val)
531 settings[name] = asbool(input_val)
532 return settings[name]
521
533
522
534
523 def _list_setting(settings, name, default):
535 def _list_setting(settings, name, default):
@@ -530,13 +542,20 b' def _list_setting(settings, name, defaul'
530 else:
542 else:
531 # Otherwise we assume it uses pyramids space/newline separation.
543 # Otherwise we assume it uses pyramids space/newline separation.
532 settings[name] = aslist(raw_value)
544 settings[name] = aslist(raw_value)
545 return settings[name]
533
546
534
547
535 def _string_setting(settings, name, default, lower=True):
548 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
536 value = settings.get(name, default)
549 value = settings.get(name, default)
550
551 if default_when_empty and not value:
552 # use default value when value is empty
553 value = default
554
537 if lower:
555 if lower:
538 value = value.lower()
556 value = value.lower()
539 settings[name] = value
557 settings[name] = value
558 return settings[name]
540
559
541
560
542 def _substitute_values(mapping, substitutions):
561 def _substitute_values(mapping, substitutions):
@@ -81,9 +81,14 b' def get_vcs_server_protocol(config):'
81
81
82
82
83 def set_instance_id(config):
83 def set_instance_id(config):
84 """ Sets a dynamic generated config['instance_id'] if missing or '*' """
84 """
85 Sets a dynamic generated config['instance_id'] if missing or '*'
86 E.g instance_id = *cluster-1 or instance_id = *
87 """
85
88
86 config['instance_id'] = config.get('instance_id') or ''
89 config['instance_id'] = config.get('instance_id') or ''
87 if config['instance_id'] == '*' or not config['instance_id']:
90 instance_id = config['instance_id']
91 if instance_id.startswith('*') or not instance_id:
92 prefix = instance_id.lstrip('*')
88 _platform_id = platform.uname()[1] or 'instance'
93 _platform_id = platform.uname()[1] or 'instance'
89 config['instance_id'] = '%s-%s' % (_platform_id, os.getpid())
94 config['instance_id'] = '%s%s-%s' % (prefix, _platform_id, os.getpid())
@@ -17,11 +17,13 b''
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20 import sys
21 import logging
21 import logging
22
22
23 from rhodecode.integrations.registry import IntegrationTypeRegistry
23 from rhodecode.integrations.registry import IntegrationTypeRegistry
24 from rhodecode.integrations.types import webhook, slack, hipchat, email, base
24 from rhodecode.integrations.types import webhook, slack, hipchat, email, base
25 from rhodecode.lib.exc_tracking import store_exception
26
25 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
26
28
27
29
@@ -61,6 +63,8 b' def integrations_event_handler(event):'
61 try:
63 try:
62 integration_model.send_event(integration, event)
64 integration_model.send_event(integration, event)
63 except Exception:
65 except Exception:
66 exc_info = sys.exc_info()
67 store_exception(id(exc_info), exc_info)
64 log.exception(
68 log.exception(
65 'failure occurred when sending event %s to integration %s' % (
69 'failure occurred when sending event %s to integration %s' % (
66 event, integration))
70 event, integration))
@@ -87,6 +87,13 b' def _store_exception(exc_id, exc_info, p'
87
87
88
88
89 def store_exception(exc_id, exc_info, prefix=global_prefix):
89 def store_exception(exc_id, exc_info, prefix=global_prefix):
90 """
91 Example usage::
92
93 exc_info = sys.exc_info()
94 store_exception(id(exc_info), exc_info)
95 """
96
90 try:
97 try:
91 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
98 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
92 except Exception:
99 except Exception:
@@ -51,25 +51,81 b' class GitGC(MaintenanceTask):'
51 output = []
51 output = []
52 instance = self.db_repo.scm_instance()
52 instance = self.db_repo.scm_instance()
53
53
54 objects = self._count_objects(instance)
54 objects_before = self._count_objects(instance)
55 output.append(objects)
56 log.debug('GIT objects:%s', objects)
57
58 stdout, stderr = instance.run_git_command(
59 ['gc', '--aggressive'], fail_on_stderr=False)
60
55
61 out = 'executed git gc --aggressive'
56 log.debug('GIT objects:%s', objects_before)
62 if stderr:
57 cmd = ['gc', '--aggressive']
63 out = ''.join(stderr.splitlines())
58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
64
59
65 elif stdout:
60 out = 'executed {}'.format(' '.join(cmd))
66 out = ''.join(stdout.splitlines())
67
68 output.append(out)
61 output.append(out)
69
62
70 objects = self._count_objects(instance)
63 out = ''
71 log.debug('GIT objects:%s', objects)
64 if stderr:
72 output.append(objects)
65 out += ''.join(stderr.splitlines())
66
67 if stdout:
68 out += ''.join(stdout.splitlines())
69
70 if out:
71 output.append(out)
72
73 objects_after = self._count_objects(instance)
74 log.debug('GIT objects:%s', objects_after)
75 output.append('objects before :' + objects_before)
76 output.append('objects after :' + objects_after)
77
78 return '\n'.join(output)
79
80
81 class GitFSCK(MaintenanceTask):
82 human_name = 'GIT FSCK'
83
84 def run(self):
85 output = []
86 instance = self.db_repo.scm_instance()
87
88 cmd = ['fsck', '--full']
89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
90
91 out = 'executed {}'.format(' '.join(cmd))
92 output.append(out)
93
94 out = ''
95 if stderr:
96 out += ''.join(stderr.splitlines())
97
98 if stdout:
99 out += ''.join(stdout.splitlines())
100
101 if out:
102 output.append(out)
103
104 return '\n'.join(output)
105
106
107 class GitRepack(MaintenanceTask):
108 human_name = 'GIT Repack'
109
110 def run(self):
111 output = []
112 instance = self.db_repo.scm_instance()
113 cmd = ['repack', '-a', '-d',
114 '--window-memory', '10m', '--max-pack-size', '100m']
115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
116
117 out = 'executed {}'.format(' '.join(cmd))
118 output.append(out)
119 out = ''
120
121 if stderr:
122 out += ''.join(stderr.splitlines())
123
124 if stdout:
125 out += ''.join(stdout.splitlines())
126
127 if out:
128 output.append(out)
73
129
74 return '\n'.join(output)
130 return '\n'.join(output)
75
131
@@ -98,7 +154,7 b' class RepoMaintenance(object):'
98 """
154 """
99 tasks = {
155 tasks = {
100 'hg': [HGVerify],
156 'hg': [HGVerify],
101 'git': [GitGC],
157 'git': [GitFSCK, GitGC, GitRepack],
102 'svn': [SVNVerify],
158 'svn': [SVNVerify],
103 }
159 }
104
160
@@ -114,5 +170,6 b' class RepoMaintenance(object):'
114 def execute(self, db_repo):
170 def execute(self, db_repo):
115 executed_tasks = []
171 executed_tasks = []
116 for task in self.tasks[db_repo.repo_type]:
172 for task in self.tasks[db_repo.repo_type]:
117 executed_tasks.append(task(db_repo).run())
173 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
174 executed_tasks.append(output)
118 return executed_tasks
175 return executed_tasks
General Comments 0
You need to be logged in to leave comments. Login now