##// END OF EJS Templates
code: small fixes/whitespace/logging
super-admin -
r4872:72f064ef default
parent child Browse files
Show More
@@ -1,98 +1,99 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import codecs
21 import codecs
22 import logging
22 import logging
23 import os
23 import os
24 from pyramid.renderers import render
24 from pyramid.renderers import render
25
25
26 from rhodecode.events import trigger
26 from rhodecode.events import trigger
27 from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_base_path
27 from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_base_path
28 from rhodecode.lib.utils2 import str2bool
28 from rhodecode.lib.utils2 import str2bool
29 from rhodecode.model.db import RepoGroup
29 from rhodecode.model.db import RepoGroup
30
30
31 from . import config_keys
31 from . import config_keys
32 from .events import ModDavSvnConfigChange
32 from .events import ModDavSvnConfigChange
33
33
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def write_mod_dav_svn_config(settings):
38 def write_mod_dav_svn_config(settings):
39 use_ssl = str2bool(settings['force_https'])
39 use_ssl = str2bool(settings['force_https'])
40 file_path = settings[config_keys.config_file_path]
40 file_path = settings[config_keys.config_file_path]
41 config = _render_mod_dav_svn_config(
41 config = _render_mod_dav_svn_config(
42 use_ssl=use_ssl,
42 use_ssl=use_ssl,
43 parent_path_root=get_rhodecode_base_path(),
43 parent_path_root=get_rhodecode_base_path(),
44 list_parent_path=settings[config_keys.list_parent_path],
44 list_parent_path=settings[config_keys.list_parent_path],
45 location_root=settings[config_keys.location_root],
45 location_root=settings[config_keys.location_root],
46 repo_groups=RepoGroup.get_all_repo_groups(),
46 repo_groups=RepoGroup.get_all_repo_groups(),
47 realm=get_rhodecode_realm(), template=settings[config_keys.template])
47 realm=get_rhodecode_realm(), template=settings[config_keys.template])
48 _write_mod_dav_svn_config(config, file_path)
48 _write_mod_dav_svn_config(config, file_path)
49 return file_path
49 return file_path
50
50
51
51
52 def generate_mod_dav_svn_config(registry):
52 def generate_mod_dav_svn_config(registry):
53 """
53 """
54 Generate the configuration file for use with subversion's mod_dav_svn
54 Generate the configuration file for use with subversion's mod_dav_svn
55 module. The configuration has to contain a <Location> block for each
55 module. The configuration has to contain a <Location> block for each
56 available repository group because the mod_dav_svn module does not support
56 available repository group because the mod_dav_svn module does not support
57 repositories organized in sub folders.
57 repositories organized in sub folders.
58 """
58 """
59 settings = registry.settings
59 settings = registry.settings
60 file_path = write_mod_dav_svn_config(settings)
60 file_path = write_mod_dav_svn_config(settings)
61
61
62 # Trigger an event on mod dav svn configuration change.
62 # Trigger an event on mod dav svn configuration change.
63 trigger(ModDavSvnConfigChange(), registry)
63 trigger(ModDavSvnConfigChange(), registry)
64 return file_path
64 return file_path
65
65
66
66 def _render_mod_dav_svn_config(
67 def _render_mod_dav_svn_config(
67 parent_path_root, list_parent_path, location_root, repo_groups, realm,
68 parent_path_root, list_parent_path, location_root, repo_groups, realm,
68 use_ssl, template):
69 use_ssl, template):
69 """
70 """
70 Render mod_dav_svn configuration to string.
71 Render mod_dav_svn configuration to string.
71 """
72 """
72 repo_group_paths = []
73 repo_group_paths = []
73 for repo_group in repo_groups:
74 for repo_group in repo_groups:
74 group_path = repo_group.full_path_splitted
75 group_path = repo_group.full_path_splitted
75 location = os.path.join(location_root, *group_path)
76 location = os.path.join(location_root, *group_path)
76 parent_path = os.path.join(parent_path_root, *group_path)
77 parent_path = os.path.join(parent_path_root, *group_path)
77 repo_group_paths.append((location, parent_path))
78 repo_group_paths.append((location, parent_path))
78
79
79 context = {
80 context = {
80 'location_root': location_root,
81 'location_root': location_root,
81 'parent_path_root': parent_path_root,
82 'parent_path_root': parent_path_root,
82 'repo_group_paths': repo_group_paths,
83 'repo_group_paths': repo_group_paths,
83 'svn_list_parent_path': list_parent_path,
84 'svn_list_parent_path': list_parent_path,
84 'rhodecode_realm': realm,
85 'rhodecode_realm': realm,
85 'use_https': use_ssl,
86 'use_https': use_ssl,
86 }
87 }
87 template = template or \
88 template = template or \
88 'rhodecode:apps/svn_support/templates/mod-dav-svn.conf.mako'
89 'rhodecode:apps/svn_support/templates/mod-dav-svn.conf.mako'
89 # Render the configuration template to string.
90 # Render the configuration template to string.
90 return render(template, context)
91 return render(template, context)
91
92
92
93
93 def _write_mod_dav_svn_config(config, filepath):
94 def _write_mod_dav_svn_config(config, filepath):
94 """
95 """
95 Write mod_dav_svn config to file.
96 Write mod_dav_svn config to file.
96 """
97 """
97 with codecs.open(filepath, 'w', encoding='utf-8') as f:
98 with codecs.open(filepath, 'w', encoding='utf-8') as f:
98 f.write(config)
99 f.write(config)
@@ -1,323 +1,329 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 """
20 """
21 Celery loader, run with::
21 Celery loader, run with::
22
22
23 celery worker \
23 celery worker \
24 --task-events \
24 --task-events \
25 --beat \
25 --beat \
26 --autoscale=20,2 \
27 --max-tasks-per-child 1 \
26 --app rhodecode.lib.celerylib.loader \
28 --app rhodecode.lib.celerylib.loader \
27 --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
29 --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
28 --loglevel DEBUG --ini=.dev/dev.ini
30 --loglevel DEBUG --ini=.dev/dev.ini
29 """
31 """
30 import os
32 import os
31 import logging
33 import logging
32 import importlib
34 import importlib
33
35
34 from celery import Celery
36 from celery import Celery
35 from celery import signals
37 from celery import signals
36 from celery import Task
38 from celery import Task
37 from celery import exceptions # pragma: no cover
39 from celery import exceptions # pragma: no cover
38 from kombu.serialization import register
40 from kombu.serialization import register
39 from pyramid.threadlocal import get_current_request
41 from pyramid.threadlocal import get_current_request
40
42
41 import rhodecode
43 import rhodecode
42
44
43 from rhodecode.lib.auth import AuthUser
45 from rhodecode.lib.auth import AuthUser
44 from rhodecode.lib.celerylib.utils import parse_ini_vars, ping_db
46 from rhodecode.lib.celerylib.utils import parse_ini_vars, ping_db
45 from rhodecode.lib.ext_json import json
47 from rhodecode.lib.ext_json import json
46 from rhodecode.lib.pyramid_utils import bootstrap, setup_logging, prepare_request
48 from rhodecode.lib.pyramid_utils import bootstrap, setup_logging, prepare_request
47 from rhodecode.lib.utils2 import str2bool
49 from rhodecode.lib.utils2 import str2bool
48 from rhodecode.model import meta
50 from rhodecode.model import meta
49
51
50
52
51 register('json_ext', json.dumps, json.loads,
53 register('json_ext', json.dumps, json.loads,
52 content_type='application/x-json-ext',
54 content_type='application/x-json-ext',
53 content_encoding='utf-8')
55 content_encoding='utf-8')
54
56
55 log = logging.getLogger('celery.rhodecode.loader')
57 log = logging.getLogger('celery.rhodecode.loader')
56
58
57
59
58 def add_preload_arguments(parser):
60 def add_preload_arguments(parser):
59 parser.add_argument(
61 parser.add_argument(
60 '--ini', default=None,
62 '--ini', default=None,
61 help='Path to ini configuration file.'
63 help='Path to ini configuration file.'
62 )
64 )
63 parser.add_argument(
65 parser.add_argument(
64 '--ini-var', default=None,
66 '--ini-var', default=None,
65 help='Comma separated list of key=value to pass to ini.'
67 help='Comma separated list of key=value to pass to ini.'
66 )
68 )
67
69
68
70
69 def get_logger(obj):
71 def get_logger(obj):
70 custom_log = logging.getLogger(
72 custom_log = logging.getLogger(
71 'rhodecode.task.{}'.format(obj.__class__.__name__))
73 'rhodecode.task.{}'.format(obj.__class__.__name__))
72
74
73 if rhodecode.CELERY_ENABLED:
75 if rhodecode.CELERY_ENABLED:
74 try:
76 try:
75 custom_log = obj.get_logger()
77 custom_log = obj.get_logger()
76 except Exception:
78 except Exception:
77 pass
79 pass
78
80
79 return custom_log
81 return custom_log
80
82
81
83
82 imports = ['rhodecode.lib.celerylib.tasks']
84 imports = ['rhodecode.lib.celerylib.tasks']
83
85
84 try:
86 try:
85 # try if we have EE tasks available
87 # try if we have EE tasks available
86 importlib.import_module('rc_ee')
88 importlib.import_module('rc_ee')
87 imports.append('rc_ee.lib.celerylib.tasks')
89 imports.append('rc_ee.lib.celerylib.tasks')
88 except ImportError:
90 except ImportError:
89 pass
91 pass
90
92
91
93
92 base_celery_config = {
94 base_celery_config = {
93 'result_backend': 'rpc://',
95 'result_backend': 'rpc://',
94 'result_expires': 60 * 60 * 24,
96 'result_expires': 60 * 60 * 24,
95 'result_persistent': True,
97 'result_persistent': True,
96 'imports': imports,
98 'imports': imports,
97 'worker_max_tasks_per_child': 100,
99 'worker_max_tasks_per_child': 100,
98 'accept_content': ['json_ext'],
100 'accept_content': ['json_ext'],
99 'task_serializer': 'json_ext',
101 'task_serializer': 'json_ext',
100 'result_serializer': 'json_ext',
102 'result_serializer': 'json_ext',
101 'worker_hijack_root_logger': False,
103 'worker_hijack_root_logger': False,
102 'database_table_names': {
104 'database_table_names': {
103 'task': 'beat_taskmeta',
105 'task': 'beat_taskmeta',
104 'group': 'beat_groupmeta',
106 'group': 'beat_groupmeta',
105 }
107 }
106 }
108 }
107 # init main celery app
109 # init main celery app
108 celery_app = Celery()
110 celery_app = Celery()
109 celery_app.user_options['preload'].add(add_preload_arguments)
111 celery_app.user_options['preload'].add(add_preload_arguments)
110 ini_file_glob = None
112 ini_file_glob = None
111
113
112
114
113 @signals.setup_logging.connect
115 @signals.setup_logging.connect
114 def setup_logging_callback(**kwargs):
116 def setup_logging_callback(**kwargs):
115 setup_logging(ini_file_glob)
117 setup_logging(ini_file_glob)
116
118
117
119
118 @signals.user_preload_options.connect
120 @signals.user_preload_options.connect
119 def on_preload_parsed(options, **kwargs):
121 def on_preload_parsed(options, **kwargs):
120 from rhodecode.config.middleware import get_celery_config
122 from rhodecode.config.middleware import get_celery_config
121
123
122 ini_location = options['ini']
124 ini_location = options['ini']
123 ini_vars = options['ini_var']
125 ini_vars = options['ini_var']
124 celery_app.conf['INI_PYRAMID'] = options['ini']
126 celery_app.conf['INI_PYRAMID'] = options['ini']
125
127
126 if ini_location is None:
128 if ini_location is None:
127 print('You must provide the paste --ini argument')
129 print('You must provide the paste --ini argument')
128 exit(-1)
130 exit(-1)
129
131
130 options = None
132 options = None
131 if ini_vars is not None:
133 if ini_vars is not None:
132 options = parse_ini_vars(ini_vars)
134 options = parse_ini_vars(ini_vars)
133
135
134 global ini_file_glob
136 global ini_file_glob
135 ini_file_glob = ini_location
137 ini_file_glob = ini_location
136
138
137 log.debug('Bootstrapping RhodeCode application...')
139 log.debug('Bootstrapping RhodeCode application...')
140
141 env = {}
138 try:
142 try:
139 env = bootstrap(ini_location, options=options)
143 env = bootstrap(ini_location, options=options)
140 except Exception:
144 except Exception:
141 log.exception('Failed to bootstrap RhodeCode APP')
145 log.exception('Failed to bootstrap RhodeCode APP')
142
146
147 log.debug('Got Pyramid ENV: %s', env)
143 celery_settings = get_celery_config(env['registry'].settings)
148 celery_settings = get_celery_config(env['registry'].settings)
149
144 setup_celery_app(
150 setup_celery_app(
145 app=env['app'], root=env['root'], request=env['request'],
151 app=env['app'], root=env['root'], request=env['request'],
146 registry=env['registry'], closer=env['closer'],
152 registry=env['registry'], closer=env['closer'],
147 celery_settings=celery_settings)
153 celery_settings=celery_settings)
148
154
149 # fix the global flag even if it's disabled via .ini file because this
155 # fix the global flag even if it's disabled via .ini file because this
150 # is a worker code that doesn't need this to be disabled.
156 # is a worker code that doesn't need this to be disabled.
151 rhodecode.CELERY_ENABLED = True
157 rhodecode.CELERY_ENABLED = True
152
158
153
159
154 @signals.task_prerun.connect
160 @signals.task_prerun.connect
155 def task_prerun_signal(task_id, task, args, **kwargs):
161 def task_prerun_signal(task_id, task, args, **kwargs):
156 ping_db()
162 ping_db()
157
163
158
164
159 @signals.task_success.connect
165 @signals.task_success.connect
160 def task_success_signal(result, **kwargs):
166 def task_success_signal(result, **kwargs):
161 meta.Session.commit()
167 meta.Session.commit()
162 closer = celery_app.conf['PYRAMID_CLOSER']
168 closer = celery_app.conf['PYRAMID_CLOSER']
163 if closer:
169 if closer:
164 closer()
170 closer()
165
171
166
172
167 @signals.task_retry.connect
173 @signals.task_retry.connect
168 def task_retry_signal(
174 def task_retry_signal(
169 request, reason, einfo, **kwargs):
175 request, reason, einfo, **kwargs):
170 meta.Session.remove()
176 meta.Session.remove()
171 closer = celery_app.conf['PYRAMID_CLOSER']
177 closer = celery_app.conf['PYRAMID_CLOSER']
172 if closer:
178 if closer:
173 closer()
179 closer()
174
180
175
181
176 @signals.task_failure.connect
182 @signals.task_failure.connect
177 def task_failure_signal(
183 def task_failure_signal(
178 task_id, exception, args, kwargs, traceback, einfo, **kargs):
184 task_id, exception, args, kwargs, traceback, einfo, **kargs):
179 from rhodecode.lib.exc_tracking import store_exception
185 from rhodecode.lib.exc_tracking import store_exception
180 from rhodecode.lib.statsd_client import StatsdClient
186 from rhodecode.lib.statsd_client import StatsdClient
181
187
182 meta.Session.remove()
188 meta.Session.remove()
183
189
184 # simulate sys.exc_info()
190 # simulate sys.exc_info()
185 exc_info = (einfo.type, einfo.exception, einfo.tb)
191 exc_info = (einfo.type, einfo.exception, einfo.tb)
186 store_exception(id(exc_info), exc_info, prefix='rhodecode-celery')
192 store_exception(id(exc_info), exc_info, prefix='rhodecode-celery')
187 statsd = StatsdClient.statsd
193 statsd = StatsdClient.statsd
188 if statsd:
194 if statsd:
189 exc_type = "{}.{}".format(einfo.__class__.__module__, einfo.__class__.__name__)
195 exc_type = "{}.{}".format(einfo.__class__.__module__, einfo.__class__.__name__)
190 statsd.incr('rhodecode_exception_total',
196 statsd.incr('rhodecode_exception_total',
191 tags=["exc_source:celery", "type:{}".format(exc_type)])
197 tags=["exc_source:celery", "type:{}".format(exc_type)])
192
198
193 closer = celery_app.conf['PYRAMID_CLOSER']
199 closer = celery_app.conf['PYRAMID_CLOSER']
194 if closer:
200 if closer:
195 closer()
201 closer()
196
202
197
203
198 @signals.task_revoked.connect
204 @signals.task_revoked.connect
199 def task_revoked_signal(
205 def task_revoked_signal(
200 request, terminated, signum, expired, **kwargs):
206 request, terminated, signum, expired, **kwargs):
201 closer = celery_app.conf['PYRAMID_CLOSER']
207 closer = celery_app.conf['PYRAMID_CLOSER']
202 if closer:
208 if closer:
203 closer()
209 closer()
204
210
205
211
206 def setup_celery_app(app, root, request, registry, closer, celery_settings):
212 def setup_celery_app(app, root, request, registry, closer, celery_settings):
207 log.debug('Got custom celery conf: %s', celery_settings)
213 log.debug('Got custom celery conf: %s', celery_settings)
208 celery_config = base_celery_config
214 celery_config = base_celery_config
209 celery_config.update({
215 celery_config.update({
210 # store celerybeat scheduler db where the .ini file is
216 # store celerybeat scheduler db where the .ini file is
211 'beat_schedule_filename': registry.settings['celerybeat-schedule.path'],
217 'beat_schedule_filename': registry.settings['celerybeat-schedule.path'],
212 })
218 })
213
219
214 celery_config.update(celery_settings)
220 celery_config.update(celery_settings)
215 celery_app.config_from_object(celery_config)
221 celery_app.config_from_object(celery_config)
216
222
217 celery_app.conf.update({'PYRAMID_APP': app})
223 celery_app.conf.update({'PYRAMID_APP': app})
218 celery_app.conf.update({'PYRAMID_ROOT': root})
224 celery_app.conf.update({'PYRAMID_ROOT': root})
219 celery_app.conf.update({'PYRAMID_REQUEST': request})
225 celery_app.conf.update({'PYRAMID_REQUEST': request})
220 celery_app.conf.update({'PYRAMID_REGISTRY': registry})
226 celery_app.conf.update({'PYRAMID_REGISTRY': registry})
221 celery_app.conf.update({'PYRAMID_CLOSER': closer})
227 celery_app.conf.update({'PYRAMID_CLOSER': closer})
222
228
223
229
224 def configure_celery(config, celery_settings):
230 def configure_celery(config, celery_settings):
225 """
231 """
226 Helper that is called from our application creation logic. It gives
232 Helper that is called from our application creation logic. It gives
227 connection info into running webapp and allows execution of tasks from
233 connection info into running webapp and allows execution of tasks from
228 RhodeCode itself
234 RhodeCode itself
229 """
235 """
230 # store some globals into rhodecode
236 # store some globals into rhodecode
231 rhodecode.CELERY_ENABLED = str2bool(
237 rhodecode.CELERY_ENABLED = str2bool(
232 config.registry.settings.get('use_celery'))
238 config.registry.settings.get('use_celery'))
233 if rhodecode.CELERY_ENABLED:
239 if rhodecode.CELERY_ENABLED:
234 log.info('Configuring celery based on `%s` settings', celery_settings)
240 log.info('Configuring celery based on `%s` settings', celery_settings)
235 setup_celery_app(
241 setup_celery_app(
236 app=None, root=None, request=None, registry=config.registry,
242 app=None, root=None, request=None, registry=config.registry,
237 closer=None, celery_settings=celery_settings)
243 closer=None, celery_settings=celery_settings)
238
244
239
245
240 def maybe_prepare_env(req):
246 def maybe_prepare_env(req):
241 environ = {}
247 environ = {}
242 try:
248 try:
243 environ.update({
249 environ.update({
244 'PATH_INFO': req.environ['PATH_INFO'],
250 'PATH_INFO': req.environ['PATH_INFO'],
245 'SCRIPT_NAME': req.environ['SCRIPT_NAME'],
251 'SCRIPT_NAME': req.environ['SCRIPT_NAME'],
246 'HTTP_HOST': req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']),
252 'HTTP_HOST': req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']),
247 'SERVER_NAME': req.environ['SERVER_NAME'],
253 'SERVER_NAME': req.environ['SERVER_NAME'],
248 'SERVER_PORT': req.environ['SERVER_PORT'],
254 'SERVER_PORT': req.environ['SERVER_PORT'],
249 'wsgi.url_scheme': req.environ['wsgi.url_scheme'],
255 'wsgi.url_scheme': req.environ['wsgi.url_scheme'],
250 })
256 })
251 except Exception:
257 except Exception:
252 pass
258 pass
253
259
254 return environ
260 return environ
255
261
256
262
257 class RequestContextTask(Task):
263 class RequestContextTask(Task):
258 """
264 """
259 This is a celery task which will create a rhodecode app instance context
265 This is a celery task which will create a rhodecode app instance context
260 for the task, patch pyramid with the original request
266 for the task, patch pyramid with the original request
261 that created the task and also add the user to the context.
267 that created the task and also add the user to the context.
262 """
268 """
263
269
264 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
270 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
265 link=None, link_error=None, shadow=None, **options):
271 link=None, link_error=None, shadow=None, **options):
266 """ queue the job to run (we are in web request context here) """
272 """ queue the job to run (we are in web request context here) """
267
273
268 req = get_current_request()
274 req = get_current_request()
269 log.debug('Running Task with class: %s. Request Class: %s',
275 log.debug('Running Task with class: %s. Request Class: %s',
270 self.__class__, req.__class__)
276 self.__class__, req.__class__)
271
277
272 # web case
278 # web case
273 if hasattr(req, 'user'):
279 if hasattr(req, 'user'):
274 ip_addr = req.user.ip_addr
280 ip_addr = req.user.ip_addr
275 user_id = req.user.user_id
281 user_id = req.user.user_id
276
282
277 # api case
283 # api case
278 elif hasattr(req, 'rpc_user'):
284 elif hasattr(req, 'rpc_user'):
279 ip_addr = req.rpc_user.ip_addr
285 ip_addr = req.rpc_user.ip_addr
280 user_id = req.rpc_user.user_id
286 user_id = req.rpc_user.user_id
281 else:
287 else:
282 raise Exception(
288 raise Exception(
283 'Unable to fetch required data from request: {}. \n'
289 'Unable to fetch required data from request: {}. \n'
284 'This task is required to be executed from context of '
290 'This task is required to be executed from context of '
285 'request in a webapp. Task: {}'.format(
291 'request in a webapp. Task: {}'.format(
286 repr(req),
292 repr(req),
287 self
293 self
288 )
294 )
289 )
295 )
290
296
291 if req:
297 if req:
292 # we hook into kwargs since it is the only way to pass our data to
298 # we hook into kwargs since it is the only way to pass our data to
293 # the celery worker
299 # the celery worker
294 environ = maybe_prepare_env(req)
300 environ = maybe_prepare_env(req)
295 options['headers'] = options.get('headers', {})
301 options['headers'] = options.get('headers', {})
296 options['headers'].update({
302 options['headers'].update({
297 'rhodecode_proxy_data': {
303 'rhodecode_proxy_data': {
298 'environ': environ,
304 'environ': environ,
299 'auth_user': {
305 'auth_user': {
300 'ip_addr': ip_addr,
306 'ip_addr': ip_addr,
301 'user_id': user_id
307 'user_id': user_id
302 },
308 },
303 }
309 }
304 })
310 })
305
311
306 return super(RequestContextTask, self).apply_async(
312 return super(RequestContextTask, self).apply_async(
307 args, kwargs, task_id, producer, link, link_error, shadow, **options)
313 args, kwargs, task_id, producer, link, link_error, shadow, **options)
308
314
309 def __call__(self, *args, **kwargs):
315 def __call__(self, *args, **kwargs):
310 """ rebuild the context and then run task on celery worker """
316 """ rebuild the context and then run task on celery worker """
311
317
312 proxy_data = getattr(self.request, 'rhodecode_proxy_data', None)
318 proxy_data = getattr(self.request, 'rhodecode_proxy_data', None)
313 if not proxy_data:
319 if not proxy_data:
314 return super(RequestContextTask, self).__call__(*args, **kwargs)
320 return super(RequestContextTask, self).__call__(*args, **kwargs)
315
321
316 log.debug('using celery proxy data to run task: %r', proxy_data)
322 log.debug('using celery proxy data to run task: %r', proxy_data)
317 # re-inject and register threadlocals for proper routing support
323 # re-inject and register threadlocals for proper routing support
318 request = prepare_request(proxy_data['environ'])
324 request = prepare_request(proxy_data['environ'])
319 request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'],
325 request.user = AuthUser(user_id=proxy_data['auth_user']['user_id'],
320 ip_addr=proxy_data['auth_user']['ip_addr'])
326 ip_addr=proxy_data['auth_user']['ip_addr'])
321
327
322 return super(RequestContextTask, self).__call__(*args, **kwargs)
328 return super(RequestContextTask, self).__call__(*args, **kwargs)
323
329
@@ -1,800 +1,799 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import sys
31 import sys
32 import shutil
32 import shutil
33 import socket
33 import socket
34 import tempfile
34 import tempfile
35 import traceback
35 import traceback
36 import tarfile
36 import tarfile
37 import warnings
37 import warnings
38 import hashlib
38 import hashlib
39 from os.path import join as jn
39 from os.path import join as jn
40
40
41 import paste
41 import paste
42 import pkg_resources
42 import pkg_resources
43 from webhelpers2.text import collapse, remove_formatting
43 from webhelpers2.text import collapse, remove_formatting
44 from mako import exceptions
44 from mako import exceptions
45 from pyramid.threadlocal import get_current_registry
45 from pyramid.threadlocal import get_current_registry
46 from rhodecode.lib.request import Request
47
46
48 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.backends.base import Config
49 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 from rhodecode.lib.utils2 import (
50 from rhodecode.lib.utils2 import (
52 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
53 from rhodecode.model import meta
52 from rhodecode.model import meta
54 from rhodecode.model.db import (
53 from rhodecode.model.db import (
55 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
56 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
57
56
58
57
59 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
60
59
61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62
61
63 # String which contains characters that are not allowed in slug names for
62 # String which contains characters that are not allowed in slug names for
64 # repositories or repository groups. It is properly escaped to use it in
63 # repositories or repository groups. It is properly escaped to use it in
65 # regular expressions.
64 # regular expressions.
66 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
67
66
68 # Regex that matches forbidden characters in repo/group slugs.
67 # Regex that matches forbidden characters in repo/group slugs.
69 SLUG_BAD_CHAR_RE = re.compile('[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
68 SLUG_BAD_CHAR_RE = re.compile('[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
70
69
71 # Regex that matches allowed characters in repo/group slugs.
70 # Regex that matches allowed characters in repo/group slugs.
72 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
73
72
74 # Regex that matches whole repo/group slugs.
73 # Regex that matches whole repo/group slugs.
75 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
76
75
77 _license_cache = None
76 _license_cache = None
78
77
79
78
80 def repo_name_slug(value):
79 def repo_name_slug(value):
81 """
80 """
82 Return slug of name of repository
81 Return slug of name of repository
83 This function is called on each creation/modification
82 This function is called on each creation/modification
84 of repository to prevent bad names in repo
83 of repository to prevent bad names in repo
85 """
84 """
86 replacement_char = '-'
85 replacement_char = '-'
87
86
88 slug = remove_formatting(value)
87 slug = remove_formatting(value)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 slug = re.sub('[\s]+', '-', slug)
89 slug = re.sub('[\s]+', '-', slug)
91 slug = collapse(slug, replacement_char)
90 slug = collapse(slug, replacement_char)
92 return slug
91 return slug
93
92
94
93
95 #==============================================================================
94 #==============================================================================
96 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 #==============================================================================
96 #==============================================================================
98 def get_repo_slug(request):
97 def get_repo_slug(request):
99 _repo = ''
98 _repo = ''
100
99
101 if hasattr(request, 'db_repo'):
100 if hasattr(request, 'db_repo'):
102 # if our requests has set db reference use it for name, this
101 # if our requests has set db reference use it for name, this
103 # translates the example.com/_<id> into proper repo names
102 # translates the example.com/_<id> into proper repo names
104 _repo = request.db_repo.repo_name
103 _repo = request.db_repo.repo_name
105 elif getattr(request, 'matchdict', None):
104 elif getattr(request, 'matchdict', None):
106 # pyramid
105 # pyramid
107 _repo = request.matchdict.get('repo_name')
106 _repo = request.matchdict.get('repo_name')
108
107
109 if _repo:
108 if _repo:
110 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
111 return _repo
110 return _repo
112
111
113
112
114 def get_repo_group_slug(request):
113 def get_repo_group_slug(request):
115 _group = ''
114 _group = ''
116 if hasattr(request, 'db_repo_group'):
115 if hasattr(request, 'db_repo_group'):
117 # if our requests has set db reference use it for name, this
116 # if our requests has set db reference use it for name, this
118 # translates the example.com/_<id> into proper repo group names
117 # translates the example.com/_<id> into proper repo group names
119 _group = request.db_repo_group.group_name
118 _group = request.db_repo_group.group_name
120 elif getattr(request, 'matchdict', None):
119 elif getattr(request, 'matchdict', None):
121 # pyramid
120 # pyramid
122 _group = request.matchdict.get('repo_group_name')
121 _group = request.matchdict.get('repo_group_name')
123
122
124 if _group:
123 if _group:
125 _group = _group.rstrip('/')
124 _group = _group.rstrip('/')
126 return _group
125 return _group
127
126
128
127
129 def get_user_group_slug(request):
128 def get_user_group_slug(request):
130 _user_group = ''
129 _user_group = ''
131
130
132 if hasattr(request, 'db_user_group'):
131 if hasattr(request, 'db_user_group'):
133 _user_group = request.db_user_group.users_group_name
132 _user_group = request.db_user_group.users_group_name
134 elif getattr(request, 'matchdict', None):
133 elif getattr(request, 'matchdict', None):
135 # pyramid
134 # pyramid
136 _user_group = request.matchdict.get('user_group_id')
135 _user_group = request.matchdict.get('user_group_id')
137 _user_group_name = request.matchdict.get('user_group_name')
136 _user_group_name = request.matchdict.get('user_group_name')
138 try:
137 try:
139 if _user_group:
138 if _user_group:
140 _user_group = UserGroup.get(_user_group)
139 _user_group = UserGroup.get(_user_group)
141 elif _user_group_name:
140 elif _user_group_name:
142 _user_group = UserGroup.get_by_group_name(_user_group_name)
141 _user_group = UserGroup.get_by_group_name(_user_group_name)
143
142
144 if _user_group:
143 if _user_group:
145 _user_group = _user_group.users_group_name
144 _user_group = _user_group.users_group_name
146 except Exception:
145 except Exception:
147 log.exception('Failed to get user group by id and name')
146 log.exception('Failed to get user group by id and name')
148 # catch all failures here
147 # catch all failures here
149 return None
148 return None
150
149
151 return _user_group
150 return _user_group
152
151
153
152
154 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 """
154 """
156 Scans given path for repos and return (name,(type,path)) tuple
155 Scans given path for repos and return (name,(type,path)) tuple
157
156
158 :param path: path to scan for repositories
157 :param path: path to scan for repositories
159 :param recursive: recursive search and return names with subdirs in front
158 :param recursive: recursive search and return names with subdirs in front
160 """
159 """
161
160
162 # remove ending slash for better results
161 # remove ending slash for better results
163 path = path.rstrip(os.sep)
162 path = path.rstrip(os.sep)
164 log.debug('now scanning in %s location recursive:%s...', path, recursive)
163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165
164
166 def _get_repos(p):
165 def _get_repos(p):
167 dirpaths = _get_dirpaths(p)
166 dirpaths = _get_dirpaths(p)
168 if not _is_dir_writable(p):
167 if not _is_dir_writable(p):
169 log.warning('repo path without write access: %s', p)
168 log.warning('repo path without write access: %s', p)
170
169
171 for dirpath in dirpaths:
170 for dirpath in dirpaths:
172 if os.path.isfile(os.path.join(p, dirpath)):
171 if os.path.isfile(os.path.join(p, dirpath)):
173 continue
172 continue
174 cur_path = os.path.join(p, dirpath)
173 cur_path = os.path.join(p, dirpath)
175
174
176 # skip removed repos
175 # skip removed repos
177 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 continue
177 continue
179
178
180 #skip .<somethin> dirs
179 #skip .<somethin> dirs
181 if dirpath.startswith('.'):
180 if dirpath.startswith('.'):
182 continue
181 continue
183
182
184 try:
183 try:
185 scm_info = get_scm(cur_path)
184 scm_info = get_scm(cur_path)
186 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 except VCSError:
186 except VCSError:
188 if not recursive:
187 if not recursive:
189 continue
188 continue
190 #check if this dir containts other repos for recursive scan
189 #check if this dir containts other repos for recursive scan
191 rec_path = os.path.join(p, dirpath)
190 rec_path = os.path.join(p, dirpath)
192 if os.path.isdir(rec_path):
191 if os.path.isdir(rec_path):
193 for inner_scm in _get_repos(rec_path):
192 for inner_scm in _get_repos(rec_path):
194 yield inner_scm
193 yield inner_scm
195
194
196 return _get_repos(path)
195 return _get_repos(path)
197
196
198
197
199 def _get_dirpaths(p):
198 def _get_dirpaths(p):
200 try:
199 try:
201 # OS-independable way of checking if we have at least read-only
200 # OS-independable way of checking if we have at least read-only
202 # access or not.
201 # access or not.
203 dirpaths = os.listdir(p)
202 dirpaths = os.listdir(p)
204 except OSError:
203 except OSError:
205 log.warning('ignoring repo path without read access: %s', p)
204 log.warning('ignoring repo path without read access: %s', p)
206 return []
205 return []
207
206
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 # decode paths and suddenly returns unicode objects itself. The items it
208 # decode paths and suddenly returns unicode objects itself. The items it
210 # cannot decode are returned as strings and cause issues.
209 # cannot decode are returned as strings and cause issues.
211 #
210 #
212 # Those paths are ignored here until a solid solution for path handling has
211 # Those paths are ignored here until a solid solution for path handling has
213 # been built.
212 # been built.
214 expected_type = type(p)
213 expected_type = type(p)
215
214
216 def _has_correct_type(item):
215 def _has_correct_type(item):
217 if type(item) is not expected_type:
216 if type(item) is not expected_type:
218 log.error(
217 log.error(
219 u"Ignoring path %s since it cannot be decoded into unicode.",
218 u"Ignoring path %s since it cannot be decoded into unicode.",
220 # Using "repr" to make sure that we see the byte value in case
219 # Using "repr" to make sure that we see the byte value in case
221 # of support.
220 # of support.
222 repr(item))
221 repr(item))
223 return False
222 return False
224 return True
223 return True
225
224
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227
226
228 return dirpaths
227 return dirpaths
229
228
230
229
231 def _is_dir_writable(path):
230 def _is_dir_writable(path):
232 """
231 """
233 Probe if `path` is writable.
232 Probe if `path` is writable.
234
233
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
234 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 possible to create a file inside of `path`, stat does not produce reliable
235 possible to create a file inside of `path`, stat does not produce reliable
237 results in this case.
236 results in this case.
238 """
237 """
239 try:
238 try:
240 with tempfile.TemporaryFile(dir=path):
239 with tempfile.TemporaryFile(dir=path):
241 pass
240 pass
242 except OSError:
241 except OSError:
243 return False
242 return False
244 return True
243 return True
245
244
246
245
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 """
247 """
249 Returns True if given path is a valid repository False otherwise.
248 Returns True if given path is a valid repository False otherwise.
250 If expect_scm param is given also, compare if given scm is the same
249 If expect_scm param is given also, compare if given scm is the same
251 as expected from scm parameter. If explicit_scm is given don't try to
250 as expected from scm parameter. If explicit_scm is given don't try to
252 detect the scm, just use the given one to check if repo is valid
251 detect the scm, just use the given one to check if repo is valid
253
252
254 :param repo_name:
253 :param repo_name:
255 :param base_path:
254 :param base_path:
256 :param expect_scm:
255 :param expect_scm:
257 :param explicit_scm:
256 :param explicit_scm:
258 :param config:
257 :param config:
259
258
260 :return True: if given path is a valid repository
259 :return True: if given path is a valid repository
261 """
260 """
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 log.debug('Checking if `%s` is a valid path for repository. '
262 log.debug('Checking if `%s` is a valid path for repository. '
264 'Explicit type: %s', repo_name, explicit_scm)
263 'Explicit type: %s', repo_name, explicit_scm)
265
264
266 try:
265 try:
267 if explicit_scm:
266 if explicit_scm:
268 detected_scms = [get_scm_backend(explicit_scm)(
267 detected_scms = [get_scm_backend(explicit_scm)(
269 full_path, config=config).alias]
268 full_path, config=config).alias]
270 else:
269 else:
271 detected_scms = get_scm(full_path)
270 detected_scms = get_scm(full_path)
272
271
273 if expect_scm:
272 if expect_scm:
274 return detected_scms[0] == expect_scm
273 return detected_scms[0] == expect_scm
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 return True
275 return True
277 except VCSError:
276 except VCSError:
278 log.debug('path: %s is not a valid repo !', full_path)
277 log.debug('path: %s is not a valid repo !', full_path)
279 return False
278 return False
280
279
281
280
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 """
282 """
284 Returns True if given path is a repository group, False otherwise
283 Returns True if given path is a repository group, False otherwise
285
284
286 :param repo_name:
285 :param repo_name:
287 :param base_path:
286 :param base_path:
288 """
287 """
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 log.debug('Checking if `%s` is a valid path for repository group',
289 log.debug('Checking if `%s` is a valid path for repository group',
291 repo_group_name)
290 repo_group_name)
292
291
293 # check if it's not a repo
292 # check if it's not a repo
294 if is_valid_repo(repo_group_name, base_path):
293 if is_valid_repo(repo_group_name, base_path):
295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
294 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
296 return False
295 return False
297
296
298 try:
297 try:
299 # we need to check bare git repos at higher level
298 # we need to check bare git repos at higher level
300 # since we might match branches/hooks/info/objects or possible
299 # since we might match branches/hooks/info/objects or possible
301 # other things inside bare git repo
300 # other things inside bare git repo
302 maybe_repo = os.path.dirname(full_path)
301 maybe_repo = os.path.dirname(full_path)
303 if maybe_repo == base_path:
302 if maybe_repo == base_path:
304 # skip root level repo check, we know root location CANNOT BE a repo group
303 # skip root level repo check, we know root location CANNOT BE a repo group
305 return False
304 return False
306
305
307 scm_ = get_scm(maybe_repo)
306 scm_ = get_scm(maybe_repo)
308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
307 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
309 return False
308 return False
310 except VCSError:
309 except VCSError:
311 pass
310 pass
312
311
313 # check if it's a valid path
312 # check if it's a valid path
314 if skip_path_check or os.path.isdir(full_path):
313 if skip_path_check or os.path.isdir(full_path):
315 log.debug('path: %s is a valid repo group !', full_path)
314 log.debug('path: %s is a valid repo group !', full_path)
316 return True
315 return True
317
316
318 log.debug('path: %s is not a valid repo group !', full_path)
317 log.debug('path: %s is not a valid repo group !', full_path)
319 return False
318 return False
320
319
321
320
322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
321 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
323 while True:
322 while True:
324 ok = raw_input(prompt)
323 ok = raw_input(prompt)
325 if ok.lower() in ('y', 'ye', 'yes'):
324 if ok.lower() in ('y', 'ye', 'yes'):
326 return True
325 return True
327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
326 if ok.lower() in ('n', 'no', 'nop', 'nope'):
328 return False
327 return False
329 retries = retries - 1
328 retries = retries - 1
330 if retries < 0:
329 if retries < 0:
331 raise IOError
330 raise IOError
332 print(complaint)
331 print(complaint)
333
332
334 # propagated from mercurial documentation
333 # propagated from mercurial documentation
335 ui_sections = [
334 ui_sections = [
336 'alias', 'auth',
335 'alias', 'auth',
337 'decode/encode', 'defaults',
336 'decode/encode', 'defaults',
338 'diff', 'email',
337 'diff', 'email',
339 'extensions', 'format',
338 'extensions', 'format',
340 'merge-patterns', 'merge-tools',
339 'merge-patterns', 'merge-tools',
341 'hooks', 'http_proxy',
340 'hooks', 'http_proxy',
342 'smtp', 'patch',
341 'smtp', 'patch',
343 'paths', 'profiling',
342 'paths', 'profiling',
344 'server', 'trusted',
343 'server', 'trusted',
345 'ui', 'web', ]
344 'ui', 'web', ]
346
345
347
346
348 def config_data_from_db(clear_session=True, repo=None):
347 def config_data_from_db(clear_session=True, repo=None):
349 """
348 """
350 Read the configuration data from the database and return configuration
349 Read the configuration data from the database and return configuration
351 tuples.
350 tuples.
352 """
351 """
353 from rhodecode.model.settings import VcsSettingsModel
352 from rhodecode.model.settings import VcsSettingsModel
354
353
355 config = []
354 config = []
356
355
357 sa = meta.Session()
356 sa = meta.Session()
358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
357 settings_model = VcsSettingsModel(repo=repo, sa=sa)
359
358
360 ui_settings = settings_model.get_ui_settings()
359 ui_settings = settings_model.get_ui_settings()
361
360
362 ui_data = []
361 ui_data = []
363 for setting in ui_settings:
362 for setting in ui_settings:
364 if setting.active:
363 if setting.active:
365 ui_data.append((setting.section, setting.key, setting.value))
364 ui_data.append((setting.section, setting.key, setting.value))
366 config.append((
365 config.append((
367 safe_str(setting.section), safe_str(setting.key),
366 safe_str(setting.section), safe_str(setting.key),
368 safe_str(setting.value)))
367 safe_str(setting.value)))
369 if setting.key == 'push_ssl':
368 if setting.key == 'push_ssl':
370 # force set push_ssl requirement to False, rhodecode
369 # force set push_ssl requirement to False, rhodecode
371 # handles that
370 # handles that
372 config.append((
371 config.append((
373 safe_str(setting.section), safe_str(setting.key), False))
372 safe_str(setting.section), safe_str(setting.key), False))
374 log.debug(
373 log.debug(
375 'settings ui from db@repo[%s]: %s',
374 'settings ui from db@repo[%s]: %s',
376 repo,
375 repo,
377 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
376 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
378 if clear_session:
377 if clear_session:
379 meta.Session.remove()
378 meta.Session.remove()
380
379
381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
380 # TODO: mikhail: probably it makes no sense to re-read hooks information.
382 # It's already there and activated/deactivated
381 # It's already there and activated/deactivated
383 skip_entries = []
382 skip_entries = []
384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
383 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
385 if 'pull' not in enabled_hook_classes:
384 if 'pull' not in enabled_hook_classes:
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
387 if 'push' not in enabled_hook_classes:
386 if 'push' not in enabled_hook_classes:
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
387 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
391
390
392 config = [entry for entry in config if entry[:2] not in skip_entries]
391 config = [entry for entry in config if entry[:2] not in skip_entries]
393
392
394 return config
393 return config
395
394
396
395
397 def make_db_config(clear_session=True, repo=None):
396 def make_db_config(clear_session=True, repo=None):
398 """
397 """
399 Create a :class:`Config` instance based on the values in the database.
398 Create a :class:`Config` instance based on the values in the database.
400 """
399 """
401 config = Config()
400 config = Config()
402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
401 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
403 for section, option, value in config_data:
402 for section, option, value in config_data:
404 config.set(section, option, value)
403 config.set(section, option, value)
405 return config
404 return config
406
405
407
406
408 def get_enabled_hook_classes(ui_settings):
407 def get_enabled_hook_classes(ui_settings):
409 """
408 """
410 Return the enabled hook classes.
409 Return the enabled hook classes.
411
410
412 :param ui_settings: List of ui_settings as returned
411 :param ui_settings: List of ui_settings as returned
413 by :meth:`VcsSettingsModel.get_ui_settings`
412 by :meth:`VcsSettingsModel.get_ui_settings`
414
413
415 :return: a list with the enabled hook classes. The order is not guaranteed.
414 :return: a list with the enabled hook classes. The order is not guaranteed.
416 :rtype: list
415 :rtype: list
417 """
416 """
418 enabled_hooks = []
417 enabled_hooks = []
419 active_hook_keys = [
418 active_hook_keys = [
420 key for section, key, value, active in ui_settings
419 key for section, key, value, active in ui_settings
421 if section == 'hooks' and active]
420 if section == 'hooks' and active]
422
421
423 hook_names = {
422 hook_names = {
424 RhodeCodeUi.HOOK_PUSH: 'push',
423 RhodeCodeUi.HOOK_PUSH: 'push',
425 RhodeCodeUi.HOOK_PULL: 'pull',
424 RhodeCodeUi.HOOK_PULL: 'pull',
426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
425 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
427 }
426 }
428
427
429 for key in active_hook_keys:
428 for key in active_hook_keys:
430 hook = hook_names.get(key)
429 hook = hook_names.get(key)
431 if hook:
430 if hook:
432 enabled_hooks.append(hook)
431 enabled_hooks.append(hook)
433
432
434 return enabled_hooks
433 return enabled_hooks
435
434
436
435
437 def set_rhodecode_config(config):
436 def set_rhodecode_config(config):
438 """
437 """
439 Updates pyramid config with new settings from database
438 Updates pyramid config with new settings from database
440
439
441 :param config:
440 :param config:
442 """
441 """
443 from rhodecode.model.settings import SettingsModel
442 from rhodecode.model.settings import SettingsModel
444 app_settings = SettingsModel().get_all_settings()
443 app_settings = SettingsModel().get_all_settings()
445
444
446 for k, v in app_settings.items():
445 for k, v in app_settings.items():
447 config[k] = v
446 config[k] = v
448
447
449
448
450 def get_rhodecode_realm():
449 def get_rhodecode_realm():
451 """
450 """
452 Return the rhodecode realm from database.
451 Return the rhodecode realm from database.
453 """
452 """
454 from rhodecode.model.settings import SettingsModel
453 from rhodecode.model.settings import SettingsModel
455 realm = SettingsModel().get_setting_by_name('realm')
454 realm = SettingsModel().get_setting_by_name('realm')
456 return safe_str(realm.app_settings_value)
455 return safe_str(realm.app_settings_value)
457
456
458
457
459 def get_rhodecode_base_path():
458 def get_rhodecode_base_path():
460 """
459 """
461 Returns the base path. The base path is the filesystem path which points
460 Returns the base path. The base path is the filesystem path which points
462 to the repository store.
461 to the repository store.
463 """
462 """
464 from rhodecode.model.settings import SettingsModel
463 from rhodecode.model.settings import SettingsModel
465 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
464 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
466 return safe_str(paths_ui.ui_value)
465 return safe_str(paths_ui.ui_value)
467
466
468
467
469 def map_groups(path):
468 def map_groups(path):
470 """
469 """
471 Given a full path to a repository, create all nested groups that this
470 Given a full path to a repository, create all nested groups that this
472 repo is inside. This function creates parent-child relationships between
471 repo is inside. This function creates parent-child relationships between
473 groups and creates default perms for all new groups.
472 groups and creates default perms for all new groups.
474
473
475 :param paths: full path to repository
474 :param paths: full path to repository
476 """
475 """
477 from rhodecode.model.repo_group import RepoGroupModel
476 from rhodecode.model.repo_group import RepoGroupModel
478 sa = meta.Session()
477 sa = meta.Session()
479 groups = path.split(Repository.NAME_SEP)
478 groups = path.split(Repository.NAME_SEP)
480 parent = None
479 parent = None
481 group = None
480 group = None
482
481
483 # last element is repo in nested groups structure
482 # last element is repo in nested groups structure
484 groups = groups[:-1]
483 groups = groups[:-1]
485 rgm = RepoGroupModel(sa)
484 rgm = RepoGroupModel(sa)
486 owner = User.get_first_super_admin()
485 owner = User.get_first_super_admin()
487 for lvl, group_name in enumerate(groups):
486 for lvl, group_name in enumerate(groups):
488 group_name = '/'.join(groups[:lvl] + [group_name])
487 group_name = '/'.join(groups[:lvl] + [group_name])
489 group = RepoGroup.get_by_group_name(group_name)
488 group = RepoGroup.get_by_group_name(group_name)
490 desc = '%s group' % group_name
489 desc = '%s group' % group_name
491
490
492 # skip folders that are now removed repos
491 # skip folders that are now removed repos
493 if REMOVED_REPO_PAT.match(group_name):
492 if REMOVED_REPO_PAT.match(group_name):
494 break
493 break
495
494
496 if group is None:
495 if group is None:
497 log.debug('creating group level: %s group_name: %s',
496 log.debug('creating group level: %s group_name: %s',
498 lvl, group_name)
497 lvl, group_name)
499 group = RepoGroup(group_name, parent)
498 group = RepoGroup(group_name, parent)
500 group.group_description = desc
499 group.group_description = desc
501 group.user = owner
500 group.user = owner
502 sa.add(group)
501 sa.add(group)
503 perm_obj = rgm._create_default_perms(group)
502 perm_obj = rgm._create_default_perms(group)
504 sa.add(perm_obj)
503 sa.add(perm_obj)
505 sa.flush()
504 sa.flush()
506
505
507 parent = group
506 parent = group
508 return group
507 return group
509
508
510
509
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
510 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
512 """
511 """
513 maps all repos given in initial_repo_list, non existing repositories
512 maps all repos given in initial_repo_list, non existing repositories
514 are created, if remove_obsolete is True it also checks for db entries
513 are created, if remove_obsolete is True it also checks for db entries
515 that are not in initial_repo_list and removes them.
514 that are not in initial_repo_list and removes them.
516
515
517 :param initial_repo_list: list of repositories found by scanning methods
516 :param initial_repo_list: list of repositories found by scanning methods
518 :param remove_obsolete: check for obsolete entries in database
517 :param remove_obsolete: check for obsolete entries in database
519 """
518 """
520 from rhodecode.model.repo import RepoModel
519 from rhodecode.model.repo import RepoModel
521 from rhodecode.model.repo_group import RepoGroupModel
520 from rhodecode.model.repo_group import RepoGroupModel
522 from rhodecode.model.settings import SettingsModel
521 from rhodecode.model.settings import SettingsModel
523
522
524 sa = meta.Session()
523 sa = meta.Session()
525 repo_model = RepoModel()
524 repo_model = RepoModel()
526 user = User.get_first_super_admin()
525 user = User.get_first_super_admin()
527 added = []
526 added = []
528
527
529 # creation defaults
528 # creation defaults
530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
529 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
531 enable_statistics = defs.get('repo_enable_statistics')
530 enable_statistics = defs.get('repo_enable_statistics')
532 enable_locking = defs.get('repo_enable_locking')
531 enable_locking = defs.get('repo_enable_locking')
533 enable_downloads = defs.get('repo_enable_downloads')
532 enable_downloads = defs.get('repo_enable_downloads')
534 private = defs.get('repo_private')
533 private = defs.get('repo_private')
535
534
536 for name, repo in initial_repo_list.items():
535 for name, repo in initial_repo_list.items():
537 group = map_groups(name)
536 group = map_groups(name)
538 unicode_name = safe_unicode(name)
537 unicode_name = safe_unicode(name)
539 db_repo = repo_model.get_by_repo_name(unicode_name)
538 db_repo = repo_model.get_by_repo_name(unicode_name)
540 # found repo that is on filesystem not in RhodeCode database
539 # found repo that is on filesystem not in RhodeCode database
541 if not db_repo:
540 if not db_repo:
542 log.info('repository %s not found, creating now', name)
541 log.info('repository %s not found, creating now', name)
543 added.append(name)
542 added.append(name)
544 desc = (repo.description
543 desc = (repo.description
545 if repo.description != 'unknown'
544 if repo.description != 'unknown'
546 else '%s repository' % name)
545 else '%s repository' % name)
547
546
548 db_repo = repo_model._create_repo(
547 db_repo = repo_model._create_repo(
549 repo_name=name,
548 repo_name=name,
550 repo_type=repo.alias,
549 repo_type=repo.alias,
551 description=desc,
550 description=desc,
552 repo_group=getattr(group, 'group_id', None),
551 repo_group=getattr(group, 'group_id', None),
553 owner=user,
552 owner=user,
554 enable_locking=enable_locking,
553 enable_locking=enable_locking,
555 enable_downloads=enable_downloads,
554 enable_downloads=enable_downloads,
556 enable_statistics=enable_statistics,
555 enable_statistics=enable_statistics,
557 private=private,
556 private=private,
558 state=Repository.STATE_CREATED
557 state=Repository.STATE_CREATED
559 )
558 )
560 sa.commit()
559 sa.commit()
561 # we added that repo just now, and make sure we updated server info
560 # we added that repo just now, and make sure we updated server info
562 if db_repo.repo_type == 'git':
561 if db_repo.repo_type == 'git':
563 git_repo = db_repo.scm_instance()
562 git_repo = db_repo.scm_instance()
564 # update repository server-info
563 # update repository server-info
565 log.debug('Running update server info')
564 log.debug('Running update server info')
566 git_repo._update_server_info()
565 git_repo._update_server_info()
567
566
568 db_repo.update_commit_cache()
567 db_repo.update_commit_cache()
569
568
570 config = db_repo._config
569 config = db_repo._config
571 config.set('extensions', 'largefiles', '')
570 config.set('extensions', 'largefiles', '')
572 repo = db_repo.scm_instance(config=config)
571 repo = db_repo.scm_instance(config=config)
573 repo.install_hooks()
572 repo.install_hooks()
574
573
575 removed = []
574 removed = []
576 if remove_obsolete:
575 if remove_obsolete:
577 # remove from database those repositories that are not in the filesystem
576 # remove from database those repositories that are not in the filesystem
578 for repo in sa.query(Repository).all():
577 for repo in sa.query(Repository).all():
579 if repo.repo_name not in initial_repo_list.keys():
578 if repo.repo_name not in initial_repo_list.keys():
580 log.debug("Removing non-existing repository found in db `%s`",
579 log.debug("Removing non-existing repository found in db `%s`",
581 repo.repo_name)
580 repo.repo_name)
582 try:
581 try:
583 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
584 sa.commit()
583 sa.commit()
585 removed.append(repo.repo_name)
584 removed.append(repo.repo_name)
586 except Exception:
585 except Exception:
587 # don't hold further removals on error
586 # don't hold further removals on error
588 log.error(traceback.format_exc())
587 log.error(traceback.format_exc())
589 sa.rollback()
588 sa.rollback()
590
589
591 def splitter(full_repo_name):
590 def splitter(full_repo_name):
592 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
593 gr_name = None
592 gr_name = None
594 if len(_parts) == 2:
593 if len(_parts) == 2:
595 gr_name = _parts[0]
594 gr_name = _parts[0]
596 return gr_name
595 return gr_name
597
596
598 initial_repo_group_list = [splitter(x) for x in
597 initial_repo_group_list = [splitter(x) for x in
599 initial_repo_list.keys() if splitter(x)]
598 initial_repo_list.keys() if splitter(x)]
600
599
601 # remove from database those repository groups that are not in the
600 # remove from database those repository groups that are not in the
602 # filesystem due to parent child relationships we need to delete them
601 # filesystem due to parent child relationships we need to delete them
603 # in a specific order of most nested first
602 # in a specific order of most nested first
604 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
605 nested_sort = lambda gr: len(gr.split('/'))
604 nested_sort = lambda gr: len(gr.split('/'))
606 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
607 if group_name not in initial_repo_group_list:
606 if group_name not in initial_repo_group_list:
608 repo_group = RepoGroup.get_by_group_name(group_name)
607 repo_group = RepoGroup.get_by_group_name(group_name)
609 if (repo_group.children.all() or
608 if (repo_group.children.all() or
610 not RepoGroupModel().check_exist_filesystem(
609 not RepoGroupModel().check_exist_filesystem(
611 group_name=group_name, exc_on_failure=False)):
610 group_name=group_name, exc_on_failure=False)):
612 continue
611 continue
613
612
614 log.info(
613 log.info(
615 'Removing non-existing repository group found in db `%s`',
614 'Removing non-existing repository group found in db `%s`',
616 group_name)
615 group_name)
617 try:
616 try:
618 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 RepoGroupModel(sa).delete(group_name, fs_remove=False)
619 sa.commit()
618 sa.commit()
620 removed.append(group_name)
619 removed.append(group_name)
621 except Exception:
620 except Exception:
622 # don't hold further removals on error
621 # don't hold further removals on error
623 log.exception(
622 log.exception(
624 'Unable to remove repository group `%s`',
623 'Unable to remove repository group `%s`',
625 group_name)
624 group_name)
626 sa.rollback()
625 sa.rollback()
627 raise
626 raise
628
627
629 return added, removed
628 return added, removed
630
629
631
630
632 def load_rcextensions(root_path):
631 def load_rcextensions(root_path):
633 import rhodecode
632 import rhodecode
634 from rhodecode.config import conf
633 from rhodecode.config import conf
635
634
636 path = os.path.join(root_path)
635 path = os.path.join(root_path)
637 sys.path.append(path)
636 sys.path.append(path)
638
637
639 try:
638 try:
640 rcextensions = __import__('rcextensions')
639 rcextensions = __import__('rcextensions')
641 except ImportError:
640 except ImportError:
642 if os.path.isdir(os.path.join(path, 'rcextensions')):
641 if os.path.isdir(os.path.join(path, 'rcextensions')):
643 log.warn('Unable to load rcextensions from %s', path)
642 log.warn('Unable to load rcextensions from %s', path)
644 rcextensions = None
643 rcextensions = None
645
644
646 if rcextensions:
645 if rcextensions:
647 log.info('Loaded rcextensions from %s...', rcextensions)
646 log.info('Loaded rcextensions from %s...', rcextensions)
648 rhodecode.EXTENSIONS = rcextensions
647 rhodecode.EXTENSIONS = rcextensions
649
648
650 # Additional mappings that are not present in the pygments lexers
649 # Additional mappings that are not present in the pygments lexers
651 conf.LANGUAGES_EXTENSIONS_MAP.update(
650 conf.LANGUAGES_EXTENSIONS_MAP.update(
652 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
651 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
653
652
654
653
655 def get_custom_lexer(extension):
654 def get_custom_lexer(extension):
656 """
655 """
657 returns a custom lexer if it is defined in rcextensions module, or None
656 returns a custom lexer if it is defined in rcextensions module, or None
658 if there's no custom lexer defined
657 if there's no custom lexer defined
659 """
658 """
660 import rhodecode
659 import rhodecode
661 from pygments import lexers
660 from pygments import lexers
662
661
663 # custom override made by RhodeCode
662 # custom override made by RhodeCode
664 if extension in ['mako']:
663 if extension in ['mako']:
665 return lexers.get_lexer_by_name('html+mako')
664 return lexers.get_lexer_by_name('html+mako')
666
665
667 # check if we didn't define this extension as other lexer
666 # check if we didn't define this extension as other lexer
668 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
667 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
669 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
668 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
670 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
669 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
671 return lexers.get_lexer_by_name(_lexer_name)
670 return lexers.get_lexer_by_name(_lexer_name)
672
671
673
672
674 #==============================================================================
673 #==============================================================================
675 # TEST FUNCTIONS AND CREATORS
674 # TEST FUNCTIONS AND CREATORS
676 #==============================================================================
675 #==============================================================================
677 def create_test_index(repo_location, config):
676 def create_test_index(repo_location, config):
678 """
677 """
679 Makes default test index.
678 Makes default test index.
680 """
679 """
681 import rc_testdata
680 import rc_testdata
682
681
683 rc_testdata.extract_search_index(
682 rc_testdata.extract_search_index(
684 'vcs_search_index', os.path.dirname(config['search.location']))
683 'vcs_search_index', os.path.dirname(config['search.location']))
685
684
686
685
687 def create_test_directory(test_path):
686 def create_test_directory(test_path):
688 """
687 """
689 Create test directory if it doesn't exist.
688 Create test directory if it doesn't exist.
690 """
689 """
691 if not os.path.isdir(test_path):
690 if not os.path.isdir(test_path):
692 log.debug('Creating testdir %s', test_path)
691 log.debug('Creating testdir %s', test_path)
693 os.makedirs(test_path)
692 os.makedirs(test_path)
694
693
695
694
696 def create_test_database(test_path, config):
695 def create_test_database(test_path, config):
697 """
696 """
698 Makes a fresh database.
697 Makes a fresh database.
699 """
698 """
700 from rhodecode.lib.db_manage import DbManage
699 from rhodecode.lib.db_manage import DbManage
701
700
702 # PART ONE create db
701 # PART ONE create db
703 dbconf = config['sqlalchemy.db1.url']
702 dbconf = config['sqlalchemy.db1.url']
704 log.debug('making test db %s', dbconf)
703 log.debug('making test db %s', dbconf)
705
704
706 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
705 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
707 tests=True, cli_args={'force_ask': True})
706 tests=True, cli_args={'force_ask': True})
708 dbmanage.create_tables(override=True)
707 dbmanage.create_tables(override=True)
709 dbmanage.set_db_version()
708 dbmanage.set_db_version()
710 # for tests dynamically set new root paths based on generated content
709 # for tests dynamically set new root paths based on generated content
711 dbmanage.create_settings(dbmanage.config_prompt(test_path))
710 dbmanage.create_settings(dbmanage.config_prompt(test_path))
712 dbmanage.create_default_user()
711 dbmanage.create_default_user()
713 dbmanage.create_test_admin_and_users()
712 dbmanage.create_test_admin_and_users()
714 dbmanage.create_permissions()
713 dbmanage.create_permissions()
715 dbmanage.populate_default_permissions()
714 dbmanage.populate_default_permissions()
716 Session().commit()
715 Session().commit()
717
716
718
717
719 def create_test_repositories(test_path, config):
718 def create_test_repositories(test_path, config):
720 """
719 """
721 Creates test repositories in the temporary directory. Repositories are
720 Creates test repositories in the temporary directory. Repositories are
722 extracted from archives within the rc_testdata package.
721 extracted from archives within the rc_testdata package.
723 """
722 """
724 import rc_testdata
723 import rc_testdata
725 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
724 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
726
725
727 log.debug('making test vcs repositories')
726 log.debug('making test vcs repositories')
728
727
729 idx_path = config['search.location']
728 idx_path = config['search.location']
730 data_path = config['cache_dir']
729 data_path = config['cache_dir']
731
730
732 # clean index and data
731 # clean index and data
733 if idx_path and os.path.exists(idx_path):
732 if idx_path and os.path.exists(idx_path):
734 log.debug('remove %s', idx_path)
733 log.debug('remove %s', idx_path)
735 shutil.rmtree(idx_path)
734 shutil.rmtree(idx_path)
736
735
737 if data_path and os.path.exists(data_path):
736 if data_path and os.path.exists(data_path):
738 log.debug('remove %s', data_path)
737 log.debug('remove %s', data_path)
739 shutil.rmtree(data_path)
738 shutil.rmtree(data_path)
740
739
741 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
740 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
742 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
741 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
743
742
744 # Note: Subversion is in the process of being integrated with the system,
743 # Note: Subversion is in the process of being integrated with the system,
745 # until we have a properly packed version of the test svn repository, this
744 # until we have a properly packed version of the test svn repository, this
746 # tries to copy over the repo from a package "rc_testdata"
745 # tries to copy over the repo from a package "rc_testdata"
747 svn_repo_path = rc_testdata.get_svn_repo_archive()
746 svn_repo_path = rc_testdata.get_svn_repo_archive()
748 with tarfile.open(svn_repo_path) as tar:
747 with tarfile.open(svn_repo_path) as tar:
749 tar.extractall(jn(test_path, SVN_REPO))
748 tar.extractall(jn(test_path, SVN_REPO))
750
749
751
750
752 def password_changed(auth_user, session):
751 def password_changed(auth_user, session):
753 # Never report password change in case of default user or anonymous user.
752 # Never report password change in case of default user or anonymous user.
754 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
753 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
755 return False
754 return False
756
755
757 password_hash = md5(auth_user.password) if auth_user.password else None
756 password_hash = md5(auth_user.password) if auth_user.password else None
758 rhodecode_user = session.get('rhodecode_user', {})
757 rhodecode_user = session.get('rhodecode_user', {})
759 session_password_hash = rhodecode_user.get('password', '')
758 session_password_hash = rhodecode_user.get('password', '')
760 return password_hash != session_password_hash
759 return password_hash != session_password_hash
761
760
762
761
763 def read_opensource_licenses():
762 def read_opensource_licenses():
764 global _license_cache
763 global _license_cache
765
764
766 if not _license_cache:
765 if not _license_cache:
767 licenses = pkg_resources.resource_string(
766 licenses = pkg_resources.resource_string(
768 'rhodecode', 'config/licenses.json')
767 'rhodecode', 'config/licenses.json')
769 _license_cache = json.loads(licenses)
768 _license_cache = json.loads(licenses)
770
769
771 return _license_cache
770 return _license_cache
772
771
773
772
774 def generate_platform_uuid():
773 def generate_platform_uuid():
775 """
774 """
776 Generates platform UUID based on it's name
775 Generates platform UUID based on it's name
777 """
776 """
778 import platform
777 import platform
779
778
780 try:
779 try:
781 uuid_list = [platform.platform()]
780 uuid_list = [platform.platform()]
782 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
781 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
783 except Exception as e:
782 except Exception as e:
784 log.error('Failed to generate host uuid: %s', e)
783 log.error('Failed to generate host uuid: %s', e)
785 return 'UNDEFINED'
784 return 'UNDEFINED'
786
785
787
786
788 def send_test_email(recipients, email_body='TEST EMAIL'):
787 def send_test_email(recipients, email_body='TEST EMAIL'):
789 """
788 """
790 Simple code for generating test emails.
789 Simple code for generating test emails.
791 Usage::
790 Usage::
792
791
793 from rhodecode.lib import utils
792 from rhodecode.lib import utils
794 utils.send_test_email()
793 utils.send_test_email()
795 """
794 """
796 from rhodecode.lib.celerylib import tasks, run_task
795 from rhodecode.lib.celerylib import tasks, run_task
797
796
798 email_body = email_body_plaintext = email_body
797 email_body = email_body_plaintext = email_body
799 subject = 'SUBJECT FROM: {}'.format(socket.gethostname())
798 subject = 'SUBJECT FROM: {}'.format(socket.gethostname())
800 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
799 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,1208 +1,1207 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26 import collections
26 import collections
27 import datetime
27 import datetime
28 import dateutil.relativedelta
28 import dateutil.relativedelta
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import re
31 import re
32 import sys
32 import sys
33 import time
33 import time
34 import urllib
34 import urllib
35 import urlobject
35 import urlobject
36 import uuid
36 import uuid
37 import getpass
37 import getpass
38 import socket
38 import socket
39 import random
39 import random
40 from functools import update_wrapper, partial, wraps
40 from functools import update_wrapper, partial, wraps
41
41
42 import pygments.lexers
42 import pygments.lexers
43 import sqlalchemy
43 import sqlalchemy
44 import sqlalchemy.engine.url
44 import sqlalchemy.engine.url
45 import sqlalchemy.exc
45 import sqlalchemy.exc
46 import sqlalchemy.sql
46 import sqlalchemy.sql
47 import webob
47 import webob
48 import pyramid.threadlocal
48 import pyramid.threadlocal
49 from pyramid import compat
49 from pyramid import compat
50 from pyramid.settings import asbool
50 from pyramid.settings import asbool
51
51
52 import rhodecode
52 import rhodecode
53 from rhodecode.translation import _, _pluralize
53 from rhodecode.translation import _, _pluralize
54
54
55
55
56 def md5(s):
56 def md5(s):
57 return hashlib.md5(s).hexdigest()
57 return hashlib.md5(s).hexdigest()
58
58
59
59
60 def md5_safe(s):
60 def md5_safe(s):
61 return md5(safe_str(s))
61 return md5(safe_str(s))
62
62
63
63
64 def sha1(s):
64 def sha1(s):
65 return hashlib.sha1(s).hexdigest()
65 return hashlib.sha1(s).hexdigest()
66
66
67
67
68 def sha1_safe(s):
68 def sha1_safe(s):
69 return sha1(safe_str(s))
69 return sha1(safe_str(s))
70
70
71
71
72 def __get_lem(extra_mapping=None):
72 def __get_lem(extra_mapping=None):
73 """
73 """
74 Get language extension map based on what's inside pygments lexers
74 Get language extension map based on what's inside pygments lexers
75 """
75 """
76 d = collections.defaultdict(lambda: [])
76 d = collections.defaultdict(lambda: [])
77
77
78 def __clean(s):
78 def __clean(s):
79 s = s.lstrip('*')
79 s = s.lstrip('*')
80 s = s.lstrip('.')
80 s = s.lstrip('.')
81
81
82 if s.find('[') != -1:
82 if s.find('[') != -1:
83 exts = []
83 exts = []
84 start, stop = s.find('['), s.find(']')
84 start, stop = s.find('['), s.find(']')
85
85
86 for suffix in s[start + 1:stop]:
86 for suffix in s[start + 1:stop]:
87 exts.append(s[:s.find('[')] + suffix)
87 exts.append(s[:s.find('[')] + suffix)
88 return [e.lower() for e in exts]
88 return [e.lower() for e in exts]
89 else:
89 else:
90 return [s.lower()]
90 return [s.lower()]
91
91
92 for lx, t in sorted(pygments.lexers.LEXERS.items()):
92 for lx, t in sorted(pygments.lexers.LEXERS.items()):
93 m = map(__clean, t[-2])
93 m = map(__clean, t[-2])
94 if m:
94 if m:
95 m = reduce(lambda x, y: x + y, m)
95 m = reduce(lambda x, y: x + y, m)
96 for ext in m:
96 for ext in m:
97 desc = lx.replace('Lexer', '')
97 desc = lx.replace('Lexer', '')
98 d[ext].append(desc)
98 d[ext].append(desc)
99
99
100 data = dict(d)
100 data = dict(d)
101
101
102 extra_mapping = extra_mapping or {}
102 extra_mapping = extra_mapping or {}
103 if extra_mapping:
103 if extra_mapping:
104 for k, v in extra_mapping.items():
104 for k, v in extra_mapping.items():
105 if k not in data:
105 if k not in data:
106 # register new mapping2lexer
106 # register new mapping2lexer
107 data[k] = [v]
107 data[k] = [v]
108
108
109 return data
109 return data
110
110
111
111
112 def str2bool(_str):
112 def str2bool(_str):
113 """
113 """
114 returns True/False value from given string, it tries to translate the
114 returns True/False value from given string, it tries to translate the
115 string into boolean
115 string into boolean
116
116
117 :param _str: string value to translate into boolean
117 :param _str: string value to translate into boolean
118 :rtype: boolean
118 :rtype: boolean
119 :returns: boolean from given string
119 :returns: boolean from given string
120 """
120 """
121 if _str is None:
121 if _str is None:
122 return False
122 return False
123 if _str in (True, False):
123 if _str in (True, False):
124 return _str
124 return _str
125 _str = str(_str).strip().lower()
125 _str = str(_str).strip().lower()
126 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
126 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
127
127
128
128
129 def aslist(obj, sep=None, strip=True):
129 def aslist(obj, sep=None, strip=True):
130 """
130 """
131 Returns given string separated by sep as list
131 Returns given string separated by sep as list
132
132
133 :param obj:
133 :param obj:
134 :param sep:
134 :param sep:
135 :param strip:
135 :param strip:
136 """
136 """
137 if isinstance(obj, (basestring,)):
137 if isinstance(obj, (basestring,)):
138 lst = obj.split(sep)
138 lst = obj.split(sep)
139 if strip:
139 if strip:
140 lst = [v.strip() for v in lst]
140 lst = [v.strip() for v in lst]
141 return lst
141 return lst
142 elif isinstance(obj, (list, tuple)):
142 elif isinstance(obj, (list, tuple)):
143 return obj
143 return obj
144 elif obj is None:
144 elif obj is None:
145 return []
145 return []
146 else:
146 else:
147 return [obj]
147 return [obj]
148
148
149
149
150 def convert_line_endings(line, mode):
150 def convert_line_endings(line, mode):
151 """
151 """
152 Converts a given line "line end" accordingly to given mode
152 Converts a given line "line end" accordingly to given mode
153
153
154 Available modes are::
154 Available modes are::
155 0 - Unix
155 0 - Unix
156 1 - Mac
156 1 - Mac
157 2 - DOS
157 2 - DOS
158
158
159 :param line: given line to convert
159 :param line: given line to convert
160 :param mode: mode to convert to
160 :param mode: mode to convert to
161 :rtype: str
161 :rtype: str
162 :return: converted line according to mode
162 :return: converted line according to mode
163 """
163 """
164 if mode == 0:
164 if mode == 0:
165 line = line.replace('\r\n', '\n')
165 line = line.replace('\r\n', '\n')
166 line = line.replace('\r', '\n')
166 line = line.replace('\r', '\n')
167 elif mode == 1:
167 elif mode == 1:
168 line = line.replace('\r\n', '\r')
168 line = line.replace('\r\n', '\r')
169 line = line.replace('\n', '\r')
169 line = line.replace('\n', '\r')
170 elif mode == 2:
170 elif mode == 2:
171 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
171 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
172 return line
172 return line
173
173
174
174
175 def detect_mode(line, default):
175 def detect_mode(line, default):
176 """
176 """
177 Detects line break for given line, if line break couldn't be found
177 Detects line break for given line, if line break couldn't be found
178 given default value is returned
178 given default value is returned
179
179
180 :param line: str line
180 :param line: str line
181 :param default: default
181 :param default: default
182 :rtype: int
182 :rtype: int
183 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
183 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
184 """
184 """
185 if line.endswith('\r\n'):
185 if line.endswith('\r\n'):
186 return 2
186 return 2
187 elif line.endswith('\n'):
187 elif line.endswith('\n'):
188 return 0
188 return 0
189 elif line.endswith('\r'):
189 elif line.endswith('\r'):
190 return 1
190 return 1
191 else:
191 else:
192 return default
192 return default
193
193
194
194
195 def safe_int(val, default=None):
195 def safe_int(val, default=None):
196 """
196 """
197 Returns int() of val if val is not convertable to int use default
197 Returns int() of val if val is not convertable to int use default
198 instead
198 instead
199
199
200 :param val:
200 :param val:
201 :param default:
201 :param default:
202 """
202 """
203
203
204 try:
204 try:
205 val = int(val)
205 val = int(val)
206 except (ValueError, TypeError):
206 except (ValueError, TypeError):
207 val = default
207 val = default
208
208
209 return val
209 return val
210
210
211
211
212 def safe_unicode(str_, from_encoding=None, use_chardet=False):
212 def safe_unicode(str_, from_encoding=None, use_chardet=False):
213 """
213 """
214 safe unicode function. Does few trick to turn str_ into unicode
214 safe unicode function. Does few trick to turn str_ into unicode
215
215
216 In case of UnicodeDecode error, we try to return it with encoding detected
216 In case of UnicodeDecode error, we try to return it with encoding detected
217 by chardet library if it fails fallback to unicode with errors replaced
217 by chardet library if it fails fallback to unicode with errors replaced
218
218
219 :param str_: string to decode
219 :param str_: string to decode
220 :rtype: unicode
220 :rtype: unicode
221 :returns: unicode object
221 :returns: unicode object
222 """
222 """
223 if isinstance(str_, unicode):
223 if isinstance(str_, unicode):
224 return str_
224 return str_
225
225
226 if not from_encoding:
226 if not from_encoding:
227 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
227 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
228 'utf8'), sep=',')
228 'utf8'), sep=',')
229 from_encoding = DEFAULT_ENCODINGS
229 from_encoding = DEFAULT_ENCODINGS
230
230
231 if not isinstance(from_encoding, (list, tuple)):
231 if not isinstance(from_encoding, (list, tuple)):
232 from_encoding = [from_encoding]
232 from_encoding = [from_encoding]
233
233
234 try:
234 try:
235 return unicode(str_)
235 return unicode(str_)
236 except UnicodeDecodeError:
236 except UnicodeDecodeError:
237 pass
237 pass
238
238
239 for enc in from_encoding:
239 for enc in from_encoding:
240 try:
240 try:
241 return unicode(str_, enc)
241 return unicode(str_, enc)
242 except UnicodeDecodeError:
242 except UnicodeDecodeError:
243 pass
243 pass
244
244
245 if use_chardet:
245 if use_chardet:
246 try:
246 try:
247 import chardet
247 import chardet
248 encoding = chardet.detect(str_)['encoding']
248 encoding = chardet.detect(str_)['encoding']
249 if encoding is None:
249 if encoding is None:
250 raise Exception()
250 raise Exception()
251 return str_.decode(encoding)
251 return str_.decode(encoding)
252 except (ImportError, UnicodeDecodeError, Exception):
252 except (ImportError, UnicodeDecodeError, Exception):
253 return unicode(str_, from_encoding[0], 'replace')
253 return unicode(str_, from_encoding[0], 'replace')
254 else:
254 else:
255 return unicode(str_, from_encoding[0], 'replace')
255 return unicode(str_, from_encoding[0], 'replace')
256
256
257 def safe_str(unicode_, to_encoding=None, use_chardet=False):
257 def safe_str(unicode_, to_encoding=None, use_chardet=False):
258 """
258 """
259 safe str function. Does few trick to turn unicode_ into string
259 safe str function. Does few trick to turn unicode_ into string
260
260
261 In case of UnicodeEncodeError, we try to return it with encoding detected
261 In case of UnicodeEncodeError, we try to return it with encoding detected
262 by chardet library if it fails fallback to string with errors replaced
262 by chardet library if it fails fallback to string with errors replaced
263
263
264 :param unicode_: unicode to encode
264 :param unicode_: unicode to encode
265 :rtype: str
265 :rtype: str
266 :returns: str object
266 :returns: str object
267 """
267 """
268
268
269 # if it's not basestr cast to str
269 # if it's not basestr cast to str
270 if not isinstance(unicode_, compat.string_types):
270 if not isinstance(unicode_, compat.string_types):
271 return str(unicode_)
271 return str(unicode_)
272
272
273 if isinstance(unicode_, str):
273 if isinstance(unicode_, str):
274 return unicode_
274 return unicode_
275
275
276 if not to_encoding:
276 if not to_encoding:
277 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
277 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
278 'utf8'), sep=',')
278 'utf8'), sep=',')
279 to_encoding = DEFAULT_ENCODINGS
279 to_encoding = DEFAULT_ENCODINGS
280
280
281 if not isinstance(to_encoding, (list, tuple)):
281 if not isinstance(to_encoding, (list, tuple)):
282 to_encoding = [to_encoding]
282 to_encoding = [to_encoding]
283
283
284 for enc in to_encoding:
284 for enc in to_encoding:
285 try:
285 try:
286 return unicode_.encode(enc)
286 return unicode_.encode(enc)
287 except UnicodeEncodeError:
287 except UnicodeEncodeError:
288 pass
288 pass
289
289
290 if use_chardet:
290 if use_chardet:
291 try:
291 try:
292 import chardet
292 import chardet
293 encoding = chardet.detect(unicode_)['encoding']
293 encoding = chardet.detect(unicode_)['encoding']
294 if encoding is None:
294 if encoding is None:
295 raise UnicodeEncodeError()
295 raise UnicodeEncodeError()
296
296
297 return unicode_.encode(encoding)
297 return unicode_.encode(encoding)
298 except (ImportError, UnicodeEncodeError):
298 except (ImportError, UnicodeEncodeError):
299 return unicode_.encode(to_encoding[0], 'replace')
299 return unicode_.encode(to_encoding[0], 'replace')
300 else:
300 else:
301 return unicode_.encode(to_encoding[0], 'replace')
301 return unicode_.encode(to_encoding[0], 'replace')
302
302
303
303
304 def remove_suffix(s, suffix):
304 def remove_suffix(s, suffix):
305 if s.endswith(suffix):
305 if s.endswith(suffix):
306 s = s[:-1 * len(suffix)]
306 s = s[:-1 * len(suffix)]
307 return s
307 return s
308
308
309
309
310 def remove_prefix(s, prefix):
310 def remove_prefix(s, prefix):
311 if s.startswith(prefix):
311 if s.startswith(prefix):
312 s = s[len(prefix):]
312 s = s[len(prefix):]
313 return s
313 return s
314
314
315
315
316 def find_calling_context(ignore_modules=None):
316 def find_calling_context(ignore_modules=None):
317 """
317 """
318 Look through the calling stack and return the frame which called
318 Look through the calling stack and return the frame which called
319 this function and is part of core module ( ie. rhodecode.* )
319 this function and is part of core module ( ie. rhodecode.* )
320
320
321 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
321 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
322
322
323 usage::
323 usage::
324 from rhodecode.lib.utils2 import find_calling_context
324 from rhodecode.lib.utils2 import find_calling_context
325
325
326 calling_context = find_calling_context(ignore_modules=[
326 calling_context = find_calling_context(ignore_modules=[
327 'rhodecode.lib.caching_query',
327 'rhodecode.lib.caching_query',
328 'rhodecode.model.settings',
328 'rhodecode.model.settings',
329 ])
329 ])
330
330
331 if calling_context:
331 if calling_context:
332 cc_str = 'call context %s:%s' % (
332 cc_str = 'call context %s:%s' % (
333 calling_context.f_code.co_filename,
333 calling_context.f_code.co_filename,
334 calling_context.f_lineno,
334 calling_context.f_lineno,
335 )
335 )
336 print(cc_str)
336 print(cc_str)
337 """
337 """
338
338
339 ignore_modules = ignore_modules or []
339 ignore_modules = ignore_modules or []
340
340
341 f = sys._getframe(2)
341 f = sys._getframe(2)
342 while f.f_back is not None:
342 while f.f_back is not None:
343 name = f.f_globals.get('__name__')
343 name = f.f_globals.get('__name__')
344 if name and name.startswith(__name__.split('.')[0]):
344 if name and name.startswith(__name__.split('.')[0]):
345 if name not in ignore_modules:
345 if name not in ignore_modules:
346 return f
346 return f
347 f = f.f_back
347 f = f.f_back
348 return None
348 return None
349
349
350
350
351 def ping_connection(connection, branch):
351 def ping_connection(connection, branch):
352 if branch:
352 if branch:
353 # "branch" refers to a sub-connection of a connection,
353 # "branch" refers to a sub-connection of a connection,
354 # we don't want to bother pinging on these.
354 # we don't want to bother pinging on these.
355 return
355 return
356
356
357 # turn off "close with result". This flag is only used with
357 # turn off "close with result". This flag is only used with
358 # "connectionless" execution, otherwise will be False in any case
358 # "connectionless" execution, otherwise will be False in any case
359 save_should_close_with_result = connection.should_close_with_result
359 save_should_close_with_result = connection.should_close_with_result
360 connection.should_close_with_result = False
360 connection.should_close_with_result = False
361
361
362 try:
362 try:
363 # run a SELECT 1. use a core select() so that
363 # run a SELECT 1. use a core select() so that
364 # the SELECT of a scalar value without a table is
364 # the SELECT of a scalar value without a table is
365 # appropriately formatted for the backend
365 # appropriately formatted for the backend
366 connection.scalar(sqlalchemy.sql.select([1]))
366 connection.scalar(sqlalchemy.sql.select([1]))
367 except sqlalchemy.exc.DBAPIError as err:
367 except sqlalchemy.exc.DBAPIError as err:
368 # catch SQLAlchemy's DBAPIError, which is a wrapper
368 # catch SQLAlchemy's DBAPIError, which is a wrapper
369 # for the DBAPI's exception. It includes a .connection_invalidated
369 # for the DBAPI's exception. It includes a .connection_invalidated
370 # attribute which specifies if this connection is a "disconnect"
370 # attribute which specifies if this connection is a "disconnect"
371 # condition, which is based on inspection of the original exception
371 # condition, which is based on inspection of the original exception
372 # by the dialect in use.
372 # by the dialect in use.
373 if err.connection_invalidated:
373 if err.connection_invalidated:
374 # run the same SELECT again - the connection will re-validate
374 # run the same SELECT again - the connection will re-validate
375 # itself and establish a new connection. The disconnect detection
375 # itself and establish a new connection. The disconnect detection
376 # here also causes the whole connection pool to be invalidated
376 # here also causes the whole connection pool to be invalidated
377 # so that all stale connections are discarded.
377 # so that all stale connections are discarded.
378 connection.scalar(sqlalchemy.sql.select([1]))
378 connection.scalar(sqlalchemy.sql.select([1]))
379 else:
379 else:
380 raise
380 raise
381 finally:
381 finally:
382 # restore "close with result"
382 # restore "close with result"
383 connection.should_close_with_result = save_should_close_with_result
383 connection.should_close_with_result = save_should_close_with_result
384
384
385
385
386 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
386 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
387 """Custom engine_from_config functions."""
387 """Custom engine_from_config functions."""
388 log = logging.getLogger('sqlalchemy.engine')
388 log = logging.getLogger('sqlalchemy.engine')
389 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
389 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
390 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
390 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
391
391
392 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
392 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
393
393
394 def color_sql(sql):
394 def color_sql(sql):
395 color_seq = '\033[1;33m' # This is yellow: code 33
395 color_seq = '\033[1;33m' # This is yellow: code 33
396 normal = '\x1b[0m'
396 normal = '\x1b[0m'
397 return ''.join([color_seq, sql, normal])
397 return ''.join([color_seq, sql, normal])
398
398
399 if use_ping_connection:
399 if use_ping_connection:
400 log.debug('Adding ping_connection on the engine config.')
400 log.debug('Adding ping_connection on the engine config.')
401 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
401 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
402
402
403 if debug:
403 if debug:
404 # attach events only for debug configuration
404 # attach events only for debug configuration
405 def before_cursor_execute(conn, cursor, statement,
405 def before_cursor_execute(conn, cursor, statement,
406 parameters, context, executemany):
406 parameters, context, executemany):
407 setattr(conn, 'query_start_time', time.time())
407 setattr(conn, 'query_start_time', time.time())
408 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
408 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
409 calling_context = find_calling_context(ignore_modules=[
409 calling_context = find_calling_context(ignore_modules=[
410 'rhodecode.lib.caching_query',
410 'rhodecode.lib.caching_query',
411 'rhodecode.model.settings',
411 'rhodecode.model.settings',
412 ])
412 ])
413 if calling_context:
413 if calling_context:
414 log.info(color_sql('call context %s:%s' % (
414 log.info(color_sql('call context %s:%s' % (
415 calling_context.f_code.co_filename,
415 calling_context.f_code.co_filename,
416 calling_context.f_lineno,
416 calling_context.f_lineno,
417 )))
417 )))
418
418
419 def after_cursor_execute(conn, cursor, statement,
419 def after_cursor_execute(conn, cursor, statement,
420 parameters, context, executemany):
420 parameters, context, executemany):
421 delattr(conn, 'query_start_time')
421 delattr(conn, 'query_start_time')
422
422
423 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
423 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
424 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
424 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
425
425
426 return engine
426 return engine
427
427
428
428
429 def get_encryption_key(config):
429 def get_encryption_key(config):
430 secret = config.get('rhodecode.encrypted_values.secret')
430 secret = config.get('rhodecode.encrypted_values.secret')
431 default = config['beaker.session.secret']
431 default = config['beaker.session.secret']
432 return secret or default
432 return secret or default
433
433
434
434
435 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
435 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
436 short_format=False):
436 short_format=False):
437 """
437 """
438 Turns a datetime into an age string.
438 Turns a datetime into an age string.
439 If show_short_version is True, this generates a shorter string with
439 If show_short_version is True, this generates a shorter string with
440 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
440 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
441
441
442 * IMPORTANT*
442 * IMPORTANT*
443 Code of this function is written in special way so it's easier to
443 Code of this function is written in special way so it's easier to
444 backport it to javascript. If you mean to update it, please also update
444 backport it to javascript. If you mean to update it, please also update
445 `jquery.timeago-extension.js` file
445 `jquery.timeago-extension.js` file
446
446
447 :param prevdate: datetime object
447 :param prevdate: datetime object
448 :param now: get current time, if not define we use
448 :param now: get current time, if not define we use
449 `datetime.datetime.now()`
449 `datetime.datetime.now()`
450 :param show_short_version: if it should approximate the date and
450 :param show_short_version: if it should approximate the date and
451 return a shorter string
451 return a shorter string
452 :param show_suffix:
452 :param show_suffix:
453 :param short_format: show short format, eg 2D instead of 2 days
453 :param short_format: show short format, eg 2D instead of 2 days
454 :rtype: unicode
454 :rtype: unicode
455 :returns: unicode words describing age
455 :returns: unicode words describing age
456 """
456 """
457
457
458 def _get_relative_delta(now, prevdate):
458 def _get_relative_delta(now, prevdate):
459 base = dateutil.relativedelta.relativedelta(now, prevdate)
459 base = dateutil.relativedelta.relativedelta(now, prevdate)
460 return {
460 return {
461 'year': base.years,
461 'year': base.years,
462 'month': base.months,
462 'month': base.months,
463 'day': base.days,
463 'day': base.days,
464 'hour': base.hours,
464 'hour': base.hours,
465 'minute': base.minutes,
465 'minute': base.minutes,
466 'second': base.seconds,
466 'second': base.seconds,
467 }
467 }
468
468
469 def _is_leap_year(year):
469 def _is_leap_year(year):
470 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
470 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
471
471
472 def get_month(prevdate):
472 def get_month(prevdate):
473 return prevdate.month
473 return prevdate.month
474
474
475 def get_year(prevdate):
475 def get_year(prevdate):
476 return prevdate.year
476 return prevdate.year
477
477
478 now = now or datetime.datetime.now()
478 now = now or datetime.datetime.now()
479 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
479 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
480 deltas = {}
480 deltas = {}
481 future = False
481 future = False
482
482
483 if prevdate > now:
483 if prevdate > now:
484 now_old = now
484 now_old = now
485 now = prevdate
485 now = prevdate
486 prevdate = now_old
486 prevdate = now_old
487 future = True
487 future = True
488 if future:
488 if future:
489 prevdate = prevdate.replace(microsecond=0)
489 prevdate = prevdate.replace(microsecond=0)
490 # Get date parts deltas
490 # Get date parts deltas
491 for part in order:
491 for part in order:
492 rel_delta = _get_relative_delta(now, prevdate)
492 rel_delta = _get_relative_delta(now, prevdate)
493 deltas[part] = rel_delta[part]
493 deltas[part] = rel_delta[part]
494
494
495 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
495 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
496 # not 1 hour, -59 minutes and -59 seconds)
496 # not 1 hour, -59 minutes and -59 seconds)
497 offsets = [[5, 60], [4, 60], [3, 24]]
497 offsets = [[5, 60], [4, 60], [3, 24]]
498 for element in offsets: # seconds, minutes, hours
498 for element in offsets: # seconds, minutes, hours
499 num = element[0]
499 num = element[0]
500 length = element[1]
500 length = element[1]
501
501
502 part = order[num]
502 part = order[num]
503 carry_part = order[num - 1]
503 carry_part = order[num - 1]
504
504
505 if deltas[part] < 0:
505 if deltas[part] < 0:
506 deltas[part] += length
506 deltas[part] += length
507 deltas[carry_part] -= 1
507 deltas[carry_part] -= 1
508
508
509 # Same thing for days except that the increment depends on the (variable)
509 # Same thing for days except that the increment depends on the (variable)
510 # number of days in the month
510 # number of days in the month
511 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
511 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
512 if deltas['day'] < 0:
512 if deltas['day'] < 0:
513 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
513 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
514 deltas['day'] += 29
514 deltas['day'] += 29
515 else:
515 else:
516 deltas['day'] += month_lengths[get_month(prevdate) - 1]
516 deltas['day'] += month_lengths[get_month(prevdate) - 1]
517
517
518 deltas['month'] -= 1
518 deltas['month'] -= 1
519
519
520 if deltas['month'] < 0:
520 if deltas['month'] < 0:
521 deltas['month'] += 12
521 deltas['month'] += 12
522 deltas['year'] -= 1
522 deltas['year'] -= 1
523
523
524 # Format the result
524 # Format the result
525 if short_format:
525 if short_format:
526 fmt_funcs = {
526 fmt_funcs = {
527 'year': lambda d: u'%dy' % d,
527 'year': lambda d: u'%dy' % d,
528 'month': lambda d: u'%dm' % d,
528 'month': lambda d: u'%dm' % d,
529 'day': lambda d: u'%dd' % d,
529 'day': lambda d: u'%dd' % d,
530 'hour': lambda d: u'%dh' % d,
530 'hour': lambda d: u'%dh' % d,
531 'minute': lambda d: u'%dmin' % d,
531 'minute': lambda d: u'%dmin' % d,
532 'second': lambda d: u'%dsec' % d,
532 'second': lambda d: u'%dsec' % d,
533 }
533 }
534 else:
534 else:
535 fmt_funcs = {
535 fmt_funcs = {
536 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
536 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
537 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
537 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
538 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
538 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
539 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
539 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
540 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
540 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
541 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
541 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
542 }
542 }
543
543
544 i = 0
544 i = 0
545 for part in order:
545 for part in order:
546 value = deltas[part]
546 value = deltas[part]
547 if value != 0:
547 if value != 0:
548
548
549 if i < 5:
549 if i < 5:
550 sub_part = order[i + 1]
550 sub_part = order[i + 1]
551 sub_value = deltas[sub_part]
551 sub_value = deltas[sub_part]
552 else:
552 else:
553 sub_value = 0
553 sub_value = 0
554
554
555 if sub_value == 0 or show_short_version:
555 if sub_value == 0 or show_short_version:
556 _val = fmt_funcs[part](value)
556 _val = fmt_funcs[part](value)
557 if future:
557 if future:
558 if show_suffix:
558 if show_suffix:
559 return _(u'in ${ago}', mapping={'ago': _val})
559 return _(u'in ${ago}', mapping={'ago': _val})
560 else:
560 else:
561 return _(_val)
561 return _(_val)
562
562
563 else:
563 else:
564 if show_suffix:
564 if show_suffix:
565 return _(u'${ago} ago', mapping={'ago': _val})
565 return _(u'${ago} ago', mapping={'ago': _val})
566 else:
566 else:
567 return _(_val)
567 return _(_val)
568
568
569 val = fmt_funcs[part](value)
569 val = fmt_funcs[part](value)
570 val_detail = fmt_funcs[sub_part](sub_value)
570 val_detail = fmt_funcs[sub_part](sub_value)
571 mapping = {'val': val, 'detail': val_detail}
571 mapping = {'val': val, 'detail': val_detail}
572
572
573 if short_format:
573 if short_format:
574 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
574 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
575 if show_suffix:
575 if show_suffix:
576 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
576 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
577 if future:
577 if future:
578 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
578 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
579 else:
579 else:
580 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
580 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
581 if show_suffix:
581 if show_suffix:
582 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
582 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
583 if future:
583 if future:
584 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
584 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
585
585
586 return datetime_tmpl
586 return datetime_tmpl
587 i += 1
587 i += 1
588 return _(u'just now')
588 return _(u'just now')
589
589
590
590
591 def age_from_seconds(seconds):
591 def age_from_seconds(seconds):
592 seconds = safe_int(seconds) or 0
592 seconds = safe_int(seconds) or 0
593 prevdate = time_to_datetime(time.time() + seconds)
593 prevdate = time_to_datetime(time.time() + seconds)
594 return age(prevdate, show_suffix=False, show_short_version=True)
594 return age(prevdate, show_suffix=False, show_short_version=True)
595
595
596
596
597 def cleaned_uri(uri):
597 def cleaned_uri(uri):
598 """
598 """
599 Quotes '[' and ']' from uri if there is only one of them.
599 Quotes '[' and ']' from uri if there is only one of them.
600 according to RFC3986 we cannot use such chars in uri
600 according to RFC3986 we cannot use such chars in uri
601 :param uri:
601 :param uri:
602 :return: uri without this chars
602 :return: uri without this chars
603 """
603 """
604 return urllib.quote(uri, safe='@$:/')
604 return urllib.quote(uri, safe='@$:/')
605
605
606
606
607 def credentials_filter(uri):
607 def credentials_filter(uri):
608 """
608 """
609 Returns a url with removed credentials
609 Returns a url with removed credentials
610
610
611 :param uri:
611 :param uri:
612 """
612 """
613 import urlobject
613 import urlobject
614 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
614 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
615 return 'InvalidDecryptionKey'
615 return 'InvalidDecryptionKey'
616
616
617 url_obj = urlobject.URLObject(cleaned_uri(uri))
617 url_obj = urlobject.URLObject(cleaned_uri(uri))
618 url_obj = url_obj.without_password().without_username()
618 url_obj = url_obj.without_password().without_username()
619
619
620 return url_obj
620 return url_obj
621
621
622
622
623 def get_host_info(request):
623 def get_host_info(request):
624 """
624 """
625 Generate host info, to obtain full url e.g https://server.com
625 Generate host info, to obtain full url e.g https://server.com
626 use this
626 use this
627 `{scheme}://{netloc}`
627 `{scheme}://{netloc}`
628 """
628 """
629 if not request:
629 if not request:
630 return {}
630 return {}
631
631
632 qualified_home_url = request.route_url('home')
632 qualified_home_url = request.route_url('home')
633 parsed_url = urlobject.URLObject(qualified_home_url)
633 parsed_url = urlobject.URLObject(qualified_home_url)
634 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
634 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
635
635
636 return {
636 return {
637 'scheme': parsed_url.scheme,
637 'scheme': parsed_url.scheme,
638 'netloc': parsed_url.netloc+decoded_path,
638 'netloc': parsed_url.netloc+decoded_path,
639 'hostname': parsed_url.hostname,
639 'hostname': parsed_url.hostname,
640 }
640 }
641
641
642
642
643 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
643 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
644 qualified_home_url = request.route_url('home')
644 qualified_home_url = request.route_url('home')
645 parsed_url = urlobject.URLObject(qualified_home_url)
645 parsed_url = urlobject.URLObject(qualified_home_url)
646 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
646 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
647
647
648 args = {
648 args = {
649 'scheme': parsed_url.scheme,
649 'scheme': parsed_url.scheme,
650 'user': '',
650 'user': '',
651 'sys_user': getpass.getuser(),
651 'sys_user': getpass.getuser(),
652 # path if we use proxy-prefix
652 # path if we use proxy-prefix
653 'netloc': parsed_url.netloc+decoded_path,
653 'netloc': parsed_url.netloc+decoded_path,
654 'hostname': parsed_url.hostname,
654 'hostname': parsed_url.hostname,
655 'prefix': decoded_path,
655 'prefix': decoded_path,
656 'repo': repo_name,
656 'repo': repo_name,
657 'repoid': str(repo_id),
657 'repoid': str(repo_id),
658 'repo_type': repo_type
658 'repo_type': repo_type
659 }
659 }
660 args.update(override)
660 args.update(override)
661 args['user'] = urllib.quote(safe_str(args['user']))
661 args['user'] = urllib.quote(safe_str(args['user']))
662
662
663 for k, v in args.items():
663 for k, v in args.items():
664 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
664 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
665
665
666 # special case for SVN clone url
666 # special case for SVN clone url
667 if repo_type == 'svn':
667 if repo_type == 'svn':
668 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
668 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
669
669
670 # remove leading @ sign if it's present. Case of empty user
670 # remove leading @ sign if it's present. Case of empty user
671 url_obj = urlobject.URLObject(uri_tmpl)
671 url_obj = urlobject.URLObject(uri_tmpl)
672 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
672 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
673
673
674 return safe_unicode(url)
674 return safe_unicode(url)
675
675
676
676
677 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
677 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
678 maybe_unreachable=False, reference_obj=None):
678 maybe_unreachable=False, reference_obj=None):
679 """
679 """
680 Safe version of get_commit if this commit doesn't exists for a
680 Safe version of get_commit if this commit doesn't exists for a
681 repository it returns a Dummy one instead
681 repository it returns a Dummy one instead
682
682
683 :param repo: repository instance
683 :param repo: repository instance
684 :param commit_id: commit id as str
684 :param commit_id: commit id as str
685 :param commit_idx: numeric commit index
685 :param commit_idx: numeric commit index
686 :param pre_load: optional list of commit attributes to load
686 :param pre_load: optional list of commit attributes to load
687 :param maybe_unreachable: translate unreachable commits on git repos
687 :param maybe_unreachable: translate unreachable commits on git repos
688 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
688 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
689 """
689 """
690 # TODO(skreft): remove these circular imports
690 # TODO(skreft): remove these circular imports
691 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
691 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
692 from rhodecode.lib.vcs.exceptions import RepositoryError
692 from rhodecode.lib.vcs.exceptions import RepositoryError
693 if not isinstance(repo, BaseRepository):
693 if not isinstance(repo, BaseRepository):
694 raise Exception('You must pass an Repository '
694 raise Exception('You must pass an Repository '
695 'object as first argument got %s', type(repo))
695 'object as first argument got %s', type(repo))
696
696
697 try:
697 try:
698 commit = repo.get_commit(
698 commit = repo.get_commit(
699 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
699 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
700 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
700 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
701 except (RepositoryError, LookupError):
701 except (RepositoryError, LookupError):
702 commit = EmptyCommit()
702 commit = EmptyCommit()
703 return commit
703 return commit
704
704
705
705
706 def datetime_to_time(dt):
706 def datetime_to_time(dt):
707 if dt:
707 if dt:
708 return time.mktime(dt.timetuple())
708 return time.mktime(dt.timetuple())
709
709
710
710
711 def time_to_datetime(tm):
711 def time_to_datetime(tm):
712 if tm:
712 if tm:
713 if isinstance(tm, compat.string_types):
713 if isinstance(tm, compat.string_types):
714 try:
714 try:
715 tm = float(tm)
715 tm = float(tm)
716 except ValueError:
716 except ValueError:
717 return
717 return
718 return datetime.datetime.fromtimestamp(tm)
718 return datetime.datetime.fromtimestamp(tm)
719
719
720
720
721 def time_to_utcdatetime(tm):
721 def time_to_utcdatetime(tm):
722 if tm:
722 if tm:
723 if isinstance(tm, compat.string_types):
723 if isinstance(tm, compat.string_types):
724 try:
724 try:
725 tm = float(tm)
725 tm = float(tm)
726 except ValueError:
726 except ValueError:
727 return
727 return
728 return datetime.datetime.utcfromtimestamp(tm)
728 return datetime.datetime.utcfromtimestamp(tm)
729
729
730
730
731 MENTIONS_REGEX = re.compile(
731 MENTIONS_REGEX = re.compile(
732 # ^@ or @ without any special chars in front
732 # ^@ or @ without any special chars in front
733 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
733 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
734 # main body starts with letter, then can be . - _
734 # main body starts with letter, then can be . - _
735 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
735 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
736 re.VERBOSE | re.MULTILINE)
736 re.VERBOSE | re.MULTILINE)
737
737
738
738
739 def extract_mentioned_users(s):
739 def extract_mentioned_users(s):
740 """
740 """
741 Returns unique usernames from given string s that have @mention
741 Returns unique usernames from given string s that have @mention
742
742
743 :param s: string to get mentions
743 :param s: string to get mentions
744 """
744 """
745 usrs = set()
745 usrs = set()
746 for username in MENTIONS_REGEX.findall(s):
746 for username in MENTIONS_REGEX.findall(s):
747 usrs.add(username)
747 usrs.add(username)
748
748
749 return sorted(list(usrs), key=lambda k: k.lower())
749 return sorted(list(usrs), key=lambda k: k.lower())
750
750
751
751
752 class AttributeDictBase(dict):
752 class AttributeDictBase(dict):
753 def __getstate__(self):
753 def __getstate__(self):
754 odict = self.__dict__ # get attribute dictionary
754 odict = self.__dict__ # get attribute dictionary
755 return odict
755 return odict
756
756
757 def __setstate__(self, dict):
757 def __setstate__(self, dict):
758 self.__dict__ = dict
758 self.__dict__ = dict
759
759
760 __setattr__ = dict.__setitem__
760 __setattr__ = dict.__setitem__
761 __delattr__ = dict.__delitem__
761 __delattr__ = dict.__delitem__
762
762
763
763
764 class StrictAttributeDict(AttributeDictBase):
764 class StrictAttributeDict(AttributeDictBase):
765 """
765 """
766 Strict Version of Attribute dict which raises an Attribute error when
766 Strict Version of Attribute dict which raises an Attribute error when
767 requested attribute is not set
767 requested attribute is not set
768 """
768 """
769 def __getattr__(self, attr):
769 def __getattr__(self, attr):
770 try:
770 try:
771 return self[attr]
771 return self[attr]
772 except KeyError:
772 except KeyError:
773 raise AttributeError('%s object has no attribute %s' % (
773 raise AttributeError('%s object has no attribute %s' % (
774 self.__class__, attr))
774 self.__class__, attr))
775
775
776
776
777 class AttributeDict(AttributeDictBase):
777 class AttributeDict(AttributeDictBase):
778 def __getattr__(self, attr):
778 def __getattr__(self, attr):
779 return self.get(attr, None)
779 return self.get(attr, None)
780
780
781
781
782
783 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
782 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
784 def __init__(self, default_factory=None, *args, **kwargs):
783 def __init__(self, default_factory=None, *args, **kwargs):
785 # in python3 you can omit the args to super
784 # in python3 you can omit the args to super
786 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
785 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
787 self.default_factory = default_factory
786 self.default_factory = default_factory
788
787
789
788
790 def fix_PATH(os_=None):
789 def fix_PATH(os_=None):
791 """
790 """
792 Get current active python path, and append it to PATH variable to fix
791 Get current active python path, and append it to PATH variable to fix
793 issues of subprocess calls and different python versions
792 issues of subprocess calls and different python versions
794 """
793 """
795 if os_ is None:
794 if os_ is None:
796 import os
795 import os
797 else:
796 else:
798 os = os_
797 os = os_
799
798
800 cur_path = os.path.split(sys.executable)[0]
799 cur_path = os.path.split(sys.executable)[0]
801 if not os.environ['PATH'].startswith(cur_path):
800 if not os.environ['PATH'].startswith(cur_path):
802 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
801 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
803
802
804
803
805 def obfuscate_url_pw(engine):
804 def obfuscate_url_pw(engine):
806 _url = engine or ''
805 _url = engine or ''
807 try:
806 try:
808 _url = sqlalchemy.engine.url.make_url(engine)
807 _url = sqlalchemy.engine.url.make_url(engine)
809 if _url.password:
808 if _url.password:
810 _url.password = 'XXXXX'
809 _url.password = 'XXXXX'
811 except Exception:
810 except Exception:
812 pass
811 pass
813 return unicode(_url)
812 return unicode(_url)
814
813
815
814
816 def get_server_url(environ):
815 def get_server_url(environ):
817 req = webob.Request(environ)
816 req = webob.Request(environ)
818 return req.host_url + req.script_name
817 return req.host_url + req.script_name
819
818
820
819
821 def unique_id(hexlen=32):
820 def unique_id(hexlen=32):
822 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
821 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
823 return suuid(truncate_to=hexlen, alphabet=alphabet)
822 return suuid(truncate_to=hexlen, alphabet=alphabet)
824
823
825
824
826 def suuid(url=None, truncate_to=22, alphabet=None):
825 def suuid(url=None, truncate_to=22, alphabet=None):
827 """
826 """
828 Generate and return a short URL safe UUID.
827 Generate and return a short URL safe UUID.
829
828
830 If the url parameter is provided, set the namespace to the provided
829 If the url parameter is provided, set the namespace to the provided
831 URL and generate a UUID.
830 URL and generate a UUID.
832
831
833 :param url to get the uuid for
832 :param url to get the uuid for
834 :truncate_to: truncate the basic 22 UUID to shorter version
833 :truncate_to: truncate the basic 22 UUID to shorter version
835
834
836 The IDs won't be universally unique any longer, but the probability of
835 The IDs won't be universally unique any longer, but the probability of
837 a collision will still be very low.
836 a collision will still be very low.
838 """
837 """
839 # Define our alphabet.
838 # Define our alphabet.
840 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
839 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
841
840
842 # If no URL is given, generate a random UUID.
841 # If no URL is given, generate a random UUID.
843 if url is None:
842 if url is None:
844 unique_id = uuid.uuid4().int
843 unique_id = uuid.uuid4().int
845 else:
844 else:
846 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
845 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
847
846
848 alphabet_length = len(_ALPHABET)
847 alphabet_length = len(_ALPHABET)
849 output = []
848 output = []
850 while unique_id > 0:
849 while unique_id > 0:
851 digit = unique_id % alphabet_length
850 digit = unique_id % alphabet_length
852 output.append(_ALPHABET[digit])
851 output.append(_ALPHABET[digit])
853 unique_id = int(unique_id / alphabet_length)
852 unique_id = int(unique_id / alphabet_length)
854 return "".join(output)[:truncate_to]
853 return "".join(output)[:truncate_to]
855
854
856
855
857 def get_current_rhodecode_user(request=None):
856 def get_current_rhodecode_user(request=None):
858 """
857 """
859 Gets rhodecode user from request
858 Gets rhodecode user from request
860 """
859 """
861 pyramid_request = request or pyramid.threadlocal.get_current_request()
860 pyramid_request = request or pyramid.threadlocal.get_current_request()
862
861
863 # web case
862 # web case
864 if pyramid_request and hasattr(pyramid_request, 'user'):
863 if pyramid_request and hasattr(pyramid_request, 'user'):
865 return pyramid_request.user
864 return pyramid_request.user
866
865
867 # api case
866 # api case
868 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
867 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
869 return pyramid_request.rpc_user
868 return pyramid_request.rpc_user
870
869
871 return None
870 return None
872
871
873
872
874 def action_logger_generic(action, namespace=''):
873 def action_logger_generic(action, namespace=''):
875 """
874 """
876 A generic logger for actions useful to the system overview, tries to find
875 A generic logger for actions useful to the system overview, tries to find
877 an acting user for the context of the call otherwise reports unknown user
876 an acting user for the context of the call otherwise reports unknown user
878
877
879 :param action: logging message eg 'comment 5 deleted'
878 :param action: logging message eg 'comment 5 deleted'
880 :param type: string
879 :param type: string
881
880
882 :param namespace: namespace of the logging message eg. 'repo.comments'
881 :param namespace: namespace of the logging message eg. 'repo.comments'
883 :param type: string
882 :param type: string
884
883
885 """
884 """
886
885
887 logger_name = 'rhodecode.actions'
886 logger_name = 'rhodecode.actions'
888
887
889 if namespace:
888 if namespace:
890 logger_name += '.' + namespace
889 logger_name += '.' + namespace
891
890
892 log = logging.getLogger(logger_name)
891 log = logging.getLogger(logger_name)
893
892
894 # get a user if we can
893 # get a user if we can
895 user = get_current_rhodecode_user()
894 user = get_current_rhodecode_user()
896
895
897 logfunc = log.info
896 logfunc = log.info
898
897
899 if not user:
898 if not user:
900 user = '<unknown user>'
899 user = '<unknown user>'
901 logfunc = log.warning
900 logfunc = log.warning
902
901
903 logfunc('Logging action by {}: {}'.format(user, action))
902 logfunc('Logging action by {}: {}'.format(user, action))
904
903
905
904
906 def escape_split(text, sep=',', maxsplit=-1):
905 def escape_split(text, sep=',', maxsplit=-1):
907 r"""
906 r"""
908 Allows for escaping of the separator: e.g. arg='foo\, bar'
907 Allows for escaping of the separator: e.g. arg='foo\, bar'
909
908
910 It should be noted that the way bash et. al. do command line parsing, those
909 It should be noted that the way bash et. al. do command line parsing, those
911 single quotes are required.
910 single quotes are required.
912 """
911 """
913 escaped_sep = r'\%s' % sep
912 escaped_sep = r'\%s' % sep
914
913
915 if escaped_sep not in text:
914 if escaped_sep not in text:
916 return text.split(sep, maxsplit)
915 return text.split(sep, maxsplit)
917
916
918 before, _mid, after = text.partition(escaped_sep)
917 before, _mid, after = text.partition(escaped_sep)
919 startlist = before.split(sep, maxsplit) # a regular split is fine here
918 startlist = before.split(sep, maxsplit) # a regular split is fine here
920 unfinished = startlist[-1]
919 unfinished = startlist[-1]
921 startlist = startlist[:-1]
920 startlist = startlist[:-1]
922
921
923 # recurse because there may be more escaped separators
922 # recurse because there may be more escaped separators
924 endlist = escape_split(after, sep, maxsplit)
923 endlist = escape_split(after, sep, maxsplit)
925
924
926 # finish building the escaped value. we use endlist[0] becaue the first
925 # finish building the escaped value. we use endlist[0] becaue the first
927 # part of the string sent in recursion is the rest of the escaped value.
926 # part of the string sent in recursion is the rest of the escaped value.
928 unfinished += sep + endlist[0]
927 unfinished += sep + endlist[0]
929
928
930 return startlist + [unfinished] + endlist[1:] # put together all the parts
929 return startlist + [unfinished] + endlist[1:] # put together all the parts
931
930
932
931
933 class OptionalAttr(object):
932 class OptionalAttr(object):
934 """
933 """
935 Special Optional Option that defines other attribute. Example::
934 Special Optional Option that defines other attribute. Example::
936
935
937 def test(apiuser, userid=Optional(OAttr('apiuser')):
936 def test(apiuser, userid=Optional(OAttr('apiuser')):
938 user = Optional.extract(userid)
937 user = Optional.extract(userid)
939 # calls
938 # calls
940
939
941 """
940 """
942
941
943 def __init__(self, attr_name):
942 def __init__(self, attr_name):
944 self.attr_name = attr_name
943 self.attr_name = attr_name
945
944
946 def __repr__(self):
945 def __repr__(self):
947 return '<OptionalAttr:%s>' % self.attr_name
946 return '<OptionalAttr:%s>' % self.attr_name
948
947
949 def __call__(self):
948 def __call__(self):
950 return self
949 return self
951
950
952
951
953 # alias
952 # alias
954 OAttr = OptionalAttr
953 OAttr = OptionalAttr
955
954
956
955
957 class Optional(object):
956 class Optional(object):
958 """
957 """
959 Defines an optional parameter::
958 Defines an optional parameter::
960
959
961 param = param.getval() if isinstance(param, Optional) else param
960 param = param.getval() if isinstance(param, Optional) else param
962 param = param() if isinstance(param, Optional) else param
961 param = param() if isinstance(param, Optional) else param
963
962
964 is equivalent of::
963 is equivalent of::
965
964
966 param = Optional.extract(param)
965 param = Optional.extract(param)
967
966
968 """
967 """
969
968
970 def __init__(self, type_):
969 def __init__(self, type_):
971 self.type_ = type_
970 self.type_ = type_
972
971
973 def __repr__(self):
972 def __repr__(self):
974 return '<Optional:%s>' % self.type_.__repr__()
973 return '<Optional:%s>' % self.type_.__repr__()
975
974
976 def __call__(self):
975 def __call__(self):
977 return self.getval()
976 return self.getval()
978
977
979 def getval(self):
978 def getval(self):
980 """
979 """
981 returns value from this Optional instance
980 returns value from this Optional instance
982 """
981 """
983 if isinstance(self.type_, OAttr):
982 if isinstance(self.type_, OAttr):
984 # use params name
983 # use params name
985 return self.type_.attr_name
984 return self.type_.attr_name
986 return self.type_
985 return self.type_
987
986
988 @classmethod
987 @classmethod
989 def extract(cls, val):
988 def extract(cls, val):
990 """
989 """
991 Extracts value from Optional() instance
990 Extracts value from Optional() instance
992
991
993 :param val:
992 :param val:
994 :return: original value if it's not Optional instance else
993 :return: original value if it's not Optional instance else
995 value of instance
994 value of instance
996 """
995 """
997 if isinstance(val, cls):
996 if isinstance(val, cls):
998 return val.getval()
997 return val.getval()
999 return val
998 return val
1000
999
1001
1000
1002 def glob2re(pat):
1001 def glob2re(pat):
1003 """
1002 """
1004 Translate a shell PATTERN to a regular expression.
1003 Translate a shell PATTERN to a regular expression.
1005
1004
1006 There is no way to quote meta-characters.
1005 There is no way to quote meta-characters.
1007 """
1006 """
1008
1007
1009 i, n = 0, len(pat)
1008 i, n = 0, len(pat)
1010 res = ''
1009 res = ''
1011 while i < n:
1010 while i < n:
1012 c = pat[i]
1011 c = pat[i]
1013 i = i+1
1012 i = i+1
1014 if c == '*':
1013 if c == '*':
1015 #res = res + '.*'
1014 #res = res + '.*'
1016 res = res + '[^/]*'
1015 res = res + '[^/]*'
1017 elif c == '?':
1016 elif c == '?':
1018 #res = res + '.'
1017 #res = res + '.'
1019 res = res + '[^/]'
1018 res = res + '[^/]'
1020 elif c == '[':
1019 elif c == '[':
1021 j = i
1020 j = i
1022 if j < n and pat[j] == '!':
1021 if j < n and pat[j] == '!':
1023 j = j+1
1022 j = j+1
1024 if j < n and pat[j] == ']':
1023 if j < n and pat[j] == ']':
1025 j = j+1
1024 j = j+1
1026 while j < n and pat[j] != ']':
1025 while j < n and pat[j] != ']':
1027 j = j+1
1026 j = j+1
1028 if j >= n:
1027 if j >= n:
1029 res = res + '\\['
1028 res = res + '\\['
1030 else:
1029 else:
1031 stuff = pat[i:j].replace('\\','\\\\')
1030 stuff = pat[i:j].replace('\\','\\\\')
1032 i = j+1
1031 i = j+1
1033 if stuff[0] == '!':
1032 if stuff[0] == '!':
1034 stuff = '^' + stuff[1:]
1033 stuff = '^' + stuff[1:]
1035 elif stuff[0] == '^':
1034 elif stuff[0] == '^':
1036 stuff = '\\' + stuff
1035 stuff = '\\' + stuff
1037 res = '%s[%s]' % (res, stuff)
1036 res = '%s[%s]' % (res, stuff)
1038 else:
1037 else:
1039 res = res + re.escape(c)
1038 res = res + re.escape(c)
1040 return res + '\Z(?ms)'
1039 return res + '\Z(?ms)'
1041
1040
1042
1041
1043 def parse_byte_string(size_str):
1042 def parse_byte_string(size_str):
1044 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1043 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1045 if not match:
1044 if not match:
1046 raise ValueError('Given size:%s is invalid, please make sure '
1045 raise ValueError('Given size:%s is invalid, please make sure '
1047 'to use format of <num>(MB|KB)' % size_str)
1046 'to use format of <num>(MB|KB)' % size_str)
1048
1047
1049 _parts = match.groups()
1048 _parts = match.groups()
1050 num, type_ = _parts
1049 num, type_ = _parts
1051 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1050 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1052
1051
1053
1052
1054 class CachedProperty(object):
1053 class CachedProperty(object):
1055 """
1054 """
1056 Lazy Attributes. With option to invalidate the cache by running a method
1055 Lazy Attributes. With option to invalidate the cache by running a method
1057
1056
1058 >>> class Foo(object):
1057 >>> class Foo(object):
1059 ...
1058 ...
1060 ... @CachedProperty
1059 ... @CachedProperty
1061 ... def heavy_func(self):
1060 ... def heavy_func(self):
1062 ... return 'super-calculation'
1061 ... return 'super-calculation'
1063 ...
1062 ...
1064 ... foo = Foo()
1063 ... foo = Foo()
1065 ... foo.heavy_func() # first computation
1064 ... foo.heavy_func() # first computation
1066 ... foo.heavy_func() # fetch from cache
1065 ... foo.heavy_func() # fetch from cache
1067 ... foo._invalidate_prop_cache('heavy_func')
1066 ... foo._invalidate_prop_cache('heavy_func')
1068
1067
1069 # at this point calling foo.heavy_func() will be re-computed
1068 # at this point calling foo.heavy_func() will be re-computed
1070 """
1069 """
1071
1070
1072 def __init__(self, func, func_name=None):
1071 def __init__(self, func, func_name=None):
1073
1072
1074 if func_name is None:
1073 if func_name is None:
1075 func_name = func.__name__
1074 func_name = func.__name__
1076 self.data = (func, func_name)
1075 self.data = (func, func_name)
1077 update_wrapper(self, func)
1076 update_wrapper(self, func)
1078
1077
1079 def __get__(self, inst, class_):
1078 def __get__(self, inst, class_):
1080 if inst is None:
1079 if inst is None:
1081 return self
1080 return self
1082
1081
1083 func, func_name = self.data
1082 func, func_name = self.data
1084 value = func(inst)
1083 value = func(inst)
1085 inst.__dict__[func_name] = value
1084 inst.__dict__[func_name] = value
1086 if '_invalidate_prop_cache' not in inst.__dict__:
1085 if '_invalidate_prop_cache' not in inst.__dict__:
1087 inst.__dict__['_invalidate_prop_cache'] = partial(
1086 inst.__dict__['_invalidate_prop_cache'] = partial(
1088 self._invalidate_prop_cache, inst)
1087 self._invalidate_prop_cache, inst)
1089 return value
1088 return value
1090
1089
1091 def _invalidate_prop_cache(self, inst, name):
1090 def _invalidate_prop_cache(self, inst, name):
1092 inst.__dict__.pop(name, None)
1091 inst.__dict__.pop(name, None)
1093
1092
1094
1093
1095 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
1094 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
1096 """
1095 """
1097 Retry decorator with exponential backoff.
1096 Retry decorator with exponential backoff.
1098
1097
1099 Parameters
1098 Parameters
1100 ----------
1099 ----------
1101 func : typing.Callable, optional
1100 func : typing.Callable, optional
1102 Callable on which the decorator is applied, by default None
1101 Callable on which the decorator is applied, by default None
1103 exception : Exception or tuple of Exceptions, optional
1102 exception : Exception or tuple of Exceptions, optional
1104 Exception(s) that invoke retry, by default Exception
1103 Exception(s) that invoke retry, by default Exception
1105 n_tries : int, optional
1104 n_tries : int, optional
1106 Number of tries before giving up, by default 5
1105 Number of tries before giving up, by default 5
1107 delay : int, optional
1106 delay : int, optional
1108 Initial delay between retries in seconds, by default 5
1107 Initial delay between retries in seconds, by default 5
1109 backoff : int, optional
1108 backoff : int, optional
1110 Backoff multiplier e.g. value of 2 will double the delay, by default 1
1109 Backoff multiplier e.g. value of 2 will double the delay, by default 1
1111 logger : bool, optional
1110 logger : bool, optional
1112 Option to log or print, by default False
1111 Option to log or print, by default False
1113
1112
1114 Returns
1113 Returns
1115 -------
1114 -------
1116 typing.Callable
1115 typing.Callable
1117 Decorated callable that calls itself when exception(s) occur.
1116 Decorated callable that calls itself when exception(s) occur.
1118
1117
1119 Examples
1118 Examples
1120 --------
1119 --------
1121 >>> import random
1120 >>> import random
1122 >>> @retry(exception=Exception, n_tries=3)
1121 >>> @retry(exception=Exception, n_tries=3)
1123 ... def test_random(text):
1122 ... def test_random(text):
1124 ... x = random.random()
1123 ... x = random.random()
1125 ... if x < 0.5:
1124 ... if x < 0.5:
1126 ... raise Exception("Fail")
1125 ... raise Exception("Fail")
1127 ... else:
1126 ... else:
1128 ... print("Success: ", text)
1127 ... print("Success: ", text)
1129 >>> test_random("It works!")
1128 >>> test_random("It works!")
1130 """
1129 """
1131
1130
1132 if func is None:
1131 if func is None:
1133 return partial(
1132 return partial(
1134 retry,
1133 retry,
1135 exception=exception,
1134 exception=exception,
1136 n_tries=n_tries,
1135 n_tries=n_tries,
1137 delay=delay,
1136 delay=delay,
1138 backoff=backoff,
1137 backoff=backoff,
1139 logger=logger,
1138 logger=logger,
1140 )
1139 )
1141
1140
1142 @wraps(func)
1141 @wraps(func)
1143 def wrapper(*args, **kwargs):
1142 def wrapper(*args, **kwargs):
1144 _n_tries, n_delay = n_tries, delay
1143 _n_tries, n_delay = n_tries, delay
1145 log = logging.getLogger('rhodecode.retry')
1144 log = logging.getLogger('rhodecode.retry')
1146
1145
1147 while _n_tries > 1:
1146 while _n_tries > 1:
1148 try:
1147 try:
1149 return func(*args, **kwargs)
1148 return func(*args, **kwargs)
1150 except exception as e:
1149 except exception as e:
1151 e_details = repr(e)
1150 e_details = repr(e)
1152 msg = "Exception on calling func {func}: {e}, " \
1151 msg = "Exception on calling func {func}: {e}, " \
1153 "Retrying in {n_delay} seconds..."\
1152 "Retrying in {n_delay} seconds..."\
1154 .format(func=func, e=e_details, n_delay=n_delay)
1153 .format(func=func, e=e_details, n_delay=n_delay)
1155 if logger:
1154 if logger:
1156 log.warning(msg)
1155 log.warning(msg)
1157 else:
1156 else:
1158 print(msg)
1157 print(msg)
1159 time.sleep(n_delay)
1158 time.sleep(n_delay)
1160 _n_tries -= 1
1159 _n_tries -= 1
1161 n_delay *= backoff
1160 n_delay *= backoff
1162
1161
1163 return func(*args, **kwargs)
1162 return func(*args, **kwargs)
1164
1163
1165 return wrapper
1164 return wrapper
1166
1165
1167
1166
1168 def user_agent_normalizer(user_agent_raw, safe=True):
1167 def user_agent_normalizer(user_agent_raw, safe=True):
1169 log = logging.getLogger('rhodecode.user_agent_normalizer')
1168 log = logging.getLogger('rhodecode.user_agent_normalizer')
1170 ua = (user_agent_raw or '').strip().lower()
1169 ua = (user_agent_raw or '').strip().lower()
1171 ua = ua.replace('"', '')
1170 ua = ua.replace('"', '')
1172
1171
1173 try:
1172 try:
1174 if 'mercurial/proto-1.0' in ua:
1173 if 'mercurial/proto-1.0' in ua:
1175 ua = ua.replace('mercurial/proto-1.0', '')
1174 ua = ua.replace('mercurial/proto-1.0', '')
1176 ua = ua.replace('(', '').replace(')', '').strip()
1175 ua = ua.replace('(', '').replace(')', '').strip()
1177 ua = ua.replace('mercurial ', 'mercurial/')
1176 ua = ua.replace('mercurial ', 'mercurial/')
1178 elif ua.startswith('git'):
1177 elif ua.startswith('git'):
1179 parts = ua.split(' ')
1178 parts = ua.split(' ')
1180 if parts:
1179 if parts:
1181 ua = parts[0]
1180 ua = parts[0]
1182 ua = re.sub('\.windows\.\d', '', ua).strip()
1181 ua = re.sub('\.windows\.\d', '', ua).strip()
1183
1182
1184 return ua
1183 return ua
1185 except Exception:
1184 except Exception:
1186 log.exception('Failed to parse scm user-agent')
1185 log.exception('Failed to parse scm user-agent')
1187 if not safe:
1186 if not safe:
1188 raise
1187 raise
1189
1188
1190 return ua
1189 return ua
1191
1190
1192
1191
1193 def get_available_port(min_port=40000, max_port=55555):
1192 def get_available_port(min_port=40000, max_port=55555):
1194 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
1193 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
1195 hostname = '127.0.0.1'
1194 hostname = '127.0.0.1'
1196 pick_port = min_port
1195 pick_port = min_port
1197
1196
1198 for _ in range(min_port, max_port):
1197 for _ in range(min_port, max_port):
1199 pick_port = random.randint(min_port, max_port)
1198 pick_port = random.randint(min_port, max_port)
1200 try:
1199 try:
1201 sock.bind((hostname, pick_port))
1200 sock.bind((hostname, pick_port))
1202 sock.close()
1201 sock.close()
1203 break
1202 break
1204 except OSError:
1203 except OSError:
1205 pass
1204 pass
1206
1205
1207 del sock
1206 del sock
1208 return pick_port
1207 return pick_port
General Comments 0
You need to be logged in to leave comments. Login now