##// END OF EJS Templates
tests: fixup the config stubs
super-admin -
r5145:3c1ad60a default
parent child Browse files
Show More
@@ -1,605 +1,609 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 The base Controller API
20 The base Controller API
21 Provides the BaseController class for subclassing. And usage in different
21 Provides the BaseController class for subclassing. And usage in different
22 controllers
22 controllers
23 """
23 """
24
24
25 import logging
25 import logging
26 import socket
26 import socket
27 import base64
27 import base64
28
28
29 import markupsafe
29 import markupsafe
30 import ipaddress
30 import ipaddress
31
31
32 import paste.httpheaders
32 import paste.httpheaders
33 from paste.auth.basic import AuthBasicAuthenticator
33 from paste.auth.basic import AuthBasicAuthenticator
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.authentication.base import VCS_TYPE
37 from rhodecode.authentication.base import VCS_TYPE
38 from rhodecode.lib import auth, utils2
38 from rhodecode.lib import auth, utils2
39 from rhodecode.lib import helpers as h
39 from rhodecode.lib import helpers as h
40 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
40 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
41 from rhodecode.lib.exceptions import UserCreationError
41 from rhodecode.lib.exceptions import UserCreationError
42 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
42 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str
44 from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str
45 from rhodecode.lib.type_utils import aslist, str2bool
45 from rhodecode.lib.type_utils import aslist, str2bool
46 from rhodecode.lib.hash_utils import sha1
46 from rhodecode.lib.hash_utils import sha1
47 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
47 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
48 from rhodecode.model.notification import NotificationModel
48 from rhodecode.model.notification import NotificationModel
49 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
49 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 def _filter_proxy(ip):
54 def _filter_proxy(ip):
55 """
55 """
56 Passed in IP addresses in HEADERS can be in a special format of multiple
56 Passed in IP addresses in HEADERS can be in a special format of multiple
57 ips. Those comma separated IPs are passed from various proxies in the
57 ips. Those comma separated IPs are passed from various proxies in the
58 chain of request processing. The left-most being the original client.
58 chain of request processing. The left-most being the original client.
59 We only care about the first IP which came from the org. client.
59 We only care about the first IP which came from the org. client.
60
60
61 :param ip: ip string from headers
61 :param ip: ip string from headers
62 """
62 """
63 if ',' in ip:
63 if ',' in ip:
64 _ips = ip.split(',')
64 _ips = ip.split(',')
65 _first_ip = _ips[0].strip()
65 _first_ip = _ips[0].strip()
66 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
66 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
67 return _first_ip
67 return _first_ip
68 return ip
68 return ip
69
69
70
70
71 def _filter_port(ip):
71 def _filter_port(ip):
72 """
72 """
73 Removes a port from ip, there are 4 main cases to handle here.
73 Removes a port from ip, there are 4 main cases to handle here.
74 - ipv4 eg. 127.0.0.1
74 - ipv4 eg. 127.0.0.1
75 - ipv6 eg. ::1
75 - ipv6 eg. ::1
76 - ipv4+port eg. 127.0.0.1:8080
76 - ipv4+port eg. 127.0.0.1:8080
77 - ipv6+port eg. [::1]:8080
77 - ipv6+port eg. [::1]:8080
78
78
79 :param ip:
79 :param ip:
80 """
80 """
81 def is_ipv6(ip_addr):
81 def is_ipv6(ip_addr):
82 if hasattr(socket, 'inet_pton'):
82 if hasattr(socket, 'inet_pton'):
83 try:
83 try:
84 socket.inet_pton(socket.AF_INET6, ip_addr)
84 socket.inet_pton(socket.AF_INET6, ip_addr)
85 except socket.error:
85 except socket.error:
86 return False
86 return False
87 else:
87 else:
88 # fallback to ipaddress
88 # fallback to ipaddress
89 try:
89 try:
90 ipaddress.IPv6Address(safe_str(ip_addr))
90 ipaddress.IPv6Address(safe_str(ip_addr))
91 except Exception:
91 except Exception:
92 return False
92 return False
93 return True
93 return True
94
94
95 if ':' not in ip: # must be ipv4 pure ip
95 if ':' not in ip: # must be ipv4 pure ip
96 return ip
96 return ip
97
97
98 if '[' in ip and ']' in ip: # ipv6 with port
98 if '[' in ip and ']' in ip: # ipv6 with port
99 return ip.split(']')[0][1:].lower()
99 return ip.split(']')[0][1:].lower()
100
100
101 # must be ipv6 or ipv4 with port
101 # must be ipv6 or ipv4 with port
102 if is_ipv6(ip):
102 if is_ipv6(ip):
103 return ip
103 return ip
104 else:
104 else:
105 ip, _port = ip.split(':')[:2] # means ipv4+port
105 ip, _port = ip.split(':')[:2] # means ipv4+port
106 return ip
106 return ip
107
107
108
108
109 def get_ip_addr(environ):
109 def get_ip_addr(environ):
110 proxy_key = 'HTTP_X_REAL_IP'
110 proxy_key = 'HTTP_X_REAL_IP'
111 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
111 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
112 def_key = 'REMOTE_ADDR'
112 def_key = 'REMOTE_ADDR'
113
113
114 def ip_filters(ip_):
114 def ip_filters(ip_):
115 return _filter_port(_filter_proxy(ip_))
115 return _filter_port(_filter_proxy(ip_))
116
116
117 ip = environ.get(proxy_key)
117 ip = environ.get(proxy_key)
118 if ip:
118 if ip:
119 return ip_filters(ip)
119 return ip_filters(ip)
120
120
121 ip = environ.get(proxy_key2)
121 ip = environ.get(proxy_key2)
122 if ip:
122 if ip:
123 return ip_filters(ip)
123 return ip_filters(ip)
124
124
125 ip = environ.get(def_key, '0.0.0.0')
125 ip = environ.get(def_key, '0.0.0.0')
126 return ip_filters(ip)
126 return ip_filters(ip)
127
127
128
128
129 def get_server_ip_addr(environ, log_errors=True):
129 def get_server_ip_addr(environ, log_errors=True):
130 hostname = environ.get('SERVER_NAME')
130 hostname = environ.get('SERVER_NAME')
131 try:
131 try:
132 return socket.gethostbyname(hostname)
132 return socket.gethostbyname(hostname)
133 except Exception as e:
133 except Exception as e:
134 if log_errors:
134 if log_errors:
135 # in some cases this lookup is not possible, and we don't want to
135 # in some cases this lookup is not possible, and we don't want to
136 # make it an exception in logs
136 # make it an exception in logs
137 log.exception('Could not retrieve server ip address: %s', e)
137 log.exception('Could not retrieve server ip address: %s', e)
138 return hostname
138 return hostname
139
139
140
140
141 def get_server_port(environ):
141 def get_server_port(environ):
142 return environ.get('SERVER_PORT')
142 return environ.get('SERVER_PORT')
143
143
144
144
145
145
146 def get_user_agent(environ):
146 def get_user_agent(environ):
147 return environ.get('HTTP_USER_AGENT')
147 return environ.get('HTTP_USER_AGENT')
148
148
149
149
150 def vcs_operation_context(
150 def vcs_operation_context(
151 environ, repo_name, username, action, scm, check_locking=True,
151 environ, repo_name, username, action, scm, check_locking=True,
152 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
152 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
153 """
153 """
154 Generate the context for a vcs operation, e.g. push or pull.
154 Generate the context for a vcs operation, e.g. push or pull.
155
155
156 This context is passed over the layers so that hooks triggered by the
156 This context is passed over the layers so that hooks triggered by the
157 vcs operation know details like the user, the user's IP address etc.
157 vcs operation know details like the user, the user's IP address etc.
158
158
159 :param check_locking: Allows to switch of the computation of the locking
159 :param check_locking: Allows to switch of the computation of the locking
160 data. This serves mainly the need of the simplevcs middleware to be
160 data. This serves mainly the need of the simplevcs middleware to be
161 able to disable this for certain operations.
161 able to disable this for certain operations.
162
162
163 """
163 """
164 # Tri-state value: False: unlock, None: nothing, True: lock
164 # Tri-state value: False: unlock, None: nothing, True: lock
165 make_lock = None
165 make_lock = None
166 locked_by = [None, None, None]
166 locked_by = [None, None, None]
167 is_anonymous = username == User.DEFAULT_USER
167 is_anonymous = username == User.DEFAULT_USER
168 user = User.get_by_username(username)
168 user = User.get_by_username(username)
169 if not is_anonymous and check_locking:
169 if not is_anonymous and check_locking:
170 log.debug('Checking locking on repository "%s"', repo_name)
170 log.debug('Checking locking on repository "%s"', repo_name)
171 repo = Repository.get_by_repo_name(repo_name)
171 repo = Repository.get_by_repo_name(repo_name)
172 make_lock, __, locked_by = repo.get_locking_state(
172 make_lock, __, locked_by = repo.get_locking_state(
173 action, user.user_id)
173 action, user.user_id)
174 user_id = user.user_id
174 user_id = user.user_id
175 settings_model = VcsSettingsModel(repo=repo_name)
175 settings_model = VcsSettingsModel(repo=repo_name)
176 ui_settings = settings_model.get_ui_settings()
176 ui_settings = settings_model.get_ui_settings()
177
177
178 # NOTE(marcink): This should be also in sync with
178 # NOTE(marcink): This should be also in sync with
179 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
179 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
180 store = [x for x in ui_settings if x.key == '/']
180 store = [x for x in ui_settings if x.key == '/']
181 repo_store = ''
181 repo_store = ''
182 if store:
182 if store:
183 repo_store = store[0].value
183 repo_store = store[0].value
184
184
185 scm_data = {
185 scm_data = {
186 'ip': get_ip_addr(environ),
186 'ip': get_ip_addr(environ),
187 'username': username,
187 'username': username,
188 'user_id': user_id,
188 'user_id': user_id,
189 'action': action,
189 'action': action,
190 'repository': repo_name,
190 'repository': repo_name,
191 'scm': scm,
191 'scm': scm,
192 'config': rhodecode.CONFIG['__file__'],
192 'config': rhodecode.CONFIG['__file__'],
193 'repo_store': repo_store,
193 'repo_store': repo_store,
194 'make_lock': make_lock,
194 'make_lock': make_lock,
195 'locked_by': locked_by,
195 'locked_by': locked_by,
196 'server_url': utils2.get_server_url(environ),
196 'server_url': utils2.get_server_url(environ),
197 'user_agent': get_user_agent(environ),
197 'user_agent': get_user_agent(environ),
198 'hooks': get_enabled_hook_classes(ui_settings),
198 'hooks': get_enabled_hook_classes(ui_settings),
199 'is_shadow_repo': is_shadow_repo,
199 'is_shadow_repo': is_shadow_repo,
200 'detect_force_push': detect_force_push,
200 'detect_force_push': detect_force_push,
201 'check_branch_perms': check_branch_perms,
201 'check_branch_perms': check_branch_perms,
202 }
202 }
203 return scm_data
203 return scm_data
204
204
205
205
206 class BasicAuth(AuthBasicAuthenticator):
206 class BasicAuth(AuthBasicAuthenticator):
207
207
208 def __init__(self, realm, authfunc, registry, auth_http_code=None,
208 def __init__(self, realm, authfunc, registry, auth_http_code=None,
209 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
209 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
210 super().__init__(realm=realm, authfunc=authfunc)
210 super().__init__(realm=realm, authfunc=authfunc)
211 self.realm = realm
211 self.realm = realm
212 self.rc_realm = rc_realm
212 self.rc_realm = rc_realm
213 self.initial_call = initial_call_detection
213 self.initial_call = initial_call_detection
214 self.authfunc = authfunc
214 self.authfunc = authfunc
215 self.registry = registry
215 self.registry = registry
216 self.acl_repo_name = acl_repo_name
216 self.acl_repo_name = acl_repo_name
217 self._rc_auth_http_code = auth_http_code
217 self._rc_auth_http_code = auth_http_code
218
218
219 def _get_response_from_code(self, http_code, fallback):
219 def _get_response_from_code(self, http_code, fallback):
220 try:
220 try:
221 return get_exception(safe_int(http_code))
221 return get_exception(safe_int(http_code))
222 except Exception:
222 except Exception:
223 log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback)
223 log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback)
224 return fallback
224 return fallback
225
225
226 def get_rc_realm(self):
226 def get_rc_realm(self):
227 return safe_str(self.rc_realm)
227 return safe_str(self.rc_realm)
228
228
229 def build_authentication(self):
229 def build_authentication(self):
230 header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')]
230 header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')]
231
231
232 # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial
232 # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial
233 # investigate if we still need it.
233 # investigate if we still need it.
234 if self._rc_auth_http_code and not self.initial_call:
234 if self._rc_auth_http_code and not self.initial_call:
235 # return alternative HTTP code if alternative http return code
235 # return alternative HTTP code if alternative http return code
236 # is specified in RhodeCode config, but ONLY if it's not the
236 # is specified in RhodeCode config, but ONLY if it's not the
237 # FIRST call
237 # FIRST call
238 custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized)
238 custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized)
239 log.debug('Using custom response class: %s', custom_response_klass)
239 log.debug('Using custom response class: %s', custom_response_klass)
240 return custom_response_klass(headers=header)
240 return custom_response_klass(headers=header)
241 return HTTPUnauthorized(headers=header)
241 return HTTPUnauthorized(headers=header)
242
242
243 def authenticate(self, environ):
243 def authenticate(self, environ):
244 authorization = paste.httpheaders.AUTHORIZATION(environ)
244 authorization = paste.httpheaders.AUTHORIZATION(environ)
245 if not authorization:
245 if not authorization:
246 return self.build_authentication()
246 return self.build_authentication()
247 (auth_meth, auth_creds_b64) = authorization.split(' ', 1)
247 (auth_meth, auth_creds_b64) = authorization.split(' ', 1)
248 if 'basic' != auth_meth.lower():
248 if 'basic' != auth_meth.lower():
249 return self.build_authentication()
249 return self.build_authentication()
250
250
251 credentials = safe_str(base64.b64decode(auth_creds_b64.strip()))
251 credentials = safe_str(base64.b64decode(auth_creds_b64.strip()))
252 _parts = credentials.split(':', 1)
252 _parts = credentials.split(':', 1)
253 if len(_parts) == 2:
253 if len(_parts) == 2:
254 username, password = _parts
254 username, password = _parts
255 auth_data = self.authfunc(
255 auth_data = self.authfunc(
256 username, password, environ, VCS_TYPE,
256 username, password, environ, VCS_TYPE,
257 registry=self.registry, acl_repo_name=self.acl_repo_name)
257 registry=self.registry, acl_repo_name=self.acl_repo_name)
258 if auth_data:
258 if auth_data:
259 return {'username': username, 'auth_data': auth_data}
259 return {'username': username, 'auth_data': auth_data}
260 if username and password:
260 if username and password:
261 # we mark that we actually executed authentication once, at
261 # we mark that we actually executed authentication once, at
262 # that point we can use the alternative auth code
262 # that point we can use the alternative auth code
263 self.initial_call = False
263 self.initial_call = False
264
264
265 return self.build_authentication()
265 return self.build_authentication()
266
266
267 __call__ = authenticate
267 __call__ = authenticate
268
268
269
269
270 def calculate_version_hash(config):
270 def calculate_version_hash(config):
271 return sha1(
271 return sha1(
272 config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__)
272 config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__)
273 )[:8]
273 )[:8]
274
274
275
275
276 def get_current_lang(request):
276 def get_current_lang(request):
277 return getattr(request, '_LOCALE_', request.locale_name)
277 return getattr(request, '_LOCALE_', request.locale_name)
278
278
279
279
280 def attach_context_attributes(context, request, user_id=None, is_api=None):
280 def attach_context_attributes(context, request, user_id=None, is_api=None):
281 """
281 """
282 Attach variables into template context called `c`.
282 Attach variables into template context called `c`.
283 """
283 """
284 config = request.registry.settings
284 config = request.registry.settings
285
285
286 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
286 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
287 context.rc_config = rc_config
287 context.rc_config = rc_config
288 context.rhodecode_version = rhodecode.__version__
288 context.rhodecode_version = rhodecode.__version__
289 context.rhodecode_edition = config.get('rhodecode.edition')
289 context.rhodecode_edition = config.get('rhodecode.edition')
290 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
290 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
291 # unique secret + version does not leak the version but keep consistency
291 # unique secret + version does not leak the version but keep consistency
292 context.rhodecode_version_hash = calculate_version_hash(config)
292 context.rhodecode_version_hash = calculate_version_hash(config)
293
293
294 # Default language set for the incoming request
294 # Default language set for the incoming request
295 context.language = get_current_lang(request)
295 context.language = get_current_lang(request)
296
296
297 # Visual options
297 # Visual options
298 context.visual = AttributeDict({})
298 context.visual = AttributeDict({})
299
299
300 # DB stored Visual Items
300 # DB stored Visual Items
301 context.visual.show_public_icon = str2bool(
301 context.visual.show_public_icon = str2bool(
302 rc_config.get('rhodecode_show_public_icon'))
302 rc_config.get('rhodecode_show_public_icon'))
303 context.visual.show_private_icon = str2bool(
303 context.visual.show_private_icon = str2bool(
304 rc_config.get('rhodecode_show_private_icon'))
304 rc_config.get('rhodecode_show_private_icon'))
305 context.visual.stylify_metatags = str2bool(
305 context.visual.stylify_metatags = str2bool(
306 rc_config.get('rhodecode_stylify_metatags'))
306 rc_config.get('rhodecode_stylify_metatags'))
307 context.visual.dashboard_items = safe_int(
307 context.visual.dashboard_items = safe_int(
308 rc_config.get('rhodecode_dashboard_items', 100))
308 rc_config.get('rhodecode_dashboard_items', 100))
309 context.visual.admin_grid_items = safe_int(
309 context.visual.admin_grid_items = safe_int(
310 rc_config.get('rhodecode_admin_grid_items', 100))
310 rc_config.get('rhodecode_admin_grid_items', 100))
311 context.visual.show_revision_number = str2bool(
311 context.visual.show_revision_number = str2bool(
312 rc_config.get('rhodecode_show_revision_number', True))
312 rc_config.get('rhodecode_show_revision_number', True))
313 context.visual.show_sha_length = safe_int(
313 context.visual.show_sha_length = safe_int(
314 rc_config.get('rhodecode_show_sha_length', 100))
314 rc_config.get('rhodecode_show_sha_length', 100))
315 context.visual.repository_fields = str2bool(
315 context.visual.repository_fields = str2bool(
316 rc_config.get('rhodecode_repository_fields'))
316 rc_config.get('rhodecode_repository_fields'))
317 context.visual.show_version = str2bool(
317 context.visual.show_version = str2bool(
318 rc_config.get('rhodecode_show_version'))
318 rc_config.get('rhodecode_show_version'))
319 context.visual.use_gravatar = str2bool(
319 context.visual.use_gravatar = str2bool(
320 rc_config.get('rhodecode_use_gravatar'))
320 rc_config.get('rhodecode_use_gravatar'))
321 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
321 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
322 context.visual.default_renderer = rc_config.get(
322 context.visual.default_renderer = rc_config.get(
323 'rhodecode_markup_renderer', 'rst')
323 'rhodecode_markup_renderer', 'rst')
324 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
324 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
325 context.visual.rhodecode_support_url = \
325 context.visual.rhodecode_support_url = \
326 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
326 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
327
327
328 context.visual.affected_files_cut_off = 60
328 context.visual.affected_files_cut_off = 60
329
329
330 context.pre_code = rc_config.get('rhodecode_pre_code')
330 context.pre_code = rc_config.get('rhodecode_pre_code')
331 context.post_code = rc_config.get('rhodecode_post_code')
331 context.post_code = rc_config.get('rhodecode_post_code')
332 context.rhodecode_name = rc_config.get('rhodecode_title')
332 context.rhodecode_name = rc_config.get('rhodecode_title')
333 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
333 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
334 # if we have specified default_encoding in the request, it has more
334 # if we have specified default_encoding in the request, it has more
335 # priority
335 # priority
336 if request.GET.get('default_encoding'):
336 if request.GET.get('default_encoding'):
337 context.default_encodings.insert(0, request.GET.get('default_encoding'))
337 context.default_encodings.insert(0, request.GET.get('default_encoding'))
338 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
338 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
339 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
339 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
340 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
340 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
341
341
342 # INI stored
342 # INI stored
343 context.labs_active = str2bool(
343 context.labs_active = str2bool(
344 config.get('labs_settings_active', 'false'))
344 config.get('labs_settings_active', 'false'))
345 context.ssh_enabled = str2bool(
345 context.ssh_enabled = str2bool(
346 config.get('ssh.generate_authorized_keyfile', 'false'))
346 config.get('ssh.generate_authorized_keyfile', 'false'))
347 context.ssh_key_generator_enabled = str2bool(
347 context.ssh_key_generator_enabled = str2bool(
348 config.get('ssh.enable_ui_key_generator', 'true'))
348 config.get('ssh.enable_ui_key_generator', 'true'))
349
349
350 context.visual.allow_repo_location_change = str2bool(
350 context.visual.allow_repo_location_change = str2bool(
351 config.get('allow_repo_location_change', True))
351 config.get('allow_repo_location_change', True))
352 context.visual.allow_custom_hooks_settings = str2bool(
352 context.visual.allow_custom_hooks_settings = str2bool(
353 config.get('allow_custom_hooks_settings', True))
353 config.get('allow_custom_hooks_settings', True))
354 context.debug_style = str2bool(config.get('debug_style', False))
354 context.debug_style = str2bool(config.get('debug_style', False))
355
355
356 context.rhodecode_instanceid = config.get('instance_id')
356 context.rhodecode_instanceid = config.get('instance_id')
357
357
358 context.visual.cut_off_limit_diff = safe_int(
358 context.visual.cut_off_limit_diff = safe_int(
359 config.get('cut_off_limit_diff'), default=0)
359 config.get('cut_off_limit_diff'), default=0)
360 context.visual.cut_off_limit_file = safe_int(
360 context.visual.cut_off_limit_file = safe_int(
361 config.get('cut_off_limit_file'), default=0)
361 config.get('cut_off_limit_file'), default=0)
362
362
363 context.license = AttributeDict({})
363 context.license = AttributeDict({})
364 context.license.hide_license_info = str2bool(
364 context.license.hide_license_info = str2bool(
365 config.get('license.hide_license_info', False))
365 config.get('license.hide_license_info', False))
366
366
367 # AppEnlight
367 # AppEnlight
368 context.appenlight_enabled = config.get('appenlight', False)
368 context.appenlight_enabled = config.get('appenlight', False)
369 context.appenlight_api_public_key = config.get(
369 context.appenlight_api_public_key = config.get(
370 'appenlight.api_public_key', '')
370 'appenlight.api_public_key', '')
371 context.appenlight_server_url = config.get('appenlight.server_url', '')
371 context.appenlight_server_url = config.get('appenlight.server_url', '')
372
372
373 diffmode = {
373 diffmode = {
374 "unified": "unified",
374 "unified": "unified",
375 "sideside": "sideside"
375 "sideside": "sideside"
376 }.get(request.GET.get('diffmode'))
376 }.get(request.GET.get('diffmode'))
377
377
378 if is_api is not None:
378 if is_api is not None:
379 is_api = hasattr(request, 'rpc_user')
379 is_api = hasattr(request, 'rpc_user')
380 session_attrs = {
380 session_attrs = {
381 # defaults
381 # defaults
382 "clone_url_format": "http",
382 "clone_url_format": "http",
383 "diffmode": "sideside",
383 "diffmode": "sideside",
384 "license_fingerprint": request.session.get('license_fingerprint')
384 "license_fingerprint": request.session.get('license_fingerprint')
385 }
385 }
386
386
387 if not is_api:
387 if not is_api:
388 # don't access pyramid session for API calls
388 # don't access pyramid session for API calls
389 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
389 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
390 request.session['rc_user_session_attr.diffmode'] = diffmode
390 request.session['rc_user_session_attr.diffmode'] = diffmode
391
391
392 # session settings per user
392 # session settings per user
393
393
394 for k, v in list(request.session.items()):
394 for k, v in list(request.session.items()):
395 pref = 'rc_user_session_attr.'
395 pref = 'rc_user_session_attr.'
396 if k and k.startswith(pref):
396 if k and k.startswith(pref):
397 k = k[len(pref):]
397 k = k[len(pref):]
398 session_attrs[k] = v
398 session_attrs[k] = v
399
399
400 context.user_session_attrs = session_attrs
400 context.user_session_attrs = session_attrs
401
401
402 # JS template context
402 # JS template context
403 context.template_context = {
403 context.template_context = {
404 'repo_name': None,
404 'repo_name': None,
405 'repo_type': None,
405 'repo_type': None,
406 'repo_landing_commit': None,
406 'repo_landing_commit': None,
407 'rhodecode_user': {
407 'rhodecode_user': {
408 'username': None,
408 'username': None,
409 'email': None,
409 'email': None,
410 'notification_status': False
410 'notification_status': False
411 },
411 },
412 'session_attrs': session_attrs,
412 'session_attrs': session_attrs,
413 'visual': {
413 'visual': {
414 'default_renderer': None
414 'default_renderer': None
415 },
415 },
416 'commit_data': {
416 'commit_data': {
417 'commit_id': None
417 'commit_id': None
418 },
418 },
419 'pull_request_data': {'pull_request_id': None},
419 'pull_request_data': {'pull_request_id': None},
420 'timeago': {
420 'timeago': {
421 'refresh_time': 120 * 1000,
421 'refresh_time': 120 * 1000,
422 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
422 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
423 },
423 },
424 'pyramid_dispatch': {
424 'pyramid_dispatch': {
425
425
426 },
426 },
427 'extra': {'plugins': {}}
427 'extra': {'plugins': {}}
428 }
428 }
429 # END CONFIG VARS
429 # END CONFIG VARS
430 if is_api:
430 if is_api:
431 csrf_token = None
431 csrf_token = None
432 else:
432 else:
433 csrf_token = auth.get_csrf_token(session=request.session)
433 csrf_token = auth.get_csrf_token(session=request.session)
434
434
435 context.csrf_token = csrf_token
435 context.csrf_token = csrf_token
436 context.backends = list(rhodecode.BACKENDS.keys())
436 context.backends = list(rhodecode.BACKENDS.keys())
437
437
438 unread_count = 0
438 unread_count = 0
439 user_bookmark_list = []
439 user_bookmark_list = []
440 if user_id:
440 if user_id:
441 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
441 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
442 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
442 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
443 context.unread_notifications = unread_count
443 context.unread_notifications = unread_count
444 context.bookmark_items = user_bookmark_list
444 context.bookmark_items = user_bookmark_list
445
445
446 # web case
446 # web case
447 if hasattr(request, 'user'):
447 if hasattr(request, 'user'):
448 context.auth_user = request.user
448 context.auth_user = request.user
449 context.rhodecode_user = request.user
449 context.rhodecode_user = request.user
450
450
451 # api case
451 # api case
452 if hasattr(request, 'rpc_user'):
452 if hasattr(request, 'rpc_user'):
453 context.auth_user = request.rpc_user
453 context.auth_user = request.rpc_user
454 context.rhodecode_user = request.rpc_user
454 context.rhodecode_user = request.rpc_user
455
455
456 # attach the whole call context to the request
456 # attach the whole call context to the request
457 request.set_call_context(context)
457 request.set_call_context(context)
458
458
459
459
460 def get_auth_user(request):
460 def get_auth_user(request):
461 environ = request.environ
461 environ = request.environ
462 session = request.session
462 session = request.session
463
463
464 ip_addr = get_ip_addr(environ)
464 ip_addr = get_ip_addr(environ)
465
465
466 # make sure that we update permissions each time we call controller
466 # make sure that we update permissions each time we call controller
467 _auth_token = (
467 _auth_token = (
468 # ?auth_token=XXX
468 # ?auth_token=XXX
469 request.GET.get('auth_token', '')
469 request.GET.get('auth_token', '')
470 # ?api_key=XXX !LEGACY
470 # ?api_key=XXX !LEGACY
471 or request.GET.get('api_key', '')
471 or request.GET.get('api_key', '')
472 # or headers....
472 # or headers....
473 or request.headers.get('X-Rc-Auth-Token', '')
473 or request.headers.get('X-Rc-Auth-Token', '')
474 )
474 )
475 if not _auth_token and request.matchdict:
475 if not _auth_token and request.matchdict:
476 url_auth_token = request.matchdict.get('_auth_token')
476 url_auth_token = request.matchdict.get('_auth_token')
477 _auth_token = url_auth_token
477 _auth_token = url_auth_token
478 if _auth_token:
478 if _auth_token:
479 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
479 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
480
480
481 if _auth_token:
481 if _auth_token:
482 # when using API_KEY we assume user exists, and
482 # when using API_KEY we assume user exists, and
483 # doesn't need auth based on cookies.
483 # doesn't need auth based on cookies.
484 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
484 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
485 authenticated = False
485 authenticated = False
486 else:
486 else:
487 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
487 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
488 try:
488 try:
489 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
489 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
490 ip_addr=ip_addr)
490 ip_addr=ip_addr)
491 except UserCreationError as e:
491 except UserCreationError as e:
492 h.flash(e, 'error')
492 h.flash(e, 'error')
493 # container auth or other auth functions that create users
493 # container auth or other auth functions that create users
494 # on the fly can throw this exception signaling that there's
494 # on the fly can throw this exception signaling that there's
495 # issue with user creation, explanation should be provided
495 # issue with user creation, explanation should be provided
496 # in Exception itself. We then create a simple blank
496 # in Exception itself. We then create a simple blank
497 # AuthUser
497 # AuthUser
498 auth_user = AuthUser(ip_addr=ip_addr)
498 auth_user = AuthUser(ip_addr=ip_addr)
499
499
500 # in case someone changes a password for user it triggers session
500 # in case someone changes a password for user it triggers session
501 # flush and forces a re-login
501 # flush and forces a re-login
502 if password_changed(auth_user, session):
502 if password_changed(auth_user, session):
503 session.invalidate()
503 session.invalidate()
504 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
504 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
505 auth_user = AuthUser(ip_addr=ip_addr)
505 auth_user = AuthUser(ip_addr=ip_addr)
506
506
507 authenticated = cookie_store.get('is_authenticated')
507 authenticated = cookie_store.get('is_authenticated')
508
508
509 if not auth_user.is_authenticated and auth_user.is_user_object:
509 if not auth_user.is_authenticated and auth_user.is_user_object:
510 # user is not authenticated and not empty
510 # user is not authenticated and not empty
511 auth_user.set_authenticated(authenticated)
511 auth_user.set_authenticated(authenticated)
512
512
513 return auth_user, _auth_token
513 return auth_user, _auth_token
514
514
515
515
516 def h_filter(s):
516 def h_filter(s):
517 """
517 """
518 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
518 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
519 we wrap this with additional functionality that converts None to empty
519 we wrap this with additional functionality that converts None to empty
520 strings
520 strings
521 """
521 """
522 if s is None:
522 if s is None:
523 return markupsafe.Markup()
523 return markupsafe.Markup()
524 return markupsafe.escape(s)
524 return markupsafe.escape(s)
525
525
526
526
527 def add_events_routes(config):
527 def add_events_routes(config):
528 """
528 """
529 Adds routing that can be used in events. Because some events are triggered
529 Adds routing that can be used in events. Because some events are triggered
530 outside of pyramid context, we need to bootstrap request with some
530 outside of pyramid context, we need to bootstrap request with some
531 routing registered
531 routing registered
532 """
532 """
533
533
534 from rhodecode.apps._base import ADMIN_PREFIX
534 from rhodecode.apps._base import ADMIN_PREFIX
535
535
536 config.add_route(name='home', pattern='/')
536 config.add_route(name='home', pattern='/')
537 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
537 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
538 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
538 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
539
539
540 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
540 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
541 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
541 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
542 config.add_route(name='repo_summary', pattern='/{repo_name}')
542 config.add_route(name='repo_summary', pattern='/{repo_name}')
543 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
543 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
544 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
544 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
545
545
546 config.add_route(name='pullrequest_show',
546 config.add_route(name='pullrequest_show',
547 pattern='/{repo_name}/pull-request/{pull_request_id}')
547 pattern='/{repo_name}/pull-request/{pull_request_id}')
548 config.add_route(name='pull_requests_global',
548 config.add_route(name='pull_requests_global',
549 pattern='/pull-request/{pull_request_id}')
549 pattern='/pull-request/{pull_request_id}')
550
550
551 config.add_route(name='repo_commit',
551 config.add_route(name='repo_commit',
552 pattern='/{repo_name}/changeset/{commit_id}')
552 pattern='/{repo_name}/changeset/{commit_id}')
553 config.add_route(name='repo_files',
553 config.add_route(name='repo_files',
554 pattern='/{repo_name}/files/{commit_id}/{f_path}')
554 pattern='/{repo_name}/files/{commit_id}/{f_path}')
555
555
556 config.add_route(name='hovercard_user',
556 config.add_route(name='hovercard_user',
557 pattern='/_hovercard/user/{user_id}')
557 pattern='/_hovercard/user/{user_id}')
558
558
559 config.add_route(name='hovercard_user_group',
559 config.add_route(name='hovercard_user_group',
560 pattern='/_hovercard/user_group/{user_group_id}')
560 pattern='/_hovercard/user_group/{user_group_id}')
561
561
562 config.add_route(name='hovercard_pull_request',
562 config.add_route(name='hovercard_pull_request',
563 pattern='/_hovercard/pull_request/{pull_request_id}')
563 pattern='/_hovercard/pull_request/{pull_request_id}')
564
564
565 config.add_route(name='hovercard_repo_commit',
565 config.add_route(name='hovercard_repo_commit',
566 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
566 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
567
567
568
568
569 def bootstrap_config(request, registry_name='RcTestRegistry'):
569 def bootstrap_config(request, registry_name='RcTestRegistry'):
570 from rhodecode.config.middleware import sanitize_settings_and_apply_defaults
570 import pyramid.testing
571 import pyramid.testing
571 registry = pyramid.testing.Registry(registry_name)
572 registry = pyramid.testing.Registry(registry_name)
572
573
574 global_config = {'__file__': ''}
575
573 config = pyramid.testing.setUp(registry=registry, request=request)
576 config = pyramid.testing.setUp(registry=registry, request=request)
577 sanitize_settings_and_apply_defaults(global_config, config.registry.settings)
574
578
575 # allow pyramid lookup in testing
579 # allow pyramid lookup in testing
576 config.include('pyramid_mako')
580 config.include('pyramid_mako')
577 config.include('rhodecode.lib.rc_beaker')
581 config.include('rhodecode.lib.rc_beaker')
578 config.include('rhodecode.lib.rc_cache')
582 config.include('rhodecode.lib.rc_cache')
579 config.include('rhodecode.lib.rc_cache.archive_cache')
583 config.include('rhodecode.lib.rc_cache.archive_cache')
580 add_events_routes(config)
584 add_events_routes(config)
581
585
582 return config
586 return config
583
587
584
588
585 def bootstrap_request(**kwargs):
589 def bootstrap_request(**kwargs):
586 """
590 """
587 Returns a thin version of Request Object that is used in non-web context like testing/celery
591 Returns a thin version of Request Object that is used in non-web context like testing/celery
588 """
592 """
589
593
590 import pyramid.testing
594 import pyramid.testing
591 from rhodecode.lib.request import ThinRequest as _ThinRequest
595 from rhodecode.lib.request import ThinRequest as _ThinRequest
592
596
593 class ThinRequest(_ThinRequest):
597 class ThinRequest(_ThinRequest):
594 application_url = kwargs.pop('application_url', 'http://example.com')
598 application_url = kwargs.pop('application_url', 'http://example.com')
595 host = kwargs.pop('host', 'example.com:80')
599 host = kwargs.pop('host', 'example.com:80')
596 domain = kwargs.pop('domain', 'example.com')
600 domain = kwargs.pop('domain', 'example.com')
597
601
598 class ThinSession(pyramid.testing.DummySession):
602 class ThinSession(pyramid.testing.DummySession):
599 def save(*arg, **kw):
603 def save(*arg, **kw):
600 pass
604 pass
601
605
602 request = ThinRequest(**kwargs)
606 request = ThinRequest(**kwargs)
603 request.session = ThinSession()
607 request.session = ThinSession()
604
608
605 return request
609 return request
@@ -1,437 +1,436 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import time
20 import time
21 import logging
21 import logging
22 import tempfile
22 import tempfile
23 import traceback
23 import traceback
24 import threading
24 import threading
25 import socket
25 import socket
26 import msgpack
26 import msgpack
27 import gevent
27 import gevent
28
28
29 from http.server import BaseHTTPRequestHandler
29 from http.server import BaseHTTPRequestHandler
30 from socketserver import TCPServer
30 from socketserver import TCPServer
31
31
32 import rhodecode
32 import rhodecode
33 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
33 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
34 from rhodecode.model import meta
34 from rhodecode.model import meta
35 from rhodecode.lib.base import bootstrap_request, bootstrap_config
36 from rhodecode.lib import hooks_base
35 from rhodecode.lib import hooks_base
37 from rhodecode.lib.utils2 import AttributeDict
36 from rhodecode.lib.utils2 import AttributeDict
38 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
39 from rhodecode.lib import rc_cache
38 from rhodecode.lib import rc_cache
40
39
41 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
42
41
43
42
44 class HooksHttpHandler(BaseHTTPRequestHandler):
43 class HooksHttpHandler(BaseHTTPRequestHandler):
45
44
46 JSON_HOOKS_PROTO = 'json.v1'
45 JSON_HOOKS_PROTO = 'json.v1'
47 MSGPACK_HOOKS_PROTO = 'msgpack.v1'
46 MSGPACK_HOOKS_PROTO = 'msgpack.v1'
48 # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
47 # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
49 DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
48 DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
50
49
51 @classmethod
50 @classmethod
52 def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
51 def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
53 if proto == cls.MSGPACK_HOOKS_PROTO:
52 if proto == cls.MSGPACK_HOOKS_PROTO:
54 return msgpack.packb(data)
53 return msgpack.packb(data)
55 return json.dumps(data)
54 return json.dumps(data)
56
55
57 @classmethod
56 @classmethod
58 def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
57 def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
59 if proto == cls.MSGPACK_HOOKS_PROTO:
58 if proto == cls.MSGPACK_HOOKS_PROTO:
60 return msgpack.unpackb(data)
59 return msgpack.unpackb(data)
61 return json.loads(data)
60 return json.loads(data)
62
61
63 def do_POST(self):
62 def do_POST(self):
64 hooks_proto, method, extras = self._read_request()
63 hooks_proto, method, extras = self._read_request()
65 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
64 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
66
65
67 txn_id = getattr(self.server, 'txn_id', None)
66 txn_id = getattr(self.server, 'txn_id', None)
68 if txn_id:
67 if txn_id:
69 log.debug('Computing TXN_ID based on `%s`:`%s`',
68 log.debug('Computing TXN_ID based on `%s`:`%s`',
70 extras['repository'], extras['txn_id'])
69 extras['repository'], extras['txn_id'])
71 computed_txn_id = rc_cache.utils.compute_key_from_params(
70 computed_txn_id = rc_cache.utils.compute_key_from_params(
72 extras['repository'], extras['txn_id'])
71 extras['repository'], extras['txn_id'])
73 if txn_id != computed_txn_id:
72 if txn_id != computed_txn_id:
74 raise Exception(
73 raise Exception(
75 'TXN ID fail: expected {} got {} instead'.format(
74 'TXN ID fail: expected {} got {} instead'.format(
76 txn_id, computed_txn_id))
75 txn_id, computed_txn_id))
77
76
78 request = getattr(self.server, 'request', None)
77 request = getattr(self.server, 'request', None)
79 try:
78 try:
80 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
79 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
81 result = self._call_hook_method(hooks, method, extras)
80 result = self._call_hook_method(hooks, method, extras)
82
81
83 except Exception as e:
82 except Exception as e:
84 exc_tb = traceback.format_exc()
83 exc_tb = traceback.format_exc()
85 result = {
84 result = {
86 'exception': e.__class__.__name__,
85 'exception': e.__class__.__name__,
87 'exception_traceback': exc_tb,
86 'exception_traceback': exc_tb,
88 'exception_args': e.args
87 'exception_args': e.args
89 }
88 }
90 self._write_response(hooks_proto, result)
89 self._write_response(hooks_proto, result)
91
90
92 def _read_request(self):
91 def _read_request(self):
93 length = int(self.headers['Content-Length'])
92 length = int(self.headers['Content-Length'])
94 # respect sent headers, fallback to OLD proto for compatability
93 # respect sent headers, fallback to OLD proto for compatability
95 hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
94 hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
96 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
95 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
97 # support for new vcsserver msgpack based protocol hooks
96 # support for new vcsserver msgpack based protocol hooks
98 body = self.rfile.read(length)
97 body = self.rfile.read(length)
99 data = self.deserialize_data(body)
98 data = self.deserialize_data(body)
100 else:
99 else:
101 body = self.rfile.read(length)
100 body = self.rfile.read(length)
102 data = self.deserialize_data(body)
101 data = self.deserialize_data(body)
103
102
104 return hooks_proto, data['method'], data['extras']
103 return hooks_proto, data['method'], data['extras']
105
104
106 def _write_response(self, hooks_proto, result):
105 def _write_response(self, hooks_proto, result):
107 self.send_response(200)
106 self.send_response(200)
108 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
107 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
109 self.send_header("Content-type", "application/msgpack")
108 self.send_header("Content-type", "application/msgpack")
110 self.end_headers()
109 self.end_headers()
111 data = self.serialize_data(result)
110 data = self.serialize_data(result)
112 self.wfile.write(data)
111 self.wfile.write(data)
113 else:
112 else:
114 self.send_header("Content-type", "text/json")
113 self.send_header("Content-type", "text/json")
115 self.end_headers()
114 self.end_headers()
116 data = self.serialize_data(result)
115 data = self.serialize_data(result)
117 self.wfile.write(data)
116 self.wfile.write(data)
118
117
119 def _call_hook_method(self, hooks, method, extras):
118 def _call_hook_method(self, hooks, method, extras):
120 try:
119 try:
121 result = getattr(hooks, method)(extras)
120 result = getattr(hooks, method)(extras)
122 finally:
121 finally:
123 meta.Session.remove()
122 meta.Session.remove()
124 return result
123 return result
125
124
126 def log_message(self, format, *args):
125 def log_message(self, format, *args):
127 """
126 """
128 This is an overridden method of BaseHTTPRequestHandler which logs using
127 This is an overridden method of BaseHTTPRequestHandler which logs using
129 logging library instead of writing directly to stderr.
128 logging library instead of writing directly to stderr.
130 """
129 """
131
130
132 message = format % args
131 message = format % args
133
132
134 log.debug(
133 log.debug(
135 "HOOKS: client=%s - - [%s] %s", self.client_address,
134 "HOOKS: client=%s - - [%s] %s", self.client_address,
136 self.log_date_time_string(), message)
135 self.log_date_time_string(), message)
137
136
138
137
139 class DummyHooksCallbackDaemon(object):
138 class DummyHooksCallbackDaemon(object):
140 hooks_uri = ''
139 hooks_uri = ''
141
140
142 def __init__(self):
141 def __init__(self):
143 self.hooks_module = Hooks.__module__
142 self.hooks_module = Hooks.__module__
144
143
145 def __enter__(self):
144 def __enter__(self):
146 log.debug('Running `%s` callback daemon', self.__class__.__name__)
145 log.debug('Running `%s` callback daemon', self.__class__.__name__)
147 return self
146 return self
148
147
149 def __exit__(self, exc_type, exc_val, exc_tb):
148 def __exit__(self, exc_type, exc_val, exc_tb):
150 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
149 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
151
150
152
151
153 class ThreadedHookCallbackDaemon(object):
152 class ThreadedHookCallbackDaemon(object):
154
153
155 _callback_thread = None
154 _callback_thread = None
156 _daemon = None
155 _daemon = None
157 _done = False
156 _done = False
158 use_gevent = False
157 use_gevent = False
159
158
160 def __init__(self, txn_id=None, host=None, port=None):
159 def __init__(self, txn_id=None, host=None, port=None):
161 self._prepare(txn_id=txn_id, host=host, port=port)
160 self._prepare(txn_id=txn_id, host=host, port=port)
162 if self.use_gevent:
161 if self.use_gevent:
163 self._run_func = self._run_gevent
162 self._run_func = self._run_gevent
164 self._stop_func = self._stop_gevent
163 self._stop_func = self._stop_gevent
165 else:
164 else:
166 self._run_func = self._run
165 self._run_func = self._run
167 self._stop_func = self._stop
166 self._stop_func = self._stop
168
167
169 def __enter__(self):
168 def __enter__(self):
170 log.debug('Running `%s` callback daemon', self.__class__.__name__)
169 log.debug('Running `%s` callback daemon', self.__class__.__name__)
171 self._run_func()
170 self._run_func()
172 return self
171 return self
173
172
174 def __exit__(self, exc_type, exc_val, exc_tb):
173 def __exit__(self, exc_type, exc_val, exc_tb):
175 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
174 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
176 self._stop_func()
175 self._stop_func()
177
176
178 def _prepare(self, txn_id=None, host=None, port=None):
177 def _prepare(self, txn_id=None, host=None, port=None):
179 raise NotImplementedError()
178 raise NotImplementedError()
180
179
181 def _run(self):
180 def _run(self):
182 raise NotImplementedError()
181 raise NotImplementedError()
183
182
184 def _stop(self):
183 def _stop(self):
185 raise NotImplementedError()
184 raise NotImplementedError()
186
185
187 def _run_gevent(self):
186 def _run_gevent(self):
188 raise NotImplementedError()
187 raise NotImplementedError()
189
188
190 def _stop_gevent(self):
189 def _stop_gevent(self):
191 raise NotImplementedError()
190 raise NotImplementedError()
192
191
193
192
194 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
193 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
195 """
194 """
196 Context manager which will run a callback daemon in a background thread.
195 Context manager which will run a callback daemon in a background thread.
197 """
196 """
198
197
199 hooks_uri = None
198 hooks_uri = None
200
199
201 # From Python docs: Polling reduces our responsiveness to a shutdown
200 # From Python docs: Polling reduces our responsiveness to a shutdown
202 # request and wastes cpu at all other times.
201 # request and wastes cpu at all other times.
203 POLL_INTERVAL = 0.01
202 POLL_INTERVAL = 0.01
204
203
205 use_gevent = False
204 use_gevent = False
206
205
207 @property
206 @property
208 def _hook_prefix(self):
207 def _hook_prefix(self):
209 return 'HOOKS: {} '.format(self.hooks_uri)
208 return 'HOOKS: {} '.format(self.hooks_uri)
210
209
211 def get_hostname(self):
210 def get_hostname(self):
212 return socket.gethostname() or '127.0.0.1'
211 return socket.gethostname() or '127.0.0.1'
213
212
214 def get_available_port(self, min_port=20000, max_port=65535):
213 def get_available_port(self, min_port=20000, max_port=65535):
215 from rhodecode.lib.utils2 import get_available_port as _get_port
214 from rhodecode.lib.utils2 import get_available_port as _get_port
216 return _get_port(min_port, max_port)
215 return _get_port(min_port, max_port)
217
216
218 def _prepare(self, txn_id=None, host=None, port=None):
217 def _prepare(self, txn_id=None, host=None, port=None):
219 from pyramid.threadlocal import get_current_request
218 from pyramid.threadlocal import get_current_request
220
219
221 if not host or host == "*":
220 if not host or host == "*":
222 host = self.get_hostname()
221 host = self.get_hostname()
223 if not port:
222 if not port:
224 port = self.get_available_port()
223 port = self.get_available_port()
225
224
226 server_address = (host, port)
225 server_address = (host, port)
227 self.hooks_uri = '{}:{}'.format(host, port)
226 self.hooks_uri = '{}:{}'.format(host, port)
228 self.txn_id = txn_id
227 self.txn_id = txn_id
229 self._done = False
228 self._done = False
230
229
231 log.debug(
230 log.debug(
232 "%s Preparing HTTP callback daemon registering hook object: %s",
231 "%s Preparing HTTP callback daemon registering hook object: %s",
233 self._hook_prefix, HooksHttpHandler)
232 self._hook_prefix, HooksHttpHandler)
234
233
235 self._daemon = TCPServer(server_address, HooksHttpHandler)
234 self._daemon = TCPServer(server_address, HooksHttpHandler)
236 # inject transaction_id for later verification
235 # inject transaction_id for later verification
237 self._daemon.txn_id = self.txn_id
236 self._daemon.txn_id = self.txn_id
238
237
239 # pass the WEB app request into daemon
238 # pass the WEB app request into daemon
240 self._daemon.request = get_current_request()
239 self._daemon.request = get_current_request()
241
240
242 def _run(self):
241 def _run(self):
243 log.debug("Running thread-based loop of callback daemon in background")
242 log.debug("Running thread-based loop of callback daemon in background")
244 callback_thread = threading.Thread(
243 callback_thread = threading.Thread(
245 target=self._daemon.serve_forever,
244 target=self._daemon.serve_forever,
246 kwargs={'poll_interval': self.POLL_INTERVAL})
245 kwargs={'poll_interval': self.POLL_INTERVAL})
247 callback_thread.daemon = True
246 callback_thread.daemon = True
248 callback_thread.start()
247 callback_thread.start()
249 self._callback_thread = callback_thread
248 self._callback_thread = callback_thread
250
249
251 def _run_gevent(self):
250 def _run_gevent(self):
252 log.debug("Running gevent-based loop of callback daemon in background")
251 log.debug("Running gevent-based loop of callback daemon in background")
253 # create a new greenlet for the daemon's serve_forever method
252 # create a new greenlet for the daemon's serve_forever method
254 callback_greenlet = gevent.spawn(
253 callback_greenlet = gevent.spawn(
255 self._daemon.serve_forever,
254 self._daemon.serve_forever,
256 poll_interval=self.POLL_INTERVAL)
255 poll_interval=self.POLL_INTERVAL)
257
256
258 # store reference to greenlet
257 # store reference to greenlet
259 self._callback_greenlet = callback_greenlet
258 self._callback_greenlet = callback_greenlet
260
259
261 # switch to this greenlet
260 # switch to this greenlet
262 gevent.sleep(0.01)
261 gevent.sleep(0.01)
263
262
264 def _stop(self):
263 def _stop(self):
265 log.debug("Waiting for background thread to finish.")
264 log.debug("Waiting for background thread to finish.")
266 self._daemon.shutdown()
265 self._daemon.shutdown()
267 self._callback_thread.join()
266 self._callback_thread.join()
268 self._daemon = None
267 self._daemon = None
269 self._callback_thread = None
268 self._callback_thread = None
270 if self.txn_id:
269 if self.txn_id:
271 txn_id_file = get_txn_id_data_path(self.txn_id)
270 txn_id_file = get_txn_id_data_path(self.txn_id)
272 log.debug('Cleaning up TXN ID %s', txn_id_file)
271 log.debug('Cleaning up TXN ID %s', txn_id_file)
273 if os.path.isfile(txn_id_file):
272 if os.path.isfile(txn_id_file):
274 os.remove(txn_id_file)
273 os.remove(txn_id_file)
275
274
276 log.debug("Background thread done.")
275 log.debug("Background thread done.")
277
276
278 def _stop_gevent(self):
277 def _stop_gevent(self):
279 log.debug("Waiting for background greenlet to finish.")
278 log.debug("Waiting for background greenlet to finish.")
280
279
281 # if greenlet exists and is running
280 # if greenlet exists and is running
282 if self._callback_greenlet and not self._callback_greenlet.dead:
281 if self._callback_greenlet and not self._callback_greenlet.dead:
283 # shutdown daemon if it exists
282 # shutdown daemon if it exists
284 if self._daemon:
283 if self._daemon:
285 self._daemon.shutdown()
284 self._daemon.shutdown()
286
285
287 # kill the greenlet
286 # kill the greenlet
288 self._callback_greenlet.kill()
287 self._callback_greenlet.kill()
289
288
290 self._daemon = None
289 self._daemon = None
291 self._callback_greenlet = None
290 self._callback_greenlet = None
292
291
293 if self.txn_id:
292 if self.txn_id:
294 txn_id_file = get_txn_id_data_path(self.txn_id)
293 txn_id_file = get_txn_id_data_path(self.txn_id)
295 log.debug('Cleaning up TXN ID %s', txn_id_file)
294 log.debug('Cleaning up TXN ID %s', txn_id_file)
296 if os.path.isfile(txn_id_file):
295 if os.path.isfile(txn_id_file):
297 os.remove(txn_id_file)
296 os.remove(txn_id_file)
298
297
299 log.debug("Background greenlet done.")
298 log.debug("Background greenlet done.")
300
299
301
300
302 def get_txn_id_data_path(txn_id):
301 def get_txn_id_data_path(txn_id):
303 import rhodecode
302 import rhodecode
304
303
305 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
304 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
306 final_dir = os.path.join(root, 'svn_txn_id')
305 final_dir = os.path.join(root, 'svn_txn_id')
307
306
308 if not os.path.isdir(final_dir):
307 if not os.path.isdir(final_dir):
309 os.makedirs(final_dir)
308 os.makedirs(final_dir)
310 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
309 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
311
310
312
311
313 def store_txn_id_data(txn_id, data_dict):
312 def store_txn_id_data(txn_id, data_dict):
314 if not txn_id:
313 if not txn_id:
315 log.warning('Cannot store txn_id because it is empty')
314 log.warning('Cannot store txn_id because it is empty')
316 return
315 return
317
316
318 path = get_txn_id_data_path(txn_id)
317 path = get_txn_id_data_path(txn_id)
319 try:
318 try:
320 with open(path, 'wb') as f:
319 with open(path, 'wb') as f:
321 f.write(json.dumps(data_dict))
320 f.write(json.dumps(data_dict))
322 except Exception:
321 except Exception:
323 log.exception('Failed to write txn_id metadata')
322 log.exception('Failed to write txn_id metadata')
324
323
325
324
326 def get_txn_id_from_store(txn_id):
325 def get_txn_id_from_store(txn_id):
327 """
326 """
328 Reads txn_id from store and if present returns the data for callback manager
327 Reads txn_id from store and if present returns the data for callback manager
329 """
328 """
330 path = get_txn_id_data_path(txn_id)
329 path = get_txn_id_data_path(txn_id)
331 try:
330 try:
332 with open(path, 'rb') as f:
331 with open(path, 'rb') as f:
333 return json.loads(f.read())
332 return json.loads(f.read())
334 except Exception:
333 except Exception:
335 return {}
334 return {}
336
335
337
336
338 def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None):
337 def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None):
339 txn_details = get_txn_id_from_store(txn_id)
338 txn_details = get_txn_id_from_store(txn_id)
340 port = txn_details.get('port', 0)
339 port = txn_details.get('port', 0)
341 if use_direct_calls:
340 if use_direct_calls:
342 callback_daemon = DummyHooksCallbackDaemon()
341 callback_daemon = DummyHooksCallbackDaemon()
343 extras['hooks_module'] = callback_daemon.hooks_module
342 extras['hooks_module'] = callback_daemon.hooks_module
344 else:
343 else:
345 if protocol == 'http':
344 if protocol == 'http':
346 callback_daemon = HttpHooksCallbackDaemon(
345 callback_daemon = HttpHooksCallbackDaemon(
347 txn_id=txn_id, host=host, port=port)
346 txn_id=txn_id, host=host, port=port)
348 else:
347 else:
349 log.error('Unsupported callback daemon protocol "%s"', protocol)
348 log.error('Unsupported callback daemon protocol "%s"', protocol)
350 raise Exception('Unsupported callback daemon protocol.')
349 raise Exception('Unsupported callback daemon protocol.')
351
350
352 extras['hooks_uri'] = callback_daemon.hooks_uri
351 extras['hooks_uri'] = callback_daemon.hooks_uri
353 extras['hooks_protocol'] = protocol
352 extras['hooks_protocol'] = protocol
354 extras['time'] = time.time()
353 extras['time'] = time.time()
355
354
356 # register txn_id
355 # register txn_id
357 extras['txn_id'] = txn_id
356 extras['txn_id'] = txn_id
358 log.debug('Prepared a callback daemon: %s at url `%s`',
357 log.debug('Prepared a callback daemon: %s at url `%s`',
359 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
358 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
360 return callback_daemon, extras
359 return callback_daemon, extras
361
360
362
361
363 class Hooks(object):
362 class Hooks(object):
364 """
363 """
365 Exposes the hooks for remote call backs
364 Exposes the hooks for remote call backs
366 """
365 """
367 def __init__(self, request=None, log_prefix=''):
366 def __init__(self, request=None, log_prefix=''):
368 self.log_prefix = log_prefix
367 self.log_prefix = log_prefix
369 self.request = request
368 self.request = request
370
369
371 def repo_size(self, extras):
370 def repo_size(self, extras):
372 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
371 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
373 return self._call_hook(hooks_base.repo_size, extras)
372 return self._call_hook(hooks_base.repo_size, extras)
374
373
375 def pre_pull(self, extras):
374 def pre_pull(self, extras):
376 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
375 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
377 return self._call_hook(hooks_base.pre_pull, extras)
376 return self._call_hook(hooks_base.pre_pull, extras)
378
377
379 def post_pull(self, extras):
378 def post_pull(self, extras):
380 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
379 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
381 return self._call_hook(hooks_base.post_pull, extras)
380 return self._call_hook(hooks_base.post_pull, extras)
382
381
383 def pre_push(self, extras):
382 def pre_push(self, extras):
384 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
383 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
385 return self._call_hook(hooks_base.pre_push, extras)
384 return self._call_hook(hooks_base.pre_push, extras)
386
385
387 def post_push(self, extras):
386 def post_push(self, extras):
388 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
387 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
389 return self._call_hook(hooks_base.post_push, extras)
388 return self._call_hook(hooks_base.post_push, extras)
390
389
391 def _call_hook(self, hook, extras):
390 def _call_hook(self, hook, extras):
392 extras = AttributeDict(extras)
391 extras = AttributeDict(extras)
393 server_url = extras['server_url']
392 server_url = extras['server_url']
394
393
395 extras.request = self.request
394 extras.request = self.request
396
395
397 try:
396 try:
398 result = hook(extras)
397 result = hook(extras)
399 if result is None:
398 if result is None:
400 raise Exception(
399 raise Exception(
401 'Failed to obtain hook result from func: {}'.format(hook))
400 'Failed to obtain hook result from func: {}'.format(hook))
402 except HTTPBranchProtected as handled_error:
401 except HTTPBranchProtected as handled_error:
403 # Those special cases doesn't need error reporting. It's a case of
402 # Those special cases doesn't need error reporting. It's a case of
404 # locked repo or protected branch
403 # locked repo or protected branch
405 result = AttributeDict({
404 result = AttributeDict({
406 'status': handled_error.code,
405 'status': handled_error.code,
407 'output': handled_error.explanation
406 'output': handled_error.explanation
408 })
407 })
409 except (HTTPLockedRC, Exception) as error:
408 except (HTTPLockedRC, Exception) as error:
410 # locked needs different handling since we need to also
409 # locked needs different handling since we need to also
411 # handle PULL operations
410 # handle PULL operations
412 exc_tb = ''
411 exc_tb = ''
413 if not isinstance(error, HTTPLockedRC):
412 if not isinstance(error, HTTPLockedRC):
414 exc_tb = traceback.format_exc()
413 exc_tb = traceback.format_exc()
415 log.exception('%sException when handling hook %s', self.log_prefix, hook)
414 log.exception('%sException when handling hook %s', self.log_prefix, hook)
416 error_args = error.args
415 error_args = error.args
417 return {
416 return {
418 'status': 128,
417 'status': 128,
419 'output': '',
418 'output': '',
420 'exception': type(error).__name__,
419 'exception': type(error).__name__,
421 'exception_traceback': exc_tb,
420 'exception_traceback': exc_tb,
422 'exception_args': error_args,
421 'exception_args': error_args,
423 }
422 }
424 finally:
423 finally:
425 meta.Session.remove()
424 meta.Session.remove()
426
425
427 log.debug('%sGot hook call response %s', self.log_prefix, result)
426 log.debug('%sGot hook call response %s', self.log_prefix, result)
428 return {
427 return {
429 'status': result.status,
428 'status': result.status,
430 'output': result.output,
429 'output': result.output,
431 }
430 }
432
431
433 def __enter__(self):
432 def __enter__(self):
434 return self
433 return self
435
434
436 def __exit__(self, exc_type, exc_val, exc_tb):
435 def __exit__(self, exc_type, exc_val, exc_tb):
437 pass
436 pass
@@ -1,846 +1,847 b''
1 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import os
20 import os
21 import sys
21 import sys
22 import time
22 import time
23 import platform
23 import platform
24 import collections
24 import collections
25 import psutil
25 import psutil
26 from functools import wraps
26 from functools import wraps
27
27
28 import pkg_resources
28 import pkg_resources
29 import logging
29 import logging
30 import resource
30 import resource
31
31
32 import configparser
32 import configparser
33
33
34 from rc_license.models import LicenseModel
34 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
35
36
36 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
37
38
38
39
39 _NA = 'NOT AVAILABLE'
40 _NA = 'NOT AVAILABLE'
40 _NA_FLOAT = 0.0
41 _NA_FLOAT = 0.0
41
42
42 STATE_OK = 'ok'
43 STATE_OK = 'ok'
43 STATE_ERR = 'error'
44 STATE_ERR = 'error'
44 STATE_WARN = 'warning'
45 STATE_WARN = 'warning'
45
46
46 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
47
48
48
49
49 registered_helpers = {}
50 registered_helpers = {}
50
51
51
52
52 def register_sysinfo(func):
53 def register_sysinfo(func):
53 """
54 """
54 @register_helper
55 @register_helper
55 def db_check():
56 def db_check():
56 pass
57 pass
57
58
58 db_check == registered_helpers['db_check']
59 db_check == registered_helpers['db_check']
59 """
60 """
60 global registered_helpers
61 global registered_helpers
61 registered_helpers[func.__name__] = func
62 registered_helpers[func.__name__] = func
62
63
63 @wraps(func)
64 @wraps(func)
64 def _wrapper(*args, **kwargs):
65 def _wrapper(*args, **kwargs):
65 return func(*args, **kwargs)
66 return func(*args, **kwargs)
66 return _wrapper
67 return _wrapper
67
68
68
69
69 # HELPERS
70 # HELPERS
70 def percentage(part: (int, float), whole: (int, float)):
71 def percentage(part: (int, float), whole: (int, float)):
71 whole = float(whole)
72 whole = float(whole)
72 if whole > 0:
73 if whole > 0:
73 return round(100 * float(part) / whole, 1)
74 return round(100 * float(part) / whole, 1)
74 return 0.0
75 return 0.0
75
76
76
77
77 def get_storage_size(storage_path):
78 def get_storage_size(storage_path):
78 sizes = []
79 sizes = []
79 for file_ in os.listdir(storage_path):
80 for file_ in os.listdir(storage_path):
80 storage_file = os.path.join(storage_path, file_)
81 storage_file = os.path.join(storage_path, file_)
81 if os.path.isfile(storage_file):
82 if os.path.isfile(storage_file):
82 try:
83 try:
83 sizes.append(os.path.getsize(storage_file))
84 sizes.append(os.path.getsize(storage_file))
84 except OSError:
85 except OSError:
85 log.exception('Failed to get size of storage file %s', storage_file)
86 log.exception('Failed to get size of storage file %s', storage_file)
86 pass
87 pass
87
88
88 return sum(sizes)
89 return sum(sizes)
89
90
90
91
91 def get_resource(resource_type):
92 def get_resource(resource_type):
92 try:
93 try:
93 return resource.getrlimit(resource_type)
94 return resource.getrlimit(resource_type)
94 except Exception:
95 except Exception:
95 return 'NOT_SUPPORTED'
96 return 'NOT_SUPPORTED'
96
97
97
98
98 def get_cert_path(ini_path):
99 def get_cert_path(ini_path):
99 default = '/etc/ssl/certs/ca-certificates.crt'
100 default = '/etc/ssl/certs/ca-certificates.crt'
100 control_ca_bundle = os.path.join(
101 control_ca_bundle = os.path.join(
101 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
102 '.rccontrol-profile/etc/ca-bundle.crt')
103 '.rccontrol-profile/etc/ca-bundle.crt')
103 if os.path.isfile(control_ca_bundle):
104 if os.path.isfile(control_ca_bundle):
104 default = control_ca_bundle
105 default = control_ca_bundle
105
106
106 return default
107 return default
107
108
108
109
109 class SysInfoRes(object):
110 class SysInfoRes(object):
110 def __init__(self, value, state=None, human_value=None):
111 def __init__(self, value, state=None, human_value=None):
111 self.value = value
112 self.value = value
112 self.state = state or STATE_OK_DEFAULT
113 self.state = state or STATE_OK_DEFAULT
113 self.human_value = human_value or value
114 self.human_value = human_value or value
114
115
115 def __json__(self):
116 def __json__(self):
116 return {
117 return {
117 'value': self.value,
118 'value': self.value,
118 'state': self.state,
119 'state': self.state,
119 'human_value': self.human_value,
120 'human_value': self.human_value,
120 }
121 }
121
122
122 def get_value(self):
123 def get_value(self):
123 return self.__json__()
124 return self.__json__()
124
125
125 def __str__(self):
126 def __str__(self):
126 return f'<SysInfoRes({self.__json__()})>'
127 return f'<SysInfoRes({self.__json__()})>'
127
128
128
129
129 class SysInfo(object):
130 class SysInfo(object):
130
131
131 def __init__(self, func_name, **kwargs):
132 def __init__(self, func_name, **kwargs):
132 self.function_name = func_name
133 self.function_name = func_name
133 self.value = _NA
134 self.value = _NA
134 self.state = None
135 self.state = None
135 self.kwargs = kwargs or {}
136 self.kwargs = kwargs or {}
136
137
137 def __call__(self):
138 def __call__(self):
138 computed = self.compute(**self.kwargs)
139 computed = self.compute(**self.kwargs)
139 if not isinstance(computed, SysInfoRes):
140 if not isinstance(computed, SysInfoRes):
140 raise ValueError(
141 raise ValueError(
141 'computed value for {} is not instance of '
142 'computed value for {} is not instance of '
142 '{}, got {} instead'.format(
143 '{}, got {} instead'.format(
143 self.function_name, SysInfoRes, type(computed)))
144 self.function_name, SysInfoRes, type(computed)))
144 return computed.__json__()
145 return computed.__json__()
145
146
146 def __str__(self):
147 def __str__(self):
147 return f'<SysInfo({self.function_name})>'
148 return f'<SysInfo({self.function_name})>'
148
149
149 def compute(self, **kwargs):
150 def compute(self, **kwargs):
150 return self.function_name(**kwargs)
151 return self.function_name(**kwargs)
151
152
152
153
153 # SysInfo functions
154 # SysInfo functions
154 @register_sysinfo
155 @register_sysinfo
155 def python_info():
156 def python_info():
156 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
157 executable=sys.executable)
158 executable=sys.executable)
158 return SysInfoRes(value=value)
159 return SysInfoRes(value=value)
159
160
160
161
161 @register_sysinfo
162 @register_sysinfo
162 def py_modules():
163 def py_modules():
163 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
164 for p in pkg_resources.working_set])
165 for p in pkg_resources.working_set])
165
166
166 value = sorted(mods.items(), key=lambda k: k[0].lower())
167 value = sorted(mods.items(), key=lambda k: k[0].lower())
167 return SysInfoRes(value=value)
168 return SysInfoRes(value=value)
168
169
169
170
170 @register_sysinfo
171 @register_sysinfo
171 def platform_type():
172 def platform_type():
172 from rhodecode.lib.utils import generate_platform_uuid
173 from rhodecode.lib.utils import generate_platform_uuid
173
174
174 value = dict(
175 value = dict(
175 name=safe_str(platform.platform()),
176 name=safe_str(platform.platform()),
176 uuid=generate_platform_uuid()
177 uuid=generate_platform_uuid()
177 )
178 )
178 return SysInfoRes(value=value)
179 return SysInfoRes(value=value)
179
180
180
181
181 @register_sysinfo
182 @register_sysinfo
182 def locale_info():
183 def locale_info():
183 import locale
184 import locale
184
185
185 def safe_get_locale(locale_name):
186 def safe_get_locale(locale_name):
186 try:
187 try:
187 locale.getlocale(locale_name)
188 locale.getlocale(locale_name)
188 except TypeError:
189 except TypeError:
189 return f'FAILED_LOCALE_GET:{locale_name}'
190 return f'FAILED_LOCALE_GET:{locale_name}'
190
191
191 value = dict(
192 value = dict(
192 locale_default=locale.getlocale(),
193 locale_default=locale.getlocale(),
193 locale_lc_all=safe_get_locale(locale.LC_ALL),
194 locale_lc_all=safe_get_locale(locale.LC_ALL),
194 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
195 lang_env=os.environ.get('LANG'),
196 lang_env=os.environ.get('LANG'),
196 lc_all_env=os.environ.get('LC_ALL'),
197 lc_all_env=os.environ.get('LC_ALL'),
197 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
198 )
199 )
199 human_value = \
200 human_value = \
200 f"LANG: {value['lang_env']}, \
201 f"LANG: {value['lang_env']}, \
201 locale LC_ALL: {value['locale_lc_all']}, \
202 locale LC_ALL: {value['locale_lc_all']}, \
202 locale LC_CTYPE: {value['locale_lc_ctype']}, \
203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
203 Default locales: {value['locale_default']}"
204 Default locales: {value['locale_default']}"
204
205
205 return SysInfoRes(value=value, human_value=human_value)
206 return SysInfoRes(value=value, human_value=human_value)
206
207
207
208
208 @register_sysinfo
209 @register_sysinfo
209 def ulimit_info():
210 def ulimit_info():
210 data = collections.OrderedDict([
211 data = collections.OrderedDict([
211 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
212 ('file size', get_resource(resource.RLIMIT_FSIZE)),
213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
213 ('stack size', get_resource(resource.RLIMIT_STACK)),
214 ('stack size', get_resource(resource.RLIMIT_STACK)),
214 ('core file size', get_resource(resource.RLIMIT_CORE)),
215 ('core file size', get_resource(resource.RLIMIT_CORE)),
215 ('address space size', get_resource(resource.RLIMIT_AS)),
216 ('address space size', get_resource(resource.RLIMIT_AS)),
216 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
217 ('heap size', get_resource(resource.RLIMIT_DATA)),
218 ('heap size', get_resource(resource.RLIMIT_DATA)),
218 ('rss size', get_resource(resource.RLIMIT_RSS)),
219 ('rss size', get_resource(resource.RLIMIT_RSS)),
219 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
220 ('open files', get_resource(resource.RLIMIT_NOFILE)),
221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
221 ])
222 ])
222
223
223 text = ', '.join(f'{k}:{v}' for k, v in data.items())
224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
224
225
225 value = {
226 value = {
226 'limits': data,
227 'limits': data,
227 'text': text,
228 'text': text,
228 }
229 }
229 return SysInfoRes(value=value)
230 return SysInfoRes(value=value)
230
231
231
232
232 @register_sysinfo
233 @register_sysinfo
233 def uptime():
234 def uptime():
234 from rhodecode.lib.helpers import age, time_to_datetime
235 from rhodecode.lib.helpers import age, time_to_datetime
235 from rhodecode.translation import TranslationString
236 from rhodecode.translation import TranslationString
236
237
237 value = dict(boot_time=0, uptime=0, text='')
238 value = dict(boot_time=0, uptime=0, text='')
238 state = STATE_OK_DEFAULT
239 state = STATE_OK_DEFAULT
239
240
240 boot_time = psutil.boot_time()
241 boot_time = psutil.boot_time()
241 value['boot_time'] = boot_time
242 value['boot_time'] = boot_time
242 value['uptime'] = time.time() - boot_time
243 value['uptime'] = time.time() - boot_time
243
244
244 date_or_age = age(time_to_datetime(boot_time))
245 date_or_age = age(time_to_datetime(boot_time))
245 if isinstance(date_or_age, TranslationString):
246 if isinstance(date_or_age, TranslationString):
246 date_or_age = date_or_age.interpolate()
247 date_or_age = date_or_age.interpolate()
247
248
248 human_value = value.copy()
249 human_value = value.copy()
249 human_value['boot_time'] = time_to_datetime(boot_time)
250 human_value['boot_time'] = time_to_datetime(boot_time)
250 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
251
252
252 human_value['text'] = f'Server started {date_or_age}'
253 human_value['text'] = f'Server started {date_or_age}'
253 return SysInfoRes(value=value, human_value=human_value)
254 return SysInfoRes(value=value, human_value=human_value)
254
255
255
256
256 @register_sysinfo
257 @register_sysinfo
257 def memory():
258 def memory():
258 from rhodecode.lib.helpers import format_byte_size_binary
259 from rhodecode.lib.helpers import format_byte_size_binary
259 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
260 percent_used=0, free=0, inactive=0, active=0, shared=0,
261 percent_used=0, free=0, inactive=0, active=0, shared=0,
261 total=0, buffers=0, text='')
262 total=0, buffers=0, text='')
262
263
263 state = STATE_OK_DEFAULT
264 state = STATE_OK_DEFAULT
264
265
265 value.update(dict(psutil.virtual_memory()._asdict()))
266 value.update(dict(psutil.virtual_memory()._asdict()))
266 value['used_real'] = value['total'] - value['available']
267 value['used_real'] = value['total'] - value['available']
267 value['percent_used'] = psutil._common.usage_percent(
268 value['percent_used'] = psutil._common.usage_percent(
268 value['used_real'], value['total'], 1)
269 value['used_real'], value['total'], 1)
269
270
270 human_value = value.copy()
271 human_value = value.copy()
271 human_value['text'] = '{}/{}, {}% used'.format(
272 human_value['text'] = '{}/{}, {}% used'.format(
272 format_byte_size_binary(value['used_real']),
273 format_byte_size_binary(value['used_real']),
273 format_byte_size_binary(value['total']),
274 format_byte_size_binary(value['total']),
274 value['percent_used'])
275 value['percent_used'])
275
276
276 keys = list(value.keys())[::]
277 keys = list(value.keys())[::]
277 keys.pop(keys.index('percent'))
278 keys.pop(keys.index('percent'))
278 keys.pop(keys.index('percent_used'))
279 keys.pop(keys.index('percent_used'))
279 keys.pop(keys.index('text'))
280 keys.pop(keys.index('text'))
280 for k in keys:
281 for k in keys:
281 human_value[k] = format_byte_size_binary(value[k])
282 human_value[k] = format_byte_size_binary(value[k])
282
283
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
284 if state['type'] == STATE_OK and value['percent_used'] > 90:
284 msg = 'Critical: your available RAM memory is very low.'
285 msg = 'Critical: your available RAM memory is very low.'
285 state = {'message': msg, 'type': STATE_ERR}
286 state = {'message': msg, 'type': STATE_ERR}
286
287
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
288 elif state['type'] == STATE_OK and value['percent_used'] > 70:
288 msg = 'Warning: your available RAM memory is running low.'
289 msg = 'Warning: your available RAM memory is running low.'
289 state = {'message': msg, 'type': STATE_WARN}
290 state = {'message': msg, 'type': STATE_WARN}
290
291
291 return SysInfoRes(value=value, state=state, human_value=human_value)
292 return SysInfoRes(value=value, state=state, human_value=human_value)
292
293
293
294
294 @register_sysinfo
295 @register_sysinfo
295 def machine_load():
296 def machine_load():
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
297 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
297 state = STATE_OK_DEFAULT
298 state = STATE_OK_DEFAULT
298
299
299 # load averages
300 # load averages
300 if hasattr(psutil.os, 'getloadavg'):
301 if hasattr(psutil.os, 'getloadavg'):
301 value.update(dict(
302 value.update(dict(
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
303 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
303 ))
304 ))
304
305
305 human_value = value.copy()
306 human_value = value.copy()
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
307 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
307 value['1_min'], value['5_min'], value['15_min'])
308 value['1_min'], value['5_min'], value['15_min'])
308
309
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
310 if state['type'] == STATE_OK and value['15_min'] > 5.0:
310 msg = 'Warning: your machine load is very high.'
311 msg = 'Warning: your machine load is very high.'
311 state = {'message': msg, 'type': STATE_WARN}
312 state = {'message': msg, 'type': STATE_WARN}
312
313
313 return SysInfoRes(value=value, state=state, human_value=human_value)
314 return SysInfoRes(value=value, state=state, human_value=human_value)
314
315
315
316
316 @register_sysinfo
317 @register_sysinfo
317 def cpu():
318 def cpu():
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
319 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
319 state = STATE_OK_DEFAULT
320 state = STATE_OK_DEFAULT
320
321
321 value['cpu'] = psutil.cpu_percent(0.5)
322 value['cpu'] = psutil.cpu_percent(0.5)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
323 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
323 value['cpu_count'] = psutil.cpu_count()
324 value['cpu_count'] = psutil.cpu_count()
324
325
325 human_value = value.copy()
326 human_value = value.copy()
326 human_value['text'] = '{} cores at {} %'.format(
327 human_value['text'] = '{} cores at {} %'.format(
327 value['cpu_count'], value['cpu'])
328 value['cpu_count'], value['cpu'])
328
329
329 return SysInfoRes(value=value, state=state, human_value=human_value)
330 return SysInfoRes(value=value, state=state, human_value=human_value)
330
331
331
332
332 @register_sysinfo
333 @register_sysinfo
333 def storage():
334 def storage():
334 from rhodecode.lib.helpers import format_byte_size_binary
335 from rhodecode.lib.helpers import format_byte_size_binary
335 from rhodecode.model.settings import VcsSettingsModel
336 from rhodecode.model.settings import VcsSettingsModel
336 path = VcsSettingsModel().get_repos_location()
337 path = VcsSettingsModel().get_repos_location()
337
338
338 value = dict(percent=0, used=0, total=0, path=path, text='')
339 value = dict(percent=0, used=0, total=0, path=path, text='')
339 state = STATE_OK_DEFAULT
340 state = STATE_OK_DEFAULT
340
341
341 try:
342 try:
342 value.update(dict(psutil.disk_usage(path)._asdict()))
343 value.update(dict(psutil.disk_usage(path)._asdict()))
343 except Exception as e:
344 except Exception as e:
344 log.exception('Failed to fetch disk info')
345 log.exception('Failed to fetch disk info')
345 state = {'message': str(e), 'type': STATE_ERR}
346 state = {'message': str(e), 'type': STATE_ERR}
346
347
347 human_value = value.copy()
348 human_value = value.copy()
348 human_value['used'] = format_byte_size_binary(value['used'])
349 human_value['used'] = format_byte_size_binary(value['used'])
349 human_value['total'] = format_byte_size_binary(value['total'])
350 human_value['total'] = format_byte_size_binary(value['total'])
350 human_value['text'] = "{}/{}, {}% used".format(
351 human_value['text'] = "{}/{}, {}% used".format(
351 format_byte_size_binary(value['used']),
352 format_byte_size_binary(value['used']),
352 format_byte_size_binary(value['total']),
353 format_byte_size_binary(value['total']),
353 value['percent'])
354 value['percent'])
354
355
355 if state['type'] == STATE_OK and value['percent'] > 90:
356 if state['type'] == STATE_OK and value['percent'] > 90:
356 msg = 'Critical: your disk space is very low.'
357 msg = 'Critical: your disk space is very low.'
357 state = {'message': msg, 'type': STATE_ERR}
358 state = {'message': msg, 'type': STATE_ERR}
358
359
359 elif state['type'] == STATE_OK and value['percent'] > 70:
360 elif state['type'] == STATE_OK and value['percent'] > 70:
360 msg = 'Warning: your disk space is running low.'
361 msg = 'Warning: your disk space is running low.'
361 state = {'message': msg, 'type': STATE_WARN}
362 state = {'message': msg, 'type': STATE_WARN}
362
363
363 return SysInfoRes(value=value, state=state, human_value=human_value)
364 return SysInfoRes(value=value, state=state, human_value=human_value)
364
365
365
366
366 @register_sysinfo
367 @register_sysinfo
367 def storage_inodes():
368 def storage_inodes():
368 from rhodecode.model.settings import VcsSettingsModel
369 from rhodecode.model.settings import VcsSettingsModel
369 path = VcsSettingsModel().get_repos_location()
370 path = VcsSettingsModel().get_repos_location()
370
371
371 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
372 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
372 state = STATE_OK_DEFAULT
373 state = STATE_OK_DEFAULT
373
374
374 try:
375 try:
375 i_stat = os.statvfs(path)
376 i_stat = os.statvfs(path)
376 value['free'] = i_stat.f_ffree
377 value['free'] = i_stat.f_ffree
377 value['used'] = i_stat.f_files-i_stat.f_favail
378 value['used'] = i_stat.f_files-i_stat.f_favail
378 value['total'] = i_stat.f_files
379 value['total'] = i_stat.f_files
379 value['percent'] = percentage(value['used'], value['total'])
380 value['percent'] = percentage(value['used'], value['total'])
380 except Exception as e:
381 except Exception as e:
381 log.exception('Failed to fetch disk inodes info')
382 log.exception('Failed to fetch disk inodes info')
382 state = {'message': str(e), 'type': STATE_ERR}
383 state = {'message': str(e), 'type': STATE_ERR}
383
384
384 human_value = value.copy()
385 human_value = value.copy()
385 human_value['text'] = "{}/{}, {}% used".format(
386 human_value['text'] = "{}/{}, {}% used".format(
386 value['used'], value['total'], value['percent'])
387 value['used'], value['total'], value['percent'])
387
388
388 if state['type'] == STATE_OK and value['percent'] > 90:
389 if state['type'] == STATE_OK and value['percent'] > 90:
389 msg = 'Critical: your disk free inodes are very low.'
390 msg = 'Critical: your disk free inodes are very low.'
390 state = {'message': msg, 'type': STATE_ERR}
391 state = {'message': msg, 'type': STATE_ERR}
391
392
392 elif state['type'] == STATE_OK and value['percent'] > 70:
393 elif state['type'] == STATE_OK and value['percent'] > 70:
393 msg = 'Warning: your disk free inodes are running low.'
394 msg = 'Warning: your disk free inodes are running low.'
394 state = {'message': msg, 'type': STATE_WARN}
395 state = {'message': msg, 'type': STATE_WARN}
395
396
396 return SysInfoRes(value=value, state=state, human_value=human_value)
397 return SysInfoRes(value=value, state=state, human_value=human_value)
397
398
398
399
399 @register_sysinfo
400 @register_sysinfo
400 def storage_archives():
401 def storage_archives():
401 import rhodecode
402 import rhodecode
402 from rhodecode.lib.utils import safe_str
403 from rhodecode.lib.utils import safe_str
403 from rhodecode.lib.helpers import format_byte_size_binary
404 from rhodecode.lib.helpers import format_byte_size_binary
404
405
405 msg = 'Archive cache storage is controlled by ' \
406 msg = 'Archive cache storage is controlled by ' \
406 'archive_cache.store_dir=/path/to/cache option in the .ini file'
407 'archive_cache.store_dir=/path/to/cache option in the .ini file'
407 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
408 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
408
409
409 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
410 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
410 state = STATE_OK_DEFAULT
411 state = STATE_OK_DEFAULT
411 try:
412 try:
412 items_count = 0
413 items_count = 0
413 used = 0
414 used = 0
414 for root, dirs, files in os.walk(path):
415 for root, dirs, files in os.walk(path):
415 if root == path:
416 if root == path:
416 items_count = len(dirs)
417 items_count = len(dirs)
417
418
418 for f in files:
419 for f in files:
419 try:
420 try:
420 used += os.path.getsize(os.path.join(root, f))
421 used += os.path.getsize(os.path.join(root, f))
421 except OSError:
422 except OSError:
422 pass
423 pass
423 value.update({
424 value.update({
424 'percent': 100,
425 'percent': 100,
425 'used': used,
426 'used': used,
426 'total': used,
427 'total': used,
427 'items': items_count
428 'items': items_count
428 })
429 })
429
430
430 except Exception as e:
431 except Exception as e:
431 log.exception('failed to fetch archive cache storage')
432 log.exception('failed to fetch archive cache storage')
432 state = {'message': str(e), 'type': STATE_ERR}
433 state = {'message': str(e), 'type': STATE_ERR}
433
434
434 human_value = value.copy()
435 human_value = value.copy()
435 human_value['used'] = format_byte_size_binary(value['used'])
436 human_value['used'] = format_byte_size_binary(value['used'])
436 human_value['total'] = format_byte_size_binary(value['total'])
437 human_value['total'] = format_byte_size_binary(value['total'])
437 human_value['text'] = "{} ({} items)".format(
438 human_value['text'] = "{} ({} items)".format(
438 human_value['used'], value['items'])
439 human_value['used'], value['items'])
439
440
440 return SysInfoRes(value=value, state=state, human_value=human_value)
441 return SysInfoRes(value=value, state=state, human_value=human_value)
441
442
442
443
443 @register_sysinfo
444 @register_sysinfo
444 def storage_gist():
445 def storage_gist():
445 from rhodecode.model.gist import GIST_STORE_LOC
446 from rhodecode.model.gist import GIST_STORE_LOC
446 from rhodecode.model.settings import VcsSettingsModel
447 from rhodecode.model.settings import VcsSettingsModel
447 from rhodecode.lib.utils import safe_str
448 from rhodecode.lib.utils import safe_str
448 from rhodecode.lib.helpers import format_byte_size_binary
449 from rhodecode.lib.helpers import format_byte_size_binary
449 path = safe_str(os.path.join(
450 path = safe_str(os.path.join(
450 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
451 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
451
452
452 # gist storage
453 # gist storage
453 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
454 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
454 state = STATE_OK_DEFAULT
455 state = STATE_OK_DEFAULT
455
456
456 try:
457 try:
457 items_count = 0
458 items_count = 0
458 used = 0
459 used = 0
459 for root, dirs, files in os.walk(path):
460 for root, dirs, files in os.walk(path):
460 if root == path:
461 if root == path:
461 items_count = len(dirs)
462 items_count = len(dirs)
462
463
463 for f in files:
464 for f in files:
464 try:
465 try:
465 used += os.path.getsize(os.path.join(root, f))
466 used += os.path.getsize(os.path.join(root, f))
466 except OSError:
467 except OSError:
467 pass
468 pass
468 value.update({
469 value.update({
469 'percent': 100,
470 'percent': 100,
470 'used': used,
471 'used': used,
471 'total': used,
472 'total': used,
472 'items': items_count
473 'items': items_count
473 })
474 })
474 except Exception as e:
475 except Exception as e:
475 log.exception('failed to fetch gist storage items')
476 log.exception('failed to fetch gist storage items')
476 state = {'message': str(e), 'type': STATE_ERR}
477 state = {'message': str(e), 'type': STATE_ERR}
477
478
478 human_value = value.copy()
479 human_value = value.copy()
479 human_value['used'] = format_byte_size_binary(value['used'])
480 human_value['used'] = format_byte_size_binary(value['used'])
480 human_value['total'] = format_byte_size_binary(value['total'])
481 human_value['total'] = format_byte_size_binary(value['total'])
481 human_value['text'] = "{} ({} items)".format(
482 human_value['text'] = "{} ({} items)".format(
482 human_value['used'], value['items'])
483 human_value['used'], value['items'])
483
484
484 return SysInfoRes(value=value, state=state, human_value=human_value)
485 return SysInfoRes(value=value, state=state, human_value=human_value)
485
486
486
487
487 @register_sysinfo
488 @register_sysinfo
488 def storage_temp():
489 def storage_temp():
489 import tempfile
490 import tempfile
490 from rhodecode.lib.helpers import format_byte_size_binary
491 from rhodecode.lib.helpers import format_byte_size_binary
491
492
492 path = tempfile.gettempdir()
493 path = tempfile.gettempdir()
493 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
494 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
494 state = STATE_OK_DEFAULT
495 state = STATE_OK_DEFAULT
495
496
496 if not psutil:
497 if not psutil:
497 return SysInfoRes(value=value, state=state)
498 return SysInfoRes(value=value, state=state)
498
499
499 try:
500 try:
500 value.update(dict(psutil.disk_usage(path)._asdict()))
501 value.update(dict(psutil.disk_usage(path)._asdict()))
501 except Exception as e:
502 except Exception as e:
502 log.exception('Failed to fetch temp dir info')
503 log.exception('Failed to fetch temp dir info')
503 state = {'message': str(e), 'type': STATE_ERR}
504 state = {'message': str(e), 'type': STATE_ERR}
504
505
505 human_value = value.copy()
506 human_value = value.copy()
506 human_value['used'] = format_byte_size_binary(value['used'])
507 human_value['used'] = format_byte_size_binary(value['used'])
507 human_value['total'] = format_byte_size_binary(value['total'])
508 human_value['total'] = format_byte_size_binary(value['total'])
508 human_value['text'] = "{}/{}, {}% used".format(
509 human_value['text'] = "{}/{}, {}% used".format(
509 format_byte_size_binary(value['used']),
510 format_byte_size_binary(value['used']),
510 format_byte_size_binary(value['total']),
511 format_byte_size_binary(value['total']),
511 value['percent'])
512 value['percent'])
512
513
513 return SysInfoRes(value=value, state=state, human_value=human_value)
514 return SysInfoRes(value=value, state=state, human_value=human_value)
514
515
515
516
516 @register_sysinfo
517 @register_sysinfo
517 def search_info():
518 def search_info():
518 import rhodecode
519 import rhodecode
519 from rhodecode.lib.index import searcher_from_config
520 from rhodecode.lib.index import searcher_from_config
520
521
521 backend = rhodecode.CONFIG.get('search.module', '')
522 backend = rhodecode.CONFIG.get('search.module', '')
522 location = rhodecode.CONFIG.get('search.location', '')
523 location = rhodecode.CONFIG.get('search.location', '')
523
524
524 try:
525 try:
525 searcher = searcher_from_config(rhodecode.CONFIG)
526 searcher = searcher_from_config(rhodecode.CONFIG)
526 searcher = searcher.__class__.__name__
527 searcher = searcher.__class__.__name__
527 except Exception:
528 except Exception:
528 searcher = None
529 searcher = None
529
530
530 value = dict(
531 value = dict(
531 backend=backend, searcher=searcher, location=location, text='')
532 backend=backend, searcher=searcher, location=location, text='')
532 state = STATE_OK_DEFAULT
533 state = STATE_OK_DEFAULT
533
534
534 human_value = value.copy()
535 human_value = value.copy()
535 human_value['text'] = "backend:`{}`".format(human_value['backend'])
536 human_value['text'] = "backend:`{}`".format(human_value['backend'])
536
537
537 return SysInfoRes(value=value, state=state, human_value=human_value)
538 return SysInfoRes(value=value, state=state, human_value=human_value)
538
539
539
540
540 @register_sysinfo
541 @register_sysinfo
541 def git_info():
542 def git_info():
542 from rhodecode.lib.vcs.backends import git
543 from rhodecode.lib.vcs.backends import git
543 state = STATE_OK_DEFAULT
544 state = STATE_OK_DEFAULT
544 value = human_value = ''
545 value = human_value = ''
545 try:
546 try:
546 value = git.discover_git_version(raise_on_exc=True)
547 value = git.discover_git_version(raise_on_exc=True)
547 human_value = f'version reported from VCSServer: {value}'
548 human_value = f'version reported from VCSServer: {value}'
548 except Exception as e:
549 except Exception as e:
549 state = {'message': str(e), 'type': STATE_ERR}
550 state = {'message': str(e), 'type': STATE_ERR}
550
551
551 return SysInfoRes(value=value, state=state, human_value=human_value)
552 return SysInfoRes(value=value, state=state, human_value=human_value)
552
553
553
554
554 @register_sysinfo
555 @register_sysinfo
555 def hg_info():
556 def hg_info():
556 from rhodecode.lib.vcs.backends import hg
557 from rhodecode.lib.vcs.backends import hg
557 state = STATE_OK_DEFAULT
558 state = STATE_OK_DEFAULT
558 value = human_value = ''
559 value = human_value = ''
559 try:
560 try:
560 value = hg.discover_hg_version(raise_on_exc=True)
561 value = hg.discover_hg_version(raise_on_exc=True)
561 human_value = f'version reported from VCSServer: {value}'
562 human_value = f'version reported from VCSServer: {value}'
562 except Exception as e:
563 except Exception as e:
563 state = {'message': str(e), 'type': STATE_ERR}
564 state = {'message': str(e), 'type': STATE_ERR}
564 return SysInfoRes(value=value, state=state, human_value=human_value)
565 return SysInfoRes(value=value, state=state, human_value=human_value)
565
566
566
567
567 @register_sysinfo
568 @register_sysinfo
568 def svn_info():
569 def svn_info():
569 from rhodecode.lib.vcs.backends import svn
570 from rhodecode.lib.vcs.backends import svn
570 state = STATE_OK_DEFAULT
571 state = STATE_OK_DEFAULT
571 value = human_value = ''
572 value = human_value = ''
572 try:
573 try:
573 value = svn.discover_svn_version(raise_on_exc=True)
574 value = svn.discover_svn_version(raise_on_exc=True)
574 human_value = f'version reported from VCSServer: {value}'
575 human_value = f'version reported from VCSServer: {value}'
575 except Exception as e:
576 except Exception as e:
576 state = {'message': str(e), 'type': STATE_ERR}
577 state = {'message': str(e), 'type': STATE_ERR}
577 return SysInfoRes(value=value, state=state, human_value=human_value)
578 return SysInfoRes(value=value, state=state, human_value=human_value)
578
579
579
580
580 @register_sysinfo
581 @register_sysinfo
581 def vcs_backends():
582 def vcs_backends():
582 import rhodecode
583 import rhodecode
583 value = rhodecode.CONFIG.get('vcs.backends')
584 value = rhodecode.CONFIG.get('vcs.backends')
584 human_value = 'Enabled backends in order: {}'.format(','.join(value))
585 human_value = 'Enabled backends in order: {}'.format(','.join(value))
585 return SysInfoRes(value=value, human_value=human_value)
586 return SysInfoRes(value=value, human_value=human_value)
586
587
587
588
588 @register_sysinfo
589 @register_sysinfo
589 def vcs_server():
590 def vcs_server():
590 import rhodecode
591 import rhodecode
591 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
592 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
592
593
593 server_url = rhodecode.CONFIG.get('vcs.server')
594 server_url = rhodecode.CONFIG.get('vcs.server')
594 enabled = rhodecode.CONFIG.get('vcs.server.enable')
595 enabled = rhodecode.CONFIG.get('vcs.server.enable')
595 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
596 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
596 state = STATE_OK_DEFAULT
597 state = STATE_OK_DEFAULT
597 version = None
598 version = None
598 workers = 0
599 workers = 0
599
600
600 try:
601 try:
601 data = get_vcsserver_service_data()
602 data = get_vcsserver_service_data()
602 if data and 'version' in data:
603 if data and 'version' in data:
603 version = data['version']
604 version = data['version']
604
605
605 if data and 'config' in data:
606 if data and 'config' in data:
606 conf = data['config']
607 conf = data['config']
607 workers = conf.get('workers', 'NOT AVAILABLE')
608 workers = conf.get('workers', 'NOT AVAILABLE')
608
609
609 connection = 'connected'
610 connection = 'connected'
610 except Exception as e:
611 except Exception as e:
611 connection = 'failed'
612 connection = 'failed'
612 state = {'message': str(e), 'type': STATE_ERR}
613 state = {'message': str(e), 'type': STATE_ERR}
613
614
614 value = dict(
615 value = dict(
615 url=server_url,
616 url=server_url,
616 enabled=enabled,
617 enabled=enabled,
617 protocol=protocol,
618 protocol=protocol,
618 connection=connection,
619 connection=connection,
619 version=version,
620 version=version,
620 text='',
621 text='',
621 )
622 )
622
623
623 human_value = value.copy()
624 human_value = value.copy()
624 human_value['text'] = \
625 human_value['text'] = \
625 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
626 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
626 url=server_url, ver=version, workers=workers, mode=protocol,
627 url=server_url, ver=version, workers=workers, mode=protocol,
627 conn=connection)
628 conn=connection)
628
629
629 return SysInfoRes(value=value, state=state, human_value=human_value)
630 return SysInfoRes(value=value, state=state, human_value=human_value)
630
631
631
632
632 @register_sysinfo
633 @register_sysinfo
633 def vcs_server_config():
634 def vcs_server_config():
634 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
635 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
635 state = STATE_OK_DEFAULT
636 state = STATE_OK_DEFAULT
636
637
637 value = {}
638 value = {}
638 try:
639 try:
639 data = get_vcsserver_service_data()
640 data = get_vcsserver_service_data()
640 value = data['app_config']
641 value = data['app_config']
641 except Exception as e:
642 except Exception as e:
642 state = {'message': str(e), 'type': STATE_ERR}
643 state = {'message': str(e), 'type': STATE_ERR}
643
644
644 human_value = value.copy()
645 human_value = value.copy()
645 human_value['text'] = 'VCS Server config'
646 human_value['text'] = 'VCS Server config'
646
647
647 return SysInfoRes(value=value, state=state, human_value=human_value)
648 return SysInfoRes(value=value, state=state, human_value=human_value)
648
649
649
650
650 @register_sysinfo
651 @register_sysinfo
651 def rhodecode_app_info():
652 def rhodecode_app_info():
652 import rhodecode
653 import rhodecode
653 edition = rhodecode.CONFIG.get('rhodecode.edition')
654 edition = rhodecode.CONFIG.get('rhodecode.edition')
654
655
655 value = dict(
656 value = dict(
656 rhodecode_version=rhodecode.__version__,
657 rhodecode_version=rhodecode.__version__,
657 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
658 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
658 text=''
659 text=''
659 )
660 )
660 human_value = value.copy()
661 human_value = value.copy()
661 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
662 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
662 edition=edition, ver=value['rhodecode_version']
663 edition=edition, ver=value['rhodecode_version']
663 )
664 )
664 return SysInfoRes(value=value, human_value=human_value)
665 return SysInfoRes(value=value, human_value=human_value)
665
666
666
667
667 @register_sysinfo
668 @register_sysinfo
668 def rhodecode_config():
669 def rhodecode_config():
669 import rhodecode
670 import rhodecode
670 path = rhodecode.CONFIG.get('__file__')
671 path = rhodecode.CONFIG.get('__file__')
671 rhodecode_ini_safe = rhodecode.CONFIG.copy()
672 rhodecode_ini_safe = rhodecode.CONFIG.copy()
672 cert_path = get_cert_path(path)
673 cert_path = get_cert_path(path)
673
674
674 try:
675 try:
675 config = configparser.ConfigParser()
676 config = configparser.ConfigParser()
676 config.read(path)
677 config.read(path)
677 parsed_ini = config
678 parsed_ini = config
678 if parsed_ini.has_section('server:main'):
679 if parsed_ini.has_section('server:main'):
679 parsed_ini = dict(parsed_ini.items('server:main'))
680 parsed_ini = dict(parsed_ini.items('server:main'))
680 except Exception:
681 except Exception:
681 log.exception('Failed to read .ini file for display')
682 log.exception('Failed to read .ini file for display')
682 parsed_ini = {}
683 parsed_ini = {}
683
684
684 rhodecode_ini_safe['server:main'] = parsed_ini
685 rhodecode_ini_safe['server:main'] = parsed_ini
685
686
686 blacklist = [
687 blacklist = [
687 'rhodecode_license_key',
688 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
688 'routes.map',
689 'routes.map',
689 'sqlalchemy.db1.url',
690 'sqlalchemy.db1.url',
690 'channelstream.secret',
691 'channelstream.secret',
691 'beaker.session.secret',
692 'beaker.session.secret',
692 'rhodecode.encrypted_values.secret',
693 'rhodecode.encrypted_values.secret',
693 'rhodecode_auth_github_consumer_key',
694 'rhodecode_auth_github_consumer_key',
694 'rhodecode_auth_github_consumer_secret',
695 'rhodecode_auth_github_consumer_secret',
695 'rhodecode_auth_google_consumer_key',
696 'rhodecode_auth_google_consumer_key',
696 'rhodecode_auth_google_consumer_secret',
697 'rhodecode_auth_google_consumer_secret',
697 'rhodecode_auth_bitbucket_consumer_secret',
698 'rhodecode_auth_bitbucket_consumer_secret',
698 'rhodecode_auth_bitbucket_consumer_key',
699 'rhodecode_auth_bitbucket_consumer_key',
699 'rhodecode_auth_twitter_consumer_secret',
700 'rhodecode_auth_twitter_consumer_secret',
700 'rhodecode_auth_twitter_consumer_key',
701 'rhodecode_auth_twitter_consumer_key',
701
702
702 'rhodecode_auth_twitter_secret',
703 'rhodecode_auth_twitter_secret',
703 'rhodecode_auth_github_secret',
704 'rhodecode_auth_github_secret',
704 'rhodecode_auth_google_secret',
705 'rhodecode_auth_google_secret',
705 'rhodecode_auth_bitbucket_secret',
706 'rhodecode_auth_bitbucket_secret',
706
707
707 'appenlight.api_key',
708 'appenlight.api_key',
708 ('app_conf', 'sqlalchemy.db1.url')
709 ('app_conf', 'sqlalchemy.db1.url')
709 ]
710 ]
710 for k in blacklist:
711 for k in blacklist:
711 if isinstance(k, tuple):
712 if isinstance(k, tuple):
712 section, key = k
713 section, key = k
713 if section in rhodecode_ini_safe:
714 if section in rhodecode_ini_safe:
714 rhodecode_ini_safe[section] = '**OBFUSCATED**'
715 rhodecode_ini_safe[section] = '**OBFUSCATED**'
715 else:
716 else:
716 rhodecode_ini_safe.pop(k, None)
717 rhodecode_ini_safe.pop(k, None)
717
718
718 # TODO: maybe put some CONFIG checks here ?
719 # TODO: maybe put some CONFIG checks here ?
719 return SysInfoRes(value={'config': rhodecode_ini_safe,
720 return SysInfoRes(value={'config': rhodecode_ini_safe,
720 'path': path, 'cert_path': cert_path})
721 'path': path, 'cert_path': cert_path})
721
722
722
723
723 @register_sysinfo
724 @register_sysinfo
724 def database_info():
725 def database_info():
725 import rhodecode
726 import rhodecode
726 from sqlalchemy.engine import url as engine_url
727 from sqlalchemy.engine import url as engine_url
727 from rhodecode.model import meta
728 from rhodecode.model import meta
728 from rhodecode.model.meta import Session
729 from rhodecode.model.meta import Session
729 from rhodecode.model.db import DbMigrateVersion
730 from rhodecode.model.db import DbMigrateVersion
730
731
731 state = STATE_OK_DEFAULT
732 state = STATE_OK_DEFAULT
732
733
733 db_migrate = DbMigrateVersion.query().filter(
734 db_migrate = DbMigrateVersion.query().filter(
734 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
735 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
735
736
736 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
737 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
737
738
738 try:
739 try:
739 engine = meta.get_engine()
740 engine = meta.get_engine()
740 db_server_info = engine.dialect._get_server_version_info(
741 db_server_info = engine.dialect._get_server_version_info(
741 Session.connection(bind=engine))
742 Session.connection(bind=engine))
742 db_version = '.'.join(map(str, db_server_info))
743 db_version = '.'.join(map(str, db_server_info))
743 except Exception:
744 except Exception:
744 log.exception('failed to fetch db version')
745 log.exception('failed to fetch db version')
745 db_version = 'UNKNOWN'
746 db_version = 'UNKNOWN'
746
747
747 db_info = dict(
748 db_info = dict(
748 migrate_version=db_migrate.version,
749 migrate_version=db_migrate.version,
749 type=db_url_obj.get_backend_name(),
750 type=db_url_obj.get_backend_name(),
750 version=db_version,
751 version=db_version,
751 url=repr(db_url_obj)
752 url=repr(db_url_obj)
752 )
753 )
753 current_version = db_migrate.version
754 current_version = db_migrate.version
754 expected_version = rhodecode.__dbversion__
755 expected_version = rhodecode.__dbversion__
755 if state['type'] == STATE_OK and current_version != expected_version:
756 if state['type'] == STATE_OK and current_version != expected_version:
756 msg = 'Critical: database schema mismatch, ' \
757 msg = 'Critical: database schema mismatch, ' \
757 'expected version {}, got {}. ' \
758 'expected version {}, got {}. ' \
758 'Please run migrations on your database.'.format(
759 'Please run migrations on your database.'.format(
759 expected_version, current_version)
760 expected_version, current_version)
760 state = {'message': msg, 'type': STATE_ERR}
761 state = {'message': msg, 'type': STATE_ERR}
761
762
762 human_value = db_info.copy()
763 human_value = db_info.copy()
763 human_value['url'] = "{} @ migration version: {}".format(
764 human_value['url'] = "{} @ migration version: {}".format(
764 db_info['url'], db_info['migrate_version'])
765 db_info['url'], db_info['migrate_version'])
765 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
766 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
766 return SysInfoRes(value=db_info, state=state, human_value=human_value)
767 return SysInfoRes(value=db_info, state=state, human_value=human_value)
767
768
768
769
769 @register_sysinfo
770 @register_sysinfo
770 def server_info(environ):
771 def server_info(environ):
771 import rhodecode
772 import rhodecode
772 from rhodecode.lib.base import get_server_ip_addr, get_server_port
773 from rhodecode.lib.base import get_server_ip_addr, get_server_port
773
774
774 value = {
775 value = {
775 'server_ip': '{}:{}'.format(
776 'server_ip': '{}:{}'.format(
776 get_server_ip_addr(environ, log_errors=False),
777 get_server_ip_addr(environ, log_errors=False),
777 get_server_port(environ)
778 get_server_port(environ)
778 ),
779 ),
779 'server_id': rhodecode.CONFIG.get('instance_id'),
780 'server_id': rhodecode.CONFIG.get('instance_id'),
780 }
781 }
781 return SysInfoRes(value=value)
782 return SysInfoRes(value=value)
782
783
783
784
784 @register_sysinfo
785 @register_sysinfo
785 def usage_info():
786 def usage_info():
786 from rhodecode.model.db import User, Repository
787 from rhodecode.model.db import User, Repository
787 value = {
788 value = {
788 'users': User.query().count(),
789 'users': User.query().count(),
789 'users_active': User.query().filter(User.active == True).count(),
790 'users_active': User.query().filter(User.active == True).count(),
790 'repositories': Repository.query().count(),
791 'repositories': Repository.query().count(),
791 'repository_types': {
792 'repository_types': {
792 'hg': Repository.query().filter(
793 'hg': Repository.query().filter(
793 Repository.repo_type == 'hg').count(),
794 Repository.repo_type == 'hg').count(),
794 'git': Repository.query().filter(
795 'git': Repository.query().filter(
795 Repository.repo_type == 'git').count(),
796 Repository.repo_type == 'git').count(),
796 'svn': Repository.query().filter(
797 'svn': Repository.query().filter(
797 Repository.repo_type == 'svn').count(),
798 Repository.repo_type == 'svn').count(),
798 },
799 },
799 }
800 }
800 return SysInfoRes(value=value)
801 return SysInfoRes(value=value)
801
802
802
803
803 def get_system_info(environ):
804 def get_system_info(environ):
804 environ = environ or {}
805 environ = environ or {}
805 return {
806 return {
806 'rhodecode_app': SysInfo(rhodecode_app_info)(),
807 'rhodecode_app': SysInfo(rhodecode_app_info)(),
807 'rhodecode_config': SysInfo(rhodecode_config)(),
808 'rhodecode_config': SysInfo(rhodecode_config)(),
808 'rhodecode_usage': SysInfo(usage_info)(),
809 'rhodecode_usage': SysInfo(usage_info)(),
809 'python': SysInfo(python_info)(),
810 'python': SysInfo(python_info)(),
810 'py_modules': SysInfo(py_modules)(),
811 'py_modules': SysInfo(py_modules)(),
811
812
812 'platform': SysInfo(platform_type)(),
813 'platform': SysInfo(platform_type)(),
813 'locale': SysInfo(locale_info)(),
814 'locale': SysInfo(locale_info)(),
814 'server': SysInfo(server_info, environ=environ)(),
815 'server': SysInfo(server_info, environ=environ)(),
815 'database': SysInfo(database_info)(),
816 'database': SysInfo(database_info)(),
816 'ulimit': SysInfo(ulimit_info)(),
817 'ulimit': SysInfo(ulimit_info)(),
817 'storage': SysInfo(storage)(),
818 'storage': SysInfo(storage)(),
818 'storage_inodes': SysInfo(storage_inodes)(),
819 'storage_inodes': SysInfo(storage_inodes)(),
819 'storage_archive': SysInfo(storage_archives)(),
820 'storage_archive': SysInfo(storage_archives)(),
820 'storage_gist': SysInfo(storage_gist)(),
821 'storage_gist': SysInfo(storage_gist)(),
821 'storage_temp': SysInfo(storage_temp)(),
822 'storage_temp': SysInfo(storage_temp)(),
822
823
823 'search': SysInfo(search_info)(),
824 'search': SysInfo(search_info)(),
824
825
825 'uptime': SysInfo(uptime)(),
826 'uptime': SysInfo(uptime)(),
826 'load': SysInfo(machine_load)(),
827 'load': SysInfo(machine_load)(),
827 'cpu': SysInfo(cpu)(),
828 'cpu': SysInfo(cpu)(),
828 'memory': SysInfo(memory)(),
829 'memory': SysInfo(memory)(),
829
830
830 'vcs_backends': SysInfo(vcs_backends)(),
831 'vcs_backends': SysInfo(vcs_backends)(),
831 'vcs_server': SysInfo(vcs_server)(),
832 'vcs_server': SysInfo(vcs_server)(),
832
833
833 'vcs_server_config': SysInfo(vcs_server_config)(),
834 'vcs_server_config': SysInfo(vcs_server_config)(),
834
835
835 'git': SysInfo(git_info)(),
836 'git': SysInfo(git_info)(),
836 'hg': SysInfo(hg_info)(),
837 'hg': SysInfo(hg_info)(),
837 'svn': SysInfo(svn_info)(),
838 'svn': SysInfo(svn_info)(),
838 }
839 }
839
840
840
841
841 def load_system_info(key):
842 def load_system_info(key):
842 """
843 """
843 get_sys_info('vcs_server')
844 get_sys_info('vcs_server')
844 get_sys_info('database')
845 get_sys_info('database')
845 """
846 """
846 return SysInfo(registered_helpers[key])()
847 return SysInfo(registered_helpers[key])()
@@ -1,842 +1,771 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = true
8 debug = true
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 host = 0.0.0.0
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 port = 5000
31 host = 127.0.0.1
32 port = 10020
32
33
33
34
34 ; ###########################
35 ; ###########################
35 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
36 ; ###########################
37 ; ###########################
37
38
38 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
39
40
40 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
41 use = egg:gunicorn#main
42 use = egg:gunicorn#main
42
43
43 ; Sets the number of process workers. More workers means more concurrent connections
44 ; RhodeCode can handle at the same time. Each additional worker also it increases
45 ; memory usage as each has it's own set of caches.
46 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
47 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
48 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
49 ; when using more than 1 worker.
50 #workers = 2
51
52 ; Gunicorn access log level
53 #loglevel = info
54
55 ; Process name visible in process list
56 #proc_name = rhodecode
57
58 ; Type of worker class, one of `sync`, `gevent`
59 ; Recommended type is `gevent`
60 #worker_class = gevent
61
62 ; The maximum number of simultaneous clients per worker. Valid only for gevent
63 #worker_connections = 10
64
65 ; The maximum number of pending connections worker will queue to handle
66 #backlog = 64
67
68 ; Max number of requests that worker will handle before being gracefully restarted.
69 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
70 #max_requests = 1000
71 #max_requests_jitter = 30
72
73 ; Amount of time a worker can spend with handling a request before it
74 ; gets killed and restarted. By default set to 21600 (6hrs)
75 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
76 #timeout = 21600
77
78 ; The maximum size of HTTP request line in bytes.
79 ; 0 for unlimited
80 #limit_request_line = 0
81
82 ; Limit the number of HTTP headers fields in a request.
83 ; By default this value is 100 and can't be larger than 32768.
84 #limit_request_fields = 32768
85
86 ; Limit the allowed size of an HTTP request header field.
87 ; Value is a positive number or 0.
88 ; Setting it to 0 will allow unlimited header field sizes.
89 #limit_request_field_size = 0
90
91 ; Timeout for graceful workers restart.
92 ; After receiving a restart signal, workers have this much time to finish
93 ; serving requests. Workers still alive after the timeout (starting from the
94 ; receipt of the restart signal) are force killed.
95 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
96 #graceful_timeout = 3600
97
98 # The number of seconds to wait for requests on a Keep-Alive connection.
99 # Generally set in the 1-5 seconds range.
100 #keepalive = 2
101
102 ; Maximum memory usage that each worker can use before it will receive a
103 ; graceful restart signal 0 = memory monitoring is disabled
104 ; Examples: 268435456 (256MB), 536870912 (512MB)
105 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
106 #memory_max_usage = 0
107
108 ; How often in seconds to check for memory usage for each gunicorn worker
109 #memory_usage_check_interval = 60
110
111 ; Threshold value for which we don't recycle worker if GarbageCollection
112 ; frees up enough resources. Before each restart we try to run GC on worker
113 ; in case we get enough free memory after that, restart will not happen.
114 #memory_usage_recovery_threshold = 0.8
115
116
117 ; Prefix middleware for RhodeCode.
44 ; Prefix middleware for RhodeCode.
118 ; recommended when using proxy setup.
45 ; recommended when using proxy setup.
119 ; allows to set RhodeCode under a prefix in server.
46 ; allows to set RhodeCode under a prefix in server.
120 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
121 ; And set your prefix like: `prefix = /custom_prefix`
48 ; And set your prefix like: `prefix = /custom_prefix`
122 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
123 ; to make your cookies only work on prefix url
50 ; to make your cookies only work on prefix url
124 [filter:proxy-prefix]
51 [filter:proxy-prefix]
125 use = egg:PasteDeploy#prefix
52 use = egg:PasteDeploy#prefix
126 prefix = /
53 prefix = /
127
54
128 [app:main]
55 [app:main]
129 ; The %(here)s variable will be replaced with the absolute path of parent directory
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
130 ; of this file
57 ; of this file
131 ; Each option in the app:main can be override by an environmental variable
58 ; Each option in the app:main can be override by an environmental variable
132 ;
59 ;
133 ;To override an option:
60 ;To override an option:
134 ;
61 ;
135 ;RC_<KeyName>
62 ;RC_<KeyName>
136 ;Everything should be uppercase, . and - should be replaced by _.
63 ;Everything should be uppercase, . and - should be replaced by _.
137 ;For example, if you have these configuration settings:
64 ;For example, if you have these configuration settings:
138 ;rc_cache.repo_object.backend = foo
65 ;rc_cache.repo_object.backend = foo
139 ;can be overridden by
66 ;can be overridden by
140 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
141
68
142 is_test = True
69 is_test = True
143 use = egg:rhodecode-enterprise-ce
70 use = egg:rhodecode-enterprise-ce
144
71
145 ; enable proxy prefix middleware, defined above
72 ; enable proxy prefix middleware, defined above
146 #filter-with = proxy-prefix
73 #filter-with = proxy-prefix
147
74
148 ; encryption key used to encrypt social plugin tokens,
75 ; encryption key used to encrypt social plugin tokens,
149 ; remote_urls with credentials etc, if not set it defaults to
76 ; remote_urls with credentials etc, if not set it defaults to
150 ; `beaker.session.secret`
77 ; `beaker.session.secret`
151 #rhodecode.encrypted_values.secret =
78 #rhodecode.encrypted_values.secret =
152
79
153 ; decryption strict mode (enabled by default). It controls if decryption raises
80 ; decryption strict mode (enabled by default). It controls if decryption raises
154 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
155 #rhodecode.encrypted_values.strict = false
82 #rhodecode.encrypted_values.strict = false
156
83
157 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
158 ; fernet is safer, and we strongly recommend switching to it.
85 ; fernet is safer, and we strongly recommend switching to it.
159 ; Due to backward compatibility aes is used as default.
86 ; Due to backward compatibility aes is used as default.
160 #rhodecode.encrypted_values.algorithm = fernet
87 #rhodecode.encrypted_values.algorithm = fernet
161
88
162 ; Return gzipped responses from RhodeCode (static files/application)
89 ; Return gzipped responses from RhodeCode (static files/application)
163 gzip_responses = false
90 gzip_responses = false
164
91
165 ; Auto-generate javascript routes file on startup
92 ; Auto-generate javascript routes file on startup
166 generate_js_files = false
93 generate_js_files = false
167
94
168 ; System global default language.
95 ; System global default language.
169 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
170 lang = en
97 lang = en
171
98
172 ; Perform a full repository scan and import on each server start.
99 ; Perform a full repository scan and import on each server start.
173 ; Settings this to true could lead to very long startup time.
100 ; Settings this to true could lead to very long startup time.
174 startup.import_repos = true
101 startup.import_repos = true
175
102
176 ; URL at which the application is running. This is used for Bootstrapping
103 ; URL at which the application is running. This is used for Bootstrapping
177 ; requests in context when no web request is available. Used in ishell, or
104 ; requests in context when no web request is available. Used in ishell, or
178 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 ; SSH calls. Set this for events to receive proper url for SSH calls.
179 app.base_url = http://rhodecode.local
106 app.base_url = http://rhodecode.local
180
107
181 ; Unique application ID. Should be a random unique string for security.
108 ; Unique application ID. Should be a random unique string for security.
182 app_instance_uuid = rc-production
109 app_instance_uuid = rc-production
183
110
184 ; Cut off limit for large diffs (size in bytes). If overall diff size on
111 ; Cut off limit for large diffs (size in bytes). If overall diff size on
185 ; commit, or pull request exceeds this limit this diff will be displayed
112 ; commit, or pull request exceeds this limit this diff will be displayed
186 ; partially. E.g 512000 == 512Kb
113 ; partially. E.g 512000 == 512Kb
187 cut_off_limit_diff = 1024000
114 cut_off_limit_diff = 1024000
188
115
189 ; Cut off limit for large files inside diffs (size in bytes). Each individual
116 ; Cut off limit for large files inside diffs (size in bytes). Each individual
190 ; file inside diff which exceeds this limit will be displayed partially.
117 ; file inside diff which exceeds this limit will be displayed partially.
191 ; E.g 128000 == 128Kb
118 ; E.g 128000 == 128Kb
192 cut_off_limit_file = 256000
119 cut_off_limit_file = 256000
193
120
194 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
121 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
195 vcs_full_cache = false
122 vcs_full_cache = false
196
123
197 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
124 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
198 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
125 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
199 force_https = false
126 force_https = false
200
127
201 ; use Strict-Transport-Security headers
128 ; use Strict-Transport-Security headers
202 use_htsts = false
129 use_htsts = false
203
130
204 ; Set to true if your repos are exposed using the dumb protocol
131 ; Set to true if your repos are exposed using the dumb protocol
205 git_update_server_info = false
132 git_update_server_info = false
206
133
207 ; RSS/ATOM feed options
134 ; RSS/ATOM feed options
208 rss_cut_off_limit = 256000
135 rss_cut_off_limit = 256000
209 rss_items_per_page = 10
136 rss_items_per_page = 10
210 rss_include_diff = false
137 rss_include_diff = false
211
138
212 ; gist URL alias, used to create nicer urls for gist. This should be an
139 ; gist URL alias, used to create nicer urls for gist. This should be an
213 ; url that does rewrites to _admin/gists/{gistid}.
140 ; url that does rewrites to _admin/gists/{gistid}.
214 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
141 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
215 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
142 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
216 gist_alias_url =
143 gist_alias_url =
217
144
218 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
145 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
219 ; used for access.
146 ; used for access.
220 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
147 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
221 ; came from the the logged in user who own this authentication token.
148 ; came from the the logged in user who own this authentication token.
222 ; Additionally @TOKEN syntax can be used to bound the view to specific
149 ; Additionally @TOKEN syntax can be used to bound the view to specific
223 ; authentication token. Such view would be only accessible when used together
150 ; authentication token. Such view would be only accessible when used together
224 ; with this authentication token
151 ; with this authentication token
225 ; list of all views can be found under `/_admin/permissions/auth_token_access`
152 ; list of all views can be found under `/_admin/permissions/auth_token_access`
226 ; The list should be "," separated and on a single line.
153 ; The list should be "," separated and on a single line.
227 ; Most common views to enable:
154 ; Most common views to enable:
228
155
229 # RepoCommitsView:repo_commit_download
156 # RepoCommitsView:repo_commit_download
230 # RepoCommitsView:repo_commit_patch
157 # RepoCommitsView:repo_commit_patch
231 # RepoCommitsView:repo_commit_raw
158 # RepoCommitsView:repo_commit_raw
232 # RepoCommitsView:repo_commit_raw@TOKEN
159 # RepoCommitsView:repo_commit_raw@TOKEN
233 # RepoFilesView:repo_files_diff
160 # RepoFilesView:repo_files_diff
234 # RepoFilesView:repo_archivefile
161 # RepoFilesView:repo_archivefile
235 # RepoFilesView:repo_file_raw
162 # RepoFilesView:repo_file_raw
236 # GistView:*
163 # GistView:*
237 api_access_controllers_whitelist =
164 api_access_controllers_whitelist =
238
165
239 ; Default encoding used to convert from and to unicode
166 ; Default encoding used to convert from and to unicode
240 ; can be also a comma separated list of encoding in case of mixed encodings
167 ; can be also a comma separated list of encoding in case of mixed encodings
241 default_encoding = UTF-8
168 default_encoding = UTF-8
242
169
243 ; instance-id prefix
170 ; instance-id prefix
244 ; a prefix key for this instance used for cache invalidation when running
171 ; a prefix key for this instance used for cache invalidation when running
245 ; multiple instances of RhodeCode, make sure it's globally unique for
172 ; multiple instances of RhodeCode, make sure it's globally unique for
246 ; all running RhodeCode instances. Leave empty if you don't use it
173 ; all running RhodeCode instances. Leave empty if you don't use it
247 instance_id =
174 instance_id =
248
175
249 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
176 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
250 ; of an authentication plugin also if it is disabled by it's settings.
177 ; of an authentication plugin also if it is disabled by it's settings.
251 ; This could be useful if you are unable to log in to the system due to broken
178 ; This could be useful if you are unable to log in to the system due to broken
252 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
179 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
253 ; module to log in again and fix the settings.
180 ; module to log in again and fix the settings.
254 ; Available builtin plugin IDs (hash is part of the ID):
181 ; Available builtin plugin IDs (hash is part of the ID):
255 ; egg:rhodecode-enterprise-ce#rhodecode
182 ; egg:rhodecode-enterprise-ce#rhodecode
256 ; egg:rhodecode-enterprise-ce#pam
183 ; egg:rhodecode-enterprise-ce#pam
257 ; egg:rhodecode-enterprise-ce#ldap
184 ; egg:rhodecode-enterprise-ce#ldap
258 ; egg:rhodecode-enterprise-ce#jasig_cas
185 ; egg:rhodecode-enterprise-ce#jasig_cas
259 ; egg:rhodecode-enterprise-ce#headers
186 ; egg:rhodecode-enterprise-ce#headers
260 ; egg:rhodecode-enterprise-ce#crowd
187 ; egg:rhodecode-enterprise-ce#crowd
261
188
262 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
189 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
263
190
264 ; Flag to control loading of legacy plugins in py:/path format
191 ; Flag to control loading of legacy plugins in py:/path format
265 auth_plugin.import_legacy_plugins = true
192 auth_plugin.import_legacy_plugins = true
266
193
267 ; alternative return HTTP header for failed authentication. Default HTTP
194 ; alternative return HTTP header for failed authentication. Default HTTP
268 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
195 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
269 ; handling that causing a series of failed authentication calls.
196 ; handling that causing a series of failed authentication calls.
270 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
197 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
271 ; This will be served instead of default 401 on bad authentication
198 ; This will be served instead of default 401 on bad authentication
272 auth_ret_code =
199 auth_ret_code =
273
200
274 ; use special detection method when serving auth_ret_code, instead of serving
201 ; use special detection method when serving auth_ret_code, instead of serving
275 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
202 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
276 ; and then serve auth_ret_code to clients
203 ; and then serve auth_ret_code to clients
277 auth_ret_code_detection = false
204 auth_ret_code_detection = false
278
205
279 ; locking return code. When repository is locked return this HTTP code. 2XX
206 ; locking return code. When repository is locked return this HTTP code. 2XX
280 ; codes don't break the transactions while 4XX codes do
207 ; codes don't break the transactions while 4XX codes do
281 lock_ret_code = 423
208 lock_ret_code = 423
282
209
283 ; allows to change the repository location in settings page
210 ; allows to change the repository location in settings page
284 allow_repo_location_change = true
211 allow_repo_location_change = true
285
212
286 ; allows to setup custom hooks in settings page
213 ; allows to setup custom hooks in settings page
287 allow_custom_hooks_settings = true
214 allow_custom_hooks_settings = true
288
215
289 ; Generated license token required for EE edition license.
216 ; Generated license token required for EE edition license.
290 ; New generated token value can be found in Admin > settings > license page.
217 ; New generated token value can be found in Admin > settings > license page.
291 license_token = abra-cada-bra1-rce3
218 license_token = abra-cada-bra1-rce3
292
219
293 ; This flag hides sensitive information on the license page such as token, and license data
220 ; This flag hides sensitive information on the license page such as token, and license data
294 license.hide_license_info = false
221 license.hide_license_info = false
295
222
296 ; supervisor connection uri, for managing supervisor and logs.
223 ; supervisor connection uri, for managing supervisor and logs.
297 supervisor.uri =
224 supervisor.uri =
298
225
299 ; supervisord group name/id we only want this RC instance to handle
226 ; supervisord group name/id we only want this RC instance to handle
300 supervisor.group_id = dev
227 supervisor.group_id = dev
301
228
302 ; Display extended labs settings
229 ; Display extended labs settings
303 labs_settings_active = true
230 labs_settings_active = true
304
231
305 ; Custom exception store path, defaults to TMPDIR
232 ; Custom exception store path, defaults to TMPDIR
306 ; This is used to store exception from RhodeCode in shared directory
233 ; This is used to store exception from RhodeCode in shared directory
307 #exception_tracker.store_path =
234 #exception_tracker.store_path =
308
235
309 ; Send email with exception details when it happens
236 ; Send email with exception details when it happens
310 #exception_tracker.send_email = false
237 #exception_tracker.send_email = false
311
238
312 ; Comma separated list of recipients for exception emails,
239 ; Comma separated list of recipients for exception emails,
313 ; e.g admin@rhodecode.com,devops@rhodecode.com
240 ; e.g admin@rhodecode.com,devops@rhodecode.com
314 ; Can be left empty, then emails will be sent to ALL super-admins
241 ; Can be left empty, then emails will be sent to ALL super-admins
315 #exception_tracker.send_email_recipients =
242 #exception_tracker.send_email_recipients =
316
243
317 ; optional prefix to Add to email Subject
244 ; optional prefix to Add to email Subject
318 #exception_tracker.email_prefix = [RHODECODE ERROR]
245 #exception_tracker.email_prefix = [RHODECODE ERROR]
319
246
320 ; File store configuration. This is used to store and serve uploaded files
247 ; File store configuration. This is used to store and serve uploaded files
321 file_store.enabled = true
248 file_store.enabled = true
322
249
323 ; Storage backend, available options are: local
250 ; Storage backend, available options are: local
324 file_store.backend = local
251 file_store.backend = local
325
252
326 ; path to store the uploaded binaries
253 ; path to store the uploaded binaries
327 file_store.storage_path = %(here)s/data/file_store
254 file_store.storage_path = %(here)s/data/file_store
328
255
329 ; Uncomment and set this path to control settings for archive download cache.
256 ; Uncomment and set this path to control settings for archive download cache.
330 ; Generated repo archives will be cached at this location
257 ; Generated repo archives will be cached at this location
331 ; and served from the cache during subsequent requests for the same archive of
258 ; and served from the cache during subsequent requests for the same archive of
332 ; the repository. This path is important to be shared across filesystems and with
259 ; the repository. This path is important to be shared across filesystems and with
333 ; RhodeCode and vcsserver
260 ; RhodeCode and vcsserver
334
261
335 ; Default is $cache_dir/archive_cache if not set
262 ; Default is $cache_dir/archive_cache if not set
336 archive_cache.store_dir = /tmp/rc-test-data/archive_cache
263 archive_cache.store_dir = /tmp/rc-test-data/archive_cache
337
264
338 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
265 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
339 archive_cache.cache_size_gb = 10
266 archive_cache.cache_size_gb = 10
340
267
341 ; By default cache uses sharding technique, this specifies how many shards are there
268 ; By default cache uses sharding technique, this specifies how many shards are there
342 archive_cache.cache_shards = 10
269 archive_cache.cache_shards = 10
343
270
344 ; #############
271 ; #############
345 ; CELERY CONFIG
272 ; CELERY CONFIG
346 ; #############
273 ; #############
347
274
348 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
275 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
349
276
350 use_celery = false
277 use_celery = false
351
278
352 ; path to store schedule database
279 ; path to store schedule database
353 #celerybeat-schedule.path =
280 #celerybeat-schedule.path =
354
281
355 ; connection url to the message broker (default redis)
282 ; connection url to the message broker (default redis)
356 celery.broker_url = redis://localhost:6379/8
283 celery.broker_url = redis://localhost:6379/8
357
284
358 ; rabbitmq example
285 ; rabbitmq example
359 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
286 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
360
287
361 ; maximum tasks to execute before worker restart
288 ; maximum tasks to execute before worker restart
362 celery.max_tasks_per_child = 20
289 celery.max_tasks_per_child = 20
363
290
364 ; tasks will never be sent to the queue, but executed locally instead.
291 ; tasks will never be sent to the queue, but executed locally instead.
365 celery.task_always_eager = false
292 celery.task_always_eager = false
366
293
367 ; #############
294 ; #############
368 ; DOGPILE CACHE
295 ; DOGPILE CACHE
369 ; #############
296 ; #############
370
297
371 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
298 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
372 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
299 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
373 cache_dir = %(here)s/rc-test-data
300 cache_dir = %(here)s/rc-test-data
374
301
375 ; *********************************************
302 ; *********************************************
376 ; `sql_cache_short` cache for heavy SQL queries
303 ; `sql_cache_short` cache for heavy SQL queries
377 ; Only supported backend is `memory_lru`
304 ; Only supported backend is `memory_lru`
378 ; *********************************************
305 ; *********************************************
379 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
306 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
380 rc_cache.sql_cache_short.expiration_time = 0
307 rc_cache.sql_cache_short.expiration_time = 0
381
308
382
309
383 ; *****************************************************
310 ; *****************************************************
384 ; `cache_repo_longterm` cache for repo object instances
311 ; `cache_repo_longterm` cache for repo object instances
385 ; Only supported backend is `memory_lru`
312 ; Only supported backend is `memory_lru`
386 ; *****************************************************
313 ; *****************************************************
387 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
314 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
388 ; by default we use 30 Days, cache is still invalidated on push
315 ; by default we use 30 Days, cache is still invalidated on push
389 rc_cache.cache_repo_longterm.expiration_time = 2592000
316 rc_cache.cache_repo_longterm.expiration_time = 2592000
390 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
317 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
391 rc_cache.cache_repo_longterm.max_size = 10000
318 rc_cache.cache_repo_longterm.max_size = 10000
392
319
393
320
394 ; *********************************************
321 ; *********************************************
395 ; `cache_general` cache for general purpose use
322 ; `cache_general` cache for general purpose use
396 ; for simplicity use rc.file_namespace backend,
323 ; for simplicity use rc.file_namespace backend,
397 ; for performance and scale use rc.redis
324 ; for performance and scale use rc.redis
398 ; *********************************************
325 ; *********************************************
399 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
326 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
400 rc_cache.cache_general.expiration_time = 43200
327 rc_cache.cache_general.expiration_time = 43200
401 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
328 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
402 rc_cache.cache_general.arguments.filename = %(here)s/cache-backend/cache_general_db
329 rc_cache.cache_general.arguments.filename = %(here)s/cache-backend/cache_general_db
403
330
404 ; alternative `cache_general` redis backend with distributed lock
331 ; alternative `cache_general` redis backend with distributed lock
405 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
332 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
406 #rc_cache.cache_general.expiration_time = 300
333 #rc_cache.cache_general.expiration_time = 300
407
334
408 ; redis_expiration_time needs to be greater then expiration_time
335 ; redis_expiration_time needs to be greater then expiration_time
409 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
336 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
410
337
411 #rc_cache.cache_general.arguments.host = localhost
338 #rc_cache.cache_general.arguments.host = localhost
412 #rc_cache.cache_general.arguments.port = 6379
339 #rc_cache.cache_general.arguments.port = 6379
413 #rc_cache.cache_general.arguments.db = 0
340 #rc_cache.cache_general.arguments.db = 0
414 #rc_cache.cache_general.arguments.socket_timeout = 30
341 #rc_cache.cache_general.arguments.socket_timeout = 30
415 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
342 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
416 #rc_cache.cache_general.arguments.distributed_lock = true
343 #rc_cache.cache_general.arguments.distributed_lock = true
417
344
418 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
345 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
419 #rc_cache.cache_general.arguments.lock_auto_renewal = true
346 #rc_cache.cache_general.arguments.lock_auto_renewal = true
420
347
421 ; *************************************************
348 ; *************************************************
422 ; `cache_perms` cache for permission tree, auth TTL
349 ; `cache_perms` cache for permission tree, auth TTL
423 ; for simplicity use rc.file_namespace backend,
350 ; for simplicity use rc.file_namespace backend,
424 ; for performance and scale use rc.redis
351 ; for performance and scale use rc.redis
425 ; *************************************************
352 ; *************************************************
426 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
353 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
427 rc_cache.cache_perms.expiration_time = 0
354 rc_cache.cache_perms.expiration_time = 0
428 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
355 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
429 rc_cache.cache_perms.arguments.filename = %(here)s/cache-backend/cache_perms_db
356 rc_cache.cache_perms.arguments.filename = %(here)s/cache-backend/cache_perms_db
430
357
431 ; alternative `cache_perms` redis backend with distributed lock
358 ; alternative `cache_perms` redis backend with distributed lock
432 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
359 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
433 #rc_cache.cache_perms.expiration_time = 300
360 #rc_cache.cache_perms.expiration_time = 300
434
361
435 ; redis_expiration_time needs to be greater then expiration_time
362 ; redis_expiration_time needs to be greater then expiration_time
436 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
363 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
437
364
438 #rc_cache.cache_perms.arguments.host = localhost
365 #rc_cache.cache_perms.arguments.host = localhost
439 #rc_cache.cache_perms.arguments.port = 6379
366 #rc_cache.cache_perms.arguments.port = 6379
440 #rc_cache.cache_perms.arguments.db = 0
367 #rc_cache.cache_perms.arguments.db = 0
441 #rc_cache.cache_perms.arguments.socket_timeout = 30
368 #rc_cache.cache_perms.arguments.socket_timeout = 30
442 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
369 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
443 #rc_cache.cache_perms.arguments.distributed_lock = true
370 #rc_cache.cache_perms.arguments.distributed_lock = true
444
371
445 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
372 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
446 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
373 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
447
374
448 ; ***************************************************
375 ; ***************************************************
449 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
376 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
450 ; for simplicity use rc.file_namespace backend,
377 ; for simplicity use rc.file_namespace backend,
451 ; for performance and scale use rc.redis
378 ; for performance and scale use rc.redis
452 ; ***************************************************
379 ; ***************************************************
453 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
380 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
454 rc_cache.cache_repo.expiration_time = 2592000
381 rc_cache.cache_repo.expiration_time = 2592000
455 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
382 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
456 rc_cache.cache_repo.arguments.filename = %(here)s/cache-backend/cache_repo_db
383 rc_cache.cache_repo.arguments.filename = %(here)s/cache-backend/cache_repo_db
457
384
458 ; alternative `cache_repo` redis backend with distributed lock
385 ; alternative `cache_repo` redis backend with distributed lock
459 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
386 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
460 #rc_cache.cache_repo.expiration_time = 2592000
387 #rc_cache.cache_repo.expiration_time = 2592000
461
388
462 ; redis_expiration_time needs to be greater then expiration_time
389 ; redis_expiration_time needs to be greater then expiration_time
463 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
390 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
464
391
465 #rc_cache.cache_repo.arguments.host = localhost
392 #rc_cache.cache_repo.arguments.host = localhost
466 #rc_cache.cache_repo.arguments.port = 6379
393 #rc_cache.cache_repo.arguments.port = 6379
467 #rc_cache.cache_repo.arguments.db = 1
394 #rc_cache.cache_repo.arguments.db = 1
468 #rc_cache.cache_repo.arguments.socket_timeout = 30
395 #rc_cache.cache_repo.arguments.socket_timeout = 30
469 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
396 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
470 #rc_cache.cache_repo.arguments.distributed_lock = true
397 #rc_cache.cache_repo.arguments.distributed_lock = true
471
398
472 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
399 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
473 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
400 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
474
401
475 ; ##############
402 ; ##############
476 ; BEAKER SESSION
403 ; BEAKER SESSION
477 ; ##############
404 ; ##############
478
405
479 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
406 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
480 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
407 ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified).
481 ; Fastest ones are Redis and ext:database
408 ; Fastest ones are Redis and ext:database
482 beaker.session.type = file
409 beaker.session.type = file
483 beaker.session.data_dir = %(here)s/rc-tests/data/sessions
410 beaker.session.data_dir = %(here)s/rc-tests/data/sessions
484
411
485 ; Redis based sessions
412 ; Redis based sessions
486 #beaker.session.type = ext:redis
413 #beaker.session.type = ext:redis
487 #beaker.session.url = redis://127.0.0.1:6379/2
414 #beaker.session.url = redis://127.0.0.1:6379/2
488
415
489 ; DB based session, fast, and allows easy management over logged in users
416 ; DB based session, fast, and allows easy management over logged in users
490 #beaker.session.type = ext:database
417 #beaker.session.type = ext:database
491 #beaker.session.table_name = db_session
418 #beaker.session.table_name = db_session
492 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
419 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
493 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
420 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
494 #beaker.session.sa.pool_recycle = 3600
421 #beaker.session.sa.pool_recycle = 3600
495 #beaker.session.sa.echo = false
422 #beaker.session.sa.echo = false
496
423
497 beaker.session.key = rhodecode
424 beaker.session.key = rhodecode
498 beaker.session.secret = test-rc-uytcxaz
425 beaker.session.secret = test-rc-uytcxaz
499 beaker.session.lock_dir = %(here)s/data/sessions/lock
426 beaker.session.lock_dir = %(here)s/data/sessions/lock
500
427
501 ; Secure encrypted cookie. Requires AES and AES python libraries
428 ; Secure encrypted cookie. Requires AES and AES python libraries
502 ; you must disable beaker.session.secret to use this
429 ; you must disable beaker.session.secret to use this
503 #beaker.session.encrypt_key = key_for_encryption
430 #beaker.session.encrypt_key = key_for_encryption
504 #beaker.session.validate_key = validation_key
431 #beaker.session.validate_key = validation_key
505
432
506 ; Sets session as invalid (also logging out user) if it haven not been
433 ; Sets session as invalid (also logging out user) if it haven not been
507 ; accessed for given amount of time in seconds
434 ; accessed for given amount of time in seconds
508 beaker.session.timeout = 2592000
435 beaker.session.timeout = 2592000
509 beaker.session.httponly = true
436 beaker.session.httponly = true
510
437
511 ; Path to use for the cookie. Set to prefix if you use prefix middleware
438 ; Path to use for the cookie. Set to prefix if you use prefix middleware
512 #beaker.session.cookie_path = /custom_prefix
439 #beaker.session.cookie_path = /custom_prefix
513
440
514 ; Set https secure cookie
441 ; Set https secure cookie
515 beaker.session.secure = false
442 beaker.session.secure = false
516
443
517 ## auto save the session to not to use .save()
444 ## auto save the session to not to use .save()
518 beaker.session.auto = false
445 beaker.session.auto = false
519
446
520 ; default cookie expiration time in seconds, set to `true` to set expire
447 ; default cookie expiration time in seconds, set to `true` to set expire
521 ; at browser close
448 ; at browser close
522 #beaker.session.cookie_expires = 3600
449 #beaker.session.cookie_expires = 3600
523
450
524 ; #############################
451 ; #############################
525 ; SEARCH INDEXING CONFIGURATION
452 ; SEARCH INDEXING CONFIGURATION
526 ; #############################
453 ; #############################
527
454
528 ; Full text search indexer is available in rhodecode-tools under
455 ; Full text search indexer is available in rhodecode-tools under
529 ; `rhodecode-tools index` command
456 ; `rhodecode-tools index` command
530
457
531 ; WHOOSH Backend, doesn't require additional services to run
458 ; WHOOSH Backend, doesn't require additional services to run
532 ; it works good with few dozen repos
459 ; it works good with few dozen repos
533 search.module = rhodecode.lib.index.whoosh
460 search.module = rhodecode.lib.index.whoosh
534 search.location = %(here)s/data/index
461 search.location = %(here)s/data/index
535
462
536 ; ####################
463 ; ####################
537 ; CHANNELSTREAM CONFIG
464 ; CHANNELSTREAM CONFIG
538 ; ####################
465 ; ####################
539
466
540 ; channelstream enables persistent connections and live notification
467 ; channelstream enables persistent connections and live notification
541 ; in the system. It's also used by the chat system
468 ; in the system. It's also used by the chat system
542
469
543 channelstream.enabled = false
470 channelstream.enabled = false
544
471
545 ; server address for channelstream server on the backend
472 ; server address for channelstream server on the backend
546 channelstream.server = 127.0.0.1:9800
473 channelstream.server = 127.0.0.1:9800
547
474
548 ; location of the channelstream server from outside world
475 ; location of the channelstream server from outside world
549 ; use ws:// for http or wss:// for https. This address needs to be handled
476 ; use ws:// for http or wss:// for https. This address needs to be handled
550 ; by external HTTP server such as Nginx or Apache
477 ; by external HTTP server such as Nginx or Apache
551 ; see Nginx/Apache configuration examples in our docs
478 ; see Nginx/Apache configuration examples in our docs
552 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
479 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
553 channelstream.secret = secret
480 channelstream.secret = secret
554 channelstream.history.location = %(here)s/channelstream_history
481 channelstream.history.location = %(here)s/channelstream_history
555
482
556 ; Internal application path that Javascript uses to connect into.
483 ; Internal application path that Javascript uses to connect into.
557 ; If you use proxy-prefix the prefix should be added before /_channelstream
484 ; If you use proxy-prefix the prefix should be added before /_channelstream
558 channelstream.proxy_path = /_channelstream
485 channelstream.proxy_path = /_channelstream
559
486
560
487
561 ; ##############################
488 ; ##############################
562 ; MAIN RHODECODE DATABASE CONFIG
489 ; MAIN RHODECODE DATABASE CONFIG
563 ; ##############################
490 ; ##############################
564
491
565 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
492 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
566 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
493 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
567 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
494 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
568 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
495 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
569 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
496 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
570
497
571 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
498 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
572
499
573 ; see sqlalchemy docs for other advanced settings
500 ; see sqlalchemy docs for other advanced settings
574 ; print the sql statements to output
501 ; print the sql statements to output
575 sqlalchemy.db1.echo = false
502 sqlalchemy.db1.echo = false
576
503
577 ; recycle the connections after this amount of seconds
504 ; recycle the connections after this amount of seconds
578 sqlalchemy.db1.pool_recycle = 3600
505 sqlalchemy.db1.pool_recycle = 3600
579
506
580 ; the number of connections to keep open inside the connection pool.
507 ; the number of connections to keep open inside the connection pool.
581 ; 0 indicates no limit
508 ; 0 indicates no limit
509 ; the general calculus with gevent is:
510 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
511 ; then increase pool size + max overflow so that they add up to 500.
582 #sqlalchemy.db1.pool_size = 5
512 #sqlalchemy.db1.pool_size = 5
583
513
584 ; The number of connections to allow in connection pool "overflow", that is
514 ; The number of connections to allow in connection pool "overflow", that is
585 ; connections that can be opened above and beyond the pool_size setting,
515 ; connections that can be opened above and beyond the pool_size setting,
586 ; which defaults to five.
516 ; which defaults to five.
587 #sqlalchemy.db1.max_overflow = 10
517 #sqlalchemy.db1.max_overflow = 10
588
518
589 ; Connection check ping, used to detect broken database connections
519 ; Connection check ping, used to detect broken database connections
590 ; could be enabled to better handle cases if MySQL has gone away errors
520 ; could be enabled to better handle cases if MySQL has gone away errors
591 #sqlalchemy.db1.ping_connection = true
521 #sqlalchemy.db1.ping_connection = true
592
522
593 ; ##########
523 ; ##########
594 ; VCS CONFIG
524 ; VCS CONFIG
595 ; ##########
525 ; ##########
596 vcs.server.enable = true
526 vcs.server.enable = true
597 #vcs.server = localhost:9901
598 vcs.server = vcsserver:10010
527 vcs.server = vcsserver:10010
599
528
600 ; Web server connectivity protocol, responsible for web based VCS operations
529 ; Web server connectivity protocol, responsible for web based VCS operations
601 ; Available protocols are:
530 ; Available protocols are:
602 ; `http` - use http-rpc backend (default)
531 ; `http` - use http-rpc backend (default)
603 vcs.server.protocol = http
532 vcs.server.protocol = http
604
533
605 ; Push/Pull operations protocol, available options are:
534 ; Push/Pull operations protocol, available options are:
606 ; `http` - use http-rpc backend (default)
535 ; `http` - use http-rpc backend (default)
607 vcs.scm_app_implementation = http
536 vcs.scm_app_implementation = http
608
537
609 ; Push/Pull operations hooks protocol, available options are:
538 ; Push/Pull operations hooks protocol, available options are:
610 ; `http` - use http-rpc backend (default)
539 ; `http` - use http-rpc backend (default)
611 vcs.hooks.protocol = http
540 vcs.hooks.protocol = http
612
541
613 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
542 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
614 ; accessible via network.
543 ; accessible via network.
615 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
544 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
616 vcs.hooks.host = *
545 vcs.hooks.host = *
617
546
618 ; Start VCSServer with this instance as a subprocess, useful for development
547 ; Start VCSServer with this instance as a subprocess, useful for development
619 vcs.start_server = false
548 vcs.start_server = false
620
549
621 ; List of enabled VCS backends, available options are:
550 ; List of enabled VCS backends, available options are:
622 ; `hg` - mercurial
551 ; `hg` - mercurial
623 ; `git` - git
552 ; `git` - git
624 ; `svn` - subversion
553 ; `svn` - subversion
625 vcs.backends = hg, git, svn
554 vcs.backends = hg, git, svn
626
555
627 ; Wait this number of seconds before killing connection to the vcsserver
556 ; Wait this number of seconds before killing connection to the vcsserver
628 vcs.connection_timeout = 3600
557 vcs.connection_timeout = 3600
629
558
630 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
559 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
631 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
560 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
632 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
561 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
633 #vcs.svn.compatible_version = 1.8
562 #vcs.svn.compatible_version = 1.8
634
563
635 ; Cache flag to cache vcsserver remote calls locally
564 ; Cache flag to cache vcsserver remote calls locally
636 ; It uses cache_region `cache_repo`
565 ; It uses cache_region `cache_repo`
637 vcs.methods.cache = false
566 vcs.methods.cache = false
638
567
639 ; ####################################################
568 ; ####################################################
640 ; Subversion proxy support (mod_dav_svn)
569 ; Subversion proxy support (mod_dav_svn)
641 ; Maps RhodeCode repo groups into SVN paths for Apache
570 ; Maps RhodeCode repo groups into SVN paths for Apache
642 ; ####################################################
571 ; ####################################################
643
572
644 ; Enable or disable the config file generation.
573 ; Enable or disable the config file generation.
645 svn.proxy.generate_config = false
574 svn.proxy.generate_config = false
646
575
647 ; Generate config file with `SVNListParentPath` set to `On`.
576 ; Generate config file with `SVNListParentPath` set to `On`.
648 svn.proxy.list_parent_path = true
577 svn.proxy.list_parent_path = true
649
578
650 ; Set location and file name of generated config file.
579 ; Set location and file name of generated config file.
651 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
580 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
652
581
653 ; alternative mod_dav config template. This needs to be a valid mako template
582 ; alternative mod_dav config template. This needs to be a valid mako template
654 ; Example template can be found in the source code:
583 ; Example template can be found in the source code:
655 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
584 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
656 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
585 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
657
586
658 ; Used as a prefix to the `Location` block in the generated config file.
587 ; Used as a prefix to the `Location` block in the generated config file.
659 ; In most cases it should be set to `/`.
588 ; In most cases it should be set to `/`.
660 svn.proxy.location_root = /
589 svn.proxy.location_root = /
661
590
662 ; Command to reload the mod dav svn configuration on change.
591 ; Command to reload the mod dav svn configuration on change.
663 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
592 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
664 ; Make sure user who runs RhodeCode process is allowed to reload Apache
593 ; Make sure user who runs RhodeCode process is allowed to reload Apache
665 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
594 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
666
595
667 ; If the timeout expires before the reload command finishes, the command will
596 ; If the timeout expires before the reload command finishes, the command will
668 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
597 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
669 #svn.proxy.reload_timeout = 10
598 #svn.proxy.reload_timeout = 10
670
599
671 ; ####################
600 ; ####################
672 ; SSH Support Settings
601 ; SSH Support Settings
673 ; ####################
602 ; ####################
674
603
675 ; Defines if a custom authorized_keys file should be created and written on
604 ; Defines if a custom authorized_keys file should be created and written on
676 ; any change user ssh keys. Setting this to false also disables possibility
605 ; any change user ssh keys. Setting this to false also disables possibility
677 ; of adding SSH keys by users from web interface. Super admins can still
606 ; of adding SSH keys by users from web interface. Super admins can still
678 ; manage SSH Keys.
607 ; manage SSH Keys.
679 ssh.generate_authorized_keyfile = true
608 ssh.generate_authorized_keyfile = true
680
609
681 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
610 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
682 # ssh.authorized_keys_ssh_opts =
611 # ssh.authorized_keys_ssh_opts =
683
612
684 ; Path to the authorized_keys file where the generate entries are placed.
613 ; Path to the authorized_keys file where the generate entries are placed.
685 ; It is possible to have multiple key files specified in `sshd_config` e.g.
614 ; It is possible to have multiple key files specified in `sshd_config` e.g.
686 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
615 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
687 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
616 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
688
617
689 ; Command to execute the SSH wrapper. The binary is available in the
618 ; Command to execute the SSH wrapper. The binary is available in the
690 ; RhodeCode installation directory.
619 ; RhodeCode installation directory.
691 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
620 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
692 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
621 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
693
622
694 ; Allow shell when executing the ssh-wrapper command
623 ; Allow shell when executing the ssh-wrapper command
695 ssh.wrapper_cmd_allow_shell = false
624 ssh.wrapper_cmd_allow_shell = false
696
625
697 ; Enables logging, and detailed output send back to the client during SSH
626 ; Enables logging, and detailed output send back to the client during SSH
698 ; operations. Useful for debugging, shouldn't be used in production.
627 ; operations. Useful for debugging, shouldn't be used in production.
699 ssh.enable_debug_logging = false
628 ssh.enable_debug_logging = false
700
629
701 ; Paths to binary executable, by default they are the names, but we can
630 ; Paths to binary executable, by default they are the names, but we can
702 ; override them if we want to use a custom one
631 ; override them if we want to use a custom one
703 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
632 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
704 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
633 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
705 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
634 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
706
635
707 ; Enables SSH key generator web interface. Disabling this still allows users
636 ; Enables SSH key generator web interface. Disabling this still allows users
708 ; to add their own keys.
637 ; to add their own keys.
709 ssh.enable_ui_key_generator = true
638 ssh.enable_ui_key_generator = true
710
639
711 ; Statsd client config, this is used to send metrics to statsd
640 ; Statsd client config, this is used to send metrics to statsd
712 ; We recommend setting statsd_exported and scrape them using Promethues
641 ; We recommend setting statsd_exported and scrape them using Prometheus
713 #statsd.enabled = false
642 #statsd.enabled = false
714 #statsd.statsd_host = 0.0.0.0
643 #statsd.statsd_host = 0.0.0.0
715 #statsd.statsd_port = 8125
644 #statsd.statsd_port = 8125
716 #statsd.statsd_prefix =
645 #statsd.statsd_prefix =
717 #statsd.statsd_ipv6 = false
646 #statsd.statsd_ipv6 = false
718
647
719 ; configure logging automatically at server startup set to false
648 ; configure logging automatically at server startup set to false
720 ; to use the below custom logging config.
649 ; to use the below custom logging config.
721 ; RC_LOGGING_FORMATTER
650 ; RC_LOGGING_FORMATTER
722 ; RC_LOGGING_LEVEL
651 ; RC_LOGGING_LEVEL
723 ; env variables can control the settings for logging in case of autoconfigure
652 ; env variables can control the settings for logging in case of autoconfigure
724
653
725 logging.autoconfigure = false
654 logging.autoconfigure = false
726
655
727 ; specify your own custom logging config file to configure logging
656 ; specify your own custom logging config file to configure logging
728 #logging.logging_conf_file = /path/to/custom_logging.ini
657 #logging.logging_conf_file = /path/to/custom_logging.ini
729
658
730 ; Dummy marker to add new entries after.
659 ; Dummy marker to add new entries after.
731 ; Add any custom entries below. Please don't remove this marker.
660 ; Add any custom entries below. Please don't remove this marker.
732 custom.conf = 1
661 custom.conf = 1
733
662
734
663
735 ; #####################
664 ; #####################
736 ; LOGGING CONFIGURATION
665 ; LOGGING CONFIGURATION
737 ; #####################
666 ; #####################
738
667
739 [loggers]
668 [loggers]
740 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile
669 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile
741
670
742 [handlers]
671 [handlers]
743 keys = console, console_sql
672 keys = console, console_sql
744
673
745 [formatters]
674 [formatters]
746 keys = generic, json, color_formatter, color_formatter_sql
675 keys = generic, json, color_formatter, color_formatter_sql
747
676
748 ; #######
677 ; #######
749 ; LOGGERS
678 ; LOGGERS
750 ; #######
679 ; #######
751 [logger_root]
680 [logger_root]
752 level = NOTSET
681 level = NOTSET
753 handlers = console
682 handlers = console
754
683
755 [logger_routes]
684 [logger_routes]
756 level = DEBUG
685 level = DEBUG
757 handlers =
686 handlers =
758 qualname = routes.middleware
687 qualname = routes.middleware
759 ## "level = DEBUG" logs the route matched and routing variables.
688 ## "level = DEBUG" logs the route matched and routing variables.
760 propagate = 1
689 propagate = 1
761
690
762 [logger_sqlalchemy]
691 [logger_sqlalchemy]
763 level = INFO
692 level = INFO
764 handlers = console_sql
693 handlers = console_sql
765 qualname = sqlalchemy.engine
694 qualname = sqlalchemy.engine
766 propagate = 0
695 propagate = 0
767
696
768 [logger_beaker]
697 [logger_beaker]
769 level = DEBUG
698 level = DEBUG
770 handlers =
699 handlers =
771 qualname = beaker.container
700 qualname = beaker.container
772 propagate = 1
701 propagate = 1
773
702
774 [logger_dogpile]
703 [logger_dogpile]
775 level = INFO
704 level = INFO
776 handlers = console
705 handlers = console
777 qualname = dogpile
706 qualname = dogpile
778 propagate = 1
707 propagate = 1
779
708
780 [logger_rhodecode]
709 [logger_rhodecode]
781 level = DEBUG
710 level = DEBUG
782 handlers =
711 handlers =
783 qualname = rhodecode
712 qualname = rhodecode
784 propagate = 1
713 propagate = 1
785
714
786 [logger_ssh_wrapper]
715 [logger_ssh_wrapper]
787 level = DEBUG
716 level = DEBUG
788 handlers =
717 handlers =
789 qualname = ssh_wrapper
718 qualname = ssh_wrapper
790 propagate = 1
719 propagate = 1
791
720
792 [logger_celery]
721 [logger_celery]
793 level = DEBUG
722 level = DEBUG
794 handlers =
723 handlers =
795 qualname = celery
724 qualname = celery
796
725
797
726
798 ; ########
727 ; ########
799 ; HANDLERS
728 ; HANDLERS
800 ; ########
729 ; ########
801
730
802 [handler_console]
731 [handler_console]
803 class = StreamHandler
732 class = StreamHandler
804 args = (sys.stderr, )
733 args = (sys.stderr, )
805 level = DEBUG
734 level = DEBUG
806 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
735 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
807 ; This allows sending properly formatted logs to grafana loki or elasticsearch
736 ; This allows sending properly formatted logs to grafana loki or elasticsearch
808 formatter = generic
737 formatter = generic
809
738
810 [handler_console_sql]
739 [handler_console_sql]
811 ; "level = DEBUG" logs SQL queries and results.
740 ; "level = DEBUG" logs SQL queries and results.
812 ; "level = INFO" logs SQL queries.
741 ; "level = INFO" logs SQL queries.
813 ; "level = WARN" logs neither. (Recommended for production systems.)
742 ; "level = WARN" logs neither. (Recommended for production systems.)
814 class = StreamHandler
743 class = StreamHandler
815 args = (sys.stderr, )
744 args = (sys.stderr, )
816 level = WARN
745 level = WARN
817 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
746 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
818 ; This allows sending properly formatted logs to grafana loki or elasticsearch
747 ; This allows sending properly formatted logs to grafana loki or elasticsearch
819 formatter = generic
748 formatter = generic
820
749
821 ; ##########
750 ; ##########
822 ; FORMATTERS
751 ; FORMATTERS
823 ; ##########
752 ; ##########
824
753
825 [formatter_generic]
754 [formatter_generic]
826 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
755 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
827 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
756 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
828 datefmt = %Y-%m-%d %H:%M:%S
757 datefmt = %Y-%m-%d %H:%M:%S
829
758
830 [formatter_color_formatter]
759 [formatter_color_formatter]
831 class = rhodecode.lib.logging_formatter.ColorFormatter
760 class = rhodecode.lib.logging_formatter.ColorFormatter
832 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
761 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
833 datefmt = %Y-%m-%d %H:%M:%S
762 datefmt = %Y-%m-%d %H:%M:%S
834
763
835 [formatter_color_formatter_sql]
764 [formatter_color_formatter_sql]
836 class = rhodecode.lib.logging_formatter.ColorFormatterSql
765 class = rhodecode.lib.logging_formatter.ColorFormatterSql
837 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
766 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
838 datefmt = %Y-%m-%d %H:%M:%S
767 datefmt = %Y-%m-%d %H:%M:%S
839
768
840 [formatter_json]
769 [formatter_json]
841 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
770 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
842 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
771 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
General Comments 0
You need to be logged in to leave comments. Login now