##// END OF EJS Templates
feat(disk-cache): rewrite diskcache backend to be k8s and NFS safe....
super-admin -
r5420:9cce0276 default
parent child Browse files
Show More
@@ -0,0 +1,29 b''
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from .fanout_cache import get_archival_cache_store
20 from .fanout_cache import get_archival_config
21
22 from .utils import archive_iterator
23 from .utils import ArchiveCacheLock
24
25
26 def includeme(config):
27 # init our cache at start
28 settings = config.get_settings()
29 get_archival_cache_store(settings)
@@ -0,0 +1,60 b''
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import redis
20 from rhodecode.lib._vendor import redis_lock
21
22 from .utils import ArchiveCacheLock
23
24
25 class GenerationLock:
26 """
27 Locking mechanism that detects if a lock is acquired
28
29 with GenerationLock(lock_key):
30 compute_archive()
31 """
32 lock_timeout = 7200
33
34 def __init__(self, lock_key, url):
35 self.lock_key = lock_key
36 self._create_client(url)
37 self.lock = self.get_lock()
38
39 def _create_client(self, url):
40 connection_pool = redis.ConnectionPool.from_url(url)
41 self.writer_client = redis.StrictRedis(
42 connection_pool=connection_pool
43 )
44 self.reader_client = self.writer_client
45
46 def get_lock(self):
47 return redis_lock.Lock(
48 redis_client=self.writer_client,
49 name=self.lock_key,
50 expire=self.lock_timeout,
51 strict=True
52 )
53
54 def __enter__(self):
55 acquired = self.lock.acquire(blocking=False)
56 if not acquired:
57 raise ArchiveCacheLock('Failed to create a lock')
58
59 def __exit__(self, exc_type, exc_val, exc_tb):
60 self.lock.release()
@@ -0,0 +1,30 b''
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19
20 class ArchiveCacheLock(Exception):
21 pass
22
23
24 def archive_iterator(_reader, block_size: int = 4096 * 512):
25 # 4096 * 64 = 64KB
26 while 1:
27 data = _reader.read(block_size)
28 if not data:
29 break
30 yield data
@@ -1,803 +1,813 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = true
8 debug = true
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 10020
32 port = 10020
33
33
34
34
35 ; ###########################
35 ; ###########################
36 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
37 ; ###########################
37 ; ###########################
38
38
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40
40
41 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
42 use = egg:gunicorn#main
42 use = egg:gunicorn#main
43
43
44 ; Prefix middleware for RhodeCode.
44 ; Prefix middleware for RhodeCode.
45 ; recommended when using proxy setup.
45 ; recommended when using proxy setup.
46 ; allows to set RhodeCode under a prefix in server.
46 ; allows to set RhodeCode under a prefix in server.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 ; And set your prefix like: `prefix = /custom_prefix`
48 ; And set your prefix like: `prefix = /custom_prefix`
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 ; to make your cookies only work on prefix url
50 ; to make your cookies only work on prefix url
51 [filter:proxy-prefix]
51 [filter:proxy-prefix]
52 use = egg:PasteDeploy#prefix
52 use = egg:PasteDeploy#prefix
53 prefix = /
53 prefix = /
54
54
55 [app:main]
55 [app:main]
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 ; of this file
57 ; of this file
58 ; Each option in the app:main can be override by an environmental variable
58 ; Each option in the app:main can be override by an environmental variable
59 ;
59 ;
60 ;To override an option:
60 ;To override an option:
61 ;
61 ;
62 ;RC_<KeyName>
62 ;RC_<KeyName>
63 ;Everything should be uppercase, . and - should be replaced by _.
63 ;Everything should be uppercase, . and - should be replaced by _.
64 ;For example, if you have these configuration settings:
64 ;For example, if you have these configuration settings:
65 ;rc_cache.repo_object.backend = foo
65 ;rc_cache.repo_object.backend = foo
66 ;can be overridden by
66 ;can be overridden by
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68
68
69 use = egg:rhodecode-enterprise-ce
69 use = egg:rhodecode-enterprise-ce
70
70
71 ; enable proxy prefix middleware, defined above
71 ; enable proxy prefix middleware, defined above
72 #filter-with = proxy-prefix
72 #filter-with = proxy-prefix
73
73
74 ; #############
74 ; #############
75 ; DEBUG OPTIONS
75 ; DEBUG OPTIONS
76 ; #############
76 ; #############
77
77
78 pyramid.reload_templates = true
78 pyramid.reload_templates = true
79
79
80 # During development the we want to have the debug toolbar enabled
80 # During development the we want to have the debug toolbar enabled
81 pyramid.includes =
81 pyramid.includes =
82 pyramid_debugtoolbar
82 pyramid_debugtoolbar
83
83
84 debugtoolbar.hosts = 0.0.0.0/0
84 debugtoolbar.hosts = 0.0.0.0/0
85 debugtoolbar.exclude_prefixes =
85 debugtoolbar.exclude_prefixes =
86 /css
86 /css
87 /fonts
87 /fonts
88 /images
88 /images
89 /js
89 /js
90
90
91 ## RHODECODE PLUGINS ##
91 ## RHODECODE PLUGINS ##
92 rhodecode.includes =
92 rhodecode.includes =
93 rhodecode.api
93 rhodecode.api
94
94
95
95
96 # api prefix url
96 # api prefix url
97 rhodecode.api.url = /_admin/api
97 rhodecode.api.url = /_admin/api
98
98
99 ; enable debug style page
99 ; enable debug style page
100 debug_style = true
100 debug_style = true
101
101
102 ; #################
102 ; #################
103 ; END DEBUG OPTIONS
103 ; END DEBUG OPTIONS
104 ; #################
104 ; #################
105
105
106 ; encryption key used to encrypt social plugin tokens,
106 ; encryption key used to encrypt social plugin tokens,
107 ; remote_urls with credentials etc, if not set it defaults to
107 ; remote_urls with credentials etc, if not set it defaults to
108 ; `beaker.session.secret`
108 ; `beaker.session.secret`
109 #rhodecode.encrypted_values.secret =
109 #rhodecode.encrypted_values.secret =
110
110
111 ; decryption strict mode (enabled by default). It controls if decryption raises
111 ; decryption strict mode (enabled by default). It controls if decryption raises
112 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
112 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 #rhodecode.encrypted_values.strict = false
113 #rhodecode.encrypted_values.strict = false
114
114
115 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
115 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
116 ; fernet is safer, and we strongly recommend switching to it.
116 ; fernet is safer, and we strongly recommend switching to it.
117 ; Due to backward compatibility aes is used as default.
117 ; Due to backward compatibility aes is used as default.
118 #rhodecode.encrypted_values.algorithm = fernet
118 #rhodecode.encrypted_values.algorithm = fernet
119
119
120 ; Return gzipped responses from RhodeCode (static files/application)
120 ; Return gzipped responses from RhodeCode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ; Auto-generate javascript routes file on startup
123 ; Auto-generate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ; System global default language.
126 ; System global default language.
127 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ; Perform a full repository scan and import on each server start.
130 ; Perform a full repository scan and import on each server start.
131 ; Settings this to true could lead to very long startup time.
131 ; Settings this to true could lead to very long startup time.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ; URL at which the application is running. This is used for Bootstrapping
134 ; URL at which the application is running. This is used for Bootstrapping
135 ; requests in context when no web request is available. Used in ishell, or
135 ; requests in context when no web request is available. Used in ishell, or
136 ; SSH calls. Set this for events to receive proper url for SSH calls.
136 ; SSH calls. Set this for events to receive proper url for SSH calls.
137 app.base_url = http://rhodecode.local
137 app.base_url = http://rhodecode.local
138
138
139 ; Host at which the Service API is running.
139 ; Host at which the Service API is running.
140 app.service_api.host = http://rhodecode.local:10020
140 app.service_api.host = http://rhodecode.local:10020
141
141
142 ; Secret for Service API authentication.
142 ; Secret for Service API authentication.
143 app.service_api.token =
143 app.service_api.token =
144
144
145 ; Unique application ID. Should be a random unique string for security.
145 ; Unique application ID. Should be a random unique string for security.
146 app_instance_uuid = rc-production
146 app_instance_uuid = rc-production
147
147
148 ; Cut off limit for large diffs (size in bytes). If overall diff size on
148 ; Cut off limit for large diffs (size in bytes). If overall diff size on
149 ; commit, or pull request exceeds this limit this diff will be displayed
149 ; commit, or pull request exceeds this limit this diff will be displayed
150 ; partially. E.g 512000 == 512Kb
150 ; partially. E.g 512000 == 512Kb
151 cut_off_limit_diff = 512000
151 cut_off_limit_diff = 512000
152
152
153 ; Cut off limit for large files inside diffs (size in bytes). Each individual
153 ; Cut off limit for large files inside diffs (size in bytes). Each individual
154 ; file inside diff which exceeds this limit will be displayed partially.
154 ; file inside diff which exceeds this limit will be displayed partially.
155 ; E.g 128000 == 128Kb
155 ; E.g 128000 == 128Kb
156 cut_off_limit_file = 128000
156 cut_off_limit_file = 128000
157
157
158 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
158 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
159 vcs_full_cache = true
159 vcs_full_cache = true
160
160
161 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
161 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
162 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
162 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
163 force_https = false
163 force_https = false
164
164
165 ; use Strict-Transport-Security headers
165 ; use Strict-Transport-Security headers
166 use_htsts = false
166 use_htsts = false
167
167
168 ; Set to true if your repos are exposed using the dumb protocol
168 ; Set to true if your repos are exposed using the dumb protocol
169 git_update_server_info = false
169 git_update_server_info = false
170
170
171 ; RSS/ATOM feed options
171 ; RSS/ATOM feed options
172 rss_cut_off_limit = 256000
172 rss_cut_off_limit = 256000
173 rss_items_per_page = 10
173 rss_items_per_page = 10
174 rss_include_diff = false
174 rss_include_diff = false
175
175
176 ; gist URL alias, used to create nicer urls for gist. This should be an
176 ; gist URL alias, used to create nicer urls for gist. This should be an
177 ; url that does rewrites to _admin/gists/{gistid}.
177 ; url that does rewrites to _admin/gists/{gistid}.
178 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
178 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
179 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
179 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
180 gist_alias_url =
180 gist_alias_url =
181
181
182 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
182 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
183 ; used for access.
183 ; used for access.
184 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
184 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
185 ; came from the the logged in user who own this authentication token.
185 ; came from the the logged in user who own this authentication token.
186 ; Additionally @TOKEN syntax can be used to bound the view to specific
186 ; Additionally @TOKEN syntax can be used to bound the view to specific
187 ; authentication token. Such view would be only accessible when used together
187 ; authentication token. Such view would be only accessible when used together
188 ; with this authentication token
188 ; with this authentication token
189 ; list of all views can be found under `/_admin/permissions/auth_token_access`
189 ; list of all views can be found under `/_admin/permissions/auth_token_access`
190 ; The list should be "," separated and on a single line.
190 ; The list should be "," separated and on a single line.
191 ; Most common views to enable:
191 ; Most common views to enable:
192
192
193 # RepoCommitsView:repo_commit_download
193 # RepoCommitsView:repo_commit_download
194 # RepoCommitsView:repo_commit_patch
194 # RepoCommitsView:repo_commit_patch
195 # RepoCommitsView:repo_commit_raw
195 # RepoCommitsView:repo_commit_raw
196 # RepoCommitsView:repo_commit_raw@TOKEN
196 # RepoCommitsView:repo_commit_raw@TOKEN
197 # RepoFilesView:repo_files_diff
197 # RepoFilesView:repo_files_diff
198 # RepoFilesView:repo_archivefile
198 # RepoFilesView:repo_archivefile
199 # RepoFilesView:repo_file_raw
199 # RepoFilesView:repo_file_raw
200 # GistView:*
200 # GistView:*
201 api_access_controllers_whitelist =
201 api_access_controllers_whitelist =
202
202
203 ; Default encoding used to convert from and to unicode
203 ; Default encoding used to convert from and to unicode
204 ; can be also a comma separated list of encoding in case of mixed encodings
204 ; can be also a comma separated list of encoding in case of mixed encodings
205 default_encoding = UTF-8
205 default_encoding = UTF-8
206
206
207 ; instance-id prefix
207 ; instance-id prefix
208 ; a prefix key for this instance used for cache invalidation when running
208 ; a prefix key for this instance used for cache invalidation when running
209 ; multiple instances of RhodeCode, make sure it's globally unique for
209 ; multiple instances of RhodeCode, make sure it's globally unique for
210 ; all running RhodeCode instances. Leave empty if you don't use it
210 ; all running RhodeCode instances. Leave empty if you don't use it
211 instance_id =
211 instance_id =
212
212
213 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
213 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
214 ; of an authentication plugin also if it is disabled by it's settings.
214 ; of an authentication plugin also if it is disabled by it's settings.
215 ; This could be useful if you are unable to log in to the system due to broken
215 ; This could be useful if you are unable to log in to the system due to broken
216 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
216 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
217 ; module to log in again and fix the settings.
217 ; module to log in again and fix the settings.
218 ; Available builtin plugin IDs (hash is part of the ID):
218 ; Available builtin plugin IDs (hash is part of the ID):
219 ; egg:rhodecode-enterprise-ce#rhodecode
219 ; egg:rhodecode-enterprise-ce#rhodecode
220 ; egg:rhodecode-enterprise-ce#pam
220 ; egg:rhodecode-enterprise-ce#pam
221 ; egg:rhodecode-enterprise-ce#ldap
221 ; egg:rhodecode-enterprise-ce#ldap
222 ; egg:rhodecode-enterprise-ce#jasig_cas
222 ; egg:rhodecode-enterprise-ce#jasig_cas
223 ; egg:rhodecode-enterprise-ce#headers
223 ; egg:rhodecode-enterprise-ce#headers
224 ; egg:rhodecode-enterprise-ce#crowd
224 ; egg:rhodecode-enterprise-ce#crowd
225
225
226 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
226 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
227
227
228 ; Flag to control loading of legacy plugins in py:/path format
228 ; Flag to control loading of legacy plugins in py:/path format
229 auth_plugin.import_legacy_plugins = true
229 auth_plugin.import_legacy_plugins = true
230
230
231 ; alternative return HTTP header for failed authentication. Default HTTP
231 ; alternative return HTTP header for failed authentication. Default HTTP
232 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
232 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
233 ; handling that causing a series of failed authentication calls.
233 ; handling that causing a series of failed authentication calls.
234 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
234 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
235 ; This will be served instead of default 401 on bad authentication
235 ; This will be served instead of default 401 on bad authentication
236 auth_ret_code =
236 auth_ret_code =
237
237
238 ; use special detection method when serving auth_ret_code, instead of serving
238 ; use special detection method when serving auth_ret_code, instead of serving
239 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
239 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
240 ; and then serve auth_ret_code to clients
240 ; and then serve auth_ret_code to clients
241 auth_ret_code_detection = false
241 auth_ret_code_detection = false
242
242
243 ; locking return code. When repository is locked return this HTTP code. 2XX
243 ; locking return code. When repository is locked return this HTTP code. 2XX
244 ; codes don't break the transactions while 4XX codes do
244 ; codes don't break the transactions while 4XX codes do
245 lock_ret_code = 423
245 lock_ret_code = 423
246
246
247 ; Filesystem location were repositories should be stored
247 ; Filesystem location were repositories should be stored
248 repo_store.path = /var/opt/rhodecode_repo_store
248 repo_store.path = /var/opt/rhodecode_repo_store
249
249
250 ; allows to setup custom hooks in settings page
250 ; allows to setup custom hooks in settings page
251 allow_custom_hooks_settings = true
251 allow_custom_hooks_settings = true
252
252
253 ; Generated license token required for EE edition license.
253 ; Generated license token required for EE edition license.
254 ; New generated token value can be found in Admin > settings > license page.
254 ; New generated token value can be found in Admin > settings > license page.
255 license_token =
255 license_token =
256
256
257 ; This flag hides sensitive information on the license page such as token, and license data
257 ; This flag hides sensitive information on the license page such as token, and license data
258 license.hide_license_info = false
258 license.hide_license_info = false
259
259
260 ; supervisor connection uri, for managing supervisor and logs.
260 ; supervisor connection uri, for managing supervisor and logs.
261 supervisor.uri =
261 supervisor.uri =
262
262
263 ; supervisord group name/id we only want this RC instance to handle
263 ; supervisord group name/id we only want this RC instance to handle
264 supervisor.group_id = dev
264 supervisor.group_id = dev
265
265
266 ; Display extended labs settings
266 ; Display extended labs settings
267 labs_settings_active = true
267 labs_settings_active = true
268
268
269 ; Custom exception store path, defaults to TMPDIR
269 ; Custom exception store path, defaults to TMPDIR
270 ; This is used to store exception from RhodeCode in shared directory
270 ; This is used to store exception from RhodeCode in shared directory
271 #exception_tracker.store_path =
271 #exception_tracker.store_path =
272
272
273 ; Send email with exception details when it happens
273 ; Send email with exception details when it happens
274 #exception_tracker.send_email = false
274 #exception_tracker.send_email = false
275
275
276 ; Comma separated list of recipients for exception emails,
276 ; Comma separated list of recipients for exception emails,
277 ; e.g admin@rhodecode.com,devops@rhodecode.com
277 ; e.g admin@rhodecode.com,devops@rhodecode.com
278 ; Can be left empty, then emails will be sent to ALL super-admins
278 ; Can be left empty, then emails will be sent to ALL super-admins
279 #exception_tracker.send_email_recipients =
279 #exception_tracker.send_email_recipients =
280
280
281 ; optional prefix to Add to email Subject
281 ; optional prefix to Add to email Subject
282 #exception_tracker.email_prefix = [RHODECODE ERROR]
282 #exception_tracker.email_prefix = [RHODECODE ERROR]
283
283
284 ; File store configuration. This is used to store and serve uploaded files
284 ; File store configuration. This is used to store and serve uploaded files
285 file_store.enabled = true
285 file_store.enabled = true
286
286
287 ; Storage backend, available options are: local
287 ; Storage backend, available options are: local
288 file_store.backend = local
288 file_store.backend = local
289
289
290 ; path to store the uploaded binaries and artifacts
290 ; path to store the uploaded binaries and artifacts
291 file_store.storage_path = /var/opt/rhodecode_data/file_store
291 file_store.storage_path = /var/opt/rhodecode_data/file_store
292
292
293 ; Uncomment and set this path to control settings for archive download cache.
293 ; Uncomment and set this path to control settings for archive download cache.
294 ; Generated repo archives will be cached at this location
294 ; Generated repo archives will be cached at this location
295 ; and served from the cache during subsequent requests for the same archive of
295 ; and served from the cache during subsequent requests for the same archive of
296 ; the repository. This path is important to be shared across filesystems and with
296 ; the repository. This path is important to be shared across filesystems and with
297 ; RhodeCode and vcsserver
297 ; RhodeCode and vcsserver
298
298
299 ; Redis url to acquire/check generation of archives locks
300 archive_cache.locking.url = redis://redis:6379/1
301
302 ; Storage backend, only 'filesystem' is available now
303 archive_cache.backend.type = filesystem
304
299 ; Default is $cache_dir/archive_cache if not set
305 ; Default is $cache_dir/archive_cache if not set
300 archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache
306 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
301
307
302 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
308 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
303 archive_cache.cache_size_gb = 10
309 archive_cache.filesystem.cache_size_gb = 1
310
311 ; Eviction policy used to clear out after cache_size_gb limit is reached
312 archive_cache.filesystem.eviction_policy = least-recently-stored
304
313
305 ; By default cache uses sharding technique, this specifies how many shards are there
314 ; By default cache uses sharding technique, this specifies how many shards are there
306 archive_cache.cache_shards = 4
315 archive_cache.filesystem.cache_shards = 8
316
307
317
308 ; #############
318 ; #############
309 ; CELERY CONFIG
319 ; CELERY CONFIG
310 ; #############
320 ; #############
311
321
312 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
322 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
313
323
314 use_celery = true
324 use_celery = true
315
325
316 ; path to store schedule database
326 ; path to store schedule database
317 #celerybeat-schedule.path =
327 #celerybeat-schedule.path =
318
328
319 ; connection url to the message broker (default redis)
329 ; connection url to the message broker (default redis)
320 celery.broker_url = redis://redis:6379/8
330 celery.broker_url = redis://redis:6379/8
321
331
322 ; results backend to get results for (default redis)
332 ; results backend to get results for (default redis)
323 celery.result_backend = redis://redis:6379/8
333 celery.result_backend = redis://redis:6379/8
324
334
325 ; rabbitmq example
335 ; rabbitmq example
326 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
336 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
327
337
328 ; maximum tasks to execute before worker restart
338 ; maximum tasks to execute before worker restart
329 celery.max_tasks_per_child = 20
339 celery.max_tasks_per_child = 20
330
340
331 ; tasks will never be sent to the queue, but executed locally instead.
341 ; tasks will never be sent to the queue, but executed locally instead.
332 celery.task_always_eager = false
342 celery.task_always_eager = false
333
343
334 ; #############
344 ; #############
335 ; DOGPILE CACHE
345 ; DOGPILE CACHE
336 ; #############
346 ; #############
337
347
338 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
348 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
339 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
349 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
340 cache_dir = /var/opt/rhodecode_data
350 cache_dir = /var/opt/rhodecode_data
341
351
342 ; *********************************************
352 ; *********************************************
343 ; `sql_cache_short` cache for heavy SQL queries
353 ; `sql_cache_short` cache for heavy SQL queries
344 ; Only supported backend is `memory_lru`
354 ; Only supported backend is `memory_lru`
345 ; *********************************************
355 ; *********************************************
346 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
356 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
347 rc_cache.sql_cache_short.expiration_time = 30
357 rc_cache.sql_cache_short.expiration_time = 30
348
358
349
359
350 ; *****************************************************
360 ; *****************************************************
351 ; `cache_repo_longterm` cache for repo object instances
361 ; `cache_repo_longterm` cache for repo object instances
352 ; Only supported backend is `memory_lru`
362 ; Only supported backend is `memory_lru`
353 ; *****************************************************
363 ; *****************************************************
354 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
364 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
355 ; by default we use 30 Days, cache is still invalidated on push
365 ; by default we use 30 Days, cache is still invalidated on push
356 rc_cache.cache_repo_longterm.expiration_time = 2592000
366 rc_cache.cache_repo_longterm.expiration_time = 2592000
357 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
367 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
358 rc_cache.cache_repo_longterm.max_size = 10000
368 rc_cache.cache_repo_longterm.max_size = 10000
359
369
360
370
361 ; *********************************************
371 ; *********************************************
362 ; `cache_general` cache for general purpose use
372 ; `cache_general` cache for general purpose use
363 ; for simplicity use rc.file_namespace backend,
373 ; for simplicity use rc.file_namespace backend,
364 ; for performance and scale use rc.redis
374 ; for performance and scale use rc.redis
365 ; *********************************************
375 ; *********************************************
366 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
376 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
367 rc_cache.cache_general.expiration_time = 43200
377 rc_cache.cache_general.expiration_time = 43200
368 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
378 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
369 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
379 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
370
380
371 ; alternative `cache_general` redis backend with distributed lock
381 ; alternative `cache_general` redis backend with distributed lock
372 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
382 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
373 #rc_cache.cache_general.expiration_time = 300
383 #rc_cache.cache_general.expiration_time = 300
374
384
375 ; redis_expiration_time needs to be greater then expiration_time
385 ; redis_expiration_time needs to be greater then expiration_time
376 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
386 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
377
387
378 #rc_cache.cache_general.arguments.host = localhost
388 #rc_cache.cache_general.arguments.host = localhost
379 #rc_cache.cache_general.arguments.port = 6379
389 #rc_cache.cache_general.arguments.port = 6379
380 #rc_cache.cache_general.arguments.db = 0
390 #rc_cache.cache_general.arguments.db = 0
381 #rc_cache.cache_general.arguments.socket_timeout = 30
391 #rc_cache.cache_general.arguments.socket_timeout = 30
382 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
392 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
383 #rc_cache.cache_general.arguments.distributed_lock = true
393 #rc_cache.cache_general.arguments.distributed_lock = true
384
394
385 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
395 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
386 #rc_cache.cache_general.arguments.lock_auto_renewal = true
396 #rc_cache.cache_general.arguments.lock_auto_renewal = true
387
397
388 ; *************************************************
398 ; *************************************************
389 ; `cache_perms` cache for permission tree, auth TTL
399 ; `cache_perms` cache for permission tree, auth TTL
390 ; for simplicity use rc.file_namespace backend,
400 ; for simplicity use rc.file_namespace backend,
391 ; for performance and scale use rc.redis
401 ; for performance and scale use rc.redis
392 ; *************************************************
402 ; *************************************************
393 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
403 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
394 rc_cache.cache_perms.expiration_time = 3600
404 rc_cache.cache_perms.expiration_time = 3600
395 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
405 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
396 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
406 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
397
407
398 ; alternative `cache_perms` redis backend with distributed lock
408 ; alternative `cache_perms` redis backend with distributed lock
399 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
409 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
400 #rc_cache.cache_perms.expiration_time = 300
410 #rc_cache.cache_perms.expiration_time = 300
401
411
402 ; redis_expiration_time needs to be greater then expiration_time
412 ; redis_expiration_time needs to be greater then expiration_time
403 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
413 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
404
414
405 #rc_cache.cache_perms.arguments.host = localhost
415 #rc_cache.cache_perms.arguments.host = localhost
406 #rc_cache.cache_perms.arguments.port = 6379
416 #rc_cache.cache_perms.arguments.port = 6379
407 #rc_cache.cache_perms.arguments.db = 0
417 #rc_cache.cache_perms.arguments.db = 0
408 #rc_cache.cache_perms.arguments.socket_timeout = 30
418 #rc_cache.cache_perms.arguments.socket_timeout = 30
409 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
419 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
410 #rc_cache.cache_perms.arguments.distributed_lock = true
420 #rc_cache.cache_perms.arguments.distributed_lock = true
411
421
412 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
422 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
413 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
423 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
414
424
415 ; ***************************************************
425 ; ***************************************************
416 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
426 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
417 ; for simplicity use rc.file_namespace backend,
427 ; for simplicity use rc.file_namespace backend,
418 ; for performance and scale use rc.redis
428 ; for performance and scale use rc.redis
419 ; ***************************************************
429 ; ***************************************************
420 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
430 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
421 rc_cache.cache_repo.expiration_time = 2592000
431 rc_cache.cache_repo.expiration_time = 2592000
422 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
432 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
423 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
433 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
424
434
425 ; alternative `cache_repo` redis backend with distributed lock
435 ; alternative `cache_repo` redis backend with distributed lock
426 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
436 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
427 #rc_cache.cache_repo.expiration_time = 2592000
437 #rc_cache.cache_repo.expiration_time = 2592000
428
438
429 ; redis_expiration_time needs to be greater then expiration_time
439 ; redis_expiration_time needs to be greater then expiration_time
430 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
440 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
431
441
432 #rc_cache.cache_repo.arguments.host = localhost
442 #rc_cache.cache_repo.arguments.host = localhost
433 #rc_cache.cache_repo.arguments.port = 6379
443 #rc_cache.cache_repo.arguments.port = 6379
434 #rc_cache.cache_repo.arguments.db = 1
444 #rc_cache.cache_repo.arguments.db = 1
435 #rc_cache.cache_repo.arguments.socket_timeout = 30
445 #rc_cache.cache_repo.arguments.socket_timeout = 30
436 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
446 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
437 #rc_cache.cache_repo.arguments.distributed_lock = true
447 #rc_cache.cache_repo.arguments.distributed_lock = true
438
448
439 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
449 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
440 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
450 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
441
451
442 ; ##############
452 ; ##############
443 ; BEAKER SESSION
453 ; BEAKER SESSION
444 ; ##############
454 ; ##############
445
455
446 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
456 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
447 ; types are file, ext:redis, ext:database, ext:memcached
457 ; types are file, ext:redis, ext:database, ext:memcached
448 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
458 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
449 #beaker.session.type = file
459 #beaker.session.type = file
450 #beaker.session.data_dir = %(here)s/data/sessions
460 #beaker.session.data_dir = %(here)s/data/sessions
451
461
452 ; Redis based sessions
462 ; Redis based sessions
453 beaker.session.type = ext:redis
463 beaker.session.type = ext:redis
454 beaker.session.url = redis://redis:6379/2
464 beaker.session.url = redis://redis:6379/2
455
465
456 ; DB based session, fast, and allows easy management over logged in users
466 ; DB based session, fast, and allows easy management over logged in users
457 #beaker.session.type = ext:database
467 #beaker.session.type = ext:database
458 #beaker.session.table_name = db_session
468 #beaker.session.table_name = db_session
459 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
469 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
460 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
470 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
461 #beaker.session.sa.pool_recycle = 3600
471 #beaker.session.sa.pool_recycle = 3600
462 #beaker.session.sa.echo = false
472 #beaker.session.sa.echo = false
463
473
464 beaker.session.key = rhodecode
474 beaker.session.key = rhodecode
465 beaker.session.secret = develop-rc-uytcxaz
475 beaker.session.secret = develop-rc-uytcxaz
466 beaker.session.lock_dir = /data_ramdisk/lock
476 beaker.session.lock_dir = /data_ramdisk/lock
467
477
468 ; Secure encrypted cookie. Requires AES and AES python libraries
478 ; Secure encrypted cookie. Requires AES and AES python libraries
469 ; you must disable beaker.session.secret to use this
479 ; you must disable beaker.session.secret to use this
470 #beaker.session.encrypt_key = key_for_encryption
480 #beaker.session.encrypt_key = key_for_encryption
471 #beaker.session.validate_key = validation_key
481 #beaker.session.validate_key = validation_key
472
482
473 ; Sets session as invalid (also logging out user) if it haven not been
483 ; Sets session as invalid (also logging out user) if it haven not been
474 ; accessed for given amount of time in seconds
484 ; accessed for given amount of time in seconds
475 beaker.session.timeout = 2592000
485 beaker.session.timeout = 2592000
476 beaker.session.httponly = true
486 beaker.session.httponly = true
477
487
478 ; Path to use for the cookie. Set to prefix if you use prefix middleware
488 ; Path to use for the cookie. Set to prefix if you use prefix middleware
479 #beaker.session.cookie_path = /custom_prefix
489 #beaker.session.cookie_path = /custom_prefix
480
490
481 ; Set https secure cookie
491 ; Set https secure cookie
482 beaker.session.secure = false
492 beaker.session.secure = false
483
493
484 ; default cookie expiration time in seconds, set to `true` to set expire
494 ; default cookie expiration time in seconds, set to `true` to set expire
485 ; at browser close
495 ; at browser close
486 #beaker.session.cookie_expires = 3600
496 #beaker.session.cookie_expires = 3600
487
497
488 ; #############################
498 ; #############################
489 ; SEARCH INDEXING CONFIGURATION
499 ; SEARCH INDEXING CONFIGURATION
490 ; #############################
500 ; #############################
491
501
492 ; Full text search indexer is available in rhodecode-tools under
502 ; Full text search indexer is available in rhodecode-tools under
493 ; `rhodecode-tools index` command
503 ; `rhodecode-tools index` command
494
504
495 ; WHOOSH Backend, doesn't require additional services to run
505 ; WHOOSH Backend, doesn't require additional services to run
496 ; it works good with few dozen repos
506 ; it works good with few dozen repos
497 search.module = rhodecode.lib.index.whoosh
507 search.module = rhodecode.lib.index.whoosh
498 search.location = %(here)s/data/index
508 search.location = %(here)s/data/index
499
509
500 ; ####################
510 ; ####################
501 ; CHANNELSTREAM CONFIG
511 ; CHANNELSTREAM CONFIG
502 ; ####################
512 ; ####################
503
513
504 ; channelstream enables persistent connections and live notification
514 ; channelstream enables persistent connections and live notification
505 ; in the system. It's also used by the chat system
515 ; in the system. It's also used by the chat system
506
516
507 channelstream.enabled = true
517 channelstream.enabled = true
508
518
509 ; server address for channelstream server on the backend
519 ; server address for channelstream server on the backend
510 channelstream.server = channelstream:9800
520 channelstream.server = channelstream:9800
511
521
512 ; location of the channelstream server from outside world
522 ; location of the channelstream server from outside world
513 ; use ws:// for http or wss:// for https. This address needs to be handled
523 ; use ws:// for http or wss:// for https. This address needs to be handled
514 ; by external HTTP server such as Nginx or Apache
524 ; by external HTTP server such as Nginx or Apache
515 ; see Nginx/Apache configuration examples in our docs
525 ; see Nginx/Apache configuration examples in our docs
516 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
526 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
517 channelstream.secret = ENV_GENERATED
527 channelstream.secret = ENV_GENERATED
518 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
528 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
519
529
520 ; Internal application path that Javascript uses to connect into.
530 ; Internal application path that Javascript uses to connect into.
521 ; If you use proxy-prefix the prefix should be added before /_channelstream
531 ; If you use proxy-prefix the prefix should be added before /_channelstream
522 channelstream.proxy_path = /_channelstream
532 channelstream.proxy_path = /_channelstream
523
533
524
534
525 ; ##############################
535 ; ##############################
526 ; MAIN RHODECODE DATABASE CONFIG
536 ; MAIN RHODECODE DATABASE CONFIG
527 ; ##############################
537 ; ##############################
528
538
529 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
539 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
530 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
540 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
531 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
541 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
532 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
542 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
533 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
543 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
534
544
535 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
545 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
536
546
537 ; see sqlalchemy docs for other advanced settings
547 ; see sqlalchemy docs for other advanced settings
538 ; print the sql statements to output
548 ; print the sql statements to output
539 sqlalchemy.db1.echo = false
549 sqlalchemy.db1.echo = false
540
550
541 ; recycle the connections after this amount of seconds
551 ; recycle the connections after this amount of seconds
542 sqlalchemy.db1.pool_recycle = 3600
552 sqlalchemy.db1.pool_recycle = 3600
543
553
544 ; the number of connections to keep open inside the connection pool.
554 ; the number of connections to keep open inside the connection pool.
545 ; 0 indicates no limit
555 ; 0 indicates no limit
546 ; the general calculus with gevent is:
556 ; the general calculus with gevent is:
547 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
557 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
548 ; then increase pool size + max overflow so that they add up to 500.
558 ; then increase pool size + max overflow so that they add up to 500.
549 #sqlalchemy.db1.pool_size = 5
559 #sqlalchemy.db1.pool_size = 5
550
560
551 ; The number of connections to allow in connection pool "overflow", that is
561 ; The number of connections to allow in connection pool "overflow", that is
552 ; connections that can be opened above and beyond the pool_size setting,
562 ; connections that can be opened above and beyond the pool_size setting,
553 ; which defaults to five.
563 ; which defaults to five.
554 #sqlalchemy.db1.max_overflow = 10
564 #sqlalchemy.db1.max_overflow = 10
555
565
556 ; Connection check ping, used to detect broken database connections
566 ; Connection check ping, used to detect broken database connections
557 ; could be enabled to better handle cases if MySQL has gone away errors
567 ; could be enabled to better handle cases if MySQL has gone away errors
558 #sqlalchemy.db1.ping_connection = true
568 #sqlalchemy.db1.ping_connection = true
559
569
560 ; ##########
570 ; ##########
561 ; VCS CONFIG
571 ; VCS CONFIG
562 ; ##########
572 ; ##########
563 vcs.server.enable = true
573 vcs.server.enable = true
564 vcs.server = vcsserver:10010
574 vcs.server = vcsserver:10010
565
575
566 ; Web server connectivity protocol, responsible for web based VCS operations
576 ; Web server connectivity protocol, responsible for web based VCS operations
567 ; Available protocols are:
577 ; Available protocols are:
568 ; `http` - use http-rpc backend (default)
578 ; `http` - use http-rpc backend (default)
569 vcs.server.protocol = http
579 vcs.server.protocol = http
570
580
571 ; Push/Pull operations protocol, available options are:
581 ; Push/Pull operations protocol, available options are:
572 ; `http` - use http-rpc backend (default)
582 ; `http` - use http-rpc backend (default)
573 vcs.scm_app_implementation = http
583 vcs.scm_app_implementation = http
574
584
575 ; Push/Pull operations hooks protocol, available options are:
585 ; Push/Pull operations hooks protocol, available options are:
576 ; `http` - use http-rpc backend (default)
586 ; `http` - use http-rpc backend (default)
577 ; `celery` - use celery based hooks
587 ; `celery` - use celery based hooks
578 vcs.hooks.protocol = http
588 vcs.hooks.protocol = http
579
589
580 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
590 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
581 ; accessible via network.
591 ; accessible via network.
582 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
592 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
583 vcs.hooks.host = *
593 vcs.hooks.host = *
584
594
585 ; Start VCSServer with this instance as a subprocess, useful for development
595 ; Start VCSServer with this instance as a subprocess, useful for development
586 vcs.start_server = false
596 vcs.start_server = false
587
597
588 ; List of enabled VCS backends, available options are:
598 ; List of enabled VCS backends, available options are:
589 ; `hg` - mercurial
599 ; `hg` - mercurial
590 ; `git` - git
600 ; `git` - git
591 ; `svn` - subversion
601 ; `svn` - subversion
592 vcs.backends = hg, git, svn
602 vcs.backends = hg, git, svn
593
603
594 ; Wait this number of seconds before killing connection to the vcsserver
604 ; Wait this number of seconds before killing connection to the vcsserver
595 vcs.connection_timeout = 3600
605 vcs.connection_timeout = 3600
596
606
597 ; Cache flag to cache vcsserver remote calls locally
607 ; Cache flag to cache vcsserver remote calls locally
598 ; It uses cache_region `cache_repo`
608 ; It uses cache_region `cache_repo`
599 vcs.methods.cache = true
609 vcs.methods.cache = true
600
610
601 ; ####################################################
611 ; ####################################################
602 ; Subversion proxy support (mod_dav_svn)
612 ; Subversion proxy support (mod_dav_svn)
603 ; Maps RhodeCode repo groups into SVN paths for Apache
613 ; Maps RhodeCode repo groups into SVN paths for Apache
604 ; ####################################################
614 ; ####################################################
605
615
606 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
616 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
607 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
617 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
608 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
618 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
609 #vcs.svn.compatible_version = 1.8
619 #vcs.svn.compatible_version = 1.8
610
620
611 ; Enable SVN proxy of requests over HTTP
621 ; Enable SVN proxy of requests over HTTP
612 vcs.svn.proxy.enabled = true
622 vcs.svn.proxy.enabled = true
613
623
614 ; host to connect to running SVN subsystem
624 ; host to connect to running SVN subsystem
615 vcs.svn.proxy.host = http://svn:8090
625 vcs.svn.proxy.host = http://svn:8090
616
626
617 ; Enable or disable the config file generation.
627 ; Enable or disable the config file generation.
618 svn.proxy.generate_config = true
628 svn.proxy.generate_config = true
619
629
620 ; Generate config file with `SVNListParentPath` set to `On`.
630 ; Generate config file with `SVNListParentPath` set to `On`.
621 svn.proxy.list_parent_path = true
631 svn.proxy.list_parent_path = true
622
632
623 ; Set location and file name of generated config file.
633 ; Set location and file name of generated config file.
624 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
634 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
625
635
626 ; alternative mod_dav config template. This needs to be a valid mako template
636 ; alternative mod_dav config template. This needs to be a valid mako template
627 ; Example template can be found in the source code:
637 ; Example template can be found in the source code:
628 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
638 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
629 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
639 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
630
640
631 ; Used as a prefix to the `Location` block in the generated config file.
641 ; Used as a prefix to the `Location` block in the generated config file.
632 ; In most cases it should be set to `/`.
642 ; In most cases it should be set to `/`.
633 svn.proxy.location_root = /
643 svn.proxy.location_root = /
634
644
635 ; Command to reload the mod dav svn configuration on change.
645 ; Command to reload the mod dav svn configuration on change.
636 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
646 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
637 ; Make sure user who runs RhodeCode process is allowed to reload Apache
647 ; Make sure user who runs RhodeCode process is allowed to reload Apache
638 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
648 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
639
649
640 ; If the timeout expires before the reload command finishes, the command will
650 ; If the timeout expires before the reload command finishes, the command will
641 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
651 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
642 #svn.proxy.reload_timeout = 10
652 #svn.proxy.reload_timeout = 10
643
653
644 ; ####################
654 ; ####################
645 ; SSH Support Settings
655 ; SSH Support Settings
646 ; ####################
656 ; ####################
647
657
648 ; Defines if a custom authorized_keys file should be created and written on
658 ; Defines if a custom authorized_keys file should be created and written on
649 ; any change user ssh keys. Setting this to false also disables possibility
659 ; any change user ssh keys. Setting this to false also disables possibility
650 ; of adding SSH keys by users from web interface. Super admins can still
660 ; of adding SSH keys by users from web interface. Super admins can still
651 ; manage SSH Keys.
661 ; manage SSH Keys.
652 ssh.generate_authorized_keyfile = true
662 ssh.generate_authorized_keyfile = true
653
663
654 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
664 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
655 # ssh.authorized_keys_ssh_opts =
665 # ssh.authorized_keys_ssh_opts =
656
666
657 ; Path to the authorized_keys file where the generate entries are placed.
667 ; Path to the authorized_keys file where the generate entries are placed.
658 ; It is possible to have multiple key files specified in `sshd_config` e.g.
668 ; It is possible to have multiple key files specified in `sshd_config` e.g.
659 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
669 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
660 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
670 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
661
671
662 ; Command to execute the SSH wrapper. The binary is available in the
672 ; Command to execute the SSH wrapper. The binary is available in the
663 ; RhodeCode installation directory.
673 ; RhodeCode installation directory.
664 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
674 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
665 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
675 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
666 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
676 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
667
677
668 ; Allow shell when executing the ssh-wrapper command
678 ; Allow shell when executing the ssh-wrapper command
669 ssh.wrapper_cmd_allow_shell = false
679 ssh.wrapper_cmd_allow_shell = false
670
680
671 ; Enables logging, and detailed output send back to the client during SSH
681 ; Enables logging, and detailed output send back to the client during SSH
672 ; operations. Useful for debugging, shouldn't be used in production.
682 ; operations. Useful for debugging, shouldn't be used in production.
673 ssh.enable_debug_logging = true
683 ssh.enable_debug_logging = true
674
684
675 ; Paths to binary executable, by default they are the names, but we can
685 ; Paths to binary executable, by default they are the names, but we can
676 ; override them if we want to use a custom one
686 ; override them if we want to use a custom one
677 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
687 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
678 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
688 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
679 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
689 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
680
690
681 ; Enables SSH key generator web interface. Disabling this still allows users
691 ; Enables SSH key generator web interface. Disabling this still allows users
682 ; to add their own keys.
692 ; to add their own keys.
683 ssh.enable_ui_key_generator = true
693 ssh.enable_ui_key_generator = true
684
694
685 ; Statsd client config, this is used to send metrics to statsd
695 ; Statsd client config, this is used to send metrics to statsd
686 ; We recommend setting statsd_exported and scrape them using Prometheus
696 ; We recommend setting statsd_exported and scrape them using Prometheus
687 #statsd.enabled = false
697 #statsd.enabled = false
688 #statsd.statsd_host = 0.0.0.0
698 #statsd.statsd_host = 0.0.0.0
689 #statsd.statsd_port = 8125
699 #statsd.statsd_port = 8125
690 #statsd.statsd_prefix =
700 #statsd.statsd_prefix =
691 #statsd.statsd_ipv6 = false
701 #statsd.statsd_ipv6 = false
692
702
693 ; configure logging automatically at server startup set to false
703 ; configure logging automatically at server startup set to false
694 ; to use the below custom logging config.
704 ; to use the below custom logging config.
695 ; RC_LOGGING_FORMATTER
705 ; RC_LOGGING_FORMATTER
696 ; RC_LOGGING_LEVEL
706 ; RC_LOGGING_LEVEL
697 ; env variables can control the settings for logging in case of autoconfigure
707 ; env variables can control the settings for logging in case of autoconfigure
698
708
699 #logging.autoconfigure = true
709 #logging.autoconfigure = true
700
710
701 ; specify your own custom logging config file to configure logging
711 ; specify your own custom logging config file to configure logging
702 #logging.logging_conf_file = /path/to/custom_logging.ini
712 #logging.logging_conf_file = /path/to/custom_logging.ini
703
713
704 ; Dummy marker to add new entries after.
714 ; Dummy marker to add new entries after.
705 ; Add any custom entries below. Please don't remove this marker.
715 ; Add any custom entries below. Please don't remove this marker.
706 custom.conf = 1
716 custom.conf = 1
707
717
708
718
709 ; #####################
719 ; #####################
710 ; LOGGING CONFIGURATION
720 ; LOGGING CONFIGURATION
711 ; #####################
721 ; #####################
712
722
713 [loggers]
723 [loggers]
714 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
724 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
715
725
716 [handlers]
726 [handlers]
717 keys = console, console_sql
727 keys = console, console_sql
718
728
719 [formatters]
729 [formatters]
720 keys = generic, json, color_formatter, color_formatter_sql
730 keys = generic, json, color_formatter, color_formatter_sql
721
731
722 ; #######
732 ; #######
723 ; LOGGERS
733 ; LOGGERS
724 ; #######
734 ; #######
725 [logger_root]
735 [logger_root]
726 level = NOTSET
736 level = NOTSET
727 handlers = console
737 handlers = console
728
738
729 [logger_sqlalchemy]
739 [logger_sqlalchemy]
730 level = INFO
740 level = INFO
731 handlers = console_sql
741 handlers = console_sql
732 qualname = sqlalchemy.engine
742 qualname = sqlalchemy.engine
733 propagate = 0
743 propagate = 0
734
744
735 [logger_beaker]
745 [logger_beaker]
736 level = DEBUG
746 level = DEBUG
737 handlers =
747 handlers =
738 qualname = beaker.container
748 qualname = beaker.container
739 propagate = 1
749 propagate = 1
740
750
741 [logger_rhodecode]
751 [logger_rhodecode]
742 level = DEBUG
752 level = DEBUG
743 handlers =
753 handlers =
744 qualname = rhodecode
754 qualname = rhodecode
745 propagate = 1
755 propagate = 1
746
756
747 [logger_ssh_wrapper]
757 [logger_ssh_wrapper]
748 level = DEBUG
758 level = DEBUG
749 handlers =
759 handlers =
750 qualname = ssh_wrapper
760 qualname = ssh_wrapper
751 propagate = 1
761 propagate = 1
752
762
753 [logger_celery]
763 [logger_celery]
754 level = DEBUG
764 level = DEBUG
755 handlers =
765 handlers =
756 qualname = celery
766 qualname = celery
757
767
758
768
759 ; ########
769 ; ########
760 ; HANDLERS
770 ; HANDLERS
761 ; ########
771 ; ########
762
772
763 [handler_console]
773 [handler_console]
764 class = StreamHandler
774 class = StreamHandler
765 args = (sys.stderr, )
775 args = (sys.stderr, )
766 level = DEBUG
776 level = DEBUG
767 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
777 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
768 ; This allows sending properly formatted logs to grafana loki or elasticsearch
778 ; This allows sending properly formatted logs to grafana loki or elasticsearch
769 formatter = color_formatter
779 formatter = color_formatter
770
780
771 [handler_console_sql]
781 [handler_console_sql]
772 ; "level = DEBUG" logs SQL queries and results.
782 ; "level = DEBUG" logs SQL queries and results.
773 ; "level = INFO" logs SQL queries.
783 ; "level = INFO" logs SQL queries.
774 ; "level = WARN" logs neither. (Recommended for production systems.)
784 ; "level = WARN" logs neither. (Recommended for production systems.)
775 class = StreamHandler
785 class = StreamHandler
776 args = (sys.stderr, )
786 args = (sys.stderr, )
777 level = WARN
787 level = WARN
778 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
788 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
779 ; This allows sending properly formatted logs to grafana loki or elasticsearch
789 ; This allows sending properly formatted logs to grafana loki or elasticsearch
780 formatter = color_formatter_sql
790 formatter = color_formatter_sql
781
791
782 ; ##########
792 ; ##########
783 ; FORMATTERS
793 ; FORMATTERS
784 ; ##########
794 ; ##########
785
795
786 [formatter_generic]
796 [formatter_generic]
787 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
797 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
788 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
798 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
789 datefmt = %Y-%m-%d %H:%M:%S
799 datefmt = %Y-%m-%d %H:%M:%S
790
800
791 [formatter_color_formatter]
801 [formatter_color_formatter]
792 class = rhodecode.lib.logging_formatter.ColorFormatter
802 class = rhodecode.lib.logging_formatter.ColorFormatter
793 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
803 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
794 datefmt = %Y-%m-%d %H:%M:%S
804 datefmt = %Y-%m-%d %H:%M:%S
795
805
796 [formatter_color_formatter_sql]
806 [formatter_color_formatter_sql]
797 class = rhodecode.lib.logging_formatter.ColorFormatterSql
807 class = rhodecode.lib.logging_formatter.ColorFormatterSql
798 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
808 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
799 datefmt = %Y-%m-%d %H:%M:%S
809 datefmt = %Y-%m-%d %H:%M:%S
800
810
801 [formatter_json]
811 [formatter_json]
802 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
812 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
803 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
813 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,771 +1,781 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = false
8 debug = false
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 10020
32 port = 10020
33
33
34
34
35 ; ###########################
35 ; ###########################
36 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
37 ; ###########################
37 ; ###########################
38
38
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40
40
41 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
42 use = egg:gunicorn#main
42 use = egg:gunicorn#main
43
43
44 ; Prefix middleware for RhodeCode.
44 ; Prefix middleware for RhodeCode.
45 ; recommended when using proxy setup.
45 ; recommended when using proxy setup.
46 ; allows to set RhodeCode under a prefix in server.
46 ; allows to set RhodeCode under a prefix in server.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 ; And set your prefix like: `prefix = /custom_prefix`
48 ; And set your prefix like: `prefix = /custom_prefix`
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 ; to make your cookies only work on prefix url
50 ; to make your cookies only work on prefix url
51 [filter:proxy-prefix]
51 [filter:proxy-prefix]
52 use = egg:PasteDeploy#prefix
52 use = egg:PasteDeploy#prefix
53 prefix = /
53 prefix = /
54
54
55 [app:main]
55 [app:main]
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 ; of this file
57 ; of this file
58 ; Each option in the app:main can be override by an environmental variable
58 ; Each option in the app:main can be override by an environmental variable
59 ;
59 ;
60 ;To override an option:
60 ;To override an option:
61 ;
61 ;
62 ;RC_<KeyName>
62 ;RC_<KeyName>
63 ;Everything should be uppercase, . and - should be replaced by _.
63 ;Everything should be uppercase, . and - should be replaced by _.
64 ;For example, if you have these configuration settings:
64 ;For example, if you have these configuration settings:
65 ;rc_cache.repo_object.backend = foo
65 ;rc_cache.repo_object.backend = foo
66 ;can be overridden by
66 ;can be overridden by
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68
68
69 use = egg:rhodecode-enterprise-ce
69 use = egg:rhodecode-enterprise-ce
70
70
71 ; enable proxy prefix middleware, defined above
71 ; enable proxy prefix middleware, defined above
72 #filter-with = proxy-prefix
72 #filter-with = proxy-prefix
73
73
74 ; encryption key used to encrypt social plugin tokens,
74 ; encryption key used to encrypt social plugin tokens,
75 ; remote_urls with credentials etc, if not set it defaults to
75 ; remote_urls with credentials etc, if not set it defaults to
76 ; `beaker.session.secret`
76 ; `beaker.session.secret`
77 #rhodecode.encrypted_values.secret =
77 #rhodecode.encrypted_values.secret =
78
78
79 ; decryption strict mode (enabled by default). It controls if decryption raises
79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 #rhodecode.encrypted_values.strict = false
81 #rhodecode.encrypted_values.strict = false
82
82
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 ; fernet is safer, and we strongly recommend switching to it.
84 ; fernet is safer, and we strongly recommend switching to it.
85 ; Due to backward compatibility aes is used as default.
85 ; Due to backward compatibility aes is used as default.
86 #rhodecode.encrypted_values.algorithm = fernet
86 #rhodecode.encrypted_values.algorithm = fernet
87
87
88 ; Return gzipped responses from RhodeCode (static files/application)
88 ; Return gzipped responses from RhodeCode (static files/application)
89 gzip_responses = false
89 gzip_responses = false
90
90
91 ; Auto-generate javascript routes file on startup
91 ; Auto-generate javascript routes file on startup
92 generate_js_files = false
92 generate_js_files = false
93
93
94 ; System global default language.
94 ; System global default language.
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 lang = en
96 lang = en
97
97
98 ; Perform a full repository scan and import on each server start.
98 ; Perform a full repository scan and import on each server start.
99 ; Settings this to true could lead to very long startup time.
99 ; Settings this to true could lead to very long startup time.
100 startup.import_repos = false
100 startup.import_repos = false
101
101
102 ; URL at which the application is running. This is used for Bootstrapping
102 ; URL at which the application is running. This is used for Bootstrapping
103 ; requests in context when no web request is available. Used in ishell, or
103 ; requests in context when no web request is available. Used in ishell, or
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 app.base_url = http://rhodecode.local
105 app.base_url = http://rhodecode.local
106
106
107 ; Host at which the Service API is running.
107 ; Host at which the Service API is running.
108 app.service_api.host = http://rhodecode.local:10020
108 app.service_api.host = http://rhodecode.local:10020
109
109
110 ; Secret for Service API authentication.
110 ; Secret for Service API authentication.
111 app.service_api.token =
111 app.service_api.token =
112
112
113 ; Unique application ID. Should be a random unique string for security.
113 ; Unique application ID. Should be a random unique string for security.
114 app_instance_uuid = rc-production
114 app_instance_uuid = rc-production
115
115
116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 ; commit, or pull request exceeds this limit this diff will be displayed
117 ; commit, or pull request exceeds this limit this diff will be displayed
118 ; partially. E.g 512000 == 512Kb
118 ; partially. E.g 512000 == 512Kb
119 cut_off_limit_diff = 512000
119 cut_off_limit_diff = 512000
120
120
121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 ; file inside diff which exceeds this limit will be displayed partially.
122 ; file inside diff which exceeds this limit will be displayed partially.
123 ; E.g 128000 == 128Kb
123 ; E.g 128000 == 128Kb
124 cut_off_limit_file = 128000
124 cut_off_limit_file = 128000
125
125
126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 vcs_full_cache = true
127 vcs_full_cache = true
128
128
129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 force_https = false
131 force_https = false
132
132
133 ; use Strict-Transport-Security headers
133 ; use Strict-Transport-Security headers
134 use_htsts = false
134 use_htsts = false
135
135
136 ; Set to true if your repos are exposed using the dumb protocol
136 ; Set to true if your repos are exposed using the dumb protocol
137 git_update_server_info = false
137 git_update_server_info = false
138
138
139 ; RSS/ATOM feed options
139 ; RSS/ATOM feed options
140 rss_cut_off_limit = 256000
140 rss_cut_off_limit = 256000
141 rss_items_per_page = 10
141 rss_items_per_page = 10
142 rss_include_diff = false
142 rss_include_diff = false
143
143
144 ; gist URL alias, used to create nicer urls for gist. This should be an
144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 ; url that does rewrites to _admin/gists/{gistid}.
145 ; url that does rewrites to _admin/gists/{gistid}.
146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 gist_alias_url =
148 gist_alias_url =
149
149
150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 ; used for access.
151 ; used for access.
152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 ; came from the the logged in user who own this authentication token.
153 ; came from the the logged in user who own this authentication token.
154 ; Additionally @TOKEN syntax can be used to bound the view to specific
154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 ; authentication token. Such view would be only accessible when used together
155 ; authentication token. Such view would be only accessible when used together
156 ; with this authentication token
156 ; with this authentication token
157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 ; The list should be "," separated and on a single line.
158 ; The list should be "," separated and on a single line.
159 ; Most common views to enable:
159 ; Most common views to enable:
160
160
161 # RepoCommitsView:repo_commit_download
161 # RepoCommitsView:repo_commit_download
162 # RepoCommitsView:repo_commit_patch
162 # RepoCommitsView:repo_commit_patch
163 # RepoCommitsView:repo_commit_raw
163 # RepoCommitsView:repo_commit_raw
164 # RepoCommitsView:repo_commit_raw@TOKEN
164 # RepoCommitsView:repo_commit_raw@TOKEN
165 # RepoFilesView:repo_files_diff
165 # RepoFilesView:repo_files_diff
166 # RepoFilesView:repo_archivefile
166 # RepoFilesView:repo_archivefile
167 # RepoFilesView:repo_file_raw
167 # RepoFilesView:repo_file_raw
168 # GistView:*
168 # GistView:*
169 api_access_controllers_whitelist =
169 api_access_controllers_whitelist =
170
170
171 ; Default encoding used to convert from and to unicode
171 ; Default encoding used to convert from and to unicode
172 ; can be also a comma separated list of encoding in case of mixed encodings
172 ; can be also a comma separated list of encoding in case of mixed encodings
173 default_encoding = UTF-8
173 default_encoding = UTF-8
174
174
175 ; instance-id prefix
175 ; instance-id prefix
176 ; a prefix key for this instance used for cache invalidation when running
176 ; a prefix key for this instance used for cache invalidation when running
177 ; multiple instances of RhodeCode, make sure it's globally unique for
177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 ; all running RhodeCode instances. Leave empty if you don't use it
178 ; all running RhodeCode instances. Leave empty if you don't use it
179 instance_id =
179 instance_id =
180
180
181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 ; of an authentication plugin also if it is disabled by it's settings.
182 ; of an authentication plugin also if it is disabled by it's settings.
183 ; This could be useful if you are unable to log in to the system due to broken
183 ; This could be useful if you are unable to log in to the system due to broken
184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 ; module to log in again and fix the settings.
185 ; module to log in again and fix the settings.
186 ; Available builtin plugin IDs (hash is part of the ID):
186 ; Available builtin plugin IDs (hash is part of the ID):
187 ; egg:rhodecode-enterprise-ce#rhodecode
187 ; egg:rhodecode-enterprise-ce#rhodecode
188 ; egg:rhodecode-enterprise-ce#pam
188 ; egg:rhodecode-enterprise-ce#pam
189 ; egg:rhodecode-enterprise-ce#ldap
189 ; egg:rhodecode-enterprise-ce#ldap
190 ; egg:rhodecode-enterprise-ce#jasig_cas
190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 ; egg:rhodecode-enterprise-ce#headers
191 ; egg:rhodecode-enterprise-ce#headers
192 ; egg:rhodecode-enterprise-ce#crowd
192 ; egg:rhodecode-enterprise-ce#crowd
193
193
194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195
195
196 ; Flag to control loading of legacy plugins in py:/path format
196 ; Flag to control loading of legacy plugins in py:/path format
197 auth_plugin.import_legacy_plugins = true
197 auth_plugin.import_legacy_plugins = true
198
198
199 ; alternative return HTTP header for failed authentication. Default HTTP
199 ; alternative return HTTP header for failed authentication. Default HTTP
200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 ; handling that causing a series of failed authentication calls.
201 ; handling that causing a series of failed authentication calls.
202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 ; This will be served instead of default 401 on bad authentication
203 ; This will be served instead of default 401 on bad authentication
204 auth_ret_code =
204 auth_ret_code =
205
205
206 ; use special detection method when serving auth_ret_code, instead of serving
206 ; use special detection method when serving auth_ret_code, instead of serving
207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 ; and then serve auth_ret_code to clients
208 ; and then serve auth_ret_code to clients
209 auth_ret_code_detection = false
209 auth_ret_code_detection = false
210
210
211 ; locking return code. When repository is locked return this HTTP code. 2XX
211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 ; codes don't break the transactions while 4XX codes do
212 ; codes don't break the transactions while 4XX codes do
213 lock_ret_code = 423
213 lock_ret_code = 423
214
214
215 ; Filesystem location were repositories should be stored
215 ; Filesystem location were repositories should be stored
216 repo_store.path = /var/opt/rhodecode_repo_store
216 repo_store.path = /var/opt/rhodecode_repo_store
217
217
218 ; allows to setup custom hooks in settings page
218 ; allows to setup custom hooks in settings page
219 allow_custom_hooks_settings = true
219 allow_custom_hooks_settings = true
220
220
221 ; Generated license token required for EE edition license.
221 ; Generated license token required for EE edition license.
222 ; New generated token value can be found in Admin > settings > license page.
222 ; New generated token value can be found in Admin > settings > license page.
223 license_token =
223 license_token =
224
224
225 ; This flag hides sensitive information on the license page such as token, and license data
225 ; This flag hides sensitive information on the license page such as token, and license data
226 license.hide_license_info = false
226 license.hide_license_info = false
227
227
228 ; supervisor connection uri, for managing supervisor and logs.
228 ; supervisor connection uri, for managing supervisor and logs.
229 supervisor.uri =
229 supervisor.uri =
230
230
231 ; supervisord group name/id we only want this RC instance to handle
231 ; supervisord group name/id we only want this RC instance to handle
232 supervisor.group_id = prod
232 supervisor.group_id = prod
233
233
234 ; Display extended labs settings
234 ; Display extended labs settings
235 labs_settings_active = true
235 labs_settings_active = true
236
236
237 ; Custom exception store path, defaults to TMPDIR
237 ; Custom exception store path, defaults to TMPDIR
238 ; This is used to store exception from RhodeCode in shared directory
238 ; This is used to store exception from RhodeCode in shared directory
239 #exception_tracker.store_path =
239 #exception_tracker.store_path =
240
240
241 ; Send email with exception details when it happens
241 ; Send email with exception details when it happens
242 #exception_tracker.send_email = false
242 #exception_tracker.send_email = false
243
243
244 ; Comma separated list of recipients for exception emails,
244 ; Comma separated list of recipients for exception emails,
245 ; e.g admin@rhodecode.com,devops@rhodecode.com
245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 ; Can be left empty, then emails will be sent to ALL super-admins
246 ; Can be left empty, then emails will be sent to ALL super-admins
247 #exception_tracker.send_email_recipients =
247 #exception_tracker.send_email_recipients =
248
248
249 ; optional prefix to Add to email Subject
249 ; optional prefix to Add to email Subject
250 #exception_tracker.email_prefix = [RHODECODE ERROR]
250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251
251
252 ; File store configuration. This is used to store and serve uploaded files
252 ; File store configuration. This is used to store and serve uploaded files
253 file_store.enabled = true
253 file_store.enabled = true
254
254
255 ; Storage backend, available options are: local
255 ; Storage backend, available options are: local
256 file_store.backend = local
256 file_store.backend = local
257
257
258 ; path to store the uploaded binaries and artifacts
258 ; path to store the uploaded binaries and artifacts
259 file_store.storage_path = /var/opt/rhodecode_data/file_store
259 file_store.storage_path = /var/opt/rhodecode_data/file_store
260
260
261 ; Uncomment and set this path to control settings for archive download cache.
261 ; Uncomment and set this path to control settings for archive download cache.
262 ; Generated repo archives will be cached at this location
262 ; Generated repo archives will be cached at this location
263 ; and served from the cache during subsequent requests for the same archive of
263 ; and served from the cache during subsequent requests for the same archive of
264 ; the repository. This path is important to be shared across filesystems and with
264 ; the repository. This path is important to be shared across filesystems and with
265 ; RhodeCode and vcsserver
265 ; RhodeCode and vcsserver
266
266
267 ; Redis url to acquire/check generation of archives locks
268 archive_cache.locking.url = redis://redis:6379/1
269
270 ; Storage backend, only 'filesystem' is available now
271 archive_cache.backend.type = filesystem
272
267 ; Default is $cache_dir/archive_cache if not set
273 ; Default is $cache_dir/archive_cache if not set
268 archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache
274 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
269
275
270 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
276 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
271 archive_cache.cache_size_gb = 40
277 archive_cache.filesystem.cache_size_gb = 40
278
279 ; Eviction policy used to clear out after cache_size_gb limit is reached
280 archive_cache.filesystem.eviction_policy = least-recently-stored
272
281
273 ; By default cache uses sharding technique, this specifies how many shards are there
282 ; By default cache uses sharding technique, this specifies how many shards are there
274 archive_cache.cache_shards = 4
283 archive_cache.filesystem.cache_shards = 8
284
275
285
276 ; #############
286 ; #############
277 ; CELERY CONFIG
287 ; CELERY CONFIG
278 ; #############
288 ; #############
279
289
280 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
290 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
281
291
282 use_celery = true
292 use_celery = true
283
293
284 ; path to store schedule database
294 ; path to store schedule database
285 #celerybeat-schedule.path =
295 #celerybeat-schedule.path =
286
296
287 ; connection url to the message broker (default redis)
297 ; connection url to the message broker (default redis)
288 celery.broker_url = redis://redis:6379/8
298 celery.broker_url = redis://redis:6379/8
289
299
290 ; results backend to get results for (default redis)
300 ; results backend to get results for (default redis)
291 celery.result_backend = redis://redis:6379/8
301 celery.result_backend = redis://redis:6379/8
292
302
293 ; rabbitmq example
303 ; rabbitmq example
294 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
304 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
295
305
296 ; maximum tasks to execute before worker restart
306 ; maximum tasks to execute before worker restart
297 celery.max_tasks_per_child = 20
307 celery.max_tasks_per_child = 20
298
308
299 ; tasks will never be sent to the queue, but executed locally instead.
309 ; tasks will never be sent to the queue, but executed locally instead.
300 celery.task_always_eager = false
310 celery.task_always_eager = false
301
311
302 ; #############
312 ; #############
303 ; DOGPILE CACHE
313 ; DOGPILE CACHE
304 ; #############
314 ; #############
305
315
306 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
316 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
307 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
317 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
308 cache_dir = /var/opt/rhodecode_data
318 cache_dir = /var/opt/rhodecode_data
309
319
310 ; *********************************************
320 ; *********************************************
311 ; `sql_cache_short` cache for heavy SQL queries
321 ; `sql_cache_short` cache for heavy SQL queries
312 ; Only supported backend is `memory_lru`
322 ; Only supported backend is `memory_lru`
313 ; *********************************************
323 ; *********************************************
314 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
324 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
315 rc_cache.sql_cache_short.expiration_time = 30
325 rc_cache.sql_cache_short.expiration_time = 30
316
326
317
327
318 ; *****************************************************
328 ; *****************************************************
319 ; `cache_repo_longterm` cache for repo object instances
329 ; `cache_repo_longterm` cache for repo object instances
320 ; Only supported backend is `memory_lru`
330 ; Only supported backend is `memory_lru`
321 ; *****************************************************
331 ; *****************************************************
322 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
332 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
323 ; by default we use 30 Days, cache is still invalidated on push
333 ; by default we use 30 Days, cache is still invalidated on push
324 rc_cache.cache_repo_longterm.expiration_time = 2592000
334 rc_cache.cache_repo_longterm.expiration_time = 2592000
325 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
335 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
326 rc_cache.cache_repo_longterm.max_size = 10000
336 rc_cache.cache_repo_longterm.max_size = 10000
327
337
328
338
329 ; *********************************************
339 ; *********************************************
330 ; `cache_general` cache for general purpose use
340 ; `cache_general` cache for general purpose use
331 ; for simplicity use rc.file_namespace backend,
341 ; for simplicity use rc.file_namespace backend,
332 ; for performance and scale use rc.redis
342 ; for performance and scale use rc.redis
333 ; *********************************************
343 ; *********************************************
334 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
344 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
335 rc_cache.cache_general.expiration_time = 43200
345 rc_cache.cache_general.expiration_time = 43200
336 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
346 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
337 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
347 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
338
348
339 ; alternative `cache_general` redis backend with distributed lock
349 ; alternative `cache_general` redis backend with distributed lock
340 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
350 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
341 #rc_cache.cache_general.expiration_time = 300
351 #rc_cache.cache_general.expiration_time = 300
342
352
343 ; redis_expiration_time needs to be greater then expiration_time
353 ; redis_expiration_time needs to be greater then expiration_time
344 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
354 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
345
355
346 #rc_cache.cache_general.arguments.host = localhost
356 #rc_cache.cache_general.arguments.host = localhost
347 #rc_cache.cache_general.arguments.port = 6379
357 #rc_cache.cache_general.arguments.port = 6379
348 #rc_cache.cache_general.arguments.db = 0
358 #rc_cache.cache_general.arguments.db = 0
349 #rc_cache.cache_general.arguments.socket_timeout = 30
359 #rc_cache.cache_general.arguments.socket_timeout = 30
350 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
360 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
351 #rc_cache.cache_general.arguments.distributed_lock = true
361 #rc_cache.cache_general.arguments.distributed_lock = true
352
362
353 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
363 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
354 #rc_cache.cache_general.arguments.lock_auto_renewal = true
364 #rc_cache.cache_general.arguments.lock_auto_renewal = true
355
365
356 ; *************************************************
366 ; *************************************************
357 ; `cache_perms` cache for permission tree, auth TTL
367 ; `cache_perms` cache for permission tree, auth TTL
358 ; for simplicity use rc.file_namespace backend,
368 ; for simplicity use rc.file_namespace backend,
359 ; for performance and scale use rc.redis
369 ; for performance and scale use rc.redis
360 ; *************************************************
370 ; *************************************************
361 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
371 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
362 rc_cache.cache_perms.expiration_time = 3600
372 rc_cache.cache_perms.expiration_time = 3600
363 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
373 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
364 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
374 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
365
375
366 ; alternative `cache_perms` redis backend with distributed lock
376 ; alternative `cache_perms` redis backend with distributed lock
367 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
377 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
368 #rc_cache.cache_perms.expiration_time = 300
378 #rc_cache.cache_perms.expiration_time = 300
369
379
370 ; redis_expiration_time needs to be greater then expiration_time
380 ; redis_expiration_time needs to be greater then expiration_time
371 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
381 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
372
382
373 #rc_cache.cache_perms.arguments.host = localhost
383 #rc_cache.cache_perms.arguments.host = localhost
374 #rc_cache.cache_perms.arguments.port = 6379
384 #rc_cache.cache_perms.arguments.port = 6379
375 #rc_cache.cache_perms.arguments.db = 0
385 #rc_cache.cache_perms.arguments.db = 0
376 #rc_cache.cache_perms.arguments.socket_timeout = 30
386 #rc_cache.cache_perms.arguments.socket_timeout = 30
377 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
387 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
378 #rc_cache.cache_perms.arguments.distributed_lock = true
388 #rc_cache.cache_perms.arguments.distributed_lock = true
379
389
380 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
390 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
381 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
391 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
382
392
383 ; ***************************************************
393 ; ***************************************************
384 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
394 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
385 ; for simplicity use rc.file_namespace backend,
395 ; for simplicity use rc.file_namespace backend,
386 ; for performance and scale use rc.redis
396 ; for performance and scale use rc.redis
387 ; ***************************************************
397 ; ***************************************************
388 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
398 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
389 rc_cache.cache_repo.expiration_time = 2592000
399 rc_cache.cache_repo.expiration_time = 2592000
390 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
400 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
391 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
401 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
392
402
393 ; alternative `cache_repo` redis backend with distributed lock
403 ; alternative `cache_repo` redis backend with distributed lock
394 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
404 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
395 #rc_cache.cache_repo.expiration_time = 2592000
405 #rc_cache.cache_repo.expiration_time = 2592000
396
406
397 ; redis_expiration_time needs to be greater then expiration_time
407 ; redis_expiration_time needs to be greater then expiration_time
398 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
408 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
399
409
400 #rc_cache.cache_repo.arguments.host = localhost
410 #rc_cache.cache_repo.arguments.host = localhost
401 #rc_cache.cache_repo.arguments.port = 6379
411 #rc_cache.cache_repo.arguments.port = 6379
402 #rc_cache.cache_repo.arguments.db = 1
412 #rc_cache.cache_repo.arguments.db = 1
403 #rc_cache.cache_repo.arguments.socket_timeout = 30
413 #rc_cache.cache_repo.arguments.socket_timeout = 30
404 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
414 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
405 #rc_cache.cache_repo.arguments.distributed_lock = true
415 #rc_cache.cache_repo.arguments.distributed_lock = true
406
416
407 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
417 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
408 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
418 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
409
419
410 ; ##############
420 ; ##############
411 ; BEAKER SESSION
421 ; BEAKER SESSION
412 ; ##############
422 ; ##############
413
423
414 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
424 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
415 ; types are file, ext:redis, ext:database, ext:memcached
425 ; types are file, ext:redis, ext:database, ext:memcached
416 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
426 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
417 #beaker.session.type = file
427 #beaker.session.type = file
418 #beaker.session.data_dir = %(here)s/data/sessions
428 #beaker.session.data_dir = %(here)s/data/sessions
419
429
420 ; Redis based sessions
430 ; Redis based sessions
421 beaker.session.type = ext:redis
431 beaker.session.type = ext:redis
422 beaker.session.url = redis://redis:6379/2
432 beaker.session.url = redis://redis:6379/2
423
433
424 ; DB based session, fast, and allows easy management over logged in users
434 ; DB based session, fast, and allows easy management over logged in users
425 #beaker.session.type = ext:database
435 #beaker.session.type = ext:database
426 #beaker.session.table_name = db_session
436 #beaker.session.table_name = db_session
427 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
437 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
428 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
438 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
429 #beaker.session.sa.pool_recycle = 3600
439 #beaker.session.sa.pool_recycle = 3600
430 #beaker.session.sa.echo = false
440 #beaker.session.sa.echo = false
431
441
432 beaker.session.key = rhodecode
442 beaker.session.key = rhodecode
433 beaker.session.secret = production-rc-uytcxaz
443 beaker.session.secret = production-rc-uytcxaz
434 beaker.session.lock_dir = /data_ramdisk/lock
444 beaker.session.lock_dir = /data_ramdisk/lock
435
445
436 ; Secure encrypted cookie. Requires AES and AES python libraries
446 ; Secure encrypted cookie. Requires AES and AES python libraries
437 ; you must disable beaker.session.secret to use this
447 ; you must disable beaker.session.secret to use this
438 #beaker.session.encrypt_key = key_for_encryption
448 #beaker.session.encrypt_key = key_for_encryption
439 #beaker.session.validate_key = validation_key
449 #beaker.session.validate_key = validation_key
440
450
441 ; Sets session as invalid (also logging out user) if it haven not been
451 ; Sets session as invalid (also logging out user) if it haven not been
442 ; accessed for given amount of time in seconds
452 ; accessed for given amount of time in seconds
443 beaker.session.timeout = 2592000
453 beaker.session.timeout = 2592000
444 beaker.session.httponly = true
454 beaker.session.httponly = true
445
455
446 ; Path to use for the cookie. Set to prefix if you use prefix middleware
456 ; Path to use for the cookie. Set to prefix if you use prefix middleware
447 #beaker.session.cookie_path = /custom_prefix
457 #beaker.session.cookie_path = /custom_prefix
448
458
449 ; Set https secure cookie
459 ; Set https secure cookie
450 beaker.session.secure = false
460 beaker.session.secure = false
451
461
452 ; default cookie expiration time in seconds, set to `true` to set expire
462 ; default cookie expiration time in seconds, set to `true` to set expire
453 ; at browser close
463 ; at browser close
454 #beaker.session.cookie_expires = 3600
464 #beaker.session.cookie_expires = 3600
455
465
456 ; #############################
466 ; #############################
457 ; SEARCH INDEXING CONFIGURATION
467 ; SEARCH INDEXING CONFIGURATION
458 ; #############################
468 ; #############################
459
469
460 ; Full text search indexer is available in rhodecode-tools under
470 ; Full text search indexer is available in rhodecode-tools under
461 ; `rhodecode-tools index` command
471 ; `rhodecode-tools index` command
462
472
463 ; WHOOSH Backend, doesn't require additional services to run
473 ; WHOOSH Backend, doesn't require additional services to run
464 ; it works good with few dozen repos
474 ; it works good with few dozen repos
465 search.module = rhodecode.lib.index.whoosh
475 search.module = rhodecode.lib.index.whoosh
466 search.location = %(here)s/data/index
476 search.location = %(here)s/data/index
467
477
468 ; ####################
478 ; ####################
469 ; CHANNELSTREAM CONFIG
479 ; CHANNELSTREAM CONFIG
470 ; ####################
480 ; ####################
471
481
472 ; channelstream enables persistent connections and live notification
482 ; channelstream enables persistent connections and live notification
473 ; in the system. It's also used by the chat system
483 ; in the system. It's also used by the chat system
474
484
475 channelstream.enabled = true
485 channelstream.enabled = true
476
486
477 ; server address for channelstream server on the backend
487 ; server address for channelstream server on the backend
478 channelstream.server = channelstream:9800
488 channelstream.server = channelstream:9800
479
489
480 ; location of the channelstream server from outside world
490 ; location of the channelstream server from outside world
481 ; use ws:// for http or wss:// for https. This address needs to be handled
491 ; use ws:// for http or wss:// for https. This address needs to be handled
482 ; by external HTTP server such as Nginx or Apache
492 ; by external HTTP server such as Nginx or Apache
483 ; see Nginx/Apache configuration examples in our docs
493 ; see Nginx/Apache configuration examples in our docs
484 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
494 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
485 channelstream.secret = ENV_GENERATED
495 channelstream.secret = ENV_GENERATED
486 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
496 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
487
497
488 ; Internal application path that Javascript uses to connect into.
498 ; Internal application path that Javascript uses to connect into.
489 ; If you use proxy-prefix the prefix should be added before /_channelstream
499 ; If you use proxy-prefix the prefix should be added before /_channelstream
490 channelstream.proxy_path = /_channelstream
500 channelstream.proxy_path = /_channelstream
491
501
492
502
493 ; ##############################
503 ; ##############################
494 ; MAIN RHODECODE DATABASE CONFIG
504 ; MAIN RHODECODE DATABASE CONFIG
495 ; ##############################
505 ; ##############################
496
506
497 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
507 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
498 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
508 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
499 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
509 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
500 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
510 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
501 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
511 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
502
512
503 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
513 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
504
514
505 ; see sqlalchemy docs for other advanced settings
515 ; see sqlalchemy docs for other advanced settings
506 ; print the sql statements to output
516 ; print the sql statements to output
507 sqlalchemy.db1.echo = false
517 sqlalchemy.db1.echo = false
508
518
509 ; recycle the connections after this amount of seconds
519 ; recycle the connections after this amount of seconds
510 sqlalchemy.db1.pool_recycle = 3600
520 sqlalchemy.db1.pool_recycle = 3600
511
521
512 ; the number of connections to keep open inside the connection pool.
522 ; the number of connections to keep open inside the connection pool.
513 ; 0 indicates no limit
523 ; 0 indicates no limit
514 ; the general calculus with gevent is:
524 ; the general calculus with gevent is:
515 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
525 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
516 ; then increase pool size + max overflow so that they add up to 500.
526 ; then increase pool size + max overflow so that they add up to 500.
517 #sqlalchemy.db1.pool_size = 5
527 #sqlalchemy.db1.pool_size = 5
518
528
519 ; The number of connections to allow in connection pool "overflow", that is
529 ; The number of connections to allow in connection pool "overflow", that is
520 ; connections that can be opened above and beyond the pool_size setting,
530 ; connections that can be opened above and beyond the pool_size setting,
521 ; which defaults to five.
531 ; which defaults to five.
522 #sqlalchemy.db1.max_overflow = 10
532 #sqlalchemy.db1.max_overflow = 10
523
533
524 ; Connection check ping, used to detect broken database connections
534 ; Connection check ping, used to detect broken database connections
525 ; could be enabled to better handle cases if MySQL has gone away errors
535 ; could be enabled to better handle cases if MySQL has gone away errors
526 #sqlalchemy.db1.ping_connection = true
536 #sqlalchemy.db1.ping_connection = true
527
537
528 ; ##########
538 ; ##########
529 ; VCS CONFIG
539 ; VCS CONFIG
530 ; ##########
540 ; ##########
531 vcs.server.enable = true
541 vcs.server.enable = true
532 vcs.server = vcsserver:10010
542 vcs.server = vcsserver:10010
533
543
534 ; Web server connectivity protocol, responsible for web based VCS operations
544 ; Web server connectivity protocol, responsible for web based VCS operations
535 ; Available protocols are:
545 ; Available protocols are:
536 ; `http` - use http-rpc backend (default)
546 ; `http` - use http-rpc backend (default)
537 vcs.server.protocol = http
547 vcs.server.protocol = http
538
548
539 ; Push/Pull operations protocol, available options are:
549 ; Push/Pull operations protocol, available options are:
540 ; `http` - use http-rpc backend (default)
550 ; `http` - use http-rpc backend (default)
541 vcs.scm_app_implementation = http
551 vcs.scm_app_implementation = http
542
552
543 ; Push/Pull operations hooks protocol, available options are:
553 ; Push/Pull operations hooks protocol, available options are:
544 ; `http` - use http-rpc backend (default)
554 ; `http` - use http-rpc backend (default)
545 ; `celery` - use celery based hooks
555 ; `celery` - use celery based hooks
546 vcs.hooks.protocol = http
556 vcs.hooks.protocol = http
547
557
548 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
558 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
549 ; accessible via network.
559 ; accessible via network.
550 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
560 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
551 vcs.hooks.host = *
561 vcs.hooks.host = *
552
562
553 ; Start VCSServer with this instance as a subprocess, useful for development
563 ; Start VCSServer with this instance as a subprocess, useful for development
554 vcs.start_server = false
564 vcs.start_server = false
555
565
556 ; List of enabled VCS backends, available options are:
566 ; List of enabled VCS backends, available options are:
557 ; `hg` - mercurial
567 ; `hg` - mercurial
558 ; `git` - git
568 ; `git` - git
559 ; `svn` - subversion
569 ; `svn` - subversion
560 vcs.backends = hg, git, svn
570 vcs.backends = hg, git, svn
561
571
562 ; Wait this number of seconds before killing connection to the vcsserver
572 ; Wait this number of seconds before killing connection to the vcsserver
563 vcs.connection_timeout = 3600
573 vcs.connection_timeout = 3600
564
574
565 ; Cache flag to cache vcsserver remote calls locally
575 ; Cache flag to cache vcsserver remote calls locally
566 ; It uses cache_region `cache_repo`
576 ; It uses cache_region `cache_repo`
567 vcs.methods.cache = true
577 vcs.methods.cache = true
568
578
569 ; ####################################################
579 ; ####################################################
570 ; Subversion proxy support (mod_dav_svn)
580 ; Subversion proxy support (mod_dav_svn)
571 ; Maps RhodeCode repo groups into SVN paths for Apache
581 ; Maps RhodeCode repo groups into SVN paths for Apache
572 ; ####################################################
582 ; ####################################################
573
583
574 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
584 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
575 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
585 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
576 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
586 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
577 #vcs.svn.compatible_version = 1.8
587 #vcs.svn.compatible_version = 1.8
578
588
579 ; Enable SVN proxy of requests over HTTP
589 ; Enable SVN proxy of requests over HTTP
580 vcs.svn.proxy.enabled = true
590 vcs.svn.proxy.enabled = true
581
591
582 ; host to connect to running SVN subsystem
592 ; host to connect to running SVN subsystem
583 vcs.svn.proxy.host = http://svn:8090
593 vcs.svn.proxy.host = http://svn:8090
584
594
585 ; Enable or disable the config file generation.
595 ; Enable or disable the config file generation.
586 svn.proxy.generate_config = true
596 svn.proxy.generate_config = true
587
597
588 ; Generate config file with `SVNListParentPath` set to `On`.
598 ; Generate config file with `SVNListParentPath` set to `On`.
589 svn.proxy.list_parent_path = true
599 svn.proxy.list_parent_path = true
590
600
591 ; Set location and file name of generated config file.
601 ; Set location and file name of generated config file.
592 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
602 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
593
603
594 ; alternative mod_dav config template. This needs to be a valid mako template
604 ; alternative mod_dav config template. This needs to be a valid mako template
595 ; Example template can be found in the source code:
605 ; Example template can be found in the source code:
596 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
606 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
597 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
607 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
598
608
599 ; Used as a prefix to the `Location` block in the generated config file.
609 ; Used as a prefix to the `Location` block in the generated config file.
600 ; In most cases it should be set to `/`.
610 ; In most cases it should be set to `/`.
601 svn.proxy.location_root = /
611 svn.proxy.location_root = /
602
612
603 ; Command to reload the mod dav svn configuration on change.
613 ; Command to reload the mod dav svn configuration on change.
604 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
614 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
605 ; Make sure user who runs RhodeCode process is allowed to reload Apache
615 ; Make sure user who runs RhodeCode process is allowed to reload Apache
606 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
616 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
607
617
608 ; If the timeout expires before the reload command finishes, the command will
618 ; If the timeout expires before the reload command finishes, the command will
609 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
619 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
610 #svn.proxy.reload_timeout = 10
620 #svn.proxy.reload_timeout = 10
611
621
612 ; ####################
622 ; ####################
613 ; SSH Support Settings
623 ; SSH Support Settings
614 ; ####################
624 ; ####################
615
625
616 ; Defines if a custom authorized_keys file should be created and written on
626 ; Defines if a custom authorized_keys file should be created and written on
617 ; any change user ssh keys. Setting this to false also disables possibility
627 ; any change user ssh keys. Setting this to false also disables possibility
618 ; of adding SSH keys by users from web interface. Super admins can still
628 ; of adding SSH keys by users from web interface. Super admins can still
619 ; manage SSH Keys.
629 ; manage SSH Keys.
620 ssh.generate_authorized_keyfile = true
630 ssh.generate_authorized_keyfile = true
621
631
622 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
632 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
623 # ssh.authorized_keys_ssh_opts =
633 # ssh.authorized_keys_ssh_opts =
624
634
625 ; Path to the authorized_keys file where the generate entries are placed.
635 ; Path to the authorized_keys file where the generate entries are placed.
626 ; It is possible to have multiple key files specified in `sshd_config` e.g.
636 ; It is possible to have multiple key files specified in `sshd_config` e.g.
627 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
637 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
628 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
638 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
629
639
630 ; Command to execute the SSH wrapper. The binary is available in the
640 ; Command to execute the SSH wrapper. The binary is available in the
631 ; RhodeCode installation directory.
641 ; RhodeCode installation directory.
632 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
642 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
633 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
643 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
634 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
644 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
635
645
636 ; Allow shell when executing the ssh-wrapper command
646 ; Allow shell when executing the ssh-wrapper command
637 ssh.wrapper_cmd_allow_shell = false
647 ssh.wrapper_cmd_allow_shell = false
638
648
639 ; Enables logging, and detailed output send back to the client during SSH
649 ; Enables logging, and detailed output send back to the client during SSH
640 ; operations. Useful for debugging, shouldn't be used in production.
650 ; operations. Useful for debugging, shouldn't be used in production.
641 ssh.enable_debug_logging = false
651 ssh.enable_debug_logging = false
642
652
643 ; Paths to binary executable, by default they are the names, but we can
653 ; Paths to binary executable, by default they are the names, but we can
644 ; override them if we want to use a custom one
654 ; override them if we want to use a custom one
645 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
655 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
646 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
656 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
647 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
657 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
648
658
649 ; Enables SSH key generator web interface. Disabling this still allows users
659 ; Enables SSH key generator web interface. Disabling this still allows users
650 ; to add their own keys.
660 ; to add their own keys.
651 ssh.enable_ui_key_generator = true
661 ssh.enable_ui_key_generator = true
652
662
653 ; Statsd client config, this is used to send metrics to statsd
663 ; Statsd client config, this is used to send metrics to statsd
654 ; We recommend setting statsd_exported and scrape them using Prometheus
664 ; We recommend setting statsd_exported and scrape them using Prometheus
655 #statsd.enabled = false
665 #statsd.enabled = false
656 #statsd.statsd_host = 0.0.0.0
666 #statsd.statsd_host = 0.0.0.0
657 #statsd.statsd_port = 8125
667 #statsd.statsd_port = 8125
658 #statsd.statsd_prefix =
668 #statsd.statsd_prefix =
659 #statsd.statsd_ipv6 = false
669 #statsd.statsd_ipv6 = false
660
670
661 ; configure logging automatically at server startup set to false
671 ; configure logging automatically at server startup set to false
662 ; to use the below custom logging config.
672 ; to use the below custom logging config.
663 ; RC_LOGGING_FORMATTER
673 ; RC_LOGGING_FORMATTER
664 ; RC_LOGGING_LEVEL
674 ; RC_LOGGING_LEVEL
665 ; env variables can control the settings for logging in case of autoconfigure
675 ; env variables can control the settings for logging in case of autoconfigure
666
676
667 #logging.autoconfigure = true
677 #logging.autoconfigure = true
668
678
669 ; specify your own custom logging config file to configure logging
679 ; specify your own custom logging config file to configure logging
670 #logging.logging_conf_file = /path/to/custom_logging.ini
680 #logging.logging_conf_file = /path/to/custom_logging.ini
671
681
672 ; Dummy marker to add new entries after.
682 ; Dummy marker to add new entries after.
673 ; Add any custom entries below. Please don't remove this marker.
683 ; Add any custom entries below. Please don't remove this marker.
674 custom.conf = 1
684 custom.conf = 1
675
685
676
686
677 ; #####################
687 ; #####################
678 ; LOGGING CONFIGURATION
688 ; LOGGING CONFIGURATION
679 ; #####################
689 ; #####################
680
690
681 [loggers]
691 [loggers]
682 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
692 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
683
693
684 [handlers]
694 [handlers]
685 keys = console, console_sql
695 keys = console, console_sql
686
696
687 [formatters]
697 [formatters]
688 keys = generic, json, color_formatter, color_formatter_sql
698 keys = generic, json, color_formatter, color_formatter_sql
689
699
690 ; #######
700 ; #######
691 ; LOGGERS
701 ; LOGGERS
692 ; #######
702 ; #######
693 [logger_root]
703 [logger_root]
694 level = NOTSET
704 level = NOTSET
695 handlers = console
705 handlers = console
696
706
697 [logger_sqlalchemy]
707 [logger_sqlalchemy]
698 level = INFO
708 level = INFO
699 handlers = console_sql
709 handlers = console_sql
700 qualname = sqlalchemy.engine
710 qualname = sqlalchemy.engine
701 propagate = 0
711 propagate = 0
702
712
703 [logger_beaker]
713 [logger_beaker]
704 level = DEBUG
714 level = DEBUG
705 handlers =
715 handlers =
706 qualname = beaker.container
716 qualname = beaker.container
707 propagate = 1
717 propagate = 1
708
718
709 [logger_rhodecode]
719 [logger_rhodecode]
710 level = DEBUG
720 level = DEBUG
711 handlers =
721 handlers =
712 qualname = rhodecode
722 qualname = rhodecode
713 propagate = 1
723 propagate = 1
714
724
715 [logger_ssh_wrapper]
725 [logger_ssh_wrapper]
716 level = DEBUG
726 level = DEBUG
717 handlers =
727 handlers =
718 qualname = ssh_wrapper
728 qualname = ssh_wrapper
719 propagate = 1
729 propagate = 1
720
730
721 [logger_celery]
731 [logger_celery]
722 level = DEBUG
732 level = DEBUG
723 handlers =
733 handlers =
724 qualname = celery
734 qualname = celery
725
735
726
736
727 ; ########
737 ; ########
728 ; HANDLERS
738 ; HANDLERS
729 ; ########
739 ; ########
730
740
731 [handler_console]
741 [handler_console]
732 class = StreamHandler
742 class = StreamHandler
733 args = (sys.stderr, )
743 args = (sys.stderr, )
734 level = INFO
744 level = INFO
735 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
745 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
736 ; This allows sending properly formatted logs to grafana loki or elasticsearch
746 ; This allows sending properly formatted logs to grafana loki or elasticsearch
737 formatter = generic
747 formatter = generic
738
748
739 [handler_console_sql]
749 [handler_console_sql]
740 ; "level = DEBUG" logs SQL queries and results.
750 ; "level = DEBUG" logs SQL queries and results.
741 ; "level = INFO" logs SQL queries.
751 ; "level = INFO" logs SQL queries.
742 ; "level = WARN" logs neither. (Recommended for production systems.)
752 ; "level = WARN" logs neither. (Recommended for production systems.)
743 class = StreamHandler
753 class = StreamHandler
744 args = (sys.stderr, )
754 args = (sys.stderr, )
745 level = WARN
755 level = WARN
746 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
756 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
747 ; This allows sending properly formatted logs to grafana loki or elasticsearch
757 ; This allows sending properly formatted logs to grafana loki or elasticsearch
748 formatter = generic
758 formatter = generic
749
759
750 ; ##########
760 ; ##########
751 ; FORMATTERS
761 ; FORMATTERS
752 ; ##########
762 ; ##########
753
763
754 [formatter_generic]
764 [formatter_generic]
755 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
765 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
756 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
766 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
757 datefmt = %Y-%m-%d %H:%M:%S
767 datefmt = %Y-%m-%d %H:%M:%S
758
768
759 [formatter_color_formatter]
769 [formatter_color_formatter]
760 class = rhodecode.lib.logging_formatter.ColorFormatter
770 class = rhodecode.lib.logging_formatter.ColorFormatter
761 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
771 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
762 datefmt = %Y-%m-%d %H:%M:%S
772 datefmt = %Y-%m-%d %H:%M:%S
763
773
764 [formatter_color_formatter_sql]
774 [formatter_color_formatter_sql]
765 class = rhodecode.lib.logging_formatter.ColorFormatterSql
775 class = rhodecode.lib.logging_formatter.ColorFormatterSql
766 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
776 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
767 datefmt = %Y-%m-%d %H:%M:%S
777 datefmt = %Y-%m-%d %H:%M:%S
768
778
769 [formatter_json]
779 [formatter_json]
770 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
780 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
771 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
781 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,287 +1,286 b''
1 # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
1 # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
2
2
3 alembic==1.13.1
3 alembic==1.13.1
4 mako==1.2.4
4 mako==1.2.4
5 markupsafe==2.1.2
5 markupsafe==2.1.2
6 sqlalchemy==1.4.52
6 sqlalchemy==1.4.52
7 greenlet==3.0.3
7 greenlet==3.0.3
8 typing_extensions==4.9.0
8 typing_extensions==4.9.0
9 async-timeout==4.0.3
9 async-timeout==4.0.3
10 babel==2.12.1
10 babel==2.12.1
11 beaker==1.12.1
11 beaker==1.12.1
12 celery==5.3.6
12 celery==5.3.6
13 billiard==4.2.0
13 billiard==4.2.0
14 click==8.1.3
14 click==8.1.3
15 click-didyoumean==0.3.0
15 click-didyoumean==0.3.0
16 click==8.1.3
16 click==8.1.3
17 click-plugins==1.1.1
17 click-plugins==1.1.1
18 click==8.1.3
18 click==8.1.3
19 click-repl==0.2.0
19 click-repl==0.2.0
20 click==8.1.3
20 click==8.1.3
21 prompt-toolkit==3.0.38
21 prompt-toolkit==3.0.38
22 wcwidth==0.2.6
22 wcwidth==0.2.6
23 six==1.16.0
23 six==1.16.0
24 kombu==5.3.5
24 kombu==5.3.5
25 amqp==5.2.0
25 amqp==5.2.0
26 vine==5.1.0
26 vine==5.1.0
27 vine==5.1.0
27 vine==5.1.0
28 python-dateutil==2.8.2
28 python-dateutil==2.8.2
29 six==1.16.0
29 six==1.16.0
30 tzdata==2024.1
30 tzdata==2024.1
31 vine==5.1.0
31 vine==5.1.0
32 channelstream==0.7.1
32 channelstream==0.7.1
33 gevent==24.2.1
33 gevent==24.2.1
34 greenlet==3.0.3
34 greenlet==3.0.3
35 zope.event==5.0.0
35 zope.event==5.0.0
36 zope.interface==6.3.0
36 zope.interface==6.3.0
37 itsdangerous==1.1.0
37 itsdangerous==1.1.0
38 marshmallow==2.18.0
38 marshmallow==2.18.0
39 pyramid==2.0.2
39 pyramid==2.0.2
40 hupper==1.12
40 hupper==1.12
41 plaster==1.1.2
41 plaster==1.1.2
42 plaster-pastedeploy==1.0.1
42 plaster-pastedeploy==1.0.1
43 pastedeploy==3.1.0
43 pastedeploy==3.1.0
44 plaster==1.1.2
44 plaster==1.1.2
45 translationstring==1.4
45 translationstring==1.4
46 venusian==3.0.0
46 venusian==3.0.0
47 webob==1.8.7
47 webob==1.8.7
48 zope.deprecation==5.0.0
48 zope.deprecation==5.0.0
49 zope.interface==6.3.0
49 zope.interface==6.3.0
50 pyramid-apispec==0.3.3
50 pyramid-apispec==0.3.3
51 apispec==1.3.3
51 apispec==1.3.3
52 pyramid-jinja2==2.10
52 pyramid-jinja2==2.10
53 jinja2==3.1.2
53 jinja2==3.1.2
54 markupsafe==2.1.2
54 markupsafe==2.1.2
55 markupsafe==2.1.2
55 markupsafe==2.1.2
56 pyramid==2.0.2
56 pyramid==2.0.2
57 hupper==1.12
57 hupper==1.12
58 plaster==1.1.2
58 plaster==1.1.2
59 plaster-pastedeploy==1.0.1
59 plaster-pastedeploy==1.0.1
60 pastedeploy==3.1.0
60 pastedeploy==3.1.0
61 plaster==1.1.2
61 plaster==1.1.2
62 translationstring==1.4
62 translationstring==1.4
63 venusian==3.0.0
63 venusian==3.0.0
64 webob==1.8.7
64 webob==1.8.7
65 zope.deprecation==5.0.0
65 zope.deprecation==5.0.0
66 zope.interface==6.3.0
66 zope.interface==6.3.0
67 zope.deprecation==5.0.0
67 zope.deprecation==5.0.0
68 python-dateutil==2.8.2
68 python-dateutil==2.8.2
69 six==1.16.0
69 six==1.16.0
70 requests==2.28.2
70 requests==2.28.2
71 certifi==2022.12.7
71 certifi==2022.12.7
72 charset-normalizer==3.1.0
72 charset-normalizer==3.1.0
73 idna==3.4
73 idna==3.4
74 urllib3==1.26.14
74 urllib3==1.26.14
75 ws4py==0.5.1
75 ws4py==0.5.1
76 deform==2.0.15
76 deform==2.0.15
77 chameleon==3.10.2
77 chameleon==3.10.2
78 colander==2.0
78 colander==2.0
79 iso8601==1.1.0
79 iso8601==1.1.0
80 translationstring==1.4
80 translationstring==1.4
81 iso8601==1.1.0
81 iso8601==1.1.0
82 peppercorn==0.6
82 peppercorn==0.6
83 translationstring==1.4
83 translationstring==1.4
84 zope.deprecation==5.0.0
84 zope.deprecation==5.0.0
85 diskcache==5.6.3
86 docutils==0.19
85 docutils==0.19
87 dogpile.cache==1.3.3
86 dogpile.cache==1.3.3
88 decorator==5.1.1
87 decorator==5.1.1
89 stevedore==5.1.0
88 stevedore==5.1.0
90 pbr==5.11.1
89 pbr==5.11.1
91 formencode==2.1.0
90 formencode==2.1.0
92 six==1.16.0
91 six==1.16.0
93 gunicorn==21.2.0
92 gunicorn==21.2.0
94 packaging==24.0
93 packaging==24.0
95 gevent==24.2.1
94 gevent==24.2.1
96 greenlet==3.0.3
95 greenlet==3.0.3
97 zope.event==5.0.0
96 zope.event==5.0.0
98 zope.interface==6.3.0
97 zope.interface==6.3.0
99 ipython==8.14.0
98 ipython==8.14.0
100 backcall==0.2.0
99 backcall==0.2.0
101 decorator==5.1.1
100 decorator==5.1.1
102 jedi==0.19.0
101 jedi==0.19.0
103 parso==0.8.3
102 parso==0.8.3
104 matplotlib-inline==0.1.6
103 matplotlib-inline==0.1.6
105 traitlets==5.9.0
104 traitlets==5.9.0
106 pexpect==4.8.0
105 pexpect==4.8.0
107 ptyprocess==0.7.0
106 ptyprocess==0.7.0
108 pickleshare==0.7.5
107 pickleshare==0.7.5
109 prompt-toolkit==3.0.38
108 prompt-toolkit==3.0.38
110 wcwidth==0.2.6
109 wcwidth==0.2.6
111 pygments==2.15.1
110 pygments==2.15.1
112 stack-data==0.6.2
111 stack-data==0.6.2
113 asttokens==2.2.1
112 asttokens==2.2.1
114 six==1.16.0
113 six==1.16.0
115 executing==1.2.0
114 executing==1.2.0
116 pure-eval==0.2.2
115 pure-eval==0.2.2
117 traitlets==5.9.0
116 traitlets==5.9.0
118 markdown==3.4.3
117 markdown==3.4.3
119 msgpack==1.0.8
118 msgpack==1.0.8
120 mysqlclient==2.1.1
119 mysqlclient==2.1.1
121 nbconvert==7.7.3
120 nbconvert==7.7.3
122 beautifulsoup4==4.12.3
121 beautifulsoup4==4.12.3
123 soupsieve==2.5
122 soupsieve==2.5
124 bleach==6.1.0
123 bleach==6.1.0
125 six==1.16.0
124 six==1.16.0
126 webencodings==0.5.1
125 webencodings==0.5.1
127 defusedxml==0.7.1
126 defusedxml==0.7.1
128 jinja2==3.1.2
127 jinja2==3.1.2
129 markupsafe==2.1.2
128 markupsafe==2.1.2
130 jupyter_core==5.3.1
129 jupyter_core==5.3.1
131 platformdirs==3.10.0
130 platformdirs==3.10.0
132 traitlets==5.9.0
131 traitlets==5.9.0
133 jupyterlab-pygments==0.2.2
132 jupyterlab-pygments==0.2.2
134 markupsafe==2.1.2
133 markupsafe==2.1.2
135 mistune==2.0.5
134 mistune==2.0.5
136 nbclient==0.8.0
135 nbclient==0.8.0
137 jupyter_client==8.3.0
136 jupyter_client==8.3.0
138 jupyter_core==5.3.1
137 jupyter_core==5.3.1
139 platformdirs==3.10.0
138 platformdirs==3.10.0
140 traitlets==5.9.0
139 traitlets==5.9.0
141 python-dateutil==2.8.2
140 python-dateutil==2.8.2
142 six==1.16.0
141 six==1.16.0
143 pyzmq==25.0.0
142 pyzmq==25.0.0
144 tornado==6.2
143 tornado==6.2
145 traitlets==5.9.0
144 traitlets==5.9.0
146 jupyter_core==5.3.1
145 jupyter_core==5.3.1
147 platformdirs==3.10.0
146 platformdirs==3.10.0
148 traitlets==5.9.0
147 traitlets==5.9.0
149 nbformat==5.9.2
148 nbformat==5.9.2
150 fastjsonschema==2.18.0
149 fastjsonschema==2.18.0
151 jsonschema==4.18.6
150 jsonschema==4.18.6
152 attrs==22.2.0
151 attrs==22.2.0
153 pyrsistent==0.19.3
152 pyrsistent==0.19.3
154 jupyter_core==5.3.1
153 jupyter_core==5.3.1
155 platformdirs==3.10.0
154 platformdirs==3.10.0
156 traitlets==5.9.0
155 traitlets==5.9.0
157 traitlets==5.9.0
156 traitlets==5.9.0
158 traitlets==5.9.0
157 traitlets==5.9.0
159 nbformat==5.9.2
158 nbformat==5.9.2
160 fastjsonschema==2.18.0
159 fastjsonschema==2.18.0
161 jsonschema==4.18.6
160 jsonschema==4.18.6
162 attrs==22.2.0
161 attrs==22.2.0
163 pyrsistent==0.19.3
162 pyrsistent==0.19.3
164 jupyter_core==5.3.1
163 jupyter_core==5.3.1
165 platformdirs==3.10.0
164 platformdirs==3.10.0
166 traitlets==5.9.0
165 traitlets==5.9.0
167 traitlets==5.9.0
166 traitlets==5.9.0
168 pandocfilters==1.5.0
167 pandocfilters==1.5.0
169 pygments==2.15.1
168 pygments==2.15.1
170 tinycss2==1.2.1
169 tinycss2==1.2.1
171 webencodings==0.5.1
170 webencodings==0.5.1
172 traitlets==5.9.0
171 traitlets==5.9.0
173 orjson==3.10.3
172 orjson==3.10.3
174 paste==3.10.1
173 paste==3.10.1
175 premailer==3.10.0
174 premailer==3.10.0
176 cachetools==5.3.3
175 cachetools==5.3.3
177 cssselect==1.2.0
176 cssselect==1.2.0
178 cssutils==2.6.0
177 cssutils==2.6.0
179 lxml==4.9.3
178 lxml==4.9.3
180 requests==2.28.2
179 requests==2.28.2
181 certifi==2022.12.7
180 certifi==2022.12.7
182 charset-normalizer==3.1.0
181 charset-normalizer==3.1.0
183 idna==3.4
182 idna==3.4
184 urllib3==1.26.14
183 urllib3==1.26.14
185 psutil==5.9.8
184 psutil==5.9.8
186 psycopg2==2.9.9
185 psycopg2==2.9.9
187 py-bcrypt==0.4
186 py-bcrypt==0.4
188 pycmarkgfm==1.2.0
187 pycmarkgfm==1.2.0
189 cffi==1.16.0
188 cffi==1.16.0
190 pycparser==2.21
189 pycparser==2.21
191 pycryptodome==3.17
190 pycryptodome==3.17
192 pycurl==7.45.3
191 pycurl==7.45.3
193 pymysql==1.0.3
192 pymysql==1.0.3
194 pyotp==2.8.0
193 pyotp==2.8.0
195 pyparsing==3.1.1
194 pyparsing==3.1.1
196 pyramid-debugtoolbar==4.12.1
195 pyramid-debugtoolbar==4.12.1
197 pygments==2.15.1
196 pygments==2.15.1
198 pyramid==2.0.2
197 pyramid==2.0.2
199 hupper==1.12
198 hupper==1.12
200 plaster==1.1.2
199 plaster==1.1.2
201 plaster-pastedeploy==1.0.1
200 plaster-pastedeploy==1.0.1
202 pastedeploy==3.1.0
201 pastedeploy==3.1.0
203 plaster==1.1.2
202 plaster==1.1.2
204 translationstring==1.4
203 translationstring==1.4
205 venusian==3.0.0
204 venusian==3.0.0
206 webob==1.8.7
205 webob==1.8.7
207 zope.deprecation==5.0.0
206 zope.deprecation==5.0.0
208 zope.interface==6.3.0
207 zope.interface==6.3.0
209 pyramid-mako==1.1.0
208 pyramid-mako==1.1.0
210 mako==1.2.4
209 mako==1.2.4
211 markupsafe==2.1.2
210 markupsafe==2.1.2
212 pyramid==2.0.2
211 pyramid==2.0.2
213 hupper==1.12
212 hupper==1.12
214 plaster==1.1.2
213 plaster==1.1.2
215 plaster-pastedeploy==1.0.1
214 plaster-pastedeploy==1.0.1
216 pastedeploy==3.1.0
215 pastedeploy==3.1.0
217 plaster==1.1.2
216 plaster==1.1.2
218 translationstring==1.4
217 translationstring==1.4
219 venusian==3.0.0
218 venusian==3.0.0
220 webob==1.8.7
219 webob==1.8.7
221 zope.deprecation==5.0.0
220 zope.deprecation==5.0.0
222 zope.interface==6.3.0
221 zope.interface==6.3.0
223 pyramid-mailer==0.15.1
222 pyramid-mailer==0.15.1
224 pyramid==2.0.2
223 pyramid==2.0.2
225 hupper==1.12
224 hupper==1.12
226 plaster==1.1.2
225 plaster==1.1.2
227 plaster-pastedeploy==1.0.1
226 plaster-pastedeploy==1.0.1
228 pastedeploy==3.1.0
227 pastedeploy==3.1.0
229 plaster==1.1.2
228 plaster==1.1.2
230 translationstring==1.4
229 translationstring==1.4
231 venusian==3.0.0
230 venusian==3.0.0
232 webob==1.8.7
231 webob==1.8.7
233 zope.deprecation==5.0.0
232 zope.deprecation==5.0.0
234 zope.interface==6.3.0
233 zope.interface==6.3.0
235 repoze.sendmail==4.4.1
234 repoze.sendmail==4.4.1
236 transaction==3.1.0
235 transaction==3.1.0
237 zope.interface==6.3.0
236 zope.interface==6.3.0
238 zope.interface==6.3.0
237 zope.interface==6.3.0
239 transaction==3.1.0
238 transaction==3.1.0
240 zope.interface==6.3.0
239 zope.interface==6.3.0
241 python-ldap==3.4.3
240 python-ldap==3.4.3
242 pyasn1==0.4.8
241 pyasn1==0.4.8
243 pyasn1-modules==0.2.8
242 pyasn1-modules==0.2.8
244 pyasn1==0.4.8
243 pyasn1==0.4.8
245 python-memcached==1.59
244 python-memcached==1.59
246 six==1.16.0
245 six==1.16.0
247 python-pam==2.0.2
246 python-pam==2.0.2
248 python3-saml==1.15.0
247 python3-saml==1.15.0
249 isodate==0.6.1
248 isodate==0.6.1
250 six==1.16.0
249 six==1.16.0
251 lxml==4.9.3
250 lxml==4.9.3
252 xmlsec==1.3.13
251 xmlsec==1.3.13
253 lxml==4.9.3
252 lxml==4.9.3
254 pyyaml==6.0.1
253 pyyaml==6.0.1
255 redis==5.0.4
254 redis==5.0.4
256 async-timeout==4.0.3
255 async-timeout==4.0.3
257 regex==2022.10.31
256 regex==2022.10.31
258 routes==2.5.1
257 routes==2.5.1
259 repoze.lru==0.7
258 repoze.lru==0.7
260 six==1.16.0
259 six==1.16.0
261 simplejson==3.19.2
260 simplejson==3.19.2
262 sshpubkeys==3.3.1
261 sshpubkeys==3.3.1
263 cryptography==40.0.2
262 cryptography==40.0.2
264 cffi==1.16.0
263 cffi==1.16.0
265 pycparser==2.21
264 pycparser==2.21
266 ecdsa==0.18.0
265 ecdsa==0.18.0
267 six==1.16.0
266 six==1.16.0
268 sqlalchemy==1.4.52
267 sqlalchemy==1.4.52
269 greenlet==3.0.3
268 greenlet==3.0.3
270 typing_extensions==4.9.0
269 typing_extensions==4.9.0
271 supervisor==4.2.5
270 supervisor==4.2.5
272 tzlocal==4.3
271 tzlocal==4.3
273 pytz-deprecation-shim==0.1.0.post0
272 pytz-deprecation-shim==0.1.0.post0
274 tzdata==2024.1
273 tzdata==2024.1
275 tempita==0.5.2
274 tempita==0.5.2
276 unidecode==1.3.6
275 unidecode==1.3.6
277 urlobject==2.4.3
276 urlobject==2.4.3
278 waitress==3.0.0
277 waitress==3.0.0
279 webhelpers2==2.1
278 webhelpers2==2.1
280 markupsafe==2.1.2
279 markupsafe==2.1.2
281 six==1.16.0
280 six==1.16.0
282 whoosh==2.7.4
281 whoosh==2.7.4
283 zope.cachedescriptors==5.0.0
282 zope.cachedescriptors==5.0.0
284 qrcode==7.4.2
283 qrcode==7.4.2
285
284
286 ## uncomment to add the debug libraries
285 ## uncomment to add the debug libraries
287 #-r requirements_debug.txt
286 #-r requirements_debug.txt
@@ -1,1708 +1,1715 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import itertools
19 import itertools
20 import logging
20 import logging
21 import os
21 import os
22 import collections
22 import collections
23 import urllib.request
23 import urllib.request
24 import urllib.parse
24 import urllib.parse
25 import urllib.error
25 import urllib.error
26 import pathlib
26 import pathlib
27 import time
28 import random
27
29
28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
29
31
30 from pyramid.renderers import render
32 from pyramid.renderers import render
31 from pyramid.response import Response
33 from pyramid.response import Response
32
34
33 import rhodecode
35 import rhodecode
34 from rhodecode.apps._base import RepoAppView
36 from rhodecode.apps._base import RepoAppView
35
37
36
38
37 from rhodecode.lib import diffs, helpers as h, rc_cache
39 from rhodecode.lib import diffs, helpers as h, rc_cache
38 from rhodecode.lib import audit_logger
40 from rhodecode.lib import audit_logger
39 from rhodecode.lib.hash_utils import sha1_safe
41 from rhodecode.lib.hash_utils import sha1_safe
40 from rhodecode.lib.rc_cache.archive_cache import get_archival_cache_store, get_archival_config, ReentrantLock
42 from rhodecode.lib.rc_cache.archive_cache import (
43 get_archival_cache_store, get_archival_config, ArchiveCacheLock, archive_iterator)
41 from rhodecode.lib.str_utils import safe_bytes, convert_special_chars
44 from rhodecode.lib.str_utils import safe_bytes, convert_special_chars
42 from rhodecode.lib.view_utils import parse_path_ref
45 from rhodecode.lib.view_utils import parse_path_ref
43 from rhodecode.lib.exceptions import NonRelativePathError
46 from rhodecode.lib.exceptions import NonRelativePathError
44 from rhodecode.lib.codeblocks import (
47 from rhodecode.lib.codeblocks import (
45 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
48 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
46 from rhodecode.lib.utils2 import convert_line_endings, detect_mode
49 from rhodecode.lib.utils2 import convert_line_endings, detect_mode
47 from rhodecode.lib.type_utils import str2bool
50 from rhodecode.lib.type_utils import str2bool
48 from rhodecode.lib.str_utils import safe_str, safe_int
51 from rhodecode.lib.str_utils import safe_str, safe_int
49 from rhodecode.lib.auth import (
52 from rhodecode.lib.auth import (
50 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
53 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
51 from rhodecode.lib.vcs import path as vcspath
54 from rhodecode.lib.vcs import path as vcspath
52 from rhodecode.lib.vcs.backends.base import EmptyCommit
55 from rhodecode.lib.vcs.backends.base import EmptyCommit
53 from rhodecode.lib.vcs.conf import settings
56 from rhodecode.lib.vcs.conf import settings
54 from rhodecode.lib.vcs.nodes import FileNode
57 from rhodecode.lib.vcs.nodes import FileNode
55 from rhodecode.lib.vcs.exceptions import (
58 from rhodecode.lib.vcs.exceptions import (
56 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
59 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
57 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
60 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
58 NodeDoesNotExistError, CommitError, NodeError)
61 NodeDoesNotExistError, CommitError, NodeError)
59
62
60 from rhodecode.model.scm import ScmModel
63 from rhodecode.model.scm import ScmModel
61 from rhodecode.model.db import Repository
64 from rhodecode.model.db import Repository
62
65
63 log = logging.getLogger(__name__)
66 log = logging.getLogger(__name__)
64
67
65
68
66 def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
69 def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
67 # original backward compat name of archive
70 # original backward compat name of archive
68 clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_'))
71 clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_'))
69
72
70 # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip
73 # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip
71 # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
74 # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
72 id_sha = sha1_safe(str(db_repo_id))[:4]
75 id_sha = sha1_safe(str(db_repo_id))[:4]
73 sub_repo = 'sub-1' if subrepos else 'sub-0'
76 sub_repo = 'sub-1' if subrepos else 'sub-0'
74 commit = commit_sha if with_hash else 'archive'
77 commit = commit_sha if with_hash else 'archive'
75 path_marker = (path_sha if with_hash else '') or 'all'
78 path_marker = (path_sha if with_hash else '') or 'all'
76 archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}'
79 archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}'
77
80
78 return archive_name
81 return archive_name
79
82
80
83
81 def get_path_sha(at_path):
84 def get_path_sha(at_path):
82 return safe_str(sha1_safe(at_path)[:8])
85 return safe_str(sha1_safe(at_path)[:8])
83
86
84
87
85 def _get_archive_spec(fname):
88 def _get_archive_spec(fname):
86 log.debug('Detecting archive spec for: `%s`', fname)
89 log.debug('Detecting archive spec for: `%s`', fname)
87
90
88 fileformat = None
91 fileformat = None
89 ext = None
92 ext = None
90 content_type = None
93 content_type = None
91 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
94 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
92
95
93 if fname.endswith(extension):
96 if fname.endswith(extension):
94 fileformat = a_type
97 fileformat = a_type
95 log.debug('archive is of type: %s', fileformat)
98 log.debug('archive is of type: %s', fileformat)
96 ext = extension
99 ext = extension
97 break
100 break
98
101
99 if not fileformat:
102 if not fileformat:
100 raise ValueError()
103 raise ValueError()
101
104
102 # left over part of whole fname is the commit
105 # left over part of whole fname is the commit
103 commit_id = fname[:-len(ext)]
106 commit_id = fname[:-len(ext)]
104
107
105 return commit_id, ext, fileformat, content_type
108 return commit_id, ext, fileformat, content_type
106
109
107
110
108 class RepoFilesView(RepoAppView):
111 class RepoFilesView(RepoAppView):
109
112
110 @staticmethod
113 @staticmethod
111 def adjust_file_path_for_svn(f_path, repo):
114 def adjust_file_path_for_svn(f_path, repo):
112 """
115 """
113 Computes the relative path of `f_path`.
116 Computes the relative path of `f_path`.
114
117
115 This is mainly based on prefix matching of the recognized tags and
118 This is mainly based on prefix matching of the recognized tags and
116 branches in the underlying repository.
119 branches in the underlying repository.
117 """
120 """
118 tags_and_branches = itertools.chain(
121 tags_and_branches = itertools.chain(
119 repo.branches.keys(),
122 repo.branches.keys(),
120 repo.tags.keys())
123 repo.tags.keys())
121 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
124 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
122
125
123 for name in tags_and_branches:
126 for name in tags_and_branches:
124 if f_path.startswith(f'{name}/'):
127 if f_path.startswith(f'{name}/'):
125 f_path = vcspath.relpath(f_path, name)
128 f_path = vcspath.relpath(f_path, name)
126 break
129 break
127 return f_path
130 return f_path
128
131
129 def load_default_context(self):
132 def load_default_context(self):
130 c = self._get_local_tmpl_context(include_app_defaults=True)
133 c = self._get_local_tmpl_context(include_app_defaults=True)
131 c.rhodecode_repo = self.rhodecode_vcs_repo
134 c.rhodecode_repo = self.rhodecode_vcs_repo
132 c.enable_downloads = self.db_repo.enable_downloads
135 c.enable_downloads = self.db_repo.enable_downloads
133 return c
136 return c
134
137
135 def _ensure_not_locked(self, commit_id='tip'):
138 def _ensure_not_locked(self, commit_id='tip'):
136 _ = self.request.translate
139 _ = self.request.translate
137
140
138 repo = self.db_repo
141 repo = self.db_repo
139 if repo.enable_locking and repo.locked[0]:
142 if repo.enable_locking and repo.locked[0]:
140 h.flash(_('This repository has been locked by %s on %s')
143 h.flash(_('This repository has been locked by %s on %s')
141 % (h.person_by_id(repo.locked[0]),
144 % (h.person_by_id(repo.locked[0]),
142 h.format_date(h.time_to_datetime(repo.locked[1]))),
145 h.format_date(h.time_to_datetime(repo.locked[1]))),
143 'warning')
146 'warning')
144 files_url = h.route_path(
147 files_url = h.route_path(
145 'repo_files:default_path',
148 'repo_files:default_path',
146 repo_name=self.db_repo_name, commit_id=commit_id)
149 repo_name=self.db_repo_name, commit_id=commit_id)
147 raise HTTPFound(files_url)
150 raise HTTPFound(files_url)
148
151
149 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
152 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
150 _ = self.request.translate
153 _ = self.request.translate
151
154
152 if not is_head:
155 if not is_head:
153 message = _('Cannot modify file. '
156 message = _('Cannot modify file. '
154 'Given commit `{}` is not head of a branch.').format(commit_id)
157 'Given commit `{}` is not head of a branch.').format(commit_id)
155 h.flash(message, category='warning')
158 h.flash(message, category='warning')
156
159
157 if json_mode:
160 if json_mode:
158 return message
161 return message
159
162
160 files_url = h.route_path(
163 files_url = h.route_path(
161 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
164 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
162 f_path=f_path)
165 f_path=f_path)
163 raise HTTPFound(files_url)
166 raise HTTPFound(files_url)
164
167
165 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
168 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
166 _ = self.request.translate
169 _ = self.request.translate
167
170
168 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
171 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
169 self.db_repo_name, branch_name)
172 self.db_repo_name, branch_name)
170 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
173 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
171 message = _('Branch `{}` changes forbidden by rule {}.').format(
174 message = _('Branch `{}` changes forbidden by rule {}.').format(
172 h.escape(branch_name), h.escape(rule))
175 h.escape(branch_name), h.escape(rule))
173 h.flash(message, 'warning')
176 h.flash(message, 'warning')
174
177
175 if json_mode:
178 if json_mode:
176 return message
179 return message
177
180
178 files_url = h.route_path(
181 files_url = h.route_path(
179 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
182 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
180
183
181 raise HTTPFound(files_url)
184 raise HTTPFound(files_url)
182
185
183 def _get_commit_and_path(self):
186 def _get_commit_and_path(self):
184 default_commit_id = self.db_repo.landing_ref_name
187 default_commit_id = self.db_repo.landing_ref_name
185 default_f_path = '/'
188 default_f_path = '/'
186
189
187 commit_id = self.request.matchdict.get(
190 commit_id = self.request.matchdict.get(
188 'commit_id', default_commit_id)
191 'commit_id', default_commit_id)
189 f_path = self._get_f_path(self.request.matchdict, default_f_path)
192 f_path = self._get_f_path(self.request.matchdict, default_f_path)
190 return commit_id, f_path
193 return commit_id, f_path
191
194
192 def _get_default_encoding(self, c):
195 def _get_default_encoding(self, c):
193 enc_list = getattr(c, 'default_encodings', [])
196 enc_list = getattr(c, 'default_encodings', [])
194 return enc_list[0] if enc_list else 'UTF-8'
197 return enc_list[0] if enc_list else 'UTF-8'
195
198
196 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
199 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
197 """
200 """
198 This is a safe way to get commit. If an error occurs it redirects to
201 This is a safe way to get commit. If an error occurs it redirects to
199 tip with proper message
202 tip with proper message
200
203
201 :param commit_id: id of commit to fetch
204 :param commit_id: id of commit to fetch
202 :param redirect_after: toggle redirection
205 :param redirect_after: toggle redirection
203 """
206 """
204 _ = self.request.translate
207 _ = self.request.translate
205
208
206 try:
209 try:
207 return self.rhodecode_vcs_repo.get_commit(commit_id)
210 return self.rhodecode_vcs_repo.get_commit(commit_id)
208 except EmptyRepositoryError:
211 except EmptyRepositoryError:
209 if not redirect_after:
212 if not redirect_after:
210 return None
213 return None
211
214
212 add_new = upload_new = ""
215 add_new = upload_new = ""
213 if h.HasRepoPermissionAny(
216 if h.HasRepoPermissionAny(
214 'repository.write', 'repository.admin')(self.db_repo_name):
217 'repository.write', 'repository.admin')(self.db_repo_name):
215 _url = h.route_path(
218 _url = h.route_path(
216 'repo_files_add_file',
219 'repo_files_add_file',
217 repo_name=self.db_repo_name, commit_id=0, f_path='')
220 repo_name=self.db_repo_name, commit_id=0, f_path='')
218 add_new = h.link_to(
221 add_new = h.link_to(
219 _('add a new file'), _url, class_="alert-link")
222 _('add a new file'), _url, class_="alert-link")
220
223
221 _url_upld = h.route_path(
224 _url_upld = h.route_path(
222 'repo_files_upload_file',
225 'repo_files_upload_file',
223 repo_name=self.db_repo_name, commit_id=0, f_path='')
226 repo_name=self.db_repo_name, commit_id=0, f_path='')
224 upload_new = h.link_to(
227 upload_new = h.link_to(
225 _('upload a new file'), _url_upld, class_="alert-link")
228 _('upload a new file'), _url_upld, class_="alert-link")
226
229
227 h.flash(h.literal(
230 h.flash(h.literal(
228 _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning')
231 _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning')
229 raise HTTPFound(
232 raise HTTPFound(
230 h.route_path('repo_summary', repo_name=self.db_repo_name))
233 h.route_path('repo_summary', repo_name=self.db_repo_name))
231
234
232 except (CommitDoesNotExistError, LookupError) as e:
235 except (CommitDoesNotExistError, LookupError) as e:
233 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
236 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
234 h.flash(msg, category='error')
237 h.flash(msg, category='error')
235 raise HTTPNotFound()
238 raise HTTPNotFound()
236 except RepositoryError as e:
239 except RepositoryError as e:
237 h.flash(h.escape(safe_str(e)), category='error')
240 h.flash(h.escape(safe_str(e)), category='error')
238 raise HTTPNotFound()
241 raise HTTPNotFound()
239
242
240 def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
243 def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
241 """
244 """
242 Returns file_node, if error occurs or given path is directory,
245 Returns file_node, if error occurs or given path is directory,
243 it'll redirect to top level path
246 it'll redirect to top level path
244 """
247 """
245 _ = self.request.translate
248 _ = self.request.translate
246
249
247 try:
250 try:
248 file_node = commit_obj.get_node(path, pre_load=pre_load)
251 file_node = commit_obj.get_node(path, pre_load=pre_load)
249 if file_node.is_dir():
252 if file_node.is_dir():
250 raise RepositoryError('The given path is a directory')
253 raise RepositoryError('The given path is a directory')
251 except CommitDoesNotExistError:
254 except CommitDoesNotExistError:
252 log.exception('No such commit exists for this repository')
255 log.exception('No such commit exists for this repository')
253 h.flash(_('No such commit exists for this repository'), category='error')
256 h.flash(_('No such commit exists for this repository'), category='error')
254 raise HTTPNotFound()
257 raise HTTPNotFound()
255 except RepositoryError as e:
258 except RepositoryError as e:
256 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
259 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
257 h.flash(h.escape(safe_str(e)), category='error')
260 h.flash(h.escape(safe_str(e)), category='error')
258 raise HTTPNotFound()
261 raise HTTPNotFound()
259
262
260 return file_node
263 return file_node
261
264
262 def _is_valid_head(self, commit_id, repo, landing_ref):
265 def _is_valid_head(self, commit_id, repo, landing_ref):
263 branch_name = sha_commit_id = ''
266 branch_name = sha_commit_id = ''
264 is_head = False
267 is_head = False
265 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
268 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
266
269
267 for _branch_name, branch_commit_id in repo.branches.items():
270 for _branch_name, branch_commit_id in repo.branches.items():
268 # simple case we pass in branch name, it's a HEAD
271 # simple case we pass in branch name, it's a HEAD
269 if commit_id == _branch_name:
272 if commit_id == _branch_name:
270 is_head = True
273 is_head = True
271 branch_name = _branch_name
274 branch_name = _branch_name
272 sha_commit_id = branch_commit_id
275 sha_commit_id = branch_commit_id
273 break
276 break
274 # case when we pass in full sha commit_id, which is a head
277 # case when we pass in full sha commit_id, which is a head
275 elif commit_id == branch_commit_id:
278 elif commit_id == branch_commit_id:
276 is_head = True
279 is_head = True
277 branch_name = _branch_name
280 branch_name = _branch_name
278 sha_commit_id = branch_commit_id
281 sha_commit_id = branch_commit_id
279 break
282 break
280
283
281 if h.is_svn(repo) and not repo.is_empty():
284 if h.is_svn(repo) and not repo.is_empty():
282 # Note: Subversion only has one head.
285 # Note: Subversion only has one head.
283 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
286 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
284 is_head = True
287 is_head = True
285 return branch_name, sha_commit_id, is_head
288 return branch_name, sha_commit_id, is_head
286
289
287 # checked branches, means we only need to try to get the branch/commit_sha
290 # checked branches, means we only need to try to get the branch/commit_sha
288 if repo.is_empty():
291 if repo.is_empty():
289 is_head = True
292 is_head = True
290 branch_name = landing_ref
293 branch_name = landing_ref
291 sha_commit_id = EmptyCommit().raw_id
294 sha_commit_id = EmptyCommit().raw_id
292 else:
295 else:
293 commit = repo.get_commit(commit_id=commit_id)
296 commit = repo.get_commit(commit_id=commit_id)
294 if commit:
297 if commit:
295 branch_name = commit.branch
298 branch_name = commit.branch
296 sha_commit_id = commit.raw_id
299 sha_commit_id = commit.raw_id
297
300
298 return branch_name, sha_commit_id, is_head
301 return branch_name, sha_commit_id, is_head
299
302
300 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
303 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
301
304
302 repo_id = self.db_repo.repo_id
305 repo_id = self.db_repo.repo_id
303 force_recache = self.get_recache_flag()
306 force_recache = self.get_recache_flag()
304
307
305 cache_seconds = safe_int(
308 cache_seconds = safe_int(
306 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
309 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
307 cache_on = not force_recache and cache_seconds > 0
310 cache_on = not force_recache and cache_seconds > 0
308 log.debug(
311 log.debug(
309 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
312 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
310 'with caching: %s[TTL: %ss]' % (
313 'with caching: %s[TTL: %ss]' % (
311 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
314 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
312
315
313 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
316 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
314 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
317 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
315
318
316 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
319 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
317 def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
320 def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
318 log.debug('Generating cached file tree at for repo_id: %s, %s, %s',
321 log.debug('Generating cached file tree at for repo_id: %s, %s, %s',
319 _repo_id, _commit_id, _f_path)
322 _repo_id, _commit_id, _f_path)
320
323
321 c.full_load = _full_load
324 c.full_load = _full_load
322 return render(
325 return render(
323 'rhodecode:templates/files/files_browser_tree.mako',
326 'rhodecode:templates/files/files_browser_tree.mako',
324 self._get_template_context(c), self.request, _at_rev)
327 self._get_template_context(c), self.request, _at_rev)
325
328
326 return compute_file_tree(
329 return compute_file_tree(
327 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
330 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
328
331
329 def create_pure_path(self, *parts):
332 def create_pure_path(self, *parts):
330 # Split paths and sanitize them, removing any ../ etc
333 # Split paths and sanitize them, removing any ../ etc
331 sanitized_path = [
334 sanitized_path = [
332 x for x in pathlib.PurePath(*parts).parts
335 x for x in pathlib.PurePath(*parts).parts
333 if x not in ['.', '..']]
336 if x not in ['.', '..']]
334
337
335 pure_path = pathlib.PurePath(*sanitized_path)
338 pure_path = pathlib.PurePath(*sanitized_path)
336 return pure_path
339 return pure_path
337
340
338 def _is_lf_enabled(self, target_repo):
341 def _is_lf_enabled(self, target_repo):
339 lf_enabled = False
342 lf_enabled = False
340
343
341 lf_key_for_vcs_map = {
344 lf_key_for_vcs_map = {
342 'hg': 'extensions_largefiles',
345 'hg': 'extensions_largefiles',
343 'git': 'vcs_git_lfs_enabled'
346 'git': 'vcs_git_lfs_enabled'
344 }
347 }
345
348
346 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
349 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
347
350
348 if lf_key_for_vcs:
351 if lf_key_for_vcs:
349 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
352 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
350
353
351 return lf_enabled
354 return lf_enabled
352
355
353 @LoginRequired()
356 @LoginRequired()
354 @HasRepoPermissionAnyDecorator(
357 @HasRepoPermissionAnyDecorator(
355 'repository.read', 'repository.write', 'repository.admin')
358 'repository.read', 'repository.write', 'repository.admin')
356 def repo_archivefile(self):
359 def repo_archivefile(self):
357 # archive cache config
360 # archive cache config
358 from rhodecode import CONFIG
361 from rhodecode import CONFIG
359 _ = self.request.translate
362 _ = self.request.translate
360 self.load_default_context()
363 self.load_default_context()
361 default_at_path = '/'
364 default_at_path = '/'
362 fname = self.request.matchdict['fname']
365 fname = self.request.matchdict['fname']
363 subrepos = self.request.GET.get('subrepos') == 'true'
366 subrepos = self.request.GET.get('subrepos') == 'true'
364 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
367 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
365 at_path = self.request.GET.get('at_path') or default_at_path
368 at_path = self.request.GET.get('at_path') or default_at_path
366
369
367 if not self.db_repo.enable_downloads:
370 if not self.db_repo.enable_downloads:
368 return Response(_('Downloads disabled'))
371 return Response(_('Downloads disabled'))
369
372
370 try:
373 try:
371 commit_id, ext, fileformat, content_type = \
374 commit_id, ext, fileformat, content_type = \
372 _get_archive_spec(fname)
375 _get_archive_spec(fname)
373 except ValueError:
376 except ValueError:
374 return Response(_('Unknown archive type for: `{}`').format(
377 return Response(_('Unknown archive type for: `{}`').format(
375 h.escape(fname)))
378 h.escape(fname)))
376
379
377 try:
380 try:
378 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
381 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
379 except CommitDoesNotExistError:
382 except CommitDoesNotExistError:
380 return Response(_('Unknown commit_id {}').format(
383 return Response(_('Unknown commit_id {}').format(
381 h.escape(commit_id)))
384 h.escape(commit_id)))
382 except EmptyRepositoryError:
385 except EmptyRepositoryError:
383 return Response(_('Empty repository'))
386 return Response(_('Empty repository'))
384
387
385 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
388 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
386 if commit_id != commit.raw_id:
389 if commit_id != commit.raw_id:
387 fname=f'{commit.raw_id}{ext}'
390 fname=f'{commit.raw_id}{ext}'
388 raise HTTPFound(self.request.current_route_path(fname=fname))
391 raise HTTPFound(self.request.current_route_path(fname=fname))
389
392
390 try:
393 try:
391 at_path = commit.get_node(at_path).path or default_at_path
394 at_path = commit.get_node(at_path).path or default_at_path
392 except Exception:
395 except Exception:
393 return Response(_('No node at path {} for this repository').format(h.escape(at_path)))
396 return Response(_('No node at path {} for this repository').format(h.escape(at_path)))
394
397
395 path_sha = get_path_sha(at_path)
398 path_sha = get_path_sha(at_path)
396
399
397 # used for cache etc, consistent unique archive name
400 # used for cache etc, consistent unique archive name
398 archive_name_key = get_archive_name(
401 archive_name_key = get_archive_name(
399 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
402 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
400 path_sha=path_sha, with_hash=True)
403 path_sha=path_sha, with_hash=True)
401
404
402 if not with_hash:
405 if not with_hash:
403 path_sha = ''
406 path_sha = ''
404
407
405 # what end client gets served
408 # what end client gets served
406 response_archive_name = get_archive_name(
409 response_archive_name = get_archive_name(
407 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
410 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
408 path_sha=path_sha, with_hash=with_hash)
411 path_sha=path_sha, with_hash=with_hash)
409
412
410 # remove extension from our archive directory name
413 # remove extension from our archive directory name
411 archive_dir_name = response_archive_name[:-len(ext)]
414 archive_dir_name = response_archive_name[:-len(ext)]
412
415
413 archive_cache_disable = self.request.GET.get('no_cache')
416 archive_cache_disable = self.request.GET.get('no_cache')
414
417
415 d_cache = get_archival_cache_store(config=CONFIG)
418 d_cache = get_archival_cache_store(config=CONFIG)
416
419
417 # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
420 # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
418 d_cache_conf = get_archival_config(config=CONFIG)
421 d_cache_conf = get_archival_config(config=CONFIG)
419
422
423 # This is also a cache key, and lock key
420 reentrant_lock_key = archive_name_key + '.lock'
424 reentrant_lock_key = archive_name_key + '.lock'
421 with ReentrantLock(d_cache, reentrant_lock_key):
425
422 # This is also a cache key
426 use_cached_archive = False
423 use_cached_archive = False
427 if not archive_cache_disable and archive_name_key in d_cache:
424 if not archive_cache_disable and archive_name_key in d_cache:
428 reader, metadata = d_cache.fetch(archive_name_key)
425 reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
426 use_cached_archive = True
427 log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
428 archive_name_key, tag, reader.name)
429 else:
430 reader = None
431 log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
432
429
433 # generate new archive, as previous was not found in the cache
430 use_cached_archive = True
434 if not reader:
431 log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
435
432 archive_name_key, metadata, reader.name)
436 try:
433 else:
437 commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
434 reader = None
438 kind=fileformat, subrepos=subrepos,
435 log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
439 archive_at_path=at_path, cache_config=d_cache_conf)
440 except ImproperArchiveTypeError:
441 return _('Unknown archive type')
442
443 reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
444
436
445 if not reader:
437 if not reader:
446 raise ValueError('archive cache reader is empty, failed to fetch file from distributed archive cache')
438 # generate new archive, as previous was not found in the cache
439 try:
440 with d_cache.get_lock(reentrant_lock_key):
441 try:
442 commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
443 kind=fileformat, subrepos=subrepos,
444 archive_at_path=at_path, cache_config=d_cache_conf)
445 except ImproperArchiveTypeError:
446 return _('Unknown archive type')
447 except ArchiveCacheLock:
448 retry_after = round(random.uniform(0.3, 3.0), 1)
449 time.sleep(retry_after)
447
450
448 def archive_iterator(_reader, block_size: int = 4096*512):
451 location = self.request.url
449 # 4096 * 64 = 64KB
452 response = Response(
450 while 1:
453 f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}"
451 data = _reader.read(block_size)
454 )
452 if not data:
455 response.headers["Retry-After"] = str(retry_after)
453 break
456 response.status_code = 307 # temporary redirect
454 yield data
457
458 response.location = location
459 return response
460
461 reader, metadata = d_cache.fetch(archive_name_key)
455
462
456 response = Response(app_iter=archive_iterator(reader))
463 response = Response(app_iter=archive_iterator(reader))
457 response.content_disposition = f'attachment; filename={response_archive_name}'
464 response.content_disposition = f'attachment; filename={response_archive_name}'
458 response.content_type = str(content_type)
465 response.content_type = str(content_type)
459
466
460 try:
467 try:
461 return response
468 return response
462 finally:
469 finally:
463 # store download action
470 # store download action
464 audit_logger.store_web(
471 audit_logger.store_web(
465 'repo.archive.download', action_data={
472 'repo.archive.download', action_data={
466 'user_agent': self.request.user_agent,
473 'user_agent': self.request.user_agent,
467 'archive_name': archive_name_key,
474 'archive_name': archive_name_key,
468 'archive_spec': fname,
475 'archive_spec': fname,
469 'archive_cached': use_cached_archive},
476 'archive_cached': use_cached_archive},
470 user=self._rhodecode_user,
477 user=self._rhodecode_user,
471 repo=self.db_repo,
478 repo=self.db_repo,
472 commit=True
479 commit=True
473 )
480 )
474
481
475 def _get_file_node(self, commit_id, f_path):
482 def _get_file_node(self, commit_id, f_path):
476 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
483 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
484 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
478 try:
485 try:
479 node = commit.get_node(f_path)
486 node = commit.get_node(f_path)
480 if node.is_dir():
487 if node.is_dir():
481 raise NodeError(f'{node} path is a {type(node)} not a file')
488 raise NodeError(f'{node} path is a {type(node)} not a file')
482 except NodeDoesNotExistError:
489 except NodeDoesNotExistError:
483 commit = EmptyCommit(
490 commit = EmptyCommit(
484 commit_id=commit_id,
491 commit_id=commit_id,
485 idx=commit.idx,
492 idx=commit.idx,
486 repo=commit.repository,
493 repo=commit.repository,
487 alias=commit.repository.alias,
494 alias=commit.repository.alias,
488 message=commit.message,
495 message=commit.message,
489 author=commit.author,
496 author=commit.author,
490 date=commit.date)
497 date=commit.date)
491 node = FileNode(safe_bytes(f_path), b'', commit=commit)
498 node = FileNode(safe_bytes(f_path), b'', commit=commit)
492 else:
499 else:
493 commit = EmptyCommit(
500 commit = EmptyCommit(
494 repo=self.rhodecode_vcs_repo,
501 repo=self.rhodecode_vcs_repo,
495 alias=self.rhodecode_vcs_repo.alias)
502 alias=self.rhodecode_vcs_repo.alias)
496 node = FileNode(safe_bytes(f_path), b'', commit=commit)
503 node = FileNode(safe_bytes(f_path), b'', commit=commit)
497 return node
504 return node
498
505
499 @LoginRequired()
506 @LoginRequired()
500 @HasRepoPermissionAnyDecorator(
507 @HasRepoPermissionAnyDecorator(
501 'repository.read', 'repository.write', 'repository.admin')
508 'repository.read', 'repository.write', 'repository.admin')
502 def repo_files_diff(self):
509 def repo_files_diff(self):
503 c = self.load_default_context()
510 c = self.load_default_context()
504 f_path = self._get_f_path(self.request.matchdict)
511 f_path = self._get_f_path(self.request.matchdict)
505 diff1 = self.request.GET.get('diff1', '')
512 diff1 = self.request.GET.get('diff1', '')
506 diff2 = self.request.GET.get('diff2', '')
513 diff2 = self.request.GET.get('diff2', '')
507
514
508 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
515 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
509
516
510 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
517 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
511 line_context = self.request.GET.get('context', 3)
518 line_context = self.request.GET.get('context', 3)
512
519
513 if not any((diff1, diff2)):
520 if not any((diff1, diff2)):
514 h.flash(
521 h.flash(
515 'Need query parameter "diff1" or "diff2" to generate a diff.',
522 'Need query parameter "diff1" or "diff2" to generate a diff.',
516 category='error')
523 category='error')
517 raise HTTPBadRequest()
524 raise HTTPBadRequest()
518
525
519 c.action = self.request.GET.get('diff')
526 c.action = self.request.GET.get('diff')
520 if c.action not in ['download', 'raw']:
527 if c.action not in ['download', 'raw']:
521 compare_url = h.route_path(
528 compare_url = h.route_path(
522 'repo_compare',
529 'repo_compare',
523 repo_name=self.db_repo_name,
530 repo_name=self.db_repo_name,
524 source_ref_type='rev',
531 source_ref_type='rev',
525 source_ref=diff1,
532 source_ref=diff1,
526 target_repo=self.db_repo_name,
533 target_repo=self.db_repo_name,
527 target_ref_type='rev',
534 target_ref_type='rev',
528 target_ref=diff2,
535 target_ref=diff2,
529 _query=dict(f_path=f_path))
536 _query=dict(f_path=f_path))
530 # redirect to new view if we render diff
537 # redirect to new view if we render diff
531 raise HTTPFound(compare_url)
538 raise HTTPFound(compare_url)
532
539
533 try:
540 try:
534 node1 = self._get_file_node(diff1, path1)
541 node1 = self._get_file_node(diff1, path1)
535 node2 = self._get_file_node(diff2, f_path)
542 node2 = self._get_file_node(diff2, f_path)
536 except (RepositoryError, NodeError):
543 except (RepositoryError, NodeError):
537 log.exception("Exception while trying to get node from repository")
544 log.exception("Exception while trying to get node from repository")
538 raise HTTPFound(
545 raise HTTPFound(
539 h.route_path('repo_files', repo_name=self.db_repo_name,
546 h.route_path('repo_files', repo_name=self.db_repo_name,
540 commit_id='tip', f_path=f_path))
547 commit_id='tip', f_path=f_path))
541
548
542 if all(isinstance(node.commit, EmptyCommit)
549 if all(isinstance(node.commit, EmptyCommit)
543 for node in (node1, node2)):
550 for node in (node1, node2)):
544 raise HTTPNotFound()
551 raise HTTPNotFound()
545
552
546 c.commit_1 = node1.commit
553 c.commit_1 = node1.commit
547 c.commit_2 = node2.commit
554 c.commit_2 = node2.commit
548
555
549 if c.action == 'download':
556 if c.action == 'download':
550 _diff = diffs.get_gitdiff(node1, node2,
557 _diff = diffs.get_gitdiff(node1, node2,
551 ignore_whitespace=ignore_whitespace,
558 ignore_whitespace=ignore_whitespace,
552 context=line_context)
559 context=line_context)
553 # NOTE: this was using diff_format='gitdiff'
560 # NOTE: this was using diff_format='gitdiff'
554 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
561 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
555
562
556 response = Response(self.path_filter.get_raw_patch(diff))
563 response = Response(self.path_filter.get_raw_patch(diff))
557 response.content_type = 'text/plain'
564 response.content_type = 'text/plain'
558 response.content_disposition = (
565 response.content_disposition = (
559 f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff'
566 f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff'
560 )
567 )
561 charset = self._get_default_encoding(c)
568 charset = self._get_default_encoding(c)
562 if charset:
569 if charset:
563 response.charset = charset
570 response.charset = charset
564 return response
571 return response
565
572
566 elif c.action == 'raw':
573 elif c.action == 'raw':
567 _diff = diffs.get_gitdiff(node1, node2,
574 _diff = diffs.get_gitdiff(node1, node2,
568 ignore_whitespace=ignore_whitespace,
575 ignore_whitespace=ignore_whitespace,
569 context=line_context)
576 context=line_context)
570 # NOTE: this was using diff_format='gitdiff'
577 # NOTE: this was using diff_format='gitdiff'
571 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
578 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
572
579
573 response = Response(self.path_filter.get_raw_patch(diff))
580 response = Response(self.path_filter.get_raw_patch(diff))
574 response.content_type = 'text/plain'
581 response.content_type = 'text/plain'
575 charset = self._get_default_encoding(c)
582 charset = self._get_default_encoding(c)
576 if charset:
583 if charset:
577 response.charset = charset
584 response.charset = charset
578 return response
585 return response
579
586
580 # in case we ever end up here
587 # in case we ever end up here
581 raise HTTPNotFound()
588 raise HTTPNotFound()
582
589
583 @LoginRequired()
590 @LoginRequired()
584 @HasRepoPermissionAnyDecorator(
591 @HasRepoPermissionAnyDecorator(
585 'repository.read', 'repository.write', 'repository.admin')
592 'repository.read', 'repository.write', 'repository.admin')
586 def repo_files_diff_2way_redirect(self):
593 def repo_files_diff_2way_redirect(self):
587 """
594 """
588 Kept only to make OLD links work
595 Kept only to make OLD links work
589 """
596 """
590 f_path = self._get_f_path_unchecked(self.request.matchdict)
597 f_path = self._get_f_path_unchecked(self.request.matchdict)
591 diff1 = self.request.GET.get('diff1', '')
598 diff1 = self.request.GET.get('diff1', '')
592 diff2 = self.request.GET.get('diff2', '')
599 diff2 = self.request.GET.get('diff2', '')
593
600
594 if not any((diff1, diff2)):
601 if not any((diff1, diff2)):
595 h.flash(
602 h.flash(
596 'Need query parameter "diff1" or "diff2" to generate a diff.',
603 'Need query parameter "diff1" or "diff2" to generate a diff.',
597 category='error')
604 category='error')
598 raise HTTPBadRequest()
605 raise HTTPBadRequest()
599
606
600 compare_url = h.route_path(
607 compare_url = h.route_path(
601 'repo_compare',
608 'repo_compare',
602 repo_name=self.db_repo_name,
609 repo_name=self.db_repo_name,
603 source_ref_type='rev',
610 source_ref_type='rev',
604 source_ref=diff1,
611 source_ref=diff1,
605 target_ref_type='rev',
612 target_ref_type='rev',
606 target_ref=diff2,
613 target_ref=diff2,
607 _query=dict(f_path=f_path, diffmode='sideside',
614 _query=dict(f_path=f_path, diffmode='sideside',
608 target_repo=self.db_repo_name,))
615 target_repo=self.db_repo_name,))
609 raise HTTPFound(compare_url)
616 raise HTTPFound(compare_url)
610
617
611 @LoginRequired()
618 @LoginRequired()
612 def repo_files_default_commit_redirect(self):
619 def repo_files_default_commit_redirect(self):
613 """
620 """
614 Special page that redirects to the landing page of files based on the default
621 Special page that redirects to the landing page of files based on the default
615 commit for repository
622 commit for repository
616 """
623 """
617 c = self.load_default_context()
624 c = self.load_default_context()
618 ref_name = c.rhodecode_db_repo.landing_ref_name
625 ref_name = c.rhodecode_db_repo.landing_ref_name
619 landing_url = h.repo_files_by_ref_url(
626 landing_url = h.repo_files_by_ref_url(
620 c.rhodecode_db_repo.repo_name,
627 c.rhodecode_db_repo.repo_name,
621 c.rhodecode_db_repo.repo_type,
628 c.rhodecode_db_repo.repo_type,
622 f_path='',
629 f_path='',
623 ref_name=ref_name,
630 ref_name=ref_name,
624 commit_id='tip',
631 commit_id='tip',
625 query=dict(at=ref_name)
632 query=dict(at=ref_name)
626 )
633 )
627
634
628 raise HTTPFound(landing_url)
635 raise HTTPFound(landing_url)
629
636
630 @LoginRequired()
637 @LoginRequired()
631 @HasRepoPermissionAnyDecorator(
638 @HasRepoPermissionAnyDecorator(
632 'repository.read', 'repository.write', 'repository.admin')
639 'repository.read', 'repository.write', 'repository.admin')
633 def repo_files(self):
640 def repo_files(self):
634 c = self.load_default_context()
641 c = self.load_default_context()
635
642
636 view_name = getattr(self.request.matched_route, 'name', None)
643 view_name = getattr(self.request.matched_route, 'name', None)
637
644
638 c.annotate = view_name == 'repo_files:annotated'
645 c.annotate = view_name == 'repo_files:annotated'
639 # default is false, but .rst/.md files later are auto rendered, we can
646 # default is false, but .rst/.md files later are auto rendered, we can
640 # overwrite auto rendering by setting this GET flag
647 # overwrite auto rendering by setting this GET flag
641 c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False)
648 c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False)
642
649
643 commit_id, f_path = self._get_commit_and_path()
650 commit_id, f_path = self._get_commit_and_path()
644
651
645 c.commit = self._get_commit_or_redirect(commit_id)
652 c.commit = self._get_commit_or_redirect(commit_id)
646 c.branch = self.request.GET.get('branch', None)
653 c.branch = self.request.GET.get('branch', None)
647 c.f_path = f_path
654 c.f_path = f_path
648 at_rev = self.request.GET.get('at')
655 at_rev = self.request.GET.get('at')
649
656
650 # files or dirs
657 # files or dirs
651 try:
658 try:
652 c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data'])
659 c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data'])
653
660
654 c.file_author = True
661 c.file_author = True
655 c.file_tree = ''
662 c.file_tree = ''
656
663
657 # prev link
664 # prev link
658 try:
665 try:
659 prev_commit = c.commit.prev(c.branch)
666 prev_commit = c.commit.prev(c.branch)
660 c.prev_commit = prev_commit
667 c.prev_commit = prev_commit
661 c.url_prev = h.route_path(
668 c.url_prev = h.route_path(
662 'repo_files', repo_name=self.db_repo_name,
669 'repo_files', repo_name=self.db_repo_name,
663 commit_id=prev_commit.raw_id, f_path=f_path)
670 commit_id=prev_commit.raw_id, f_path=f_path)
664 if c.branch:
671 if c.branch:
665 c.url_prev += '?branch=%s' % c.branch
672 c.url_prev += '?branch=%s' % c.branch
666 except (CommitDoesNotExistError, VCSError):
673 except (CommitDoesNotExistError, VCSError):
667 c.url_prev = '#'
674 c.url_prev = '#'
668 c.prev_commit = EmptyCommit()
675 c.prev_commit = EmptyCommit()
669
676
670 # next link
677 # next link
671 try:
678 try:
672 next_commit = c.commit.next(c.branch)
679 next_commit = c.commit.next(c.branch)
673 c.next_commit = next_commit
680 c.next_commit = next_commit
674 c.url_next = h.route_path(
681 c.url_next = h.route_path(
675 'repo_files', repo_name=self.db_repo_name,
682 'repo_files', repo_name=self.db_repo_name,
676 commit_id=next_commit.raw_id, f_path=f_path)
683 commit_id=next_commit.raw_id, f_path=f_path)
677 if c.branch:
684 if c.branch:
678 c.url_next += '?branch=%s' % c.branch
685 c.url_next += '?branch=%s' % c.branch
679 except (CommitDoesNotExistError, VCSError):
686 except (CommitDoesNotExistError, VCSError):
680 c.url_next = '#'
687 c.url_next = '#'
681 c.next_commit = EmptyCommit()
688 c.next_commit = EmptyCommit()
682
689
683 # load file content
690 # load file content
684 if c.file.is_file():
691 if c.file.is_file():
685
692
686 c.lf_node = {}
693 c.lf_node = {}
687
694
688 has_lf_enabled = self._is_lf_enabled(self.db_repo)
695 has_lf_enabled = self._is_lf_enabled(self.db_repo)
689 if has_lf_enabled:
696 if has_lf_enabled:
690 c.lf_node = c.file.get_largefile_node()
697 c.lf_node = c.file.get_largefile_node()
691
698
692 c.file_source_page = 'true'
699 c.file_source_page = 'true'
693 c.file_last_commit = c.file.last_commit
700 c.file_last_commit = c.file.last_commit
694
701
695 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
702 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
696
703
697 if not (c.file_size_too_big or c.file.is_binary):
704 if not (c.file_size_too_big or c.file.is_binary):
698 if c.annotate: # annotation has precedence over renderer
705 if c.annotate: # annotation has precedence over renderer
699 c.annotated_lines = filenode_as_annotated_lines_tokens(
706 c.annotated_lines = filenode_as_annotated_lines_tokens(
700 c.file
707 c.file
701 )
708 )
702 else:
709 else:
703 c.renderer = (
710 c.renderer = (
704 c.renderer and h.renderer_from_filename(c.file.path)
711 c.renderer and h.renderer_from_filename(c.file.path)
705 )
712 )
706 if not c.renderer:
713 if not c.renderer:
707 c.lines = filenode_as_lines_tokens(c.file)
714 c.lines = filenode_as_lines_tokens(c.file)
708
715
709 _branch_name, _sha_commit_id, is_head = \
716 _branch_name, _sha_commit_id, is_head = \
710 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
717 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
711 landing_ref=self.db_repo.landing_ref_name)
718 landing_ref=self.db_repo.landing_ref_name)
712 c.on_branch_head = is_head
719 c.on_branch_head = is_head
713
720
714 branch = c.commit.branch if (
721 branch = c.commit.branch if (
715 c.commit.branch and '/' not in c.commit.branch) else None
722 c.commit.branch and '/' not in c.commit.branch) else None
716 c.branch_or_raw_id = branch or c.commit.raw_id
723 c.branch_or_raw_id = branch or c.commit.raw_id
717 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
724 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
718
725
719 author = c.file_last_commit.author
726 author = c.file_last_commit.author
720 c.authors = [[
727 c.authors = [[
721 h.email(author),
728 h.email(author),
722 h.person(author, 'username_or_name_or_email'),
729 h.person(author, 'username_or_name_or_email'),
723 1
730 1
724 ]]
731 ]]
725
732
726 else: # load tree content at path
733 else: # load tree content at path
727 c.file_source_page = 'false'
734 c.file_source_page = 'false'
728 c.authors = []
735 c.authors = []
729 # this loads a simple tree without metadata to speed things up
736 # this loads a simple tree without metadata to speed things up
730 # later via ajax we call repo_nodetree_full and fetch whole
737 # later via ajax we call repo_nodetree_full and fetch whole
731 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
738 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
732
739
733 c.readme_data, c.readme_file = \
740 c.readme_data, c.readme_file = \
734 self._get_readme_data(self.db_repo, c.visual.default_renderer,
741 self._get_readme_data(self.db_repo, c.visual.default_renderer,
735 c.commit.raw_id, f_path)
742 c.commit.raw_id, f_path)
736
743
737 except RepositoryError as e:
744 except RepositoryError as e:
738 h.flash(h.escape(safe_str(e)), category='error')
745 h.flash(h.escape(safe_str(e)), category='error')
739 raise HTTPNotFound()
746 raise HTTPNotFound()
740
747
741 if self.request.environ.get('HTTP_X_PJAX'):
748 if self.request.environ.get('HTTP_X_PJAX'):
742 html = render('rhodecode:templates/files/files_pjax.mako',
749 html = render('rhodecode:templates/files/files_pjax.mako',
743 self._get_template_context(c), self.request)
750 self._get_template_context(c), self.request)
744 else:
751 else:
745 html = render('rhodecode:templates/files/files.mako',
752 html = render('rhodecode:templates/files/files.mako',
746 self._get_template_context(c), self.request)
753 self._get_template_context(c), self.request)
747 return Response(html)
754 return Response(html)
748
755
749 @HasRepoPermissionAnyDecorator(
756 @HasRepoPermissionAnyDecorator(
750 'repository.read', 'repository.write', 'repository.admin')
757 'repository.read', 'repository.write', 'repository.admin')
751 def repo_files_annotated_previous(self):
758 def repo_files_annotated_previous(self):
752 self.load_default_context()
759 self.load_default_context()
753
760
754 commit_id, f_path = self._get_commit_and_path()
761 commit_id, f_path = self._get_commit_and_path()
755 commit = self._get_commit_or_redirect(commit_id)
762 commit = self._get_commit_or_redirect(commit_id)
756 prev_commit_id = commit.raw_id
763 prev_commit_id = commit.raw_id
757 line_anchor = self.request.GET.get('line_anchor')
764 line_anchor = self.request.GET.get('line_anchor')
758 is_file = False
765 is_file = False
759 try:
766 try:
760 _file = commit.get_node(f_path)
767 _file = commit.get_node(f_path)
761 is_file = _file.is_file()
768 is_file = _file.is_file()
762 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
769 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
763 pass
770 pass
764
771
765 if is_file:
772 if is_file:
766 history = commit.get_path_history(f_path)
773 history = commit.get_path_history(f_path)
767 prev_commit_id = history[1].raw_id \
774 prev_commit_id = history[1].raw_id \
768 if len(history) > 1 else prev_commit_id
775 if len(history) > 1 else prev_commit_id
769 prev_url = h.route_path(
776 prev_url = h.route_path(
770 'repo_files:annotated', repo_name=self.db_repo_name,
777 'repo_files:annotated', repo_name=self.db_repo_name,
771 commit_id=prev_commit_id, f_path=f_path,
778 commit_id=prev_commit_id, f_path=f_path,
772 _anchor=f'L{line_anchor}')
779 _anchor=f'L{line_anchor}')
773
780
774 raise HTTPFound(prev_url)
781 raise HTTPFound(prev_url)
775
782
776 @LoginRequired()
783 @LoginRequired()
777 @HasRepoPermissionAnyDecorator(
784 @HasRepoPermissionAnyDecorator(
778 'repository.read', 'repository.write', 'repository.admin')
785 'repository.read', 'repository.write', 'repository.admin')
779 def repo_nodetree_full(self):
786 def repo_nodetree_full(self):
780 """
787 """
781 Returns rendered html of file tree that contains commit date,
788 Returns rendered html of file tree that contains commit date,
782 author, commit_id for the specified combination of
789 author, commit_id for the specified combination of
783 repo, commit_id and file path
790 repo, commit_id and file path
784 """
791 """
785 c = self.load_default_context()
792 c = self.load_default_context()
786
793
787 commit_id, f_path = self._get_commit_and_path()
794 commit_id, f_path = self._get_commit_and_path()
788 commit = self._get_commit_or_redirect(commit_id)
795 commit = self._get_commit_or_redirect(commit_id)
789 try:
796 try:
790 dir_node = commit.get_node(f_path)
797 dir_node = commit.get_node(f_path)
791 except RepositoryError as e:
798 except RepositoryError as e:
792 return Response(f'error: {h.escape(safe_str(e))}')
799 return Response(f'error: {h.escape(safe_str(e))}')
793
800
794 if dir_node.is_file():
801 if dir_node.is_file():
795 return Response('')
802 return Response('')
796
803
797 c.file = dir_node
804 c.file = dir_node
798 c.commit = commit
805 c.commit = commit
799 at_rev = self.request.GET.get('at')
806 at_rev = self.request.GET.get('at')
800
807
801 html = self._get_tree_at_commit(
808 html = self._get_tree_at_commit(
802 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
809 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
803
810
804 return Response(html)
811 return Response(html)
805
812
806 def _get_attachement_headers(self, f_path):
813 def _get_attachement_headers(self, f_path):
807 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
814 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
808 safe_path = f_name.replace('"', '\\"')
815 safe_path = f_name.replace('"', '\\"')
809 encoded_path = urllib.parse.quote(f_name)
816 encoded_path = urllib.parse.quote(f_name)
810
817
811 headers = "attachment; " \
818 headers = "attachment; " \
812 "filename=\"{}\"; " \
819 "filename=\"{}\"; " \
813 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
820 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
814
821
815 return safe_bytes(headers).decode('latin-1', errors='replace')
822 return safe_bytes(headers).decode('latin-1', errors='replace')
816
823
817 @LoginRequired()
824 @LoginRequired()
818 @HasRepoPermissionAnyDecorator(
825 @HasRepoPermissionAnyDecorator(
819 'repository.read', 'repository.write', 'repository.admin')
826 'repository.read', 'repository.write', 'repository.admin')
820 def repo_file_raw(self):
827 def repo_file_raw(self):
821 """
828 """
822 Action for show as raw, some mimetypes are "rendered",
829 Action for show as raw, some mimetypes are "rendered",
823 those include images, icons.
830 those include images, icons.
824 """
831 """
825 c = self.load_default_context()
832 c = self.load_default_context()
826
833
827 commit_id, f_path = self._get_commit_and_path()
834 commit_id, f_path = self._get_commit_and_path()
828 commit = self._get_commit_or_redirect(commit_id)
835 commit = self._get_commit_or_redirect(commit_id)
829 file_node = self._get_filenode_or_redirect(commit, f_path)
836 file_node = self._get_filenode_or_redirect(commit, f_path)
830
837
831 raw_mimetype_mapping = {
838 raw_mimetype_mapping = {
832 # map original mimetype to a mimetype used for "show as raw"
839 # map original mimetype to a mimetype used for "show as raw"
833 # you can also provide a content-disposition to override the
840 # you can also provide a content-disposition to override the
834 # default "attachment" disposition.
841 # default "attachment" disposition.
835 # orig_type: (new_type, new_dispo)
842 # orig_type: (new_type, new_dispo)
836
843
837 # show images inline:
844 # show images inline:
838 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
845 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
839 # for example render an SVG with javascript inside or even render
846 # for example render an SVG with javascript inside or even render
840 # HTML.
847 # HTML.
841 'image/x-icon': ('image/x-icon', 'inline'),
848 'image/x-icon': ('image/x-icon', 'inline'),
842 'image/png': ('image/png', 'inline'),
849 'image/png': ('image/png', 'inline'),
843 'image/gif': ('image/gif', 'inline'),
850 'image/gif': ('image/gif', 'inline'),
844 'image/jpeg': ('image/jpeg', 'inline'),
851 'image/jpeg': ('image/jpeg', 'inline'),
845 'application/pdf': ('application/pdf', 'inline'),
852 'application/pdf': ('application/pdf', 'inline'),
846 }
853 }
847
854
848 mimetype = file_node.mimetype
855 mimetype = file_node.mimetype
849 try:
856 try:
850 mimetype, disposition = raw_mimetype_mapping[mimetype]
857 mimetype, disposition = raw_mimetype_mapping[mimetype]
851 except KeyError:
858 except KeyError:
852 # we don't know anything special about this, handle it safely
859 # we don't know anything special about this, handle it safely
853 if file_node.is_binary:
860 if file_node.is_binary:
854 # do same as download raw for binary files
861 # do same as download raw for binary files
855 mimetype, disposition = 'application/octet-stream', 'attachment'
862 mimetype, disposition = 'application/octet-stream', 'attachment'
856 else:
863 else:
857 # do not just use the original mimetype, but force text/plain,
864 # do not just use the original mimetype, but force text/plain,
858 # otherwise it would serve text/html and that might be unsafe.
865 # otherwise it would serve text/html and that might be unsafe.
859 # Note: underlying vcs library fakes text/plain mimetype if the
866 # Note: underlying vcs library fakes text/plain mimetype if the
860 # mimetype can not be determined and it thinks it is not
867 # mimetype can not be determined and it thinks it is not
861 # binary.This might lead to erroneous text display in some
868 # binary.This might lead to erroneous text display in some
862 # cases, but helps in other cases, like with text files
869 # cases, but helps in other cases, like with text files
863 # without extension.
870 # without extension.
864 mimetype, disposition = 'text/plain', 'inline'
871 mimetype, disposition = 'text/plain', 'inline'
865
872
866 if disposition == 'attachment':
873 if disposition == 'attachment':
867 disposition = self._get_attachement_headers(f_path)
874 disposition = self._get_attachement_headers(f_path)
868
875
869 stream_content = file_node.stream_bytes()
876 stream_content = file_node.stream_bytes()
870
877
871 response = Response(app_iter=stream_content)
878 response = Response(app_iter=stream_content)
872 response.content_disposition = disposition
879 response.content_disposition = disposition
873 response.content_type = mimetype
880 response.content_type = mimetype
874
881
875 charset = self._get_default_encoding(c)
882 charset = self._get_default_encoding(c)
876 if charset:
883 if charset:
877 response.charset = charset
884 response.charset = charset
878
885
879 return response
886 return response
880
887
881 @LoginRequired()
888 @LoginRequired()
882 @HasRepoPermissionAnyDecorator(
889 @HasRepoPermissionAnyDecorator(
883 'repository.read', 'repository.write', 'repository.admin')
890 'repository.read', 'repository.write', 'repository.admin')
884 def repo_file_download(self):
891 def repo_file_download(self):
885 c = self.load_default_context()
892 c = self.load_default_context()
886
893
887 commit_id, f_path = self._get_commit_and_path()
894 commit_id, f_path = self._get_commit_and_path()
888 commit = self._get_commit_or_redirect(commit_id)
895 commit = self._get_commit_or_redirect(commit_id)
889 file_node = self._get_filenode_or_redirect(commit, f_path)
896 file_node = self._get_filenode_or_redirect(commit, f_path)
890
897
891 if self.request.GET.get('lf'):
898 if self.request.GET.get('lf'):
892 # only if lf get flag is passed, we download this file
899 # only if lf get flag is passed, we download this file
893 # as LFS/Largefile
900 # as LFS/Largefile
894 lf_node = file_node.get_largefile_node()
901 lf_node = file_node.get_largefile_node()
895 if lf_node:
902 if lf_node:
896 # overwrite our pointer with the REAL large-file
903 # overwrite our pointer with the REAL large-file
897 file_node = lf_node
904 file_node = lf_node
898
905
899 disposition = self._get_attachement_headers(f_path)
906 disposition = self._get_attachement_headers(f_path)
900
907
901 stream_content = file_node.stream_bytes()
908 stream_content = file_node.stream_bytes()
902
909
903 response = Response(app_iter=stream_content)
910 response = Response(app_iter=stream_content)
904 response.content_disposition = disposition
911 response.content_disposition = disposition
905 response.content_type = file_node.mimetype
912 response.content_type = file_node.mimetype
906
913
907 charset = self._get_default_encoding(c)
914 charset = self._get_default_encoding(c)
908 if charset:
915 if charset:
909 response.charset = charset
916 response.charset = charset
910
917
911 return response
918 return response
912
919
913 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
920 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
914
921
915 cache_seconds = safe_int(
922 cache_seconds = safe_int(
916 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
923 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
917 cache_on = cache_seconds > 0
924 cache_on = cache_seconds > 0
918 log.debug(
925 log.debug(
919 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
926 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
920 'with caching: %s[TTL: %ss]' % (
927 'with caching: %s[TTL: %ss]' % (
921 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
928 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
922
929
923 cache_namespace_uid = f'repo.{repo_id}'
930 cache_namespace_uid = f'repo.{repo_id}'
924 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
931 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
925
932
926 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
933 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
927 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
934 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
928 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
935 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
929 _repo_id, commit_id, f_path)
936 _repo_id, commit_id, f_path)
930 try:
937 try:
931 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
938 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
932 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
939 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
933 log.exception(safe_str(e))
940 log.exception(safe_str(e))
934 h.flash(h.escape(safe_str(e)), category='error')
941 h.flash(h.escape(safe_str(e)), category='error')
935 raise HTTPFound(h.route_path(
942 raise HTTPFound(h.route_path(
936 'repo_files', repo_name=self.db_repo_name,
943 'repo_files', repo_name=self.db_repo_name,
937 commit_id='tip', f_path='/'))
944 commit_id='tip', f_path='/'))
938
945
939 return _d + _f
946 return _d + _f
940
947
941 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
948 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
942 commit_id, f_path)
949 commit_id, f_path)
943 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
950 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
944
951
945 @LoginRequired()
952 @LoginRequired()
946 @HasRepoPermissionAnyDecorator(
953 @HasRepoPermissionAnyDecorator(
947 'repository.read', 'repository.write', 'repository.admin')
954 'repository.read', 'repository.write', 'repository.admin')
948 def repo_nodelist(self):
955 def repo_nodelist(self):
949 self.load_default_context()
956 self.load_default_context()
950
957
951 commit_id, f_path = self._get_commit_and_path()
958 commit_id, f_path = self._get_commit_and_path()
952 commit = self._get_commit_or_redirect(commit_id)
959 commit = self._get_commit_or_redirect(commit_id)
953
960
954 metadata = self._get_nodelist_at_commit(
961 metadata = self._get_nodelist_at_commit(
955 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
962 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
956 return {'nodes': [x for x in metadata]}
963 return {'nodes': [x for x in metadata]}
957
964
958 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
965 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
959 items = []
966 items = []
960 for name, commit_id in branches_or_tags.items():
967 for name, commit_id in branches_or_tags.items():
961 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
968 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
962 items.append((sym_ref, name, ref_type))
969 items.append((sym_ref, name, ref_type))
963 return items
970 return items
964
971
965 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
972 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
966 return commit_id
973 return commit_id
967
974
968 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
975 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
969 return commit_id
976 return commit_id
970
977
971 # NOTE(dan): old code we used in "diff" mode compare
978 # NOTE(dan): old code we used in "diff" mode compare
972 new_f_path = vcspath.join(name, f_path)
979 new_f_path = vcspath.join(name, f_path)
973 return f'{new_f_path}@{commit_id}'
980 return f'{new_f_path}@{commit_id}'
974
981
975 def _get_node_history(self, commit_obj, f_path, commits=None):
982 def _get_node_history(self, commit_obj, f_path, commits=None):
976 """
983 """
977 get commit history for given node
984 get commit history for given node
978
985
979 :param commit_obj: commit to calculate history
986 :param commit_obj: commit to calculate history
980 :param f_path: path for node to calculate history for
987 :param f_path: path for node to calculate history for
981 :param commits: if passed don't calculate history and take
988 :param commits: if passed don't calculate history and take
982 commits defined in this list
989 commits defined in this list
983 """
990 """
984 _ = self.request.translate
991 _ = self.request.translate
985
992
986 # calculate history based on tip
993 # calculate history based on tip
987 tip = self.rhodecode_vcs_repo.get_commit()
994 tip = self.rhodecode_vcs_repo.get_commit()
988 if commits is None:
995 if commits is None:
989 pre_load = ["author", "branch"]
996 pre_load = ["author", "branch"]
990 try:
997 try:
991 commits = tip.get_path_history(f_path, pre_load=pre_load)
998 commits = tip.get_path_history(f_path, pre_load=pre_load)
992 except (NodeDoesNotExistError, CommitError):
999 except (NodeDoesNotExistError, CommitError):
993 # this node is not present at tip!
1000 # this node is not present at tip!
994 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
1001 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
995
1002
996 history = []
1003 history = []
997 commits_group = ([], _("Changesets"))
1004 commits_group = ([], _("Changesets"))
998 for commit in commits:
1005 for commit in commits:
999 branch = ' (%s)' % commit.branch if commit.branch else ''
1006 branch = ' (%s)' % commit.branch if commit.branch else ''
1000 n_desc = f'r{commit.idx}:{commit.short_id}{branch}'
1007 n_desc = f'r{commit.idx}:{commit.short_id}{branch}'
1001 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
1008 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
1002 history.append(commits_group)
1009 history.append(commits_group)
1003
1010
1004 symbolic_reference = self._symbolic_reference
1011 symbolic_reference = self._symbolic_reference
1005
1012
1006 if self.rhodecode_vcs_repo.alias == 'svn':
1013 if self.rhodecode_vcs_repo.alias == 'svn':
1007 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1014 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1008 f_path, self.rhodecode_vcs_repo)
1015 f_path, self.rhodecode_vcs_repo)
1009 if adjusted_f_path != f_path:
1016 if adjusted_f_path != f_path:
1010 log.debug(
1017 log.debug(
1011 'Recognized svn tag or branch in file "%s", using svn '
1018 'Recognized svn tag or branch in file "%s", using svn '
1012 'specific symbolic references', f_path)
1019 'specific symbolic references', f_path)
1013 f_path = adjusted_f_path
1020 f_path = adjusted_f_path
1014 symbolic_reference = self._symbolic_reference_svn
1021 symbolic_reference = self._symbolic_reference_svn
1015
1022
1016 branches = self._create_references(
1023 branches = self._create_references(
1017 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1024 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1018 branches_group = (branches, _("Branches"))
1025 branches_group = (branches, _("Branches"))
1019
1026
1020 tags = self._create_references(
1027 tags = self._create_references(
1021 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1028 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1022 tags_group = (tags, _("Tags"))
1029 tags_group = (tags, _("Tags"))
1023
1030
1024 history.append(branches_group)
1031 history.append(branches_group)
1025 history.append(tags_group)
1032 history.append(tags_group)
1026
1033
1027 return history, commits
1034 return history, commits
1028
1035
1029 @LoginRequired()
1036 @LoginRequired()
1030 @HasRepoPermissionAnyDecorator(
1037 @HasRepoPermissionAnyDecorator(
1031 'repository.read', 'repository.write', 'repository.admin')
1038 'repository.read', 'repository.write', 'repository.admin')
1032 def repo_file_history(self):
1039 def repo_file_history(self):
1033 self.load_default_context()
1040 self.load_default_context()
1034
1041
1035 commit_id, f_path = self._get_commit_and_path()
1042 commit_id, f_path = self._get_commit_and_path()
1036 commit = self._get_commit_or_redirect(commit_id)
1043 commit = self._get_commit_or_redirect(commit_id)
1037 file_node = self._get_filenode_or_redirect(commit, f_path)
1044 file_node = self._get_filenode_or_redirect(commit, f_path)
1038
1045
1039 if file_node.is_file():
1046 if file_node.is_file():
1040 file_history, _hist = self._get_node_history(commit, f_path)
1047 file_history, _hist = self._get_node_history(commit, f_path)
1041
1048
1042 res = []
1049 res = []
1043 for section_items, section in file_history:
1050 for section_items, section in file_history:
1044 items = []
1051 items = []
1045 for obj_id, obj_text, obj_type in section_items:
1052 for obj_id, obj_text, obj_type in section_items:
1046 at_rev = ''
1053 at_rev = ''
1047 if obj_type in ['branch', 'bookmark', 'tag']:
1054 if obj_type in ['branch', 'bookmark', 'tag']:
1048 at_rev = obj_text
1055 at_rev = obj_text
1049 entry = {
1056 entry = {
1050 'id': obj_id,
1057 'id': obj_id,
1051 'text': obj_text,
1058 'text': obj_text,
1052 'type': obj_type,
1059 'type': obj_type,
1053 'at_rev': at_rev
1060 'at_rev': at_rev
1054 }
1061 }
1055
1062
1056 items.append(entry)
1063 items.append(entry)
1057
1064
1058 res.append({
1065 res.append({
1059 'text': section,
1066 'text': section,
1060 'children': items
1067 'children': items
1061 })
1068 })
1062
1069
1063 data = {
1070 data = {
1064 'more': False,
1071 'more': False,
1065 'results': res
1072 'results': res
1066 }
1073 }
1067 return data
1074 return data
1068
1075
1069 log.warning('Cannot fetch history for directory')
1076 log.warning('Cannot fetch history for directory')
1070 raise HTTPBadRequest()
1077 raise HTTPBadRequest()
1071
1078
1072 @LoginRequired()
1079 @LoginRequired()
1073 @HasRepoPermissionAnyDecorator(
1080 @HasRepoPermissionAnyDecorator(
1074 'repository.read', 'repository.write', 'repository.admin')
1081 'repository.read', 'repository.write', 'repository.admin')
1075 def repo_file_authors(self):
1082 def repo_file_authors(self):
1076 c = self.load_default_context()
1083 c = self.load_default_context()
1077
1084
1078 commit_id, f_path = self._get_commit_and_path()
1085 commit_id, f_path = self._get_commit_and_path()
1079 commit = self._get_commit_or_redirect(commit_id)
1086 commit = self._get_commit_or_redirect(commit_id)
1080 file_node = self._get_filenode_or_redirect(commit, f_path)
1087 file_node = self._get_filenode_or_redirect(commit, f_path)
1081
1088
1082 if not file_node.is_file():
1089 if not file_node.is_file():
1083 raise HTTPBadRequest()
1090 raise HTTPBadRequest()
1084
1091
1085 c.file_last_commit = file_node.last_commit
1092 c.file_last_commit = file_node.last_commit
1086 if self.request.GET.get('annotate') == '1':
1093 if self.request.GET.get('annotate') == '1':
1087 # use _hist from annotation if annotation mode is on
1094 # use _hist from annotation if annotation mode is on
1088 commit_ids = {x[1] for x in file_node.annotate}
1095 commit_ids = {x[1] for x in file_node.annotate}
1089 _hist = (
1096 _hist = (
1090 self.rhodecode_vcs_repo.get_commit(commit_id)
1097 self.rhodecode_vcs_repo.get_commit(commit_id)
1091 for commit_id in commit_ids)
1098 for commit_id in commit_ids)
1092 else:
1099 else:
1093 _f_history, _hist = self._get_node_history(commit, f_path)
1100 _f_history, _hist = self._get_node_history(commit, f_path)
1094 c.file_author = False
1101 c.file_author = False
1095
1102
1096 unique = collections.OrderedDict()
1103 unique = collections.OrderedDict()
1097 for commit in _hist:
1104 for commit in _hist:
1098 author = commit.author
1105 author = commit.author
1099 if author not in unique:
1106 if author not in unique:
1100 unique[commit.author] = [
1107 unique[commit.author] = [
1101 h.email(author),
1108 h.email(author),
1102 h.person(author, 'username_or_name_or_email'),
1109 h.person(author, 'username_or_name_or_email'),
1103 1 # counter
1110 1 # counter
1104 ]
1111 ]
1105
1112
1106 else:
1113 else:
1107 # increase counter
1114 # increase counter
1108 unique[commit.author][2] += 1
1115 unique[commit.author][2] += 1
1109
1116
1110 c.authors = [val for val in unique.values()]
1117 c.authors = [val for val in unique.values()]
1111
1118
1112 return self._get_template_context(c)
1119 return self._get_template_context(c)
1113
1120
1114 @LoginRequired()
1121 @LoginRequired()
1115 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1122 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1116 def repo_files_check_head(self):
1123 def repo_files_check_head(self):
1117 self.load_default_context()
1124 self.load_default_context()
1118
1125
1119 commit_id, f_path = self._get_commit_and_path()
1126 commit_id, f_path = self._get_commit_and_path()
1120 _branch_name, _sha_commit_id, is_head = \
1127 _branch_name, _sha_commit_id, is_head = \
1121 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1128 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1122 landing_ref=self.db_repo.landing_ref_name)
1129 landing_ref=self.db_repo.landing_ref_name)
1123
1130
1124 new_path = self.request.POST.get('path')
1131 new_path = self.request.POST.get('path')
1125 operation = self.request.POST.get('operation')
1132 operation = self.request.POST.get('operation')
1126 path_exist = ''
1133 path_exist = ''
1127
1134
1128 if new_path and operation in ['create', 'upload']:
1135 if new_path and operation in ['create', 'upload']:
1129 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1136 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1130 try:
1137 try:
1131 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1138 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1132 # NOTE(dan): construct whole path without leading /
1139 # NOTE(dan): construct whole path without leading /
1133 file_node = commit_obj.get_node(new_f_path)
1140 file_node = commit_obj.get_node(new_f_path)
1134 if file_node is not None:
1141 if file_node is not None:
1135 path_exist = new_f_path
1142 path_exist = new_f_path
1136 except EmptyRepositoryError:
1143 except EmptyRepositoryError:
1137 pass
1144 pass
1138 except Exception:
1145 except Exception:
1139 pass
1146 pass
1140
1147
1141 return {
1148 return {
1142 'branch': _branch_name,
1149 'branch': _branch_name,
1143 'sha': _sha_commit_id,
1150 'sha': _sha_commit_id,
1144 'is_head': is_head,
1151 'is_head': is_head,
1145 'path_exists': path_exist
1152 'path_exists': path_exist
1146 }
1153 }
1147
1154
1148 @LoginRequired()
1155 @LoginRequired()
1149 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1156 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1150 def repo_files_remove_file(self):
1157 def repo_files_remove_file(self):
1151 _ = self.request.translate
1158 _ = self.request.translate
1152 c = self.load_default_context()
1159 c = self.load_default_context()
1153 commit_id, f_path = self._get_commit_and_path()
1160 commit_id, f_path = self._get_commit_and_path()
1154
1161
1155 self._ensure_not_locked()
1162 self._ensure_not_locked()
1156 _branch_name, _sha_commit_id, is_head = \
1163 _branch_name, _sha_commit_id, is_head = \
1157 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1164 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1158 landing_ref=self.db_repo.landing_ref_name)
1165 landing_ref=self.db_repo.landing_ref_name)
1159
1166
1160 self.forbid_non_head(is_head, f_path)
1167 self.forbid_non_head(is_head, f_path)
1161 self.check_branch_permission(_branch_name)
1168 self.check_branch_permission(_branch_name)
1162
1169
1163 c.commit = self._get_commit_or_redirect(commit_id)
1170 c.commit = self._get_commit_or_redirect(commit_id)
1164 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1171 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1165
1172
1166 c.default_message = _(
1173 c.default_message = _(
1167 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1174 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1168 c.f_path = f_path
1175 c.f_path = f_path
1169
1176
1170 return self._get_template_context(c)
1177 return self._get_template_context(c)
1171
1178
1172 @LoginRequired()
1179 @LoginRequired()
1173 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1180 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1174 @CSRFRequired()
1181 @CSRFRequired()
1175 def repo_files_delete_file(self):
1182 def repo_files_delete_file(self):
1176 _ = self.request.translate
1183 _ = self.request.translate
1177
1184
1178 c = self.load_default_context()
1185 c = self.load_default_context()
1179 commit_id, f_path = self._get_commit_and_path()
1186 commit_id, f_path = self._get_commit_and_path()
1180
1187
1181 self._ensure_not_locked()
1188 self._ensure_not_locked()
1182 _branch_name, _sha_commit_id, is_head = \
1189 _branch_name, _sha_commit_id, is_head = \
1183 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1190 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1184 landing_ref=self.db_repo.landing_ref_name)
1191 landing_ref=self.db_repo.landing_ref_name)
1185
1192
1186 self.forbid_non_head(is_head, f_path)
1193 self.forbid_non_head(is_head, f_path)
1187 self.check_branch_permission(_branch_name)
1194 self.check_branch_permission(_branch_name)
1188
1195
1189 c.commit = self._get_commit_or_redirect(commit_id)
1196 c.commit = self._get_commit_or_redirect(commit_id)
1190 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1197 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1191
1198
1192 c.default_message = _(
1199 c.default_message = _(
1193 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1200 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1194 c.f_path = f_path
1201 c.f_path = f_path
1195 node_path = f_path
1202 node_path = f_path
1196 author = self._rhodecode_db_user.full_contact
1203 author = self._rhodecode_db_user.full_contact
1197 message = self.request.POST.get('message') or c.default_message
1204 message = self.request.POST.get('message') or c.default_message
1198 try:
1205 try:
1199 nodes = {
1206 nodes = {
1200 safe_bytes(node_path): {
1207 safe_bytes(node_path): {
1201 'content': b''
1208 'content': b''
1202 }
1209 }
1203 }
1210 }
1204 ScmModel().delete_nodes(
1211 ScmModel().delete_nodes(
1205 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1212 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1206 message=message,
1213 message=message,
1207 nodes=nodes,
1214 nodes=nodes,
1208 parent_commit=c.commit,
1215 parent_commit=c.commit,
1209 author=author,
1216 author=author,
1210 )
1217 )
1211
1218
1212 h.flash(
1219 h.flash(
1213 _('Successfully deleted file `{}`').format(
1220 _('Successfully deleted file `{}`').format(
1214 h.escape(f_path)), category='success')
1221 h.escape(f_path)), category='success')
1215 except Exception:
1222 except Exception:
1216 log.exception('Error during commit operation')
1223 log.exception('Error during commit operation')
1217 h.flash(_('Error occurred during commit'), category='error')
1224 h.flash(_('Error occurred during commit'), category='error')
1218 raise HTTPFound(
1225 raise HTTPFound(
1219 h.route_path('repo_commit', repo_name=self.db_repo_name,
1226 h.route_path('repo_commit', repo_name=self.db_repo_name,
1220 commit_id='tip'))
1227 commit_id='tip'))
1221
1228
1222 @LoginRequired()
1229 @LoginRequired()
1223 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1230 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1224 def repo_files_edit_file(self):
1231 def repo_files_edit_file(self):
1225 _ = self.request.translate
1232 _ = self.request.translate
1226 c = self.load_default_context()
1233 c = self.load_default_context()
1227 commit_id, f_path = self._get_commit_and_path()
1234 commit_id, f_path = self._get_commit_and_path()
1228
1235
1229 self._ensure_not_locked()
1236 self._ensure_not_locked()
1230 _branch_name, _sha_commit_id, is_head = \
1237 _branch_name, _sha_commit_id, is_head = \
1231 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1238 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1232 landing_ref=self.db_repo.landing_ref_name)
1239 landing_ref=self.db_repo.landing_ref_name)
1233
1240
1234 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1241 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1235 self.check_branch_permission(_branch_name, commit_id=commit_id)
1242 self.check_branch_permission(_branch_name, commit_id=commit_id)
1236
1243
1237 c.commit = self._get_commit_or_redirect(commit_id)
1244 c.commit = self._get_commit_or_redirect(commit_id)
1238 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1245 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1239
1246
1240 if c.file.is_binary:
1247 if c.file.is_binary:
1241 files_url = h.route_path(
1248 files_url = h.route_path(
1242 'repo_files',
1249 'repo_files',
1243 repo_name=self.db_repo_name,
1250 repo_name=self.db_repo_name,
1244 commit_id=c.commit.raw_id, f_path=f_path)
1251 commit_id=c.commit.raw_id, f_path=f_path)
1245 raise HTTPFound(files_url)
1252 raise HTTPFound(files_url)
1246
1253
1247 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1254 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1248 c.f_path = f_path
1255 c.f_path = f_path
1249
1256
1250 return self._get_template_context(c)
1257 return self._get_template_context(c)
1251
1258
1252 @LoginRequired()
1259 @LoginRequired()
1253 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1260 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1254 @CSRFRequired()
1261 @CSRFRequired()
1255 def repo_files_update_file(self):
1262 def repo_files_update_file(self):
1256 _ = self.request.translate
1263 _ = self.request.translate
1257 c = self.load_default_context()
1264 c = self.load_default_context()
1258 commit_id, f_path = self._get_commit_and_path()
1265 commit_id, f_path = self._get_commit_and_path()
1259
1266
1260 self._ensure_not_locked()
1267 self._ensure_not_locked()
1261
1268
1262 c.commit = self._get_commit_or_redirect(commit_id)
1269 c.commit = self._get_commit_or_redirect(commit_id)
1263 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1270 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1264
1271
1265 if c.file.is_binary:
1272 if c.file.is_binary:
1266 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1273 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1267 commit_id=c.commit.raw_id, f_path=f_path))
1274 commit_id=c.commit.raw_id, f_path=f_path))
1268
1275
1269 _branch_name, _sha_commit_id, is_head = \
1276 _branch_name, _sha_commit_id, is_head = \
1270 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1277 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1271 landing_ref=self.db_repo.landing_ref_name)
1278 landing_ref=self.db_repo.landing_ref_name)
1272
1279
1273 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1280 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1274 self.check_branch_permission(_branch_name, commit_id=commit_id)
1281 self.check_branch_permission(_branch_name, commit_id=commit_id)
1275
1282
1276 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1283 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1277 c.f_path = f_path
1284 c.f_path = f_path
1278
1285
1279 old_content = c.file.str_content
1286 old_content = c.file.str_content
1280 sl = old_content.splitlines(1)
1287 sl = old_content.splitlines(1)
1281 first_line = sl[0] if sl else ''
1288 first_line = sl[0] if sl else ''
1282
1289
1283 r_post = self.request.POST
1290 r_post = self.request.POST
1284 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1291 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1285 line_ending_mode = detect_mode(first_line, 0)
1292 line_ending_mode = detect_mode(first_line, 0)
1286 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1293 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1287
1294
1288 message = r_post.get('message') or c.default_message
1295 message = r_post.get('message') or c.default_message
1289
1296
1290 org_node_path = c.file.str_path
1297 org_node_path = c.file.str_path
1291 filename = r_post['filename']
1298 filename = r_post['filename']
1292
1299
1293 root_path = c.file.dir_path
1300 root_path = c.file.dir_path
1294 pure_path = self.create_pure_path(root_path, filename)
1301 pure_path = self.create_pure_path(root_path, filename)
1295 node_path = pure_path.as_posix()
1302 node_path = pure_path.as_posix()
1296
1303
1297 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1304 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1298 commit_id=commit_id)
1305 commit_id=commit_id)
1299 if content == old_content and node_path == org_node_path:
1306 if content == old_content and node_path == org_node_path:
1300 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1307 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1301 category='warning')
1308 category='warning')
1302 raise HTTPFound(default_redirect_url)
1309 raise HTTPFound(default_redirect_url)
1303
1310
1304 try:
1311 try:
1305 mapping = {
1312 mapping = {
1306 c.file.bytes_path: {
1313 c.file.bytes_path: {
1307 'org_filename': org_node_path,
1314 'org_filename': org_node_path,
1308 'filename': safe_bytes(node_path),
1315 'filename': safe_bytes(node_path),
1309 'content': safe_bytes(content),
1316 'content': safe_bytes(content),
1310 'lexer': '',
1317 'lexer': '',
1311 'op': 'mod',
1318 'op': 'mod',
1312 'mode': c.file.mode
1319 'mode': c.file.mode
1313 }
1320 }
1314 }
1321 }
1315
1322
1316 commit = ScmModel().update_nodes(
1323 commit = ScmModel().update_nodes(
1317 user=self._rhodecode_db_user.user_id,
1324 user=self._rhodecode_db_user.user_id,
1318 repo=self.db_repo,
1325 repo=self.db_repo,
1319 message=message,
1326 message=message,
1320 nodes=mapping,
1327 nodes=mapping,
1321 parent_commit=c.commit,
1328 parent_commit=c.commit,
1322 )
1329 )
1323
1330
1324 h.flash(_('Successfully committed changes to file `{}`').format(
1331 h.flash(_('Successfully committed changes to file `{}`').format(
1325 h.escape(f_path)), category='success')
1332 h.escape(f_path)), category='success')
1326 default_redirect_url = h.route_path(
1333 default_redirect_url = h.route_path(
1327 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1334 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1328
1335
1329 except Exception:
1336 except Exception:
1330 log.exception('Error occurred during commit')
1337 log.exception('Error occurred during commit')
1331 h.flash(_('Error occurred during commit'), category='error')
1338 h.flash(_('Error occurred during commit'), category='error')
1332
1339
1333 raise HTTPFound(default_redirect_url)
1340 raise HTTPFound(default_redirect_url)
1334
1341
1335 @LoginRequired()
1342 @LoginRequired()
1336 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1343 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1337 def repo_files_add_file(self):
1344 def repo_files_add_file(self):
1338 _ = self.request.translate
1345 _ = self.request.translate
1339 c = self.load_default_context()
1346 c = self.load_default_context()
1340 commit_id, f_path = self._get_commit_and_path()
1347 commit_id, f_path = self._get_commit_and_path()
1341
1348
1342 self._ensure_not_locked()
1349 self._ensure_not_locked()
1343
1350
1344 # Check if we need to use this page to upload binary
1351 # Check if we need to use this page to upload binary
1345 upload_binary = str2bool(self.request.params.get('upload_binary', False))
1352 upload_binary = str2bool(self.request.params.get('upload_binary', False))
1346
1353
1347 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1354 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1348 if c.commit is None:
1355 if c.commit is None:
1349 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1356 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1350
1357
1351 if self.rhodecode_vcs_repo.is_empty():
1358 if self.rhodecode_vcs_repo.is_empty():
1352 # for empty repository we cannot check for current branch, we rely on
1359 # for empty repository we cannot check for current branch, we rely on
1353 # c.commit.branch instead
1360 # c.commit.branch instead
1354 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1361 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1355 else:
1362 else:
1356 _branch_name, _sha_commit_id, is_head = \
1363 _branch_name, _sha_commit_id, is_head = \
1357 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1364 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1358 landing_ref=self.db_repo.landing_ref_name)
1365 landing_ref=self.db_repo.landing_ref_name)
1359
1366
1360 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1367 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1361 self.check_branch_permission(_branch_name, commit_id=commit_id)
1368 self.check_branch_permission(_branch_name, commit_id=commit_id)
1362
1369
1363 c.default_message = (_('Added file via RhodeCode Enterprise')) \
1370 c.default_message = (_('Added file via RhodeCode Enterprise')) \
1364 if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1371 if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1365 c.f_path = f_path.lstrip('/') # ensure not relative path
1372 c.f_path = f_path.lstrip('/') # ensure not relative path
1366 c.replace_binary = upload_binary
1373 c.replace_binary = upload_binary
1367
1374
1368 return self._get_template_context(c)
1375 return self._get_template_context(c)
1369
1376
1370 @LoginRequired()
1377 @LoginRequired()
1371 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1378 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1372 @CSRFRequired()
1379 @CSRFRequired()
1373 def repo_files_create_file(self):
1380 def repo_files_create_file(self):
1374 _ = self.request.translate
1381 _ = self.request.translate
1375 c = self.load_default_context()
1382 c = self.load_default_context()
1376 commit_id, f_path = self._get_commit_and_path()
1383 commit_id, f_path = self._get_commit_and_path()
1377
1384
1378 self._ensure_not_locked()
1385 self._ensure_not_locked()
1379
1386
1380 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1387 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1381 if c.commit is None:
1388 if c.commit is None:
1382 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1389 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1383
1390
1384 # calculate redirect URL
1391 # calculate redirect URL
1385 if self.rhodecode_vcs_repo.is_empty():
1392 if self.rhodecode_vcs_repo.is_empty():
1386 default_redirect_url = h.route_path(
1393 default_redirect_url = h.route_path(
1387 'repo_summary', repo_name=self.db_repo_name)
1394 'repo_summary', repo_name=self.db_repo_name)
1388 else:
1395 else:
1389 default_redirect_url = h.route_path(
1396 default_redirect_url = h.route_path(
1390 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1397 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1391
1398
1392 if self.rhodecode_vcs_repo.is_empty():
1399 if self.rhodecode_vcs_repo.is_empty():
1393 # for empty repository we cannot check for current branch, we rely on
1400 # for empty repository we cannot check for current branch, we rely on
1394 # c.commit.branch instead
1401 # c.commit.branch instead
1395 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1402 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1396 else:
1403 else:
1397 _branch_name, _sha_commit_id, is_head = \
1404 _branch_name, _sha_commit_id, is_head = \
1398 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1405 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1399 landing_ref=self.db_repo.landing_ref_name)
1406 landing_ref=self.db_repo.landing_ref_name)
1400
1407
1401 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1408 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1402 self.check_branch_permission(_branch_name, commit_id=commit_id)
1409 self.check_branch_permission(_branch_name, commit_id=commit_id)
1403
1410
1404 c.default_message = (_('Added file via RhodeCode Enterprise'))
1411 c.default_message = (_('Added file via RhodeCode Enterprise'))
1405 c.f_path = f_path
1412 c.f_path = f_path
1406
1413
1407 r_post = self.request.POST
1414 r_post = self.request.POST
1408 message = r_post.get('message') or c.default_message
1415 message = r_post.get('message') or c.default_message
1409 filename = r_post.get('filename')
1416 filename = r_post.get('filename')
1410 unix_mode = 0
1417 unix_mode = 0
1411
1418
1412 if not filename:
1419 if not filename:
1413 # If there's no commit, redirect to repo summary
1420 # If there's no commit, redirect to repo summary
1414 if type(c.commit) is EmptyCommit:
1421 if type(c.commit) is EmptyCommit:
1415 redirect_url = h.route_path(
1422 redirect_url = h.route_path(
1416 'repo_summary', repo_name=self.db_repo_name)
1423 'repo_summary', repo_name=self.db_repo_name)
1417 else:
1424 else:
1418 redirect_url = default_redirect_url
1425 redirect_url = default_redirect_url
1419 h.flash(_('No filename specified'), category='warning')
1426 h.flash(_('No filename specified'), category='warning')
1420 raise HTTPFound(redirect_url)
1427 raise HTTPFound(redirect_url)
1421
1428
1422 root_path = f_path
1429 root_path = f_path
1423 pure_path = self.create_pure_path(root_path, filename)
1430 pure_path = self.create_pure_path(root_path, filename)
1424 node_path = pure_path.as_posix().lstrip('/')
1431 node_path = pure_path.as_posix().lstrip('/')
1425
1432
1426 author = self._rhodecode_db_user.full_contact
1433 author = self._rhodecode_db_user.full_contact
1427 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1434 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1428 nodes = {
1435 nodes = {
1429 safe_bytes(node_path): {
1436 safe_bytes(node_path): {
1430 'content': safe_bytes(content)
1437 'content': safe_bytes(content)
1431 }
1438 }
1432 }
1439 }
1433
1440
1434 try:
1441 try:
1435
1442
1436 commit = ScmModel().create_nodes(
1443 commit = ScmModel().create_nodes(
1437 user=self._rhodecode_db_user.user_id,
1444 user=self._rhodecode_db_user.user_id,
1438 repo=self.db_repo,
1445 repo=self.db_repo,
1439 message=message,
1446 message=message,
1440 nodes=nodes,
1447 nodes=nodes,
1441 parent_commit=c.commit,
1448 parent_commit=c.commit,
1442 author=author,
1449 author=author,
1443 )
1450 )
1444
1451
1445 h.flash(_('Successfully committed new file `{}`').format(
1452 h.flash(_('Successfully committed new file `{}`').format(
1446 h.escape(node_path)), category='success')
1453 h.escape(node_path)), category='success')
1447
1454
1448 default_redirect_url = h.route_path(
1455 default_redirect_url = h.route_path(
1449 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1456 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1450
1457
1451 except NonRelativePathError:
1458 except NonRelativePathError:
1452 log.exception('Non Relative path found')
1459 log.exception('Non Relative path found')
1453 h.flash(_('The location specified must be a relative path and must not '
1460 h.flash(_('The location specified must be a relative path and must not '
1454 'contain .. in the path'), category='warning')
1461 'contain .. in the path'), category='warning')
1455 raise HTTPFound(default_redirect_url)
1462 raise HTTPFound(default_redirect_url)
1456 except (NodeError, NodeAlreadyExistsError) as e:
1463 except (NodeError, NodeAlreadyExistsError) as e:
1457 h.flash(h.escape(safe_str(e)), category='error')
1464 h.flash(h.escape(safe_str(e)), category='error')
1458 except Exception:
1465 except Exception:
1459 log.exception('Error occurred during commit')
1466 log.exception('Error occurred during commit')
1460 h.flash(_('Error occurred during commit'), category='error')
1467 h.flash(_('Error occurred during commit'), category='error')
1461
1468
1462 raise HTTPFound(default_redirect_url)
1469 raise HTTPFound(default_redirect_url)
1463
1470
1464 @LoginRequired()
1471 @LoginRequired()
1465 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1472 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1466 @CSRFRequired()
1473 @CSRFRequired()
1467 def repo_files_upload_file(self):
1474 def repo_files_upload_file(self):
1468 _ = self.request.translate
1475 _ = self.request.translate
1469 c = self.load_default_context()
1476 c = self.load_default_context()
1470 commit_id, f_path = self._get_commit_and_path()
1477 commit_id, f_path = self._get_commit_and_path()
1471
1478
1472 self._ensure_not_locked()
1479 self._ensure_not_locked()
1473
1480
1474 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1481 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1475 if c.commit is None:
1482 if c.commit is None:
1476 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1483 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1477
1484
1478 # calculate redirect URL
1485 # calculate redirect URL
1479 if self.rhodecode_vcs_repo.is_empty():
1486 if self.rhodecode_vcs_repo.is_empty():
1480 default_redirect_url = h.route_path(
1487 default_redirect_url = h.route_path(
1481 'repo_summary', repo_name=self.db_repo_name)
1488 'repo_summary', repo_name=self.db_repo_name)
1482 else:
1489 else:
1483 default_redirect_url = h.route_path(
1490 default_redirect_url = h.route_path(
1484 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1491 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1485
1492
1486 if self.rhodecode_vcs_repo.is_empty():
1493 if self.rhodecode_vcs_repo.is_empty():
1487 # for empty repository we cannot check for current branch, we rely on
1494 # for empty repository we cannot check for current branch, we rely on
1488 # c.commit.branch instead
1495 # c.commit.branch instead
1489 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1496 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1490 else:
1497 else:
1491 _branch_name, _sha_commit_id, is_head = \
1498 _branch_name, _sha_commit_id, is_head = \
1492 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1499 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1493 landing_ref=self.db_repo.landing_ref_name)
1500 landing_ref=self.db_repo.landing_ref_name)
1494
1501
1495 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1502 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1496 if error:
1503 if error:
1497 return {
1504 return {
1498 'error': error,
1505 'error': error,
1499 'redirect_url': default_redirect_url
1506 'redirect_url': default_redirect_url
1500 }
1507 }
1501 error = self.check_branch_permission(_branch_name, json_mode=True)
1508 error = self.check_branch_permission(_branch_name, json_mode=True)
1502 if error:
1509 if error:
1503 return {
1510 return {
1504 'error': error,
1511 'error': error,
1505 'redirect_url': default_redirect_url
1512 'redirect_url': default_redirect_url
1506 }
1513 }
1507
1514
1508 c.default_message = (_('Added file via RhodeCode Enterprise'))
1515 c.default_message = (_('Added file via RhodeCode Enterprise'))
1509 c.f_path = f_path
1516 c.f_path = f_path
1510
1517
1511 r_post = self.request.POST
1518 r_post = self.request.POST
1512
1519
1513 message = c.default_message
1520 message = c.default_message
1514 user_message = r_post.getall('message')
1521 user_message = r_post.getall('message')
1515 if isinstance(user_message, list) and user_message:
1522 if isinstance(user_message, list) and user_message:
1516 # we take the first from duplicated results if it's not empty
1523 # we take the first from duplicated results if it's not empty
1517 message = user_message[0] if user_message[0] else message
1524 message = user_message[0] if user_message[0] else message
1518
1525
1519 nodes = {}
1526 nodes = {}
1520
1527
1521 for file_obj in r_post.getall('files_upload') or []:
1528 for file_obj in r_post.getall('files_upload') or []:
1522 content = file_obj.file
1529 content = file_obj.file
1523 filename = file_obj.filename
1530 filename = file_obj.filename
1524
1531
1525 root_path = f_path
1532 root_path = f_path
1526 pure_path = self.create_pure_path(root_path, filename)
1533 pure_path = self.create_pure_path(root_path, filename)
1527 node_path = pure_path.as_posix().lstrip('/')
1534 node_path = pure_path.as_posix().lstrip('/')
1528
1535
1529 nodes[safe_bytes(node_path)] = {
1536 nodes[safe_bytes(node_path)] = {
1530 'content': content
1537 'content': content
1531 }
1538 }
1532
1539
1533 if not nodes:
1540 if not nodes:
1534 error = 'missing files'
1541 error = 'missing files'
1535 return {
1542 return {
1536 'error': error,
1543 'error': error,
1537 'redirect_url': default_redirect_url
1544 'redirect_url': default_redirect_url
1538 }
1545 }
1539
1546
1540 author = self._rhodecode_db_user.full_contact
1547 author = self._rhodecode_db_user.full_contact
1541
1548
1542 try:
1549 try:
1543 commit = ScmModel().create_nodes(
1550 commit = ScmModel().create_nodes(
1544 user=self._rhodecode_db_user.user_id,
1551 user=self._rhodecode_db_user.user_id,
1545 repo=self.db_repo,
1552 repo=self.db_repo,
1546 message=message,
1553 message=message,
1547 nodes=nodes,
1554 nodes=nodes,
1548 parent_commit=c.commit,
1555 parent_commit=c.commit,
1549 author=author,
1556 author=author,
1550 )
1557 )
1551 if len(nodes) == 1:
1558 if len(nodes) == 1:
1552 flash_message = _('Successfully committed {} new files').format(len(nodes))
1559 flash_message = _('Successfully committed {} new files').format(len(nodes))
1553 else:
1560 else:
1554 flash_message = _('Successfully committed 1 new file')
1561 flash_message = _('Successfully committed 1 new file')
1555
1562
1556 h.flash(flash_message, category='success')
1563 h.flash(flash_message, category='success')
1557
1564
1558 default_redirect_url = h.route_path(
1565 default_redirect_url = h.route_path(
1559 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1566 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1560
1567
1561 except NonRelativePathError:
1568 except NonRelativePathError:
1562 log.exception('Non Relative path found')
1569 log.exception('Non Relative path found')
1563 error = _('The location specified must be a relative path and must not '
1570 error = _('The location specified must be a relative path and must not '
1564 'contain .. in the path')
1571 'contain .. in the path')
1565 h.flash(error, category='warning')
1572 h.flash(error, category='warning')
1566
1573
1567 return {
1574 return {
1568 'error': error,
1575 'error': error,
1569 'redirect_url': default_redirect_url
1576 'redirect_url': default_redirect_url
1570 }
1577 }
1571 except (NodeError, NodeAlreadyExistsError) as e:
1578 except (NodeError, NodeAlreadyExistsError) as e:
1572 error = h.escape(e)
1579 error = h.escape(e)
1573 h.flash(error, category='error')
1580 h.flash(error, category='error')
1574
1581
1575 return {
1582 return {
1576 'error': error,
1583 'error': error,
1577 'redirect_url': default_redirect_url
1584 'redirect_url': default_redirect_url
1578 }
1585 }
1579 except Exception:
1586 except Exception:
1580 log.exception('Error occurred during commit')
1587 log.exception('Error occurred during commit')
1581 error = _('Error occurred during commit')
1588 error = _('Error occurred during commit')
1582 h.flash(error, category='error')
1589 h.flash(error, category='error')
1583 return {
1590 return {
1584 'error': error,
1591 'error': error,
1585 'redirect_url': default_redirect_url
1592 'redirect_url': default_redirect_url
1586 }
1593 }
1587
1594
1588 return {
1595 return {
1589 'error': None,
1596 'error': None,
1590 'redirect_url': default_redirect_url
1597 'redirect_url': default_redirect_url
1591 }
1598 }
1592
1599
1593 @LoginRequired()
1600 @LoginRequired()
1594 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1601 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1595 @CSRFRequired()
1602 @CSRFRequired()
1596 def repo_files_replace_file(self):
1603 def repo_files_replace_file(self):
1597 _ = self.request.translate
1604 _ = self.request.translate
1598 c = self.load_default_context()
1605 c = self.load_default_context()
1599 commit_id, f_path = self._get_commit_and_path()
1606 commit_id, f_path = self._get_commit_and_path()
1600
1607
1601 self._ensure_not_locked()
1608 self._ensure_not_locked()
1602
1609
1603 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1610 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1604 if c.commit is None:
1611 if c.commit is None:
1605 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1612 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1606
1613
1607 if self.rhodecode_vcs_repo.is_empty():
1614 if self.rhodecode_vcs_repo.is_empty():
1608 default_redirect_url = h.route_path(
1615 default_redirect_url = h.route_path(
1609 'repo_summary', repo_name=self.db_repo_name)
1616 'repo_summary', repo_name=self.db_repo_name)
1610 else:
1617 else:
1611 default_redirect_url = h.route_path(
1618 default_redirect_url = h.route_path(
1612 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1619 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1613
1620
1614 if self.rhodecode_vcs_repo.is_empty():
1621 if self.rhodecode_vcs_repo.is_empty():
1615 # for empty repository we cannot check for current branch, we rely on
1622 # for empty repository we cannot check for current branch, we rely on
1616 # c.commit.branch instead
1623 # c.commit.branch instead
1617 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1624 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1618 else:
1625 else:
1619 _branch_name, _sha_commit_id, is_head = \
1626 _branch_name, _sha_commit_id, is_head = \
1620 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1627 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1621 landing_ref=self.db_repo.landing_ref_name)
1628 landing_ref=self.db_repo.landing_ref_name)
1622
1629
1623 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1630 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1624 if error:
1631 if error:
1625 return {
1632 return {
1626 'error': error,
1633 'error': error,
1627 'redirect_url': default_redirect_url
1634 'redirect_url': default_redirect_url
1628 }
1635 }
1629 error = self.check_branch_permission(_branch_name, json_mode=True)
1636 error = self.check_branch_permission(_branch_name, json_mode=True)
1630 if error:
1637 if error:
1631 return {
1638 return {
1632 'error': error,
1639 'error': error,
1633 'redirect_url': default_redirect_url
1640 'redirect_url': default_redirect_url
1634 }
1641 }
1635
1642
1636 c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1643 c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1637 c.f_path = f_path
1644 c.f_path = f_path
1638
1645
1639 r_post = self.request.POST
1646 r_post = self.request.POST
1640
1647
1641 message = c.default_message
1648 message = c.default_message
1642 user_message = r_post.getall('message')
1649 user_message = r_post.getall('message')
1643 if isinstance(user_message, list) and user_message:
1650 if isinstance(user_message, list) and user_message:
1644 # we take the first from duplicated results if it's not empty
1651 # we take the first from duplicated results if it's not empty
1645 message = user_message[0] if user_message[0] else message
1652 message = user_message[0] if user_message[0] else message
1646
1653
1647 data_for_replacement = r_post.getall('files_upload') or []
1654 data_for_replacement = r_post.getall('files_upload') or []
1648 if (objects_count := len(data_for_replacement)) > 1:
1655 if (objects_count := len(data_for_replacement)) > 1:
1649 return {
1656 return {
1650 'error': 'too many files for replacement',
1657 'error': 'too many files for replacement',
1651 'redirect_url': default_redirect_url
1658 'redirect_url': default_redirect_url
1652 }
1659 }
1653 elif not objects_count:
1660 elif not objects_count:
1654 return {
1661 return {
1655 'error': 'missing files',
1662 'error': 'missing files',
1656 'redirect_url': default_redirect_url
1663 'redirect_url': default_redirect_url
1657 }
1664 }
1658
1665
1659 content = data_for_replacement[0].file
1666 content = data_for_replacement[0].file
1660 retrieved_filename = data_for_replacement[0].filename
1667 retrieved_filename = data_for_replacement[0].filename
1661
1668
1662 if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]:
1669 if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]:
1663 return {
1670 return {
1664 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension',
1671 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension',
1665 'redirect_url': default_redirect_url
1672 'redirect_url': default_redirect_url
1666 }
1673 }
1667
1674
1668 author = self._rhodecode_db_user.full_contact
1675 author = self._rhodecode_db_user.full_contact
1669
1676
1670 try:
1677 try:
1671 commit = ScmModel().update_binary_node(
1678 commit = ScmModel().update_binary_node(
1672 user=self._rhodecode_db_user.user_id,
1679 user=self._rhodecode_db_user.user_id,
1673 repo=self.db_repo,
1680 repo=self.db_repo,
1674 message=message,
1681 message=message,
1675 node={
1682 node={
1676 'content': content,
1683 'content': content,
1677 'file_path': f_path.encode(),
1684 'file_path': f_path.encode(),
1678 },
1685 },
1679 parent_commit=c.commit,
1686 parent_commit=c.commit,
1680 author=author,
1687 author=author,
1681 )
1688 )
1682
1689
1683 h.flash(_('Successfully committed 1 new file'), category='success')
1690 h.flash(_('Successfully committed 1 new file'), category='success')
1684
1691
1685 default_redirect_url = h.route_path(
1692 default_redirect_url = h.route_path(
1686 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1693 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1687
1694
1688 except (NodeError, NodeAlreadyExistsError) as e:
1695 except (NodeError, NodeAlreadyExistsError) as e:
1689 error = h.escape(e)
1696 error = h.escape(e)
1690 h.flash(error, category='error')
1697 h.flash(error, category='error')
1691
1698
1692 return {
1699 return {
1693 'error': error,
1700 'error': error,
1694 'redirect_url': default_redirect_url
1701 'redirect_url': default_redirect_url
1695 }
1702 }
1696 except Exception:
1703 except Exception:
1697 log.exception('Error occurred during commit')
1704 log.exception('Error occurred during commit')
1698 error = _('Error occurred during commit')
1705 error = _('Error occurred during commit')
1699 h.flash(error, category='error')
1706 h.flash(error, category='error')
1700 return {
1707 return {
1701 'error': error,
1708 'error': error,
1702 'redirect_url': default_redirect_url
1709 'redirect_url': default_redirect_url
1703 }
1710 }
1704
1711
1705 return {
1712 return {
1706 'error': None,
1713 'error': None,
1707 'redirect_url': default_redirect_url
1714 'redirect_url': default_redirect_url
1708 }
1715 }
@@ -1,201 +1,205 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import tempfile
20 import tempfile
21 import logging
21 import logging
22
22
23 from pyramid.settings import asbool
23 from pyramid.settings import asbool
24
24
25 from rhodecode.config.settings_maker import SettingsMaker
25 from rhodecode.config.settings_maker import SettingsMaker
26 from rhodecode.config import utils as config_utils
26 from rhodecode.config import utils as config_utils
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30
30
31 def sanitize_settings_and_apply_defaults(global_config, settings):
31 def sanitize_settings_and_apply_defaults(global_config, settings):
32 """
32 """
33 Applies settings defaults and does all type conversion.
33 Applies settings defaults and does all type conversion.
34
34
35 We would move all settings parsing and preparation into this place, so that
35 We would move all settings parsing and preparation into this place, so that
36 we have only one place left which deals with this part. The remaining parts
36 we have only one place left which deals with this part. The remaining parts
37 of the application would start to rely fully on well-prepared settings.
37 of the application would start to rely fully on well-prepared settings.
38
38
39 This piece would later be split up per topic to avoid a big fat monster
39 This piece would later be split up per topic to avoid a big fat monster
40 function.
40 function.
41 """
41 """
42 jn = os.path.join
42 jn = os.path.join
43
43
44 global_settings_maker = SettingsMaker(global_config)
44 global_settings_maker = SettingsMaker(global_config)
45 global_settings_maker.make_setting('debug', default=False, parser='bool')
45 global_settings_maker.make_setting('debug', default=False, parser='bool')
46 debug_enabled = asbool(global_config.get('debug'))
46 debug_enabled = asbool(global_config.get('debug'))
47
47
48 settings_maker = SettingsMaker(settings)
48 settings_maker = SettingsMaker(settings)
49
49
50 settings_maker.make_setting(
50 settings_maker.make_setting(
51 'logging.autoconfigure',
51 'logging.autoconfigure',
52 default=False,
52 default=False,
53 parser='bool')
53 parser='bool')
54
54
55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
57
57
58 # Default includes, possible to change as a user
58 # Default includes, possible to change as a user
59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
60 log.debug(
60 log.debug(
61 "Using the following pyramid.includes: %s",
61 "Using the following pyramid.includes: %s",
62 pyramid_includes)
62 pyramid_includes)
63
63
64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
66
66
67 if 'mako.default_filters' not in settings:
67 if 'mako.default_filters' not in settings:
68 # set custom default filters if we don't have it defined
68 # set custom default filters if we don't have it defined
69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
70 settings['mako.default_filters'] = 'h_filter'
70 settings['mako.default_filters'] = 'h_filter'
71
71
72 if 'mako.directories' not in settings:
72 if 'mako.directories' not in settings:
73 mako_directories = settings.setdefault('mako.directories', [
73 mako_directories = settings.setdefault('mako.directories', [
74 # Base templates of the original application
74 # Base templates of the original application
75 'rhodecode:templates',
75 'rhodecode:templates',
76 ])
76 ])
77 log.debug(
77 log.debug(
78 "Using the following Mako template directories: %s",
78 "Using the following Mako template directories: %s",
79 mako_directories)
79 mako_directories)
80
80
81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
83 raw_url = settings['beaker.session.url']
83 raw_url = settings['beaker.session.url']
84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
85 settings['beaker.session.url'] = 'redis://' + raw_url
85 settings['beaker.session.url'] = 'redis://' + raw_url
86
86
87 settings_maker.make_setting('__file__', global_config.get('__file__'))
87 settings_maker.make_setting('__file__', global_config.get('__file__'))
88
88
89 # TODO: johbo: Re-think this, usually the call to config.include
89 # TODO: johbo: Re-think this, usually the call to config.include
90 # should allow to pass in a prefix.
90 # should allow to pass in a prefix.
91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
92
92
93 # Sanitize generic settings.
93 # Sanitize generic settings.
94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
95 settings_maker.make_setting('gzip_responses', False, parser='bool')
95 settings_maker.make_setting('gzip_responses', False, parser='bool')
96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
97
97
98 # statsd
98 # statsd
99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
102 settings_maker.make_setting('statsd.statsd_prefix', '')
102 settings_maker.make_setting('statsd.statsd_prefix', '')
103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104
104
105 settings_maker.make_setting('vcs.svn.compatible_version', '')
105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
106 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
107 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
107 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
108 settings_maker.make_setting('vcs.hooks.protocol', 'http')
108 settings_maker.make_setting('vcs.hooks.protocol', 'http')
109 settings_maker.make_setting('vcs.hooks.host', '*')
109 settings_maker.make_setting('vcs.hooks.host', '*')
110 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
110 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
111 settings_maker.make_setting('vcs.server', '')
111 settings_maker.make_setting('vcs.server', '')
112 settings_maker.make_setting('vcs.server.protocol', 'http')
112 settings_maker.make_setting('vcs.server.protocol', 'http')
113 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
113 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
114 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
114 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
115 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
115 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
116 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
116 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
117 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
117 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
118
118
119 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
119 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
120
120
121 # repo_store path
121 # repo_store path
122 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
122 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
123 # Support legacy values of vcs.scm_app_implementation. Legacy
123 # Support legacy values of vcs.scm_app_implementation. Legacy
124 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
124 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
125 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
125 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
126 scm_app_impl = settings['vcs.scm_app_implementation']
126 scm_app_impl = settings['vcs.scm_app_implementation']
127 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
127 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
128 settings['vcs.scm_app_implementation'] = 'http'
128 settings['vcs.scm_app_implementation'] = 'http'
129
129
130 settings_maker.make_setting('appenlight', False, parser='bool')
130 settings_maker.make_setting('appenlight', False, parser='bool')
131
131
132 temp_store = tempfile.gettempdir()
132 temp_store = tempfile.gettempdir()
133 tmp_cache_dir = jn(temp_store, 'rc_cache')
133 tmp_cache_dir = jn(temp_store, 'rc_cache')
134
134
135 # save default, cache dir, and use it for all backends later.
135 # save default, cache dir, and use it for all backends later.
136 default_cache_dir = settings_maker.make_setting(
136 default_cache_dir = settings_maker.make_setting(
137 'cache_dir',
137 'cache_dir',
138 default=tmp_cache_dir, default_when_empty=True,
138 default=tmp_cache_dir, default_when_empty=True,
139 parser='dir:ensured')
139 parser='dir:ensured')
140
140
141 # exception store cache
141 # exception store cache
142 settings_maker.make_setting(
142 settings_maker.make_setting(
143 'exception_tracker.store_path',
143 'exception_tracker.store_path',
144 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
144 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
145 parser='dir:ensured'
145 parser='dir:ensured'
146 )
146 )
147
147
148 settings_maker.make_setting(
148 settings_maker.make_setting(
149 'celerybeat-schedule.path',
149 'celerybeat-schedule.path',
150 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
150 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
151 parser='file:ensured'
151 parser='file:ensured'
152 )
152 )
153
153
154 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
154 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
155 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
155 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
156
156
157 # sessions, ensure file since no-value is memory
157 # sessions, ensure file since no-value is memory
158 settings_maker.make_setting('beaker.session.type', 'file')
158 settings_maker.make_setting('beaker.session.type', 'file')
159 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
159 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
160
160
161 # cache_general
161 # cache_general
162 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
162 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
163 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
163 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
164 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
164 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
165
165
166 # cache_perms
166 # cache_perms
167 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
167 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
168 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
168 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
169 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
169 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
170
170
171 # cache_repo
171 # cache_repo
172 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
172 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
173 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
173 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
174 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
174 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
175
175
176 # cache_license
176 # cache_license
177 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
177 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
178 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
178 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
179 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
179 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
180
180
181 # cache_repo_longterm memory, 96H
181 # cache_repo_longterm memory, 96H
182 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
182 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
183 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
183 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
184 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
184 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
185
185
186 # sql_cache_short
186 # sql_cache_short
187 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
187 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
188 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
188 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
189 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
189 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
190
190
191 # archive_cache
191 # archive_cache
192 settings_maker.make_setting('archive_cache.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
192 settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1')
193 settings_maker.make_setting('archive_cache.cache_size_gb', 10, parser='float')
193 settings_maker.make_setting('archive_cache.backend.type', 'filesystem')
194 settings_maker.make_setting('archive_cache.cache_shards', 10, parser='int')
194
195 settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
196 settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float')
197 settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
198 settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored')
195
199
196 settings_maker.env_expand()
200 settings_maker.env_expand()
197
201
198 # configure instance id
202 # configure instance id
199 config_utils.set_instance_id(settings)
203 config_utils.set_instance_id(settings)
200
204
201 return settings
205 return settings
@@ -1,88 +1,264 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import codecs
20 import contextlib
21 import functools
20 import os
22 import os
21 import diskcache
23 import logging
22 from diskcache import RLock
24 import time
25 import typing
26 import zlib
27
28 from rhodecode.lib.ext_json import json
29 from .lock import GenerationLock
23
30
24 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
25
32
26 cache_meta = None
33 cache_meta = None
27
34
35 UNKNOWN = -241
36 NO_VAL = -917
28
37
29 class ReentrantLock(RLock):
38 MODE_BINARY = 'BINARY'
30 def __enter__(self):
39
31 reentrant_lock_key = self._key
40
41 class FileSystemCache:
42
43 def __init__(self, index, directory, **settings):
44 self._index = index
45 self._directory = directory
46
47 def _write_file(self, full_path, iterator, mode, encoding=None):
48 full_dir, _ = os.path.split(full_path)
49
50 for count in range(1, 11):
51 with contextlib.suppress(OSError):
52 os.makedirs(full_dir)
53
54 try:
55 # Another cache may have deleted the directory before
56 # the file could be opened.
57 writer = open(full_path, mode, encoding=encoding)
58 except OSError:
59 if count == 10:
60 # Give up after 10 tries to open the file.
61 raise
62 continue
63
64 with writer:
65 size = 0
66 for chunk in iterator:
67 size += len(chunk)
68 writer.write(chunk)
69 return size
70
71 def _get_keyfile(self, key):
72 return os.path.join(self._directory, f'{key}.key')
73
74 def store(self, key, value_reader, metadata):
75 filename, full_path = self.random_filename()
76 key_file = self._get_keyfile(key)
77
78 # STORE METADATA
79 _metadata = {
80 "version": "v1",
81 "timestamp": time.time(),
82 "filename": filename,
83 "full_path": full_path,
84 "key_file": key_file,
85 }
86 if metadata:
87 _metadata.update(metadata)
88
89 reader = functools.partial(value_reader.read, 2**22)
90
91 iterator = iter(reader, b'')
92 size = self._write_file(full_path, iterator, 'xb')
93
94 # after archive is finished, we create a key to save the presence of the binary file
95 with open(key_file, 'wb') as f:
96 f.write(json.dumps(_metadata))
97
98 return key, size, MODE_BINARY, filename, _metadata
99
100 def fetch(self, key) -> tuple[typing.BinaryIO, dict]:
101 if key not in self:
102 raise KeyError(key)
103
104 key_file = self._get_keyfile(key)
105 with open(key_file, 'rb') as f:
106 metadata = json.loads(f.read())
107
108 filename = metadata['filename']
109
110 return open(os.path.join(self._directory, filename), 'rb'), metadata
111
112 def random_filename(self):
113 """Return filename and full-path tuple for file storage.
114
115 Filename will be a randomly generated 28 character hexadecimal string
116 with ".archive_cache" suffixed. Two levels of sub-directories will be used to
117 reduce the size of directories. On older filesystems, lookups in
118 directories with many files may be slow.
119 """
32
120
33 log.debug('Acquire ReentrantLock(key=%s) for archive cache generation...', reentrant_lock_key)
121 hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
34 #self.acquire()
122 sub_dir = os.path.join(hex_name[:2], hex_name[2:4])
35 log.debug('Lock for key=%s acquired', reentrant_lock_key)
123 name = hex_name[4:] + '.archive_cache'
124 filename = os.path.join(sub_dir, name)
125 full_path = os.path.join(self._directory, filename)
126 return filename, full_path
127
128 def hash(self, key):
129 """Compute portable hash for `key`.
130
131 :param key: key to hash
132 :return: hash value
133
134 """
135 mask = 0xFFFFFFFF
136 return zlib.adler32(key.encode('utf-8')) & mask # noqa
137
138 def __contains__(self, key):
139 """Return `True` if `key` matching item is found in cache.
140
141 :param key: key matching item
142 :return: True if key matching item
143
144 """
145 key_file = self._get_keyfile(key)
146 return os.path.exists(key_file)
147
148
149 class FanoutCache:
150 """Cache that shards keys and values."""
151
152 def __init__(
153 self, directory=None, **settings
154 ):
155 """Initialize cache instance.
156
157 :param str directory: cache directory
158 :param settings: settings dict
159
160 """
161 if directory is None:
162 raise ValueError('directory cannot be None')
163
164 directory = str(directory)
165 directory = os.path.expanduser(directory)
166 directory = os.path.expandvars(directory)
167 self._directory = directory
36
168
37 def __exit__(self, *exc_info):
169 self._count = settings.pop('cache_shards')
38 #self.release()
170 self._locking_url = settings.pop('locking_url')
39 pass
171
172 self._shards = tuple(
173 FileSystemCache(
174 index=num,
175 directory=os.path.join(directory, 'shard_%03d' % num),
176 **settings,
177 )
178 for num in range(self._count)
179 )
180 self._hash = self._shards[0].hash
181
182 def get_lock(self, lock_key):
183 return GenerationLock(lock_key, self._locking_url)
184
185 def _get_shard(self, key) -> FileSystemCache:
186 index = self._hash(key) % self._count
187 shard = self._shards[index]
188 return shard
189
190 def store(self, key, value_reader, metadata=None):
191 shard = self._get_shard(key)
192 return shard.store(key, value_reader, metadata)
193
194 def fetch(self, key):
195 """Return file handle corresponding to `key` from cache.
196 """
197 shard = self._get_shard(key)
198 return shard.fetch(key)
199
200 def has_key(self, key):
201 """Return `True` if `key` matching item is found in cache.
202
203 :param key: key for item
204 :return: True if key is found
205
206 """
207 shard = self._get_shard(key)
208 return key in shard
209
210 def __contains__(self, item):
211 return self.has_key(item)
212
213 def evict(self):
214 """Remove old items based on the conditions"""
215 # TODO: Implement this...
216 return
40
217
41
218
42 def get_archival_config(config):
219 def get_archival_config(config):
43
220
44 final_config = {
221 final_config = {
45 'archive_cache.eviction_policy': 'least-frequently-used'
222
46 }
223 }
47
224
48 for k, v in config.items():
225 for k, v in config.items():
49 if k.startswith('archive_cache'):
226 if k.startswith('archive_cache'):
50 final_config[k] = v
227 final_config[k] = v
51
228
52 return final_config
229 return final_config
53
230
54
231
55 def get_archival_cache_store(config):
232 def get_archival_cache_store(config):
56
233
57 global cache_meta
234 global cache_meta
58 if cache_meta is not None:
235 if cache_meta is not None:
59 return cache_meta
236 return cache_meta
60
237
61 config = get_archival_config(config)
238 config = get_archival_config(config)
239 backend = config['archive_cache.backend.type']
240 if backend != 'filesystem':
241 raise ValueError('archive_cache.backend.type only supports "filesystem"')
62
242
63 archive_cache_dir = config['archive_cache.store_dir']
243 archive_cache_locking_url = config['archive_cache.locking.url']
64 archive_cache_size_gb = config['archive_cache.cache_size_gb']
244 archive_cache_dir = config['archive_cache.filesystem.store_dir']
65 archive_cache_shards = config['archive_cache.cache_shards']
245 archive_cache_size_gb = config['archive_cache.filesystem.cache_size_gb']
66 archive_cache_eviction_policy = config['archive_cache.eviction_policy']
246 archive_cache_shards = config['archive_cache.filesystem.cache_shards']
247 archive_cache_eviction_policy = config['archive_cache.filesystem.eviction_policy']
67
248
68 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
249 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
69
250
70 # check if it's ok to write, and re-create the archive cache
251 # check if it's ok to write, and re-create the archive cache
71 if not os.path.isdir(archive_cache_dir):
252 if not os.path.isdir(archive_cache_dir):
72 os.makedirs(archive_cache_dir, exist_ok=True)
253 os.makedirs(archive_cache_dir, exist_ok=True)
73
254
74 d_cache = diskcache.FanoutCache(
255 d_cache = FanoutCache(
75 archive_cache_dir, shards=archive_cache_shards,
256 archive_cache_dir,
76 cull_limit=0, # manual eviction required
257 locking_url=archive_cache_locking_url,
77 size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
258 cache_shards=archive_cache_shards,
78 eviction_policy=archive_cache_eviction_policy,
259 cache_size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
79 timeout=30
260 cache_eviction_policy=archive_cache_eviction_policy
80 )
261 )
81 cache_meta = d_cache
262 cache_meta = d_cache
82 return cache_meta
263 return cache_meta
83
264
84
85 def includeme(config):
86 # init our cache at start
87 settings = config.get_settings()
88 get_archival_cache_store(settings)
@@ -1,844 +1,851 b''
1 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import os
20 import os
21 import sys
21 import sys
22 import time
22 import time
23 import platform
23 import platform
24 import collections
24 import collections
25 import psutil
25 import psutil
26 from functools import wraps
26 from functools import wraps
27
27
28 import pkg_resources
28 import pkg_resources
29 import logging
29 import logging
30 import resource
30 import resource
31
31
32 import configparser
32 import configparser
33
33
34 from rc_license.models import LicenseModel
34 from rc_license.models import LicenseModel
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 _NA = 'NOT AVAILABLE'
40 _NA = 'NOT AVAILABLE'
41 _NA_FLOAT = 0.0
41 _NA_FLOAT = 0.0
42
42
43 STATE_OK = 'ok'
43 STATE_OK = 'ok'
44 STATE_ERR = 'error'
44 STATE_ERR = 'error'
45 STATE_WARN = 'warning'
45 STATE_WARN = 'warning'
46
46
47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
48
48
49
49
50 registered_helpers = {}
50 registered_helpers = {}
51
51
52
52
53 def register_sysinfo(func):
53 def register_sysinfo(func):
54 """
54 """
55 @register_helper
55 @register_helper
56 def db_check():
56 def db_check():
57 pass
57 pass
58
58
59 db_check == registered_helpers['db_check']
59 db_check == registered_helpers['db_check']
60 """
60 """
61 global registered_helpers
61 global registered_helpers
62 registered_helpers[func.__name__] = func
62 registered_helpers[func.__name__] = func
63
63
64 @wraps(func)
64 @wraps(func)
65 def _wrapper(*args, **kwargs):
65 def _wrapper(*args, **kwargs):
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 return _wrapper
67 return _wrapper
68
68
69
69
70 # HELPERS
70 # HELPERS
71 def percentage(part: (int, float), whole: (int, float)):
71 def percentage(part: (int, float), whole: (int, float)):
72 whole = float(whole)
72 whole = float(whole)
73 if whole > 0:
73 if whole > 0:
74 return round(100 * float(part) / whole, 1)
74 return round(100 * float(part) / whole, 1)
75 return 0.0
75 return 0.0
76
76
77
77
78 def get_storage_size(storage_path):
78 def get_storage_size(storage_path):
79 sizes = []
79 sizes = []
80 for file_ in os.listdir(storage_path):
80 for file_ in os.listdir(storage_path):
81 storage_file = os.path.join(storage_path, file_)
81 storage_file = os.path.join(storage_path, file_)
82 if os.path.isfile(storage_file):
82 if os.path.isfile(storage_file):
83 try:
83 try:
84 sizes.append(os.path.getsize(storage_file))
84 sizes.append(os.path.getsize(storage_file))
85 except OSError:
85 except OSError:
86 log.exception('Failed to get size of storage file %s', storage_file)
86 log.exception('Failed to get size of storage file %s', storage_file)
87 pass
87 pass
88
88
89 return sum(sizes)
89 return sum(sizes)
90
90
91
91
92 def get_resource(resource_type):
92 def get_resource(resource_type):
93 try:
93 try:
94 return resource.getrlimit(resource_type)
94 return resource.getrlimit(resource_type)
95 except Exception:
95 except Exception:
96 return 'NOT_SUPPORTED'
96 return 'NOT_SUPPORTED'
97
97
98
98
99 def get_cert_path(ini_path):
99 def get_cert_path(ini_path):
100 default = '/etc/ssl/certs/ca-certificates.crt'
100 default = '/etc/ssl/certs/ca-certificates.crt'
101 control_ca_bundle = os.path.join(
101 control_ca_bundle = os.path.join(
102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
103 '.rccontrol-profile/etc/ca-bundle.crt')
103 '.rccontrol-profile/etc/ca-bundle.crt')
104 if os.path.isfile(control_ca_bundle):
104 if os.path.isfile(control_ca_bundle):
105 default = control_ca_bundle
105 default = control_ca_bundle
106
106
107 return default
107 return default
108
108
109
109
110 class SysInfoRes(object):
110 class SysInfoRes(object):
111 def __init__(self, value, state=None, human_value=None):
111 def __init__(self, value, state=None, human_value=None):
112 self.value = value
112 self.value = value
113 self.state = state or STATE_OK_DEFAULT
113 self.state = state or STATE_OK_DEFAULT
114 self.human_value = human_value or value
114 self.human_value = human_value or value
115
115
116 def __json__(self):
116 def __json__(self):
117 return {
117 return {
118 'value': self.value,
118 'value': self.value,
119 'state': self.state,
119 'state': self.state,
120 'human_value': self.human_value,
120 'human_value': self.human_value,
121 }
121 }
122
122
123 def get_value(self):
123 def get_value(self):
124 return self.__json__()
124 return self.__json__()
125
125
126 def __str__(self):
126 def __str__(self):
127 return f'<SysInfoRes({self.__json__()})>'
127 return f'<SysInfoRes({self.__json__()})>'
128
128
129
129
130 class SysInfo(object):
130 class SysInfo(object):
131
131
132 def __init__(self, func_name, **kwargs):
132 def __init__(self, func_name, **kwargs):
133 self.function_name = func_name
133 self.function_name = func_name
134 self.value = _NA
134 self.value = _NA
135 self.state = None
135 self.state = None
136 self.kwargs = kwargs or {}
136 self.kwargs = kwargs or {}
137
137
138 def __call__(self):
138 def __call__(self):
139 computed = self.compute(**self.kwargs)
139 computed = self.compute(**self.kwargs)
140 if not isinstance(computed, SysInfoRes):
140 if not isinstance(computed, SysInfoRes):
141 raise ValueError(
141 raise ValueError(
142 'computed value for {} is not instance of '
142 'computed value for {} is not instance of '
143 '{}, got {} instead'.format(
143 '{}, got {} instead'.format(
144 self.function_name, SysInfoRes, type(computed)))
144 self.function_name, SysInfoRes, type(computed)))
145 return computed.__json__()
145 return computed.__json__()
146
146
147 def __str__(self):
147 def __str__(self):
148 return f'<SysInfo({self.function_name})>'
148 return f'<SysInfo({self.function_name})>'
149
149
150 def compute(self, **kwargs):
150 def compute(self, **kwargs):
151 return self.function_name(**kwargs)
151 return self.function_name(**kwargs)
152
152
153
153
154 # SysInfo functions
154 # SysInfo functions
155 @register_sysinfo
155 @register_sysinfo
156 def python_info():
156 def python_info():
157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
158 executable=sys.executable)
158 executable=sys.executable)
159 return SysInfoRes(value=value)
159 return SysInfoRes(value=value)
160
160
161
161
162 @register_sysinfo
162 @register_sysinfo
163 def py_modules():
163 def py_modules():
164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
165 for p in pkg_resources.working_set])
165 for p in pkg_resources.working_set])
166
166
167 value = sorted(mods.items(), key=lambda k: k[0].lower())
167 value = sorted(mods.items(), key=lambda k: k[0].lower())
168 return SysInfoRes(value=value)
168 return SysInfoRes(value=value)
169
169
170
170
171 @register_sysinfo
171 @register_sysinfo
172 def platform_type():
172 def platform_type():
173 from rhodecode.lib.utils import generate_platform_uuid
173 from rhodecode.lib.utils import generate_platform_uuid
174
174
175 value = dict(
175 value = dict(
176 name=safe_str(platform.platform()),
176 name=safe_str(platform.platform()),
177 uuid=generate_platform_uuid()
177 uuid=generate_platform_uuid()
178 )
178 )
179 return SysInfoRes(value=value)
179 return SysInfoRes(value=value)
180
180
181
181
182 @register_sysinfo
182 @register_sysinfo
183 def locale_info():
183 def locale_info():
184 import locale
184 import locale
185
185
186 def safe_get_locale(locale_name):
186 def safe_get_locale(locale_name):
187 try:
187 try:
188 locale.getlocale(locale_name)
188 locale.getlocale(locale_name)
189 except TypeError:
189 except TypeError:
190 return f'FAILED_LOCALE_GET:{locale_name}'
190 return f'FAILED_LOCALE_GET:{locale_name}'
191
191
192 value = dict(
192 value = dict(
193 locale_default=locale.getlocale(),
193 locale_default=locale.getlocale(),
194 locale_lc_all=safe_get_locale(locale.LC_ALL),
194 locale_lc_all=safe_get_locale(locale.LC_ALL),
195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
196 lang_env=os.environ.get('LANG'),
196 lang_env=os.environ.get('LANG'),
197 lc_all_env=os.environ.get('LC_ALL'),
197 lc_all_env=os.environ.get('LC_ALL'),
198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
199 )
199 )
200 human_value = \
200 human_value = \
201 f"LANG: {value['lang_env']}, \
201 f"LANG: {value['lang_env']}, \
202 locale LC_ALL: {value['locale_lc_all']}, \
202 locale LC_ALL: {value['locale_lc_all']}, \
203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
204 Default locales: {value['locale_default']}"
204 Default locales: {value['locale_default']}"
205
205
206 return SysInfoRes(value=value, human_value=human_value)
206 return SysInfoRes(value=value, human_value=human_value)
207
207
208
208
209 @register_sysinfo
209 @register_sysinfo
210 def ulimit_info():
210 def ulimit_info():
211 data = collections.OrderedDict([
211 data = collections.OrderedDict([
212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
214 ('stack size', get_resource(resource.RLIMIT_STACK)),
214 ('stack size', get_resource(resource.RLIMIT_STACK)),
215 ('core file size', get_resource(resource.RLIMIT_CORE)),
215 ('core file size', get_resource(resource.RLIMIT_CORE)),
216 ('address space size', get_resource(resource.RLIMIT_AS)),
216 ('address space size', get_resource(resource.RLIMIT_AS)),
217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
218 ('heap size', get_resource(resource.RLIMIT_DATA)),
218 ('heap size', get_resource(resource.RLIMIT_DATA)),
219 ('rss size', get_resource(resource.RLIMIT_RSS)),
219 ('rss size', get_resource(resource.RLIMIT_RSS)),
220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
222 ])
222 ])
223
223
224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
225
225
226 value = {
226 value = {
227 'limits': data,
227 'limits': data,
228 'text': text,
228 'text': text,
229 }
229 }
230 return SysInfoRes(value=value)
230 return SysInfoRes(value=value)
231
231
232
232
233 @register_sysinfo
233 @register_sysinfo
234 def uptime():
234 def uptime():
235 from rhodecode.lib.helpers import age, time_to_datetime
235 from rhodecode.lib.helpers import age, time_to_datetime
236 from rhodecode.translation import TranslationString
236 from rhodecode.translation import TranslationString
237
237
238 value = dict(boot_time=0, uptime=0, text='')
238 value = dict(boot_time=0, uptime=0, text='')
239 state = STATE_OK_DEFAULT
239 state = STATE_OK_DEFAULT
240
240
241 boot_time = psutil.boot_time()
241 boot_time = psutil.boot_time()
242 value['boot_time'] = boot_time
242 value['boot_time'] = boot_time
243 value['uptime'] = time.time() - boot_time
243 value['uptime'] = time.time() - boot_time
244
244
245 date_or_age = age(time_to_datetime(boot_time))
245 date_or_age = age(time_to_datetime(boot_time))
246 if isinstance(date_or_age, TranslationString):
246 if isinstance(date_or_age, TranslationString):
247 date_or_age = date_or_age.interpolate()
247 date_or_age = date_or_age.interpolate()
248
248
249 human_value = value.copy()
249 human_value = value.copy()
250 human_value['boot_time'] = time_to_datetime(boot_time)
250 human_value['boot_time'] = time_to_datetime(boot_time)
251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
252
252
253 human_value['text'] = f'Server started {date_or_age}'
253 human_value['text'] = f'Server started {date_or_age}'
254 return SysInfoRes(value=value, human_value=human_value)
254 return SysInfoRes(value=value, human_value=human_value)
255
255
256
256
257 @register_sysinfo
257 @register_sysinfo
258 def memory():
258 def memory():
259 from rhodecode.lib.helpers import format_byte_size_binary
259 from rhodecode.lib.helpers import format_byte_size_binary
260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
261 percent_used=0, free=0, inactive=0, active=0, shared=0,
261 percent_used=0, free=0, inactive=0, active=0, shared=0,
262 total=0, buffers=0, text='')
262 total=0, buffers=0, text='')
263
263
264 state = STATE_OK_DEFAULT
264 state = STATE_OK_DEFAULT
265
265
266 value.update(dict(psutil.virtual_memory()._asdict()))
266 value.update(dict(psutil.virtual_memory()._asdict()))
267 value['used_real'] = value['total'] - value['available']
267 value['used_real'] = value['total'] - value['available']
268 value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1)
268 value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1)
269
269
270 human_value = value.copy()
270 human_value = value.copy()
271 human_value['text'] = '{}/{}, {}% used'.format(
271 human_value['text'] = '{}/{}, {}% used'.format(
272 format_byte_size_binary(value['used_real']),
272 format_byte_size_binary(value['used_real']),
273 format_byte_size_binary(value['total']),
273 format_byte_size_binary(value['total']),
274 value['percent_used'])
274 value['percent_used'])
275
275
276 keys = list(value.keys())[::]
276 keys = list(value.keys())[::]
277 keys.pop(keys.index('percent'))
277 keys.pop(keys.index('percent'))
278 keys.pop(keys.index('percent_used'))
278 keys.pop(keys.index('percent_used'))
279 keys.pop(keys.index('text'))
279 keys.pop(keys.index('text'))
280 for k in keys:
280 for k in keys:
281 human_value[k] = format_byte_size_binary(value[k])
281 human_value[k] = format_byte_size_binary(value[k])
282
282
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
284 msg = 'Critical: your available RAM memory is very low.'
284 msg = 'Critical: your available RAM memory is very low.'
285 state = {'message': msg, 'type': STATE_ERR}
285 state = {'message': msg, 'type': STATE_ERR}
286
286
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
288 msg = 'Warning: your available RAM memory is running low.'
288 msg = 'Warning: your available RAM memory is running low.'
289 state = {'message': msg, 'type': STATE_WARN}
289 state = {'message': msg, 'type': STATE_WARN}
290
290
291 return SysInfoRes(value=value, state=state, human_value=human_value)
291 return SysInfoRes(value=value, state=state, human_value=human_value)
292
292
293
293
294 @register_sysinfo
294 @register_sysinfo
295 def machine_load():
295 def machine_load():
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
297 state = STATE_OK_DEFAULT
297 state = STATE_OK_DEFAULT
298
298
299 # load averages
299 # load averages
300 if hasattr(psutil.os, 'getloadavg'):
300 if hasattr(psutil.os, 'getloadavg'):
301 value.update(dict(
301 value.update(dict(
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
303 ))
303 ))
304
304
305 human_value = value.copy()
305 human_value = value.copy()
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
307 value['1_min'], value['5_min'], value['15_min'])
307 value['1_min'], value['5_min'], value['15_min'])
308
308
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
310 msg = 'Warning: your machine load is very high.'
310 msg = 'Warning: your machine load is very high.'
311 state = {'message': msg, 'type': STATE_WARN}
311 state = {'message': msg, 'type': STATE_WARN}
312
312
313 return SysInfoRes(value=value, state=state, human_value=human_value)
313 return SysInfoRes(value=value, state=state, human_value=human_value)
314
314
315
315
316 @register_sysinfo
316 @register_sysinfo
317 def cpu():
317 def cpu():
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
319 state = STATE_OK_DEFAULT
319 state = STATE_OK_DEFAULT
320
320
321 value['cpu'] = psutil.cpu_percent(0.5)
321 value['cpu'] = psutil.cpu_percent(0.5)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
323 value['cpu_count'] = psutil.cpu_count()
323 value['cpu_count'] = psutil.cpu_count()
324
324
325 human_value = value.copy()
325 human_value = value.copy()
326 human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu'])
326 human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu'])
327
327
328 return SysInfoRes(value=value, state=state, human_value=human_value)
328 return SysInfoRes(value=value, state=state, human_value=human_value)
329
329
330
330
331 @register_sysinfo
331 @register_sysinfo
332 def storage():
332 def storage():
333 from rhodecode.lib.helpers import format_byte_size_binary
333 from rhodecode.lib.helpers import format_byte_size_binary
334 from rhodecode.lib.utils import get_rhodecode_repo_store_path
334 from rhodecode.lib.utils import get_rhodecode_repo_store_path
335 path = get_rhodecode_repo_store_path()
335 path = get_rhodecode_repo_store_path()
336
336
337 value = dict(percent=0, used=0, total=0, path=path, text='')
337 value = dict(percent=0, used=0, total=0, path=path, text='')
338 state = STATE_OK_DEFAULT
338 state = STATE_OK_DEFAULT
339
339
340 try:
340 try:
341 value.update(dict(psutil.disk_usage(path)._asdict()))
341 value.update(dict(psutil.disk_usage(path)._asdict()))
342 except Exception as e:
342 except Exception as e:
343 log.exception('Failed to fetch disk info')
343 log.exception('Failed to fetch disk info')
344 state = {'message': str(e), 'type': STATE_ERR}
344 state = {'message': str(e), 'type': STATE_ERR}
345
345
346 human_value = value.copy()
346 human_value = value.copy()
347 human_value['used'] = format_byte_size_binary(value['used'])
347 human_value['used'] = format_byte_size_binary(value['used'])
348 human_value['total'] = format_byte_size_binary(value['total'])
348 human_value['total'] = format_byte_size_binary(value['total'])
349 human_value['text'] = "{}/{}, {}% used".format(
349 human_value['text'] = "{}/{}, {}% used".format(
350 format_byte_size_binary(value['used']),
350 format_byte_size_binary(value['used']),
351 format_byte_size_binary(value['total']),
351 format_byte_size_binary(value['total']),
352 value['percent'])
352 value['percent'])
353
353
354 if state['type'] == STATE_OK and value['percent'] > 90:
354 if state['type'] == STATE_OK and value['percent'] > 90:
355 msg = 'Critical: your disk space is very low.'
355 msg = 'Critical: your disk space is very low.'
356 state = {'message': msg, 'type': STATE_ERR}
356 state = {'message': msg, 'type': STATE_ERR}
357
357
358 elif state['type'] == STATE_OK and value['percent'] > 70:
358 elif state['type'] == STATE_OK and value['percent'] > 70:
359 msg = 'Warning: your disk space is running low.'
359 msg = 'Warning: your disk space is running low.'
360 state = {'message': msg, 'type': STATE_WARN}
360 state = {'message': msg, 'type': STATE_WARN}
361
361
362 return SysInfoRes(value=value, state=state, human_value=human_value)
362 return SysInfoRes(value=value, state=state, human_value=human_value)
363
363
364
364
365 @register_sysinfo
365 @register_sysinfo
366 def storage_inodes():
366 def storage_inodes():
367 from rhodecode.lib.utils import get_rhodecode_repo_store_path
367 from rhodecode.lib.utils import get_rhodecode_repo_store_path
368 path = get_rhodecode_repo_store_path()
368 path = get_rhodecode_repo_store_path()
369
369
370 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
370 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
371 state = STATE_OK_DEFAULT
371 state = STATE_OK_DEFAULT
372
372
373 try:
373 try:
374 i_stat = os.statvfs(path)
374 i_stat = os.statvfs(path)
375 value['free'] = i_stat.f_ffree
375 value['free'] = i_stat.f_ffree
376 value['used'] = i_stat.f_files-i_stat.f_favail
376 value['used'] = i_stat.f_files-i_stat.f_favail
377 value['total'] = i_stat.f_files
377 value['total'] = i_stat.f_files
378 value['percent'] = percentage(value['used'], value['total'])
378 value['percent'] = percentage(value['used'], value['total'])
379 except Exception as e:
379 except Exception as e:
380 log.exception('Failed to fetch disk inodes info')
380 log.exception('Failed to fetch disk inodes info')
381 state = {'message': str(e), 'type': STATE_ERR}
381 state = {'message': str(e), 'type': STATE_ERR}
382
382
383 human_value = value.copy()
383 human_value = value.copy()
384 human_value['text'] = "{}/{}, {}% used".format(
384 human_value['text'] = "{}/{}, {}% used".format(
385 value['used'], value['total'], value['percent'])
385 value['used'], value['total'], value['percent'])
386
386
387 if state['type'] == STATE_OK and value['percent'] > 90:
387 if state['type'] == STATE_OK and value['percent'] > 90:
388 msg = 'Critical: your disk free inodes are very low.'
388 msg = 'Critical: your disk free inodes are very low.'
389 state = {'message': msg, 'type': STATE_ERR}
389 state = {'message': msg, 'type': STATE_ERR}
390
390
391 elif state['type'] == STATE_OK and value['percent'] > 70:
391 elif state['type'] == STATE_OK and value['percent'] > 70:
392 msg = 'Warning: your disk free inodes are running low.'
392 msg = 'Warning: your disk free inodes are running low.'
393 state = {'message': msg, 'type': STATE_WARN}
393 state = {'message': msg, 'type': STATE_WARN}
394
394
395 return SysInfoRes(value=value, state=state, human_value=human_value)
395 return SysInfoRes(value=value, state=state, human_value=human_value)
396
396
397
397
398 @register_sysinfo
398 @register_sysinfo
399 def storage_archives():
399 def storage_archives():
400 import rhodecode
400 import rhodecode
401 from rhodecode.lib.utils import safe_str
401 from rhodecode.lib.utils import safe_str
402 from rhodecode.lib.helpers import format_byte_size_binary
402 from rhodecode.lib.helpers import format_byte_size_binary
403
403
404 msg = 'Archive cache storage is controlled by ' \
404 storage_type = rhodecode.ConfigGet().get_str('archive_cache.backend.type')
405 'archive_cache.store_dir=/path/to/cache option in the .ini file'
405 storage_key = 'archive_cache.filesystem.store_dir'
406 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
406
407 default_msg = 'Archive cache storage is controlled by '\
408 f'{storage_key}=/path/to/cache option in the .ini file'
409 path = rhodecode.ConfigGet().get_str(storage_key, missing=default_msg)
407
410
408 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
411 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
409 state = STATE_OK_DEFAULT
412 state = STATE_OK_DEFAULT
410 try:
413 try:
414 if storage_type != 'filesystem':
415 # raise Exc to stop reporting on different type
416 raise ValueError('Storage type must be "filesystem"')
417
411 items_count = 0
418 items_count = 0
412 used = 0
419 used = 0
413 for root, dirs, files in os.walk(path):
420 for root, dirs, files in os.walk(path):
414 if root == path:
421 if root == path:
415 items_count = len(dirs)
422 items_count = len(dirs)
416
423
417 for f in files:
424 for f in files:
418 try:
425 try:
419 used += os.path.getsize(os.path.join(root, f))
426 used += os.path.getsize(os.path.join(root, f))
420 except OSError:
427 except OSError:
421 pass
428 pass
422 value.update({
429 value.update({
423 'percent': 100,
430 'percent': 100,
424 'used': used,
431 'used': used,
425 'total': used,
432 'total': used,
426 'items': items_count
433 'items': items_count
427 })
434 })
428
435
429 except Exception as e:
436 except Exception as e:
430 log.exception('failed to fetch archive cache storage')
437 log.exception('failed to fetch archive cache storage')
431 state = {'message': str(e), 'type': STATE_ERR}
438 state = {'message': str(e), 'type': STATE_ERR}
432
439
433 human_value = value.copy()
440 human_value = value.copy()
434 human_value['used'] = format_byte_size_binary(value['used'])
441 human_value['used'] = format_byte_size_binary(value['used'])
435 human_value['total'] = format_byte_size_binary(value['total'])
442 human_value['total'] = format_byte_size_binary(value['total'])
436 human_value['text'] = "{} ({} items)".format(
443 human_value['text'] = "{} ({} items)".format(
437 human_value['used'], value['items'])
444 human_value['used'], value['items'])
438
445
439 return SysInfoRes(value=value, state=state, human_value=human_value)
446 return SysInfoRes(value=value, state=state, human_value=human_value)
440
447
441
448
442 @register_sysinfo
449 @register_sysinfo
443 def storage_gist():
450 def storage_gist():
444 from rhodecode.model.gist import GIST_STORE_LOC
451 from rhodecode.model.gist import GIST_STORE_LOC
445 from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path
452 from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path
446 from rhodecode.lib.helpers import format_byte_size_binary
453 from rhodecode.lib.helpers import format_byte_size_binary
447 path = safe_str(os.path.join(
454 path = safe_str(os.path.join(
448 get_rhodecode_repo_store_path(), GIST_STORE_LOC))
455 get_rhodecode_repo_store_path(), GIST_STORE_LOC))
449
456
450 # gist storage
457 # gist storage
451 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
458 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
452 state = STATE_OK_DEFAULT
459 state = STATE_OK_DEFAULT
453
460
454 try:
461 try:
455 items_count = 0
462 items_count = 0
456 used = 0
463 used = 0
457 for root, dirs, files in os.walk(path):
464 for root, dirs, files in os.walk(path):
458 if root == path:
465 if root == path:
459 items_count = len(dirs)
466 items_count = len(dirs)
460
467
461 for f in files:
468 for f in files:
462 try:
469 try:
463 used += os.path.getsize(os.path.join(root, f))
470 used += os.path.getsize(os.path.join(root, f))
464 except OSError:
471 except OSError:
465 pass
472 pass
466 value.update({
473 value.update({
467 'percent': 100,
474 'percent': 100,
468 'used': used,
475 'used': used,
469 'total': used,
476 'total': used,
470 'items': items_count
477 'items': items_count
471 })
478 })
472 except Exception as e:
479 except Exception as e:
473 log.exception('failed to fetch gist storage items')
480 log.exception('failed to fetch gist storage items')
474 state = {'message': str(e), 'type': STATE_ERR}
481 state = {'message': str(e), 'type': STATE_ERR}
475
482
476 human_value = value.copy()
483 human_value = value.copy()
477 human_value['used'] = format_byte_size_binary(value['used'])
484 human_value['used'] = format_byte_size_binary(value['used'])
478 human_value['total'] = format_byte_size_binary(value['total'])
485 human_value['total'] = format_byte_size_binary(value['total'])
479 human_value['text'] = "{} ({} items)".format(
486 human_value['text'] = "{} ({} items)".format(
480 human_value['used'], value['items'])
487 human_value['used'], value['items'])
481
488
482 return SysInfoRes(value=value, state=state, human_value=human_value)
489 return SysInfoRes(value=value, state=state, human_value=human_value)
483
490
484
491
485 @register_sysinfo
492 @register_sysinfo
486 def storage_temp():
493 def storage_temp():
487 import tempfile
494 import tempfile
488 from rhodecode.lib.helpers import format_byte_size_binary
495 from rhodecode.lib.helpers import format_byte_size_binary
489
496
490 path = tempfile.gettempdir()
497 path = tempfile.gettempdir()
491 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
498 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
492 state = STATE_OK_DEFAULT
499 state = STATE_OK_DEFAULT
493
500
494 if not psutil:
501 if not psutil:
495 return SysInfoRes(value=value, state=state)
502 return SysInfoRes(value=value, state=state)
496
503
497 try:
504 try:
498 value.update(dict(psutil.disk_usage(path)._asdict()))
505 value.update(dict(psutil.disk_usage(path)._asdict()))
499 except Exception as e:
506 except Exception as e:
500 log.exception('Failed to fetch temp dir info')
507 log.exception('Failed to fetch temp dir info')
501 state = {'message': str(e), 'type': STATE_ERR}
508 state = {'message': str(e), 'type': STATE_ERR}
502
509
503 human_value = value.copy()
510 human_value = value.copy()
504 human_value['used'] = format_byte_size_binary(value['used'])
511 human_value['used'] = format_byte_size_binary(value['used'])
505 human_value['total'] = format_byte_size_binary(value['total'])
512 human_value['total'] = format_byte_size_binary(value['total'])
506 human_value['text'] = "{}/{}, {}% used".format(
513 human_value['text'] = "{}/{}, {}% used".format(
507 format_byte_size_binary(value['used']),
514 format_byte_size_binary(value['used']),
508 format_byte_size_binary(value['total']),
515 format_byte_size_binary(value['total']),
509 value['percent'])
516 value['percent'])
510
517
511 return SysInfoRes(value=value, state=state, human_value=human_value)
518 return SysInfoRes(value=value, state=state, human_value=human_value)
512
519
513
520
514 @register_sysinfo
521 @register_sysinfo
515 def search_info():
522 def search_info():
516 import rhodecode
523 import rhodecode
517 from rhodecode.lib.index import searcher_from_config
524 from rhodecode.lib.index import searcher_from_config
518
525
519 backend = rhodecode.CONFIG.get('search.module', '')
526 backend = rhodecode.CONFIG.get('search.module', '')
520 location = rhodecode.CONFIG.get('search.location', '')
527 location = rhodecode.CONFIG.get('search.location', '')
521
528
522 try:
529 try:
523 searcher = searcher_from_config(rhodecode.CONFIG)
530 searcher = searcher_from_config(rhodecode.CONFIG)
524 searcher = searcher.__class__.__name__
531 searcher = searcher.__class__.__name__
525 except Exception:
532 except Exception:
526 searcher = None
533 searcher = None
527
534
528 value = dict(
535 value = dict(
529 backend=backend, searcher=searcher, location=location, text='')
536 backend=backend, searcher=searcher, location=location, text='')
530 state = STATE_OK_DEFAULT
537 state = STATE_OK_DEFAULT
531
538
532 human_value = value.copy()
539 human_value = value.copy()
533 human_value['text'] = "backend:`{}`".format(human_value['backend'])
540 human_value['text'] = "backend:`{}`".format(human_value['backend'])
534
541
535 return SysInfoRes(value=value, state=state, human_value=human_value)
542 return SysInfoRes(value=value, state=state, human_value=human_value)
536
543
537
544
538 @register_sysinfo
545 @register_sysinfo
539 def git_info():
546 def git_info():
540 from rhodecode.lib.vcs.backends import git
547 from rhodecode.lib.vcs.backends import git
541 state = STATE_OK_DEFAULT
548 state = STATE_OK_DEFAULT
542 value = human_value = ''
549 value = human_value = ''
543 try:
550 try:
544 value = git.discover_git_version(raise_on_exc=True)
551 value = git.discover_git_version(raise_on_exc=True)
545 human_value = f'version reported from VCSServer: {value}'
552 human_value = f'version reported from VCSServer: {value}'
546 except Exception as e:
553 except Exception as e:
547 state = {'message': str(e), 'type': STATE_ERR}
554 state = {'message': str(e), 'type': STATE_ERR}
548
555
549 return SysInfoRes(value=value, state=state, human_value=human_value)
556 return SysInfoRes(value=value, state=state, human_value=human_value)
550
557
551
558
552 @register_sysinfo
559 @register_sysinfo
553 def hg_info():
560 def hg_info():
554 from rhodecode.lib.vcs.backends import hg
561 from rhodecode.lib.vcs.backends import hg
555 state = STATE_OK_DEFAULT
562 state = STATE_OK_DEFAULT
556 value = human_value = ''
563 value = human_value = ''
557 try:
564 try:
558 value = hg.discover_hg_version(raise_on_exc=True)
565 value = hg.discover_hg_version(raise_on_exc=True)
559 human_value = f'version reported from VCSServer: {value}'
566 human_value = f'version reported from VCSServer: {value}'
560 except Exception as e:
567 except Exception as e:
561 state = {'message': str(e), 'type': STATE_ERR}
568 state = {'message': str(e), 'type': STATE_ERR}
562 return SysInfoRes(value=value, state=state, human_value=human_value)
569 return SysInfoRes(value=value, state=state, human_value=human_value)
563
570
564
571
565 @register_sysinfo
572 @register_sysinfo
566 def svn_info():
573 def svn_info():
567 from rhodecode.lib.vcs.backends import svn
574 from rhodecode.lib.vcs.backends import svn
568 state = STATE_OK_DEFAULT
575 state = STATE_OK_DEFAULT
569 value = human_value = ''
576 value = human_value = ''
570 try:
577 try:
571 value = svn.discover_svn_version(raise_on_exc=True)
578 value = svn.discover_svn_version(raise_on_exc=True)
572 human_value = f'version reported from VCSServer: {value}'
579 human_value = f'version reported from VCSServer: {value}'
573 except Exception as e:
580 except Exception as e:
574 state = {'message': str(e), 'type': STATE_ERR}
581 state = {'message': str(e), 'type': STATE_ERR}
575 return SysInfoRes(value=value, state=state, human_value=human_value)
582 return SysInfoRes(value=value, state=state, human_value=human_value)
576
583
577
584
578 @register_sysinfo
585 @register_sysinfo
579 def vcs_backends():
586 def vcs_backends():
580 import rhodecode
587 import rhodecode
581 value = rhodecode.CONFIG.get('vcs.backends')
588 value = rhodecode.CONFIG.get('vcs.backends')
582 human_value = 'Enabled backends in order: {}'.format(','.join(value))
589 human_value = 'Enabled backends in order: {}'.format(','.join(value))
583 return SysInfoRes(value=value, human_value=human_value)
590 return SysInfoRes(value=value, human_value=human_value)
584
591
585
592
586 @register_sysinfo
593 @register_sysinfo
587 def vcs_server():
594 def vcs_server():
588 import rhodecode
595 import rhodecode
589 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
596 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
590
597
591 server_url = rhodecode.CONFIG.get('vcs.server')
598 server_url = rhodecode.CONFIG.get('vcs.server')
592 enabled = rhodecode.CONFIG.get('vcs.server.enable')
599 enabled = rhodecode.CONFIG.get('vcs.server.enable')
593 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
600 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
594 state = STATE_OK_DEFAULT
601 state = STATE_OK_DEFAULT
595 version = None
602 version = None
596 workers = 0
603 workers = 0
597
604
598 try:
605 try:
599 data = get_vcsserver_service_data()
606 data = get_vcsserver_service_data()
600 if data and 'version' in data:
607 if data and 'version' in data:
601 version = data['version']
608 version = data['version']
602
609
603 if data and 'config' in data:
610 if data and 'config' in data:
604 conf = data['config']
611 conf = data['config']
605 workers = conf.get('workers', 'NOT AVAILABLE')
612 workers = conf.get('workers', 'NOT AVAILABLE')
606
613
607 connection = 'connected'
614 connection = 'connected'
608 except Exception as e:
615 except Exception as e:
609 connection = 'failed'
616 connection = 'failed'
610 state = {'message': str(e), 'type': STATE_ERR}
617 state = {'message': str(e), 'type': STATE_ERR}
611
618
612 value = dict(
619 value = dict(
613 url=server_url,
620 url=server_url,
614 enabled=enabled,
621 enabled=enabled,
615 protocol=protocol,
622 protocol=protocol,
616 connection=connection,
623 connection=connection,
617 version=version,
624 version=version,
618 text='',
625 text='',
619 )
626 )
620
627
621 human_value = value.copy()
628 human_value = value.copy()
622 human_value['text'] = \
629 human_value['text'] = \
623 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
630 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
624 url=server_url, ver=version, workers=workers, mode=protocol,
631 url=server_url, ver=version, workers=workers, mode=protocol,
625 conn=connection)
632 conn=connection)
626
633
627 return SysInfoRes(value=value, state=state, human_value=human_value)
634 return SysInfoRes(value=value, state=state, human_value=human_value)
628
635
629
636
630 @register_sysinfo
637 @register_sysinfo
631 def vcs_server_config():
638 def vcs_server_config():
632 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
639 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
633 state = STATE_OK_DEFAULT
640 state = STATE_OK_DEFAULT
634
641
635 value = {}
642 value = {}
636 try:
643 try:
637 data = get_vcsserver_service_data()
644 data = get_vcsserver_service_data()
638 value = data['app_config']
645 value = data['app_config']
639 except Exception as e:
646 except Exception as e:
640 state = {'message': str(e), 'type': STATE_ERR}
647 state = {'message': str(e), 'type': STATE_ERR}
641
648
642 human_value = value.copy()
649 human_value = value.copy()
643 human_value['text'] = 'VCS Server config'
650 human_value['text'] = 'VCS Server config'
644
651
645 return SysInfoRes(value=value, state=state, human_value=human_value)
652 return SysInfoRes(value=value, state=state, human_value=human_value)
646
653
647
654
648 @register_sysinfo
655 @register_sysinfo
649 def rhodecode_app_info():
656 def rhodecode_app_info():
650 import rhodecode
657 import rhodecode
651 edition = rhodecode.CONFIG.get('rhodecode.edition')
658 edition = rhodecode.CONFIG.get('rhodecode.edition')
652
659
653 value = dict(
660 value = dict(
654 rhodecode_version=rhodecode.__version__,
661 rhodecode_version=rhodecode.__version__,
655 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
662 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
656 text=''
663 text=''
657 )
664 )
658 human_value = value.copy()
665 human_value = value.copy()
659 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
666 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
660 edition=edition, ver=value['rhodecode_version']
667 edition=edition, ver=value['rhodecode_version']
661 )
668 )
662 return SysInfoRes(value=value, human_value=human_value)
669 return SysInfoRes(value=value, human_value=human_value)
663
670
664
671
665 @register_sysinfo
672 @register_sysinfo
666 def rhodecode_config():
673 def rhodecode_config():
667 import rhodecode
674 import rhodecode
668 path = rhodecode.CONFIG.get('__file__')
675 path = rhodecode.CONFIG.get('__file__')
669 rhodecode_ini_safe = rhodecode.CONFIG.copy()
676 rhodecode_ini_safe = rhodecode.CONFIG.copy()
670 cert_path = get_cert_path(path)
677 cert_path = get_cert_path(path)
671
678
672 try:
679 try:
673 config = configparser.ConfigParser()
680 config = configparser.ConfigParser()
674 config.read(path)
681 config.read(path)
675 parsed_ini = config
682 parsed_ini = config
676 if parsed_ini.has_section('server:main'):
683 if parsed_ini.has_section('server:main'):
677 parsed_ini = dict(parsed_ini.items('server:main'))
684 parsed_ini = dict(parsed_ini.items('server:main'))
678 except Exception:
685 except Exception:
679 log.exception('Failed to read .ini file for display')
686 log.exception('Failed to read .ini file for display')
680 parsed_ini = {}
687 parsed_ini = {}
681
688
682 rhodecode_ini_safe['server:main'] = parsed_ini
689 rhodecode_ini_safe['server:main'] = parsed_ini
683
690
684 blacklist = [
691 blacklist = [
685 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
692 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
686 'routes.map',
693 'routes.map',
687 'sqlalchemy.db1.url',
694 'sqlalchemy.db1.url',
688 'channelstream.secret',
695 'channelstream.secret',
689 'beaker.session.secret',
696 'beaker.session.secret',
690 'rhodecode.encrypted_values.secret',
697 'rhodecode.encrypted_values.secret',
691 'rhodecode_auth_github_consumer_key',
698 'rhodecode_auth_github_consumer_key',
692 'rhodecode_auth_github_consumer_secret',
699 'rhodecode_auth_github_consumer_secret',
693 'rhodecode_auth_google_consumer_key',
700 'rhodecode_auth_google_consumer_key',
694 'rhodecode_auth_google_consumer_secret',
701 'rhodecode_auth_google_consumer_secret',
695 'rhodecode_auth_bitbucket_consumer_secret',
702 'rhodecode_auth_bitbucket_consumer_secret',
696 'rhodecode_auth_bitbucket_consumer_key',
703 'rhodecode_auth_bitbucket_consumer_key',
697 'rhodecode_auth_twitter_consumer_secret',
704 'rhodecode_auth_twitter_consumer_secret',
698 'rhodecode_auth_twitter_consumer_key',
705 'rhodecode_auth_twitter_consumer_key',
699
706
700 'rhodecode_auth_twitter_secret',
707 'rhodecode_auth_twitter_secret',
701 'rhodecode_auth_github_secret',
708 'rhodecode_auth_github_secret',
702 'rhodecode_auth_google_secret',
709 'rhodecode_auth_google_secret',
703 'rhodecode_auth_bitbucket_secret',
710 'rhodecode_auth_bitbucket_secret',
704
711
705 'appenlight.api_key',
712 'appenlight.api_key',
706 ('app_conf', 'sqlalchemy.db1.url')
713 ('app_conf', 'sqlalchemy.db1.url')
707 ]
714 ]
708 for k in blacklist:
715 for k in blacklist:
709 if isinstance(k, tuple):
716 if isinstance(k, tuple):
710 section, key = k
717 section, key = k
711 if section in rhodecode_ini_safe:
718 if section in rhodecode_ini_safe:
712 rhodecode_ini_safe[section] = '**OBFUSCATED**'
719 rhodecode_ini_safe[section] = '**OBFUSCATED**'
713 else:
720 else:
714 rhodecode_ini_safe.pop(k, None)
721 rhodecode_ini_safe.pop(k, None)
715
722
716 # TODO: maybe put some CONFIG checks here ?
723 # TODO: maybe put some CONFIG checks here ?
717 return SysInfoRes(value={'config': rhodecode_ini_safe,
724 return SysInfoRes(value={'config': rhodecode_ini_safe,
718 'path': path, 'cert_path': cert_path})
725 'path': path, 'cert_path': cert_path})
719
726
720
727
721 @register_sysinfo
728 @register_sysinfo
722 def database_info():
729 def database_info():
723 import rhodecode
730 import rhodecode
724 from sqlalchemy.engine import url as engine_url
731 from sqlalchemy.engine import url as engine_url
725 from rhodecode.model import meta
732 from rhodecode.model import meta
726 from rhodecode.model.meta import Session
733 from rhodecode.model.meta import Session
727 from rhodecode.model.db import DbMigrateVersion
734 from rhodecode.model.db import DbMigrateVersion
728
735
729 state = STATE_OK_DEFAULT
736 state = STATE_OK_DEFAULT
730
737
731 db_migrate = DbMigrateVersion.query().filter(
738 db_migrate = DbMigrateVersion.query().filter(
732 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
739 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
733
740
734 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
741 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
735
742
736 try:
743 try:
737 engine = meta.get_engine()
744 engine = meta.get_engine()
738 db_server_info = engine.dialect._get_server_version_info(
745 db_server_info = engine.dialect._get_server_version_info(
739 Session.connection(bind=engine))
746 Session.connection(bind=engine))
740 db_version = '.'.join(map(str, db_server_info))
747 db_version = '.'.join(map(str, db_server_info))
741 except Exception:
748 except Exception:
742 log.exception('failed to fetch db version')
749 log.exception('failed to fetch db version')
743 db_version = 'UNKNOWN'
750 db_version = 'UNKNOWN'
744
751
745 db_info = dict(
752 db_info = dict(
746 migrate_version=db_migrate.version,
753 migrate_version=db_migrate.version,
747 type=db_url_obj.get_backend_name(),
754 type=db_url_obj.get_backend_name(),
748 version=db_version,
755 version=db_version,
749 url=repr(db_url_obj)
756 url=repr(db_url_obj)
750 )
757 )
751 current_version = db_migrate.version
758 current_version = db_migrate.version
752 expected_version = rhodecode.__dbversion__
759 expected_version = rhodecode.__dbversion__
753 if state['type'] == STATE_OK and current_version != expected_version:
760 if state['type'] == STATE_OK and current_version != expected_version:
754 msg = 'Critical: database schema mismatch, ' \
761 msg = 'Critical: database schema mismatch, ' \
755 'expected version {}, got {}. ' \
762 'expected version {}, got {}. ' \
756 'Please run migrations on your database.'.format(
763 'Please run migrations on your database.'.format(
757 expected_version, current_version)
764 expected_version, current_version)
758 state = {'message': msg, 'type': STATE_ERR}
765 state = {'message': msg, 'type': STATE_ERR}
759
766
760 human_value = db_info.copy()
767 human_value = db_info.copy()
761 human_value['url'] = "{} @ migration version: {}".format(
768 human_value['url'] = "{} @ migration version: {}".format(
762 db_info['url'], db_info['migrate_version'])
769 db_info['url'], db_info['migrate_version'])
763 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
770 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
764 return SysInfoRes(value=db_info, state=state, human_value=human_value)
771 return SysInfoRes(value=db_info, state=state, human_value=human_value)
765
772
766
773
767 @register_sysinfo
774 @register_sysinfo
768 def server_info(environ):
775 def server_info(environ):
769 import rhodecode
776 import rhodecode
770 from rhodecode.lib.base import get_server_ip_addr, get_server_port
777 from rhodecode.lib.base import get_server_ip_addr, get_server_port
771
778
772 value = {
779 value = {
773 'server_ip': '{}:{}'.format(
780 'server_ip': '{}:{}'.format(
774 get_server_ip_addr(environ, log_errors=False),
781 get_server_ip_addr(environ, log_errors=False),
775 get_server_port(environ)
782 get_server_port(environ)
776 ),
783 ),
777 'server_id': rhodecode.CONFIG.get('instance_id'),
784 'server_id': rhodecode.CONFIG.get('instance_id'),
778 }
785 }
779 return SysInfoRes(value=value)
786 return SysInfoRes(value=value)
780
787
781
788
782 @register_sysinfo
789 @register_sysinfo
783 def usage_info():
790 def usage_info():
784 from rhodecode.model.db import User, Repository, true
791 from rhodecode.model.db import User, Repository, true
785 value = {
792 value = {
786 'users': User.query().count(),
793 'users': User.query().count(),
787 'users_active': User.query().filter(User.active == true()).count(),
794 'users_active': User.query().filter(User.active == true()).count(),
788 'repositories': Repository.query().count(),
795 'repositories': Repository.query().count(),
789 'repository_types': {
796 'repository_types': {
790 'hg': Repository.query().filter(
797 'hg': Repository.query().filter(
791 Repository.repo_type == 'hg').count(),
798 Repository.repo_type == 'hg').count(),
792 'git': Repository.query().filter(
799 'git': Repository.query().filter(
793 Repository.repo_type == 'git').count(),
800 Repository.repo_type == 'git').count(),
794 'svn': Repository.query().filter(
801 'svn': Repository.query().filter(
795 Repository.repo_type == 'svn').count(),
802 Repository.repo_type == 'svn').count(),
796 },
803 },
797 }
804 }
798 return SysInfoRes(value=value)
805 return SysInfoRes(value=value)
799
806
800
807
801 def get_system_info(environ):
808 def get_system_info(environ):
802 environ = environ or {}
809 environ = environ or {}
803 return {
810 return {
804 'rhodecode_app': SysInfo(rhodecode_app_info)(),
811 'rhodecode_app': SysInfo(rhodecode_app_info)(),
805 'rhodecode_config': SysInfo(rhodecode_config)(),
812 'rhodecode_config': SysInfo(rhodecode_config)(),
806 'rhodecode_usage': SysInfo(usage_info)(),
813 'rhodecode_usage': SysInfo(usage_info)(),
807 'python': SysInfo(python_info)(),
814 'python': SysInfo(python_info)(),
808 'py_modules': SysInfo(py_modules)(),
815 'py_modules': SysInfo(py_modules)(),
809
816
810 'platform': SysInfo(platform_type)(),
817 'platform': SysInfo(platform_type)(),
811 'locale': SysInfo(locale_info)(),
818 'locale': SysInfo(locale_info)(),
812 'server': SysInfo(server_info, environ=environ)(),
819 'server': SysInfo(server_info, environ=environ)(),
813 'database': SysInfo(database_info)(),
820 'database': SysInfo(database_info)(),
814 'ulimit': SysInfo(ulimit_info)(),
821 'ulimit': SysInfo(ulimit_info)(),
815 'storage': SysInfo(storage)(),
822 'storage': SysInfo(storage)(),
816 'storage_inodes': SysInfo(storage_inodes)(),
823 'storage_inodes': SysInfo(storage_inodes)(),
817 'storage_archive': SysInfo(storage_archives)(),
824 'storage_archive': SysInfo(storage_archives)(),
818 'storage_gist': SysInfo(storage_gist)(),
825 'storage_gist': SysInfo(storage_gist)(),
819 'storage_temp': SysInfo(storage_temp)(),
826 'storage_temp': SysInfo(storage_temp)(),
820
827
821 'search': SysInfo(search_info)(),
828 'search': SysInfo(search_info)(),
822
829
823 'uptime': SysInfo(uptime)(),
830 'uptime': SysInfo(uptime)(),
824 'load': SysInfo(machine_load)(),
831 'load': SysInfo(machine_load)(),
825 'cpu': SysInfo(cpu)(),
832 'cpu': SysInfo(cpu)(),
826 'memory': SysInfo(memory)(),
833 'memory': SysInfo(memory)(),
827
834
828 'vcs_backends': SysInfo(vcs_backends)(),
835 'vcs_backends': SysInfo(vcs_backends)(),
829 'vcs_server': SysInfo(vcs_server)(),
836 'vcs_server': SysInfo(vcs_server)(),
830
837
831 'vcs_server_config': SysInfo(vcs_server_config)(),
838 'vcs_server_config': SysInfo(vcs_server_config)(),
832
839
833 'git': SysInfo(git_info)(),
840 'git': SysInfo(git_info)(),
834 'hg': SysInfo(hg_info)(),
841 'hg': SysInfo(hg_info)(),
835 'svn': SysInfo(svn_info)(),
842 'svn': SysInfo(svn_info)(),
836 }
843 }
837
844
838
845
839 def load_system_info(key):
846 def load_system_info(key):
840 """
847 """
841 get_sys_info('vcs_server')
848 get_sys_info('vcs_server')
842 get_sys_info('database')
849 get_sys_info('database')
843 """
850 """
844 return SysInfo(registered_helpers[key])()
851 return SysInfo(registered_helpers[key])()
General Comments 0
You need to be logged in to leave comments. Login now