##// END OF EJS Templates
fix(ssh): Added alternative SshWrapper and changes needed to support it + service api. Fixes: RCCE-6
ilin.s -
r5314:585ee450 default
parent child Browse files
Show More
@@ -0,0 +1,55 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 import pytest
21
22 from rhodecode.api.tests.utils import (
23 build_data, api_call)
24
25
26 @pytest.mark.usefixtures("app")
27 class TestServiceApi:
28
29 def test_service_api_with_wrong_secret(self):
30 id, payload = build_data("wrong_api_key", 'service_get_repo_name_by_id')
31 response = api_call(self.app, payload)
32
33 assert 'Invalid API KEY' == response.json['error']
34
35 def test_service_api_with_legit_secret(self):
36 id, payload = build_data(self.app.app.config.get_settings()['app.service_api.token'],
37 'service_get_repo_name_by_id', repo_id='1')
38 response = api_call(self.app, payload)
39 assert not response.json['error']
40
41 def test_service_api_not_a_part_of_public_api_suggestions(self):
42 id, payload = build_data("secret", 'some_random_guess_method')
43 response = api_call(self.app, payload)
44 assert 'service_' not in response.json['error']
45
46 def test_service_get_data_for_ssh_wrapper_output(self):
47 id, payload = build_data(
48 self.app.app.config.get_settings()['app.service_api.token'],
49 'service_get_data_for_ssh_wrapper',
50 user_id=1,
51 repo_name='vcs_test_git')
52 response = api_call(self.app, payload)
53
54 assert ['branch_permissions', 'repo_permissions', 'repos_path', 'user_id', 'username']\
55 == list(response.json['result'].keys())
@@ -0,0 +1,125 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import logging
20 import datetime
21 from collections import defaultdict
22
23 from sqlalchemy import Table
24 from rhodecode.api import jsonrpc_method, SERVICE_API_IDENTIFIER
25
26
27 log = logging.getLogger(__name__)
28
29
30 @jsonrpc_method()
31 def service_get_data_for_ssh_wrapper(request, apiuser, user_id, repo_name, key_id=None):
32 from rhodecode.model.db import User
33 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.meta import raw_query_executor, Base
35
36 if key_id:
37 table = Table('user_ssh_keys', Base.metadata, autoload=False)
38 atime = datetime.datetime.utcnow()
39 stmt = (
40 table.update()
41 .where(table.c.ssh_key_id == key_id)
42 .values(accessed_on=atime)
43 )
44
45 res_count = None
46 with raw_query_executor() as session:
47 result = session.execute(stmt)
48 if result.rowcount:
49 res_count = result.rowcount
50
51 if res_count:
52 log.debug(f'Update key id:{key_id} access time')
53 db_user = User.get(user_id)
54 if not db_user:
55 return None
56 auth_user = db_user.AuthUser()
57
58 return {
59 'user_id': db_user.user_id,
60 'username': db_user.username,
61 'repo_permissions': auth_user.permissions['repositories'],
62 "branch_permissions": auth_user.get_branch_permissions(repo_name),
63 "repos_path": ScmModel().repos_path
64 }
65
66
67 @jsonrpc_method()
68 def service_get_repo_name_by_id(request, apiuser, repo_id):
69 from rhodecode.model.repo import RepoModel
70 by_id_match = RepoModel().get_repo_by_id(repo_id)
71 if by_id_match:
72 repo_name = by_id_match.repo_name
73 return {
74 'repo_name': repo_name
75 }
76 return None
77
78
79 @jsonrpc_method()
80 def service_mark_for_invalidation(request, apiuser, repo_name):
81 from rhodecode.model.scm import ScmModel
82 ScmModel().mark_for_invalidation(repo_name)
83 return {'msg': "Applied"}
84
85
86 @jsonrpc_method()
87 def service_config_to_hgrc(request, apiuser, cli_flags, repo_name):
88 from rhodecode.model.db import RhodeCodeUi
89 from rhodecode.model.settings import VcsSettingsModel
90
91 ui_sections = defaultdict(list)
92 ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
93
94 default_hooks = [
95 ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
96 ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
97 ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
98
99 ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
100 ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
101 ]
102
103 for k, v in default_hooks:
104 ui_sections['hooks'].append((k, v))
105
106 for entry in ui:
107 if not entry.active:
108 continue
109 sec = entry.section
110 key = entry.key
111
112 if sec in cli_flags:
113 # we want only custom hooks, so we skip builtins
114 if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
115 continue
116
117 ui_sections[sec].append([key, entry.value])
118
119 flags = []
120 for _sec, key_val in ui_sections.items():
121 flags.append(' ')
122 flags.append(f'[{_sec}]')
123 for key, val in key_val:
124 flags.append(f'{key}= {val}')
125 return {'flags': flags}
@@ -0,0 +1,72 b''
1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import os
20 import sys
21 import time
22 import logging
23
24 import click
25
26 from pyramid.paster import setup_logging
27
28 from rhodecode.lib.statsd_client import StatsdClient
29 from .backends import SshWrapperStandalone
30 from .ssh_wrapper_v1 import setup_custom_logging
31
32 log = logging.getLogger(__name__)
33
34
35 @click.command()
36 @click.argument('ini_path', type=click.Path(exists=True))
37 @click.option(
38 '--mode', '-m', required=False, default='auto',
39 type=click.Choice(['auto', 'vcs', 'git', 'hg', 'svn', 'test']),
40 help='mode of operation')
41 @click.option('--user', help='Username for which the command will be executed')
42 @click.option('--user-id', help='User ID for which the command will be executed')
43 @click.option('--key-id', help='ID of the key from the database')
44 @click.option('--shell', '-s', is_flag=True, help='Allow Shell')
45 @click.option('--debug', is_flag=True, help='Enabled detailed output logging')
46 def main(ini_path, mode, user, user_id, key_id, shell, debug):
47 setup_custom_logging(ini_path, debug)
48
49 command = os.environ.get('SSH_ORIGINAL_COMMAND', '')
50 if not command and mode not in ['test']:
51 raise ValueError(
52 'Unable to fetch SSH_ORIGINAL_COMMAND from environment.'
53 'Please make sure this is set and available during execution '
54 'of this script.')
55 connection_info = os.environ.get('SSH_CONNECTION', '')
56 time_start = time.time()
57 env = {'RC_CMD_SSH_WRAPPER': '1'}
58 statsd = StatsdClient.statsd
59 try:
60 ssh_wrapper = SshWrapperStandalone(
61 command, connection_info, mode,
62 user, user_id, key_id, shell, ini_path, env)
63 except Exception:
64 log.exception('Failed to execute SshWrapper')
65 sys.exit(-5)
66 return_code = ssh_wrapper.wrap()
67 operation_took = time.time() - time_start
68 if statsd:
69 operation_took_ms = round(1000.0 * operation_took)
70 statsd.timing("rhodecode_ssh_wrapper_timing.histogram", operation_took_ms,
71 use_decimals=False)
72 sys.exit(return_code)
@@ -1,865 +1,871 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = true
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34 ; ##################################################
35 35 ; WAITRESS WSGI SERVER - Recommended for Development
36 36 ; ##################################################
37 37
38 38 ; use server type
39 39 use = egg:waitress#main
40 40
41 41 ; number of worker threads
42 42 threads = 5
43 43
44 44 ; MAX BODY SIZE 100GB
45 45 max_request_body_size = 107374182400
46 46
47 47 ; Use poll instead of select, fixes file descriptors limits problems.
48 48 ; May not work on old windows systems.
49 49 asyncore_use_poll = true
50 50
51 51
52 52 ; ###########################
53 53 ; GUNICORN APPLICATION SERVER
54 54 ; ###########################
55 55
56 56 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
57 57
58 58 ; Module to use, this setting shouldn't be changed
59 59 #use = egg:gunicorn#main
60 60
61 61 ; Prefix middleware for RhodeCode.
62 62 ; recommended when using proxy setup.
63 63 ; allows to set RhodeCode under a prefix in server.
64 64 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
65 65 ; And set your prefix like: `prefix = /custom_prefix`
66 66 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
67 67 ; to make your cookies only work on prefix url
68 68 [filter:proxy-prefix]
69 69 use = egg:PasteDeploy#prefix
70 70 prefix = /
71 71
72 72 [app:main]
73 73 ; The %(here)s variable will be replaced with the absolute path of parent directory
74 74 ; of this file
75 75 ; Each option in the app:main can be override by an environmental variable
76 76 ;
77 77 ;To override an option:
78 78 ;
79 79 ;RC_<KeyName>
80 80 ;Everything should be uppercase, . and - should be replaced by _.
81 81 ;For example, if you have these configuration settings:
82 82 ;rc_cache.repo_object.backend = foo
83 83 ;can be overridden by
84 84 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
85 85
86 86 use = egg:rhodecode-enterprise-ce
87 87
88 88 ; enable proxy prefix middleware, defined above
89 89 #filter-with = proxy-prefix
90 90
91 91 ; #############
92 92 ; DEBUG OPTIONS
93 93 ; #############
94 94
95 95 pyramid.reload_templates = true
96 96
97 97 # During development the we want to have the debug toolbar enabled
98 98 pyramid.includes =
99 99 pyramid_debugtoolbar
100 100
101 101 debugtoolbar.hosts = 0.0.0.0/0
102 102 debugtoolbar.exclude_prefixes =
103 103 /css
104 104 /fonts
105 105 /images
106 106 /js
107 107
108 108 ## RHODECODE PLUGINS ##
109 109 rhodecode.includes =
110 110 rhodecode.api
111 111
112 112
113 113 # api prefix url
114 114 rhodecode.api.url = /_admin/api
115 115
116 116 ; enable debug style page
117 117 debug_style = true
118 118
119 119 ; #################
120 120 ; END DEBUG OPTIONS
121 121 ; #################
122 122
123 123 ; encryption key used to encrypt social plugin tokens,
124 124 ; remote_urls with credentials etc, if not set it defaults to
125 125 ; `beaker.session.secret`
126 126 #rhodecode.encrypted_values.secret =
127 127
128 128 ; decryption strict mode (enabled by default). It controls if decryption raises
129 129 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
130 130 #rhodecode.encrypted_values.strict = false
131 131
132 132 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
133 133 ; fernet is safer, and we strongly recommend switching to it.
134 134 ; Due to backward compatibility aes is used as default.
135 135 #rhodecode.encrypted_values.algorithm = fernet
136 136
137 137 ; Return gzipped responses from RhodeCode (static files/application)
138 138 gzip_responses = false
139 139
140 140 ; Auto-generate javascript routes file on startup
141 141 generate_js_files = false
142 142
143 143 ; System global default language.
144 144 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
145 145 lang = en
146 146
147 147 ; Perform a full repository scan and import on each server start.
148 148 ; Settings this to true could lead to very long startup time.
149 149 startup.import_repos = false
150 150
151 151 ; URL at which the application is running. This is used for Bootstrapping
152 152 ; requests in context when no web request is available. Used in ishell, or
153 153 ; SSH calls. Set this for events to receive proper url for SSH calls.
154 154 app.base_url = http://rhodecode.local
155 155
156 ; Host at which the Service API is running.
157 app.service_api.host = http://rhodecode.local:10020
158
159 ; Secret for Service API authentication.
160 app.service_api.token =
161
156 162 ; Unique application ID. Should be a random unique string for security.
157 163 app_instance_uuid = rc-production
158 164
159 165 ; Cut off limit for large diffs (size in bytes). If overall diff size on
160 166 ; commit, or pull request exceeds this limit this diff will be displayed
161 167 ; partially. E.g 512000 == 512Kb
162 168 cut_off_limit_diff = 512000
163 169
164 170 ; Cut off limit for large files inside diffs (size in bytes). Each individual
165 171 ; file inside diff which exceeds this limit will be displayed partially.
166 172 ; E.g 128000 == 128Kb
167 173 cut_off_limit_file = 128000
168 174
169 175 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
170 176 vcs_full_cache = true
171 177
172 178 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
173 179 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
174 180 force_https = false
175 181
176 182 ; use Strict-Transport-Security headers
177 183 use_htsts = false
178 184
179 185 ; Set to true if your repos are exposed using the dumb protocol
180 186 git_update_server_info = false
181 187
182 188 ; RSS/ATOM feed options
183 189 rss_cut_off_limit = 256000
184 190 rss_items_per_page = 10
185 191 rss_include_diff = false
186 192
187 193 ; gist URL alias, used to create nicer urls for gist. This should be an
188 194 ; url that does rewrites to _admin/gists/{gistid}.
189 195 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
190 196 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
191 197 gist_alias_url =
192 198
193 199 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
194 200 ; used for access.
195 201 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
196 202 ; came from the the logged in user who own this authentication token.
197 203 ; Additionally @TOKEN syntax can be used to bound the view to specific
198 204 ; authentication token. Such view would be only accessible when used together
199 205 ; with this authentication token
200 206 ; list of all views can be found under `/_admin/permissions/auth_token_access`
201 207 ; The list should be "," separated and on a single line.
202 208 ; Most common views to enable:
203 209
204 210 # RepoCommitsView:repo_commit_download
205 211 # RepoCommitsView:repo_commit_patch
206 212 # RepoCommitsView:repo_commit_raw
207 213 # RepoCommitsView:repo_commit_raw@TOKEN
208 214 # RepoFilesView:repo_files_diff
209 215 # RepoFilesView:repo_archivefile
210 216 # RepoFilesView:repo_file_raw
211 217 # GistView:*
212 218 api_access_controllers_whitelist =
213 219
214 220 ; Default encoding used to convert from and to unicode
215 221 ; can be also a comma separated list of encoding in case of mixed encodings
216 222 default_encoding = UTF-8
217 223
218 224 ; instance-id prefix
219 225 ; a prefix key for this instance used for cache invalidation when running
220 226 ; multiple instances of RhodeCode, make sure it's globally unique for
221 227 ; all running RhodeCode instances. Leave empty if you don't use it
222 228 instance_id =
223 229
224 230 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
225 231 ; of an authentication plugin also if it is disabled by it's settings.
226 232 ; This could be useful if you are unable to log in to the system due to broken
227 233 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
228 234 ; module to log in again and fix the settings.
229 235 ; Available builtin plugin IDs (hash is part of the ID):
230 236 ; egg:rhodecode-enterprise-ce#rhodecode
231 237 ; egg:rhodecode-enterprise-ce#pam
232 238 ; egg:rhodecode-enterprise-ce#ldap
233 239 ; egg:rhodecode-enterprise-ce#jasig_cas
234 240 ; egg:rhodecode-enterprise-ce#headers
235 241 ; egg:rhodecode-enterprise-ce#crowd
236 242
237 243 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
238 244
239 245 ; Flag to control loading of legacy plugins in py:/path format
240 246 auth_plugin.import_legacy_plugins = true
241 247
242 248 ; alternative return HTTP header for failed authentication. Default HTTP
243 249 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
244 250 ; handling that causing a series of failed authentication calls.
245 251 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
246 252 ; This will be served instead of default 401 on bad authentication
247 253 auth_ret_code =
248 254
249 255 ; use special detection method when serving auth_ret_code, instead of serving
250 256 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
251 257 ; and then serve auth_ret_code to clients
252 258 auth_ret_code_detection = false
253 259
254 260 ; locking return code. When repository is locked return this HTTP code. 2XX
255 261 ; codes don't break the transactions while 4XX codes do
256 262 lock_ret_code = 423
257 263
258 264 ; allows to change the repository location in settings page
259 265 allow_repo_location_change = true
260 266
261 267 ; allows to setup custom hooks in settings page
262 268 allow_custom_hooks_settings = true
263 269
264 270 ; Generated license token required for EE edition license.
265 271 ; New generated token value can be found in Admin > settings > license page.
266 272 license_token =
267 273
268 274 ; This flag hides sensitive information on the license page such as token, and license data
269 275 license.hide_license_info = false
270 276
271 277 ; supervisor connection uri, for managing supervisor and logs.
272 278 supervisor.uri =
273 279
274 280 ; supervisord group name/id we only want this RC instance to handle
275 281 supervisor.group_id = dev
276 282
277 283 ; Display extended labs settings
278 284 labs_settings_active = true
279 285
280 286 ; Custom exception store path, defaults to TMPDIR
281 287 ; This is used to store exception from RhodeCode in shared directory
282 288 #exception_tracker.store_path =
283 289
284 290 ; Send email with exception details when it happens
285 291 #exception_tracker.send_email = false
286 292
287 293 ; Comma separated list of recipients for exception emails,
288 294 ; e.g admin@rhodecode.com,devops@rhodecode.com
289 295 ; Can be left empty, then emails will be sent to ALL super-admins
290 296 #exception_tracker.send_email_recipients =
291 297
292 298 ; optional prefix to Add to email Subject
293 299 #exception_tracker.email_prefix = [RHODECODE ERROR]
294 300
295 301 ; File store configuration. This is used to store and serve uploaded files
296 302 file_store.enabled = true
297 303
298 304 ; Storage backend, available options are: local
299 305 file_store.backend = local
300 306
301 307 ; path to store the uploaded binaries
302 308 file_store.storage_path = %(here)s/data/file_store
303 309
304 310 ; Uncomment and set this path to control settings for archive download cache.
305 311 ; Generated repo archives will be cached at this location
306 312 ; and served from the cache during subsequent requests for the same archive of
307 313 ; the repository. This path is important to be shared across filesystems and with
308 314 ; RhodeCode and vcsserver
309 315
310 316 ; Default is $cache_dir/archive_cache if not set
311 317 archive_cache.store_dir = %(here)s/data/archive_cache
312 318
313 319 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
314 320 archive_cache.cache_size_gb = 10
315 321
316 322 ; By default cache uses sharding technique, this specifies how many shards are there
317 323 archive_cache.cache_shards = 10
318 324
319 325 ; #############
320 326 ; CELERY CONFIG
321 327 ; #############
322 328
323 329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
324 330
325 331 use_celery = false
326 332
327 333 ; path to store schedule database
328 334 #celerybeat-schedule.path =
329 335
330 336 ; connection url to the message broker (default redis)
331 337 celery.broker_url = redis://redis:6379/8
332 338
333 339 ; results backend to get results for (default redis)
334 340 celery.result_backend = redis://redis:6379/8
335 341
336 342 ; rabbitmq example
337 343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
338 344
339 345 ; maximum tasks to execute before worker restart
340 346 celery.max_tasks_per_child = 20
341 347
342 348 ; tasks will never be sent to the queue, but executed locally instead.
343 349 celery.task_always_eager = false
344 350
345 351 ; #############
346 352 ; DOGPILE CACHE
347 353 ; #############
348 354
349 355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
350 356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
351 357 cache_dir = %(here)s/data
352 358
353 359 ; *********************************************
354 360 ; `sql_cache_short` cache for heavy SQL queries
355 361 ; Only supported backend is `memory_lru`
356 362 ; *********************************************
357 363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
358 364 rc_cache.sql_cache_short.expiration_time = 30
359 365
360 366
361 367 ; *****************************************************
362 368 ; `cache_repo_longterm` cache for repo object instances
363 369 ; Only supported backend is `memory_lru`
364 370 ; *****************************************************
365 371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
366 372 ; by default we use 30 Days, cache is still invalidated on push
367 373 rc_cache.cache_repo_longterm.expiration_time = 2592000
368 374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
369 375 rc_cache.cache_repo_longterm.max_size = 10000
370 376
371 377
372 378 ; *********************************************
373 379 ; `cache_general` cache for general purpose use
374 380 ; for simplicity use rc.file_namespace backend,
375 381 ; for performance and scale use rc.redis
376 382 ; *********************************************
377 383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
378 384 rc_cache.cache_general.expiration_time = 43200
379 385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
380 386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
381 387
382 388 ; alternative `cache_general` redis backend with distributed lock
383 389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
384 390 #rc_cache.cache_general.expiration_time = 300
385 391
386 392 ; redis_expiration_time needs to be greater then expiration_time
387 393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
388 394
389 395 #rc_cache.cache_general.arguments.host = localhost
390 396 #rc_cache.cache_general.arguments.port = 6379
391 397 #rc_cache.cache_general.arguments.db = 0
392 398 #rc_cache.cache_general.arguments.socket_timeout = 30
393 399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
394 400 #rc_cache.cache_general.arguments.distributed_lock = true
395 401
396 402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
397 403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
398 404
399 405 ; *************************************************
400 406 ; `cache_perms` cache for permission tree, auth TTL
401 407 ; for simplicity use rc.file_namespace backend,
402 408 ; for performance and scale use rc.redis
403 409 ; *************************************************
404 410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
405 411 rc_cache.cache_perms.expiration_time = 3600
406 412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
407 413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
408 414
409 415 ; alternative `cache_perms` redis backend with distributed lock
410 416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
411 417 #rc_cache.cache_perms.expiration_time = 300
412 418
413 419 ; redis_expiration_time needs to be greater then expiration_time
414 420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
415 421
416 422 #rc_cache.cache_perms.arguments.host = localhost
417 423 #rc_cache.cache_perms.arguments.port = 6379
418 424 #rc_cache.cache_perms.arguments.db = 0
419 425 #rc_cache.cache_perms.arguments.socket_timeout = 30
420 426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
421 427 #rc_cache.cache_perms.arguments.distributed_lock = true
422 428
423 429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
424 430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
425 431
426 432 ; ***************************************************
427 433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
428 434 ; for simplicity use rc.file_namespace backend,
429 435 ; for performance and scale use rc.redis
430 436 ; ***************************************************
431 437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
432 438 rc_cache.cache_repo.expiration_time = 2592000
433 439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
434 440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
435 441
436 442 ; alternative `cache_repo` redis backend with distributed lock
437 443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
438 444 #rc_cache.cache_repo.expiration_time = 2592000
439 445
440 446 ; redis_expiration_time needs to be greater then expiration_time
441 447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
442 448
443 449 #rc_cache.cache_repo.arguments.host = localhost
444 450 #rc_cache.cache_repo.arguments.port = 6379
445 451 #rc_cache.cache_repo.arguments.db = 1
446 452 #rc_cache.cache_repo.arguments.socket_timeout = 30
447 453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
448 454 #rc_cache.cache_repo.arguments.distributed_lock = true
449 455
450 456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
451 457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
452 458
453 459 ; ##############
454 460 ; BEAKER SESSION
455 461 ; ##############
456 462
457 463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
458 464 ; types are file, ext:redis, ext:database, ext:memcached
459 465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
460 466 beaker.session.type = file
461 467 beaker.session.data_dir = %(here)s/data/sessions
462 468
463 469 ; Redis based sessions
464 470 #beaker.session.type = ext:redis
465 471 #beaker.session.url = redis://127.0.0.1:6379/2
466 472
467 473 ; DB based session, fast, and allows easy management over logged in users
468 474 #beaker.session.type = ext:database
469 475 #beaker.session.table_name = db_session
470 476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
471 477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
472 478 #beaker.session.sa.pool_recycle = 3600
473 479 #beaker.session.sa.echo = false
474 480
475 481 beaker.session.key = rhodecode
476 482 beaker.session.secret = develop-rc-uytcxaz
477 483 beaker.session.lock_dir = %(here)s/data/sessions/lock
478 484
479 485 ; Secure encrypted cookie. Requires AES and AES python libraries
480 486 ; you must disable beaker.session.secret to use this
481 487 #beaker.session.encrypt_key = key_for_encryption
482 488 #beaker.session.validate_key = validation_key
483 489
484 490 ; Sets session as invalid (also logging out user) if it haven not been
485 491 ; accessed for given amount of time in seconds
486 492 beaker.session.timeout = 2592000
487 493 beaker.session.httponly = true
488 494
489 495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
490 496 #beaker.session.cookie_path = /custom_prefix
491 497
492 498 ; Set https secure cookie
493 499 beaker.session.secure = false
494 500
495 501 ; default cookie expiration time in seconds, set to `true` to set expire
496 502 ; at browser close
497 503 #beaker.session.cookie_expires = 3600
498 504
499 505 ; #############################
500 506 ; SEARCH INDEXING CONFIGURATION
501 507 ; #############################
502 508
503 509 ; Full text search indexer is available in rhodecode-tools under
504 510 ; `rhodecode-tools index` command
505 511
506 512 ; WHOOSH Backend, doesn't require additional services to run
507 513 ; it works good with few dozen repos
508 514 search.module = rhodecode.lib.index.whoosh
509 515 search.location = %(here)s/data/index
510 516
511 517 ; ####################
512 518 ; CHANNELSTREAM CONFIG
513 519 ; ####################
514 520
515 521 ; channelstream enables persistent connections and live notification
516 522 ; in the system. It's also used by the chat system
517 523
518 524 channelstream.enabled = false
519 525
520 526 ; server address for channelstream server on the backend
521 527 channelstream.server = 127.0.0.1:9800
522 528
523 529 ; location of the channelstream server from outside world
524 530 ; use ws:// for http or wss:// for https. This address needs to be handled
525 531 ; by external HTTP server such as Nginx or Apache
526 532 ; see Nginx/Apache configuration examples in our docs
527 533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
528 534 channelstream.secret = secret
529 535 channelstream.history.location = %(here)s/channelstream_history
530 536
531 537 ; Internal application path that Javascript uses to connect into.
532 538 ; If you use proxy-prefix the prefix should be added before /_channelstream
533 539 channelstream.proxy_path = /_channelstream
534 540
535 541
536 542 ; ##############################
537 543 ; MAIN RHODECODE DATABASE CONFIG
538 544 ; ##############################
539 545
540 546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
541 547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
542 548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
543 549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
544 550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
545 551
546 552 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
547 553
548 554 ; see sqlalchemy docs for other advanced settings
549 555 ; print the sql statements to output
550 556 sqlalchemy.db1.echo = false
551 557
552 558 ; recycle the connections after this amount of seconds
553 559 sqlalchemy.db1.pool_recycle = 3600
554 560
555 561 ; the number of connections to keep open inside the connection pool.
556 562 ; 0 indicates no limit
557 563 ; the general calculus with gevent is:
558 564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
559 565 ; then increase pool size + max overflow so that they add up to 500.
560 566 #sqlalchemy.db1.pool_size = 5
561 567
562 568 ; The number of connections to allow in connection pool "overflow", that is
563 569 ; connections that can be opened above and beyond the pool_size setting,
564 570 ; which defaults to five.
565 571 #sqlalchemy.db1.max_overflow = 10
566 572
567 573 ; Connection check ping, used to detect broken database connections
568 574 ; could be enabled to better handle cases if MySQL has gone away errors
569 575 #sqlalchemy.db1.ping_connection = true
570 576
571 577 ; ##########
572 578 ; VCS CONFIG
573 579 ; ##########
574 580 vcs.server.enable = true
575 581 vcs.server = localhost:9900
576 582
577 583 ; Web server connectivity protocol, responsible for web based VCS operations
578 584 ; Available protocols are:
579 585 ; `http` - use http-rpc backend (default)
580 586 vcs.server.protocol = http
581 587
582 588 ; Push/Pull operations protocol, available options are:
583 589 ; `http` - use http-rpc backend (default)
584 590 vcs.scm_app_implementation = http
585 591
586 592 ; Push/Pull operations hooks protocol, available options are:
587 593 ; `http` - use http-rpc backend (default)
588 594 vcs.hooks.protocol = http
589 595
590 596 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
591 597 ; accessible via network.
592 598 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
593 599 vcs.hooks.host = *
594 600
595 601 ; Start VCSServer with this instance as a subprocess, useful for development
596 602 vcs.start_server = false
597 603
598 604 ; List of enabled VCS backends, available options are:
599 605 ; `hg` - mercurial
600 606 ; `git` - git
601 607 ; `svn` - subversion
602 608 vcs.backends = hg, git, svn
603 609
604 610 ; Wait this number of seconds before killing connection to the vcsserver
605 611 vcs.connection_timeout = 3600
606 612
607 613 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
608 614 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
609 615 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
610 616 #vcs.svn.compatible_version = 1.8
611 617
612 618 ; Cache flag to cache vcsserver remote calls locally
613 619 ; It uses cache_region `cache_repo`
614 620 vcs.methods.cache = true
615 621
616 622 ; ####################################################
617 623 ; Subversion proxy support (mod_dav_svn)
618 624 ; Maps RhodeCode repo groups into SVN paths for Apache
619 625 ; ####################################################
620 626
621 627 ; Enable or disable the config file generation.
622 628 svn.proxy.generate_config = false
623 629
624 630 ; Generate config file with `SVNListParentPath` set to `On`.
625 631 svn.proxy.list_parent_path = true
626 632
627 633 ; Set location and file name of generated config file.
628 634 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
629 635
630 636 ; alternative mod_dav config template. This needs to be a valid mako template
631 637 ; Example template can be found in the source code:
632 638 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
633 639 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
634 640
635 641 ; Used as a prefix to the `Location` block in the generated config file.
636 642 ; In most cases it should be set to `/`.
637 643 svn.proxy.location_root = /
638 644
639 645 ; Command to reload the mod dav svn configuration on change.
640 646 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
641 647 ; Make sure user who runs RhodeCode process is allowed to reload Apache
642 648 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
643 649
644 650 ; If the timeout expires before the reload command finishes, the command will
645 651 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
646 652 #svn.proxy.reload_timeout = 10
647 653
648 654 ; ####################
649 655 ; SSH Support Settings
650 656 ; ####################
651 657
652 658 ; Defines if a custom authorized_keys file should be created and written on
653 659 ; any change user ssh keys. Setting this to false also disables possibility
654 660 ; of adding SSH keys by users from web interface. Super admins can still
655 661 ; manage SSH Keys.
656 662 ssh.generate_authorized_keyfile = false
657 663
658 664 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
659 665 # ssh.authorized_keys_ssh_opts =
660 666
661 667 ; Path to the authorized_keys file where the generate entries are placed.
662 668 ; It is possible to have multiple key files specified in `sshd_config` e.g.
663 669 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
664 670 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
665 671
666 672 ; Command to execute the SSH wrapper. The binary is available in the
667 673 ; RhodeCode installation directory.
668 674 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
669 675 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
670 676
671 677 ; Allow shell when executing the ssh-wrapper command
672 678 ssh.wrapper_cmd_allow_shell = false
673 679
674 680 ; Enables logging, and detailed output send back to the client during SSH
675 681 ; operations. Useful for debugging, shouldn't be used in production.
676 682 ssh.enable_debug_logging = true
677 683
678 684 ; Paths to binary executable, by default they are the names, but we can
679 685 ; override them if we want to use a custom one
680 686 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
681 687 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
682 688 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
683 689
684 690 ; Enables SSH key generator web interface. Disabling this still allows users
685 691 ; to add their own keys.
686 692 ssh.enable_ui_key_generator = true
687 693
688 694
689 695 ; #################
690 696 ; APPENLIGHT CONFIG
691 697 ; #################
692 698
693 699 ; Appenlight is tailored to work with RhodeCode, see
694 700 ; http://appenlight.rhodecode.com for details how to obtain an account
695 701
696 702 ; Appenlight integration enabled
697 703 #appenlight = false
698 704
699 705 #appenlight.server_url = https://api.appenlight.com
700 706 #appenlight.api_key = YOUR_API_KEY
701 707 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
702 708
703 709 ; used for JS client
704 710 #appenlight.api_public_key = YOUR_API_PUBLIC_KEY
705 711
706 712 ; TWEAK AMOUNT OF INFO SENT HERE
707 713
708 714 ; enables 404 error logging (default False)
709 715 #appenlight.report_404 = false
710 716
711 717 ; time in seconds after request is considered being slow (default 1)
712 718 #appenlight.slow_request_time = 1
713 719
714 720 ; record slow requests in application
715 721 ; (needs to be enabled for slow datastore recording and time tracking)
716 722 #appenlight.slow_requests = true
717 723
718 724 ; enable hooking to application loggers
719 725 #appenlight.logging = true
720 726
721 727 ; minimum log level for log capture
722 728 #ppenlight.logging.level = WARNING
723 729
724 730 ; send logs only from erroneous/slow requests
725 731 ; (saves API quota for intensive logging)
726 732 #appenlight.logging_on_error = false
727 733
728 734 ; list of additional keywords that should be grabbed from environ object
729 735 ; can be string with comma separated list of words in lowercase
730 736 ; (by default client will always send following info:
731 737 ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
732 738 ; start with HTTP* this list be extended with additional keywords here
733 739 #appenlight.environ_keys_whitelist =
734 740
735 741 ; list of keywords that should be blanked from request object
736 742 ; can be string with comma separated list of words in lowercase
737 743 ; (by default client will always blank keys that contain following words
738 744 ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
739 745 ; this list be extended with additional keywords set here
740 746 #appenlight.request_keys_blacklist =
741 747
742 748 ; list of namespaces that should be ignores when gathering log entries
743 749 ; can be string with comma separated list of namespaces
744 750 ; (by default the client ignores own entries: appenlight_client.client)
745 751 #appenlight.log_namespace_blacklist =
746 752
747 753 ; Statsd client config, this is used to send metrics to statsd
748 754 ; We recommend setting statsd_exported and scrape them using Prometheus
749 755 #statsd.enabled = false
750 756 #statsd.statsd_host = 0.0.0.0
751 757 #statsd.statsd_port = 8125
752 758 #statsd.statsd_prefix =
753 759 #statsd.statsd_ipv6 = false
754 760
755 761 ; configure logging automatically at server startup set to false
756 762 ; to use the below custom logging config.
757 763 ; RC_LOGGING_FORMATTER
758 764 ; RC_LOGGING_LEVEL
759 765 ; env variables can control the settings for logging in case of autoconfigure
760 766
761 767 #logging.autoconfigure = true
762 768
763 769 ; specify your own custom logging config file to configure logging
764 770 #logging.logging_conf_file = /path/to/custom_logging.ini
765 771
766 772 ; Dummy marker to add new entries after.
767 773 ; Add any custom entries below. Please don't remove this marker.
768 774 custom.conf = 1
769 775
770 776
771 777 ; #####################
772 778 ; LOGGING CONFIGURATION
773 779 ; #####################
774 780
775 781 [loggers]
776 782 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
777 783
778 784 [handlers]
779 785 keys = console, console_sql
780 786
781 787 [formatters]
782 788 keys = generic, json, color_formatter, color_formatter_sql
783 789
784 790 ; #######
785 791 ; LOGGERS
786 792 ; #######
787 793 [logger_root]
788 794 level = NOTSET
789 795 handlers = console
790 796
791 797 [logger_sqlalchemy]
792 798 level = INFO
793 799 handlers = console_sql
794 800 qualname = sqlalchemy.engine
795 801 propagate = 0
796 802
797 803 [logger_beaker]
798 804 level = DEBUG
799 805 handlers =
800 806 qualname = beaker.container
801 807 propagate = 1
802 808
803 809 [logger_rhodecode]
804 810 level = DEBUG
805 811 handlers =
806 812 qualname = rhodecode
807 813 propagate = 1
808 814
809 815 [logger_ssh_wrapper]
810 816 level = DEBUG
811 817 handlers =
812 818 qualname = ssh_wrapper
813 819 propagate = 1
814 820
815 821 [logger_celery]
816 822 level = DEBUG
817 823 handlers =
818 824 qualname = celery
819 825
820 826
821 827 ; ########
822 828 ; HANDLERS
823 829 ; ########
824 830
825 831 [handler_console]
826 832 class = StreamHandler
827 833 args = (sys.stderr, )
828 834 level = DEBUG
829 835 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
830 836 ; This allows sending properly formatted logs to grafana loki or elasticsearch
831 837 formatter = color_formatter
832 838
833 839 [handler_console_sql]
834 840 ; "level = DEBUG" logs SQL queries and results.
835 841 ; "level = INFO" logs SQL queries.
836 842 ; "level = WARN" logs neither. (Recommended for production systems.)
837 843 class = StreamHandler
838 844 args = (sys.stderr, )
839 845 level = WARN
840 846 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
841 847 ; This allows sending properly formatted logs to grafana loki or elasticsearch
842 848 formatter = color_formatter_sql
843 849
844 850 ; ##########
845 851 ; FORMATTERS
846 852 ; ##########
847 853
848 854 [formatter_generic]
849 855 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
850 856 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
851 857 datefmt = %Y-%m-%d %H:%M:%S
852 858
853 859 [formatter_color_formatter]
854 860 class = rhodecode.lib.logging_formatter.ColorFormatter
855 861 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
856 862 datefmt = %Y-%m-%d %H:%M:%S
857 863
858 864 [formatter_color_formatter_sql]
859 865 class = rhodecode.lib.logging_formatter.ColorFormatterSql
860 866 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
861 867 datefmt = %Y-%m-%d %H:%M:%S
862 868
863 869 [formatter_json]
864 870 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
865 871 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,816 +1,822 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = false
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; encryption key used to encrypt social plugin tokens,
75 75 ; remote_urls with credentials etc, if not set it defaults to
76 76 ; `beaker.session.secret`
77 77 #rhodecode.encrypted_values.secret =
78 78
79 79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 81 #rhodecode.encrypted_values.strict = false
82 82
83 83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 84 ; fernet is safer, and we strongly recommend switching to it.
85 85 ; Due to backward compatibility aes is used as default.
86 86 #rhodecode.encrypted_values.algorithm = fernet
87 87
88 88 ; Return gzipped responses from RhodeCode (static files/application)
89 89 gzip_responses = false
90 90
91 91 ; Auto-generate javascript routes file on startup
92 92 generate_js_files = false
93 93
94 94 ; System global default language.
95 95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 96 lang = en
97 97
98 98 ; Perform a full repository scan and import on each server start.
99 99 ; Settings this to true could lead to very long startup time.
100 100 startup.import_repos = false
101 101
102 102 ; URL at which the application is running. This is used for Bootstrapping
103 103 ; requests in context when no web request is available. Used in ishell, or
104 104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 105 app.base_url = http://rhodecode.local
106 106
107 ; Host at which the Service API is running.
108 app.service_api.host= http://rhodecode.local:10020
109
110 ; Secret for Service API authentication.
111 app.service_api.token =
112
107 113 ; Unique application ID. Should be a random unique string for security.
108 114 app_instance_uuid = rc-production
109 115
110 116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
111 117 ; commit, or pull request exceeds this limit this diff will be displayed
112 118 ; partially. E.g 512000 == 512Kb
113 119 cut_off_limit_diff = 512000
114 120
115 121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
116 122 ; file inside diff which exceeds this limit will be displayed partially.
117 123 ; E.g 128000 == 128Kb
118 124 cut_off_limit_file = 128000
119 125
120 126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
121 127 vcs_full_cache = true
122 128
123 129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
124 130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
125 131 force_https = false
126 132
127 133 ; use Strict-Transport-Security headers
128 134 use_htsts = false
129 135
130 136 ; Set to true if your repos are exposed using the dumb protocol
131 137 git_update_server_info = false
132 138
133 139 ; RSS/ATOM feed options
134 140 rss_cut_off_limit = 256000
135 141 rss_items_per_page = 10
136 142 rss_include_diff = false
137 143
138 144 ; gist URL alias, used to create nicer urls for gist. This should be an
139 145 ; url that does rewrites to _admin/gists/{gistid}.
140 146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
141 147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
142 148 gist_alias_url =
143 149
144 150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
145 151 ; used for access.
146 152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
147 153 ; came from the the logged in user who own this authentication token.
148 154 ; Additionally @TOKEN syntax can be used to bound the view to specific
149 155 ; authentication token. Such view would be only accessible when used together
150 156 ; with this authentication token
151 157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
152 158 ; The list should be "," separated and on a single line.
153 159 ; Most common views to enable:
154 160
155 161 # RepoCommitsView:repo_commit_download
156 162 # RepoCommitsView:repo_commit_patch
157 163 # RepoCommitsView:repo_commit_raw
158 164 # RepoCommitsView:repo_commit_raw@TOKEN
159 165 # RepoFilesView:repo_files_diff
160 166 # RepoFilesView:repo_archivefile
161 167 # RepoFilesView:repo_file_raw
162 168 # GistView:*
163 169 api_access_controllers_whitelist =
164 170
165 171 ; Default encoding used to convert from and to unicode
166 172 ; can be also a comma separated list of encoding in case of mixed encodings
167 173 default_encoding = UTF-8
168 174
169 175 ; instance-id prefix
170 176 ; a prefix key for this instance used for cache invalidation when running
171 177 ; multiple instances of RhodeCode, make sure it's globally unique for
172 178 ; all running RhodeCode instances. Leave empty if you don't use it
173 179 instance_id =
174 180
175 181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
176 182 ; of an authentication plugin also if it is disabled by it's settings.
177 183 ; This could be useful if you are unable to log in to the system due to broken
178 184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
179 185 ; module to log in again and fix the settings.
180 186 ; Available builtin plugin IDs (hash is part of the ID):
181 187 ; egg:rhodecode-enterprise-ce#rhodecode
182 188 ; egg:rhodecode-enterprise-ce#pam
183 189 ; egg:rhodecode-enterprise-ce#ldap
184 190 ; egg:rhodecode-enterprise-ce#jasig_cas
185 191 ; egg:rhodecode-enterprise-ce#headers
186 192 ; egg:rhodecode-enterprise-ce#crowd
187 193
188 194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
189 195
190 196 ; Flag to control loading of legacy plugins in py:/path format
191 197 auth_plugin.import_legacy_plugins = true
192 198
193 199 ; alternative return HTTP header for failed authentication. Default HTTP
194 200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
195 201 ; handling that causing a series of failed authentication calls.
196 202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
197 203 ; This will be served instead of default 401 on bad authentication
198 204 auth_ret_code =
199 205
200 206 ; use special detection method when serving auth_ret_code, instead of serving
201 207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
202 208 ; and then serve auth_ret_code to clients
203 209 auth_ret_code_detection = false
204 210
205 211 ; locking return code. When repository is locked return this HTTP code. 2XX
206 212 ; codes don't break the transactions while 4XX codes do
207 213 lock_ret_code = 423
208 214
209 215 ; allows to change the repository location in settings page
210 216 allow_repo_location_change = true
211 217
212 218 ; allows to setup custom hooks in settings page
213 219 allow_custom_hooks_settings = true
214 220
215 221 ; Generated license token required for EE edition license.
216 222 ; New generated token value can be found in Admin > settings > license page.
217 223 license_token =
218 224
219 225 ; This flag hides sensitive information on the license page such as token, and license data
220 226 license.hide_license_info = false
221 227
222 228 ; supervisor connection uri, for managing supervisor and logs.
223 229 supervisor.uri =
224 230
225 231 ; supervisord group name/id we only want this RC instance to handle
226 232 supervisor.group_id = prod
227 233
228 234 ; Display extended labs settings
229 235 labs_settings_active = true
230 236
231 237 ; Custom exception store path, defaults to TMPDIR
232 238 ; This is used to store exception from RhodeCode in shared directory
233 239 #exception_tracker.store_path =
234 240
235 241 ; Send email with exception details when it happens
236 242 #exception_tracker.send_email = false
237 243
238 244 ; Comma separated list of recipients for exception emails,
239 245 ; e.g admin@rhodecode.com,devops@rhodecode.com
240 246 ; Can be left empty, then emails will be sent to ALL super-admins
241 247 #exception_tracker.send_email_recipients =
242 248
243 249 ; optional prefix to Add to email Subject
244 250 #exception_tracker.email_prefix = [RHODECODE ERROR]
245 251
246 252 ; File store configuration. This is used to store and serve uploaded files
247 253 file_store.enabled = true
248 254
249 255 ; Storage backend, available options are: local
250 256 file_store.backend = local
251 257
252 258 ; path to store the uploaded binaries
253 259 file_store.storage_path = %(here)s/data/file_store
254 260
255 261 ; Uncomment and set this path to control settings for archive download cache.
256 262 ; Generated repo archives will be cached at this location
257 263 ; and served from the cache during subsequent requests for the same archive of
258 264 ; the repository. This path is important to be shared across filesystems and with
259 265 ; RhodeCode and vcsserver
260 266
261 267 ; Default is $cache_dir/archive_cache if not set
262 268 archive_cache.store_dir = %(here)s/data/archive_cache
263 269
264 270 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
265 271 archive_cache.cache_size_gb = 40
266 272
267 273 ; By default cache uses sharding technique, this specifies how many shards are there
268 274 archive_cache.cache_shards = 4
269 275
270 276 ; #############
271 277 ; CELERY CONFIG
272 278 ; #############
273 279
274 280 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
275 281
276 282 use_celery = false
277 283
278 284 ; path to store schedule database
279 285 #celerybeat-schedule.path =
280 286
281 287 ; connection url to the message broker (default redis)
282 288 celery.broker_url = redis://redis:6379/8
283 289
284 290 ; results backend to get results for (default redis)
285 291 celery.result_backend = redis://redis:6379/8
286 292
287 293 ; rabbitmq example
288 294 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
289 295
290 296 ; maximum tasks to execute before worker restart
291 297 celery.max_tasks_per_child = 20
292 298
293 299 ; tasks will never be sent to the queue, but executed locally instead.
294 300 celery.task_always_eager = false
295 301
296 302 ; #############
297 303 ; DOGPILE CACHE
298 304 ; #############
299 305
300 306 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
301 307 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
302 308 cache_dir = %(here)s/data
303 309
304 310 ; *********************************************
305 311 ; `sql_cache_short` cache for heavy SQL queries
306 312 ; Only supported backend is `memory_lru`
307 313 ; *********************************************
308 314 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
309 315 rc_cache.sql_cache_short.expiration_time = 30
310 316
311 317
312 318 ; *****************************************************
313 319 ; `cache_repo_longterm` cache for repo object instances
314 320 ; Only supported backend is `memory_lru`
315 321 ; *****************************************************
316 322 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
317 323 ; by default we use 30 Days, cache is still invalidated on push
318 324 rc_cache.cache_repo_longterm.expiration_time = 2592000
319 325 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
320 326 rc_cache.cache_repo_longterm.max_size = 10000
321 327
322 328
323 329 ; *********************************************
324 330 ; `cache_general` cache for general purpose use
325 331 ; for simplicity use rc.file_namespace backend,
326 332 ; for performance and scale use rc.redis
327 333 ; *********************************************
328 334 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
329 335 rc_cache.cache_general.expiration_time = 43200
330 336 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
331 337 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
332 338
333 339 ; alternative `cache_general` redis backend with distributed lock
334 340 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
335 341 #rc_cache.cache_general.expiration_time = 300
336 342
337 343 ; redis_expiration_time needs to be greater then expiration_time
338 344 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
339 345
340 346 #rc_cache.cache_general.arguments.host = localhost
341 347 #rc_cache.cache_general.arguments.port = 6379
342 348 #rc_cache.cache_general.arguments.db = 0
343 349 #rc_cache.cache_general.arguments.socket_timeout = 30
344 350 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
345 351 #rc_cache.cache_general.arguments.distributed_lock = true
346 352
347 353 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
348 354 #rc_cache.cache_general.arguments.lock_auto_renewal = true
349 355
350 356 ; *************************************************
351 357 ; `cache_perms` cache for permission tree, auth TTL
352 358 ; for simplicity use rc.file_namespace backend,
353 359 ; for performance and scale use rc.redis
354 360 ; *************************************************
355 361 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
356 362 rc_cache.cache_perms.expiration_time = 3600
357 363 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
358 364 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
359 365
360 366 ; alternative `cache_perms` redis backend with distributed lock
361 367 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
362 368 #rc_cache.cache_perms.expiration_time = 300
363 369
364 370 ; redis_expiration_time needs to be greater then expiration_time
365 371 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
366 372
367 373 #rc_cache.cache_perms.arguments.host = localhost
368 374 #rc_cache.cache_perms.arguments.port = 6379
369 375 #rc_cache.cache_perms.arguments.db = 0
370 376 #rc_cache.cache_perms.arguments.socket_timeout = 30
371 377 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
372 378 #rc_cache.cache_perms.arguments.distributed_lock = true
373 379
374 380 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
375 381 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
376 382
377 383 ; ***************************************************
378 384 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
379 385 ; for simplicity use rc.file_namespace backend,
380 386 ; for performance and scale use rc.redis
381 387 ; ***************************************************
382 388 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
383 389 rc_cache.cache_repo.expiration_time = 2592000
384 390 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
385 391 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
386 392
387 393 ; alternative `cache_repo` redis backend with distributed lock
388 394 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
389 395 #rc_cache.cache_repo.expiration_time = 2592000
390 396
391 397 ; redis_expiration_time needs to be greater then expiration_time
392 398 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
393 399
394 400 #rc_cache.cache_repo.arguments.host = localhost
395 401 #rc_cache.cache_repo.arguments.port = 6379
396 402 #rc_cache.cache_repo.arguments.db = 1
397 403 #rc_cache.cache_repo.arguments.socket_timeout = 30
398 404 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
399 405 #rc_cache.cache_repo.arguments.distributed_lock = true
400 406
401 407 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
402 408 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
403 409
404 410 ; ##############
405 411 ; BEAKER SESSION
406 412 ; ##############
407 413
408 414 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
409 415 ; types are file, ext:redis, ext:database, ext:memcached
410 416 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
411 417 beaker.session.type = file
412 418 beaker.session.data_dir = %(here)s/data/sessions
413 419
414 420 ; Redis based sessions
415 421 #beaker.session.type = ext:redis
416 422 #beaker.session.url = redis://127.0.0.1:6379/2
417 423
418 424 ; DB based session, fast, and allows easy management over logged in users
419 425 #beaker.session.type = ext:database
420 426 #beaker.session.table_name = db_session
421 427 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
422 428 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
423 429 #beaker.session.sa.pool_recycle = 3600
424 430 #beaker.session.sa.echo = false
425 431
426 432 beaker.session.key = rhodecode
427 433 beaker.session.secret = production-rc-uytcxaz
428 434 beaker.session.lock_dir = %(here)s/data/sessions/lock
429 435
430 436 ; Secure encrypted cookie. Requires AES and AES python libraries
431 437 ; you must disable beaker.session.secret to use this
432 438 #beaker.session.encrypt_key = key_for_encryption
433 439 #beaker.session.validate_key = validation_key
434 440
435 441 ; Sets session as invalid (also logging out user) if it haven not been
436 442 ; accessed for given amount of time in seconds
437 443 beaker.session.timeout = 2592000
438 444 beaker.session.httponly = true
439 445
440 446 ; Path to use for the cookie. Set to prefix if you use prefix middleware
441 447 #beaker.session.cookie_path = /custom_prefix
442 448
443 449 ; Set https secure cookie
444 450 beaker.session.secure = false
445 451
446 452 ; default cookie expiration time in seconds, set to `true` to set expire
447 453 ; at browser close
448 454 #beaker.session.cookie_expires = 3600
449 455
450 456 ; #############################
451 457 ; SEARCH INDEXING CONFIGURATION
452 458 ; #############################
453 459
454 460 ; Full text search indexer is available in rhodecode-tools under
455 461 ; `rhodecode-tools index` command
456 462
457 463 ; WHOOSH Backend, doesn't require additional services to run
458 464 ; it works good with few dozen repos
459 465 search.module = rhodecode.lib.index.whoosh
460 466 search.location = %(here)s/data/index
461 467
462 468 ; ####################
463 469 ; CHANNELSTREAM CONFIG
464 470 ; ####################
465 471
466 472 ; channelstream enables persistent connections and live notification
467 473 ; in the system. It's also used by the chat system
468 474
469 475 channelstream.enabled = false
470 476
471 477 ; server address for channelstream server on the backend
472 478 channelstream.server = 127.0.0.1:9800
473 479
474 480 ; location of the channelstream server from outside world
475 481 ; use ws:// for http or wss:// for https. This address needs to be handled
476 482 ; by external HTTP server such as Nginx or Apache
477 483 ; see Nginx/Apache configuration examples in our docs
478 484 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
479 485 channelstream.secret = secret
480 486 channelstream.history.location = %(here)s/channelstream_history
481 487
482 488 ; Internal application path that Javascript uses to connect into.
483 489 ; If you use proxy-prefix the prefix should be added before /_channelstream
484 490 channelstream.proxy_path = /_channelstream
485 491
486 492
487 493 ; ##############################
488 494 ; MAIN RHODECODE DATABASE CONFIG
489 495 ; ##############################
490 496
491 497 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
492 498 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
493 499 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
494 500 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
495 501 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
496 502
497 503 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
498 504
499 505 ; see sqlalchemy docs for other advanced settings
500 506 ; print the sql statements to output
501 507 sqlalchemy.db1.echo = false
502 508
503 509 ; recycle the connections after this amount of seconds
504 510 sqlalchemy.db1.pool_recycle = 3600
505 511
506 512 ; the number of connections to keep open inside the connection pool.
507 513 ; 0 indicates no limit
508 514 ; the general calculus with gevent is:
509 515 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
510 516 ; then increase pool size + max overflow so that they add up to 500.
511 517 #sqlalchemy.db1.pool_size = 5
512 518
513 519 ; The number of connections to allow in connection pool "overflow", that is
514 520 ; connections that can be opened above and beyond the pool_size setting,
515 521 ; which defaults to five.
516 522 #sqlalchemy.db1.max_overflow = 10
517 523
518 524 ; Connection check ping, used to detect broken database connections
519 525 ; could be enabled to better handle cases if MySQL has gone away errors
520 526 #sqlalchemy.db1.ping_connection = true
521 527
522 528 ; ##########
523 529 ; VCS CONFIG
524 530 ; ##########
525 531 vcs.server.enable = true
526 532 vcs.server = localhost:9900
527 533
528 534 ; Web server connectivity protocol, responsible for web based VCS operations
529 535 ; Available protocols are:
530 536 ; `http` - use http-rpc backend (default)
531 537 vcs.server.protocol = http
532 538
533 539 ; Push/Pull operations protocol, available options are:
534 540 ; `http` - use http-rpc backend (default)
535 541 vcs.scm_app_implementation = http
536 542
537 543 ; Push/Pull operations hooks protocol, available options are:
538 544 ; `http` - use http-rpc backend (default)
539 545 vcs.hooks.protocol = http
540 546
541 547 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
542 548 ; accessible via network.
543 549 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
544 550 vcs.hooks.host = *
545 551
546 552 ; Start VCSServer with this instance as a subprocess, useful for development
547 553 vcs.start_server = false
548 554
549 555 ; List of enabled VCS backends, available options are:
550 556 ; `hg` - mercurial
551 557 ; `git` - git
552 558 ; `svn` - subversion
553 559 vcs.backends = hg, git, svn
554 560
555 561 ; Wait this number of seconds before killing connection to the vcsserver
556 562 vcs.connection_timeout = 3600
557 563
558 564 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
559 565 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
560 566 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
561 567 #vcs.svn.compatible_version = 1.8
562 568
563 569 ; Cache flag to cache vcsserver remote calls locally
564 570 ; It uses cache_region `cache_repo`
565 571 vcs.methods.cache = true
566 572
567 573 ; ####################################################
568 574 ; Subversion proxy support (mod_dav_svn)
569 575 ; Maps RhodeCode repo groups into SVN paths for Apache
570 576 ; ####################################################
571 577
572 578 ; Enable or disable the config file generation.
573 579 svn.proxy.generate_config = false
574 580
575 581 ; Generate config file with `SVNListParentPath` set to `On`.
576 582 svn.proxy.list_parent_path = true
577 583
578 584 ; Set location and file name of generated config file.
579 585 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
580 586
581 587 ; alternative mod_dav config template. This needs to be a valid mako template
582 588 ; Example template can be found in the source code:
583 589 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
584 590 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
585 591
586 592 ; Used as a prefix to the `Location` block in the generated config file.
587 593 ; In most cases it should be set to `/`.
588 594 svn.proxy.location_root = /
589 595
590 596 ; Command to reload the mod dav svn configuration on change.
591 597 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
592 598 ; Make sure user who runs RhodeCode process is allowed to reload Apache
593 599 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
594 600
595 601 ; If the timeout expires before the reload command finishes, the command will
596 602 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
597 603 #svn.proxy.reload_timeout = 10
598 604
599 605 ; ####################
600 606 ; SSH Support Settings
601 607 ; ####################
602 608
603 609 ; Defines if a custom authorized_keys file should be created and written on
604 610 ; any change user ssh keys. Setting this to false also disables possibility
605 611 ; of adding SSH keys by users from web interface. Super admins can still
606 612 ; manage SSH Keys.
607 613 ssh.generate_authorized_keyfile = false
608 614
609 615 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
610 616 # ssh.authorized_keys_ssh_opts =
611 617
612 618 ; Path to the authorized_keys file where the generate entries are placed.
613 619 ; It is possible to have multiple key files specified in `sshd_config` e.g.
614 620 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
615 621 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
616 622
617 623 ; Command to execute the SSH wrapper. The binary is available in the
618 624 ; RhodeCode installation directory.
619 625 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
620 626 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
621 627
622 628 ; Allow shell when executing the ssh-wrapper command
623 629 ssh.wrapper_cmd_allow_shell = false
624 630
625 631 ; Enables logging, and detailed output send back to the client during SSH
626 632 ; operations. Useful for debugging, shouldn't be used in production.
627 633 ssh.enable_debug_logging = false
628 634
629 635 ; Paths to binary executable, by default they are the names, but we can
630 636 ; override them if we want to use a custom one
631 637 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
632 638 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
633 639 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
634 640
635 641 ; Enables SSH key generator web interface. Disabling this still allows users
636 642 ; to add their own keys.
637 643 ssh.enable_ui_key_generator = true
638 644
639 645
640 646 ; #################
641 647 ; APPENLIGHT CONFIG
642 648 ; #################
643 649
644 650 ; Appenlight is tailored to work with RhodeCode, see
645 651 ; http://appenlight.rhodecode.com for details how to obtain an account
646 652
647 653 ; Appenlight integration enabled
648 654 #appenlight = false
649 655
650 656 #appenlight.server_url = https://api.appenlight.com
651 657 #appenlight.api_key = YOUR_API_KEY
652 658 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
653 659
654 660 ; used for JS client
655 661 #appenlight.api_public_key = YOUR_API_PUBLIC_KEY
656 662
657 663 ; TWEAK AMOUNT OF INFO SENT HERE
658 664
659 665 ; enables 404 error logging (default False)
660 666 #appenlight.report_404 = false
661 667
662 668 ; time in seconds after request is considered being slow (default 1)
663 669 #appenlight.slow_request_time = 1
664 670
665 671 ; record slow requests in application
666 672 ; (needs to be enabled for slow datastore recording and time tracking)
667 673 #appenlight.slow_requests = true
668 674
669 675 ; enable hooking to application loggers
670 676 #appenlight.logging = true
671 677
672 678 ; minimum log level for log capture
673 679 #ppenlight.logging.level = WARNING
674 680
675 681 ; send logs only from erroneous/slow requests
676 682 ; (saves API quota for intensive logging)
677 683 #appenlight.logging_on_error = false
678 684
679 685 ; list of additional keywords that should be grabbed from environ object
680 686 ; can be string with comma separated list of words in lowercase
681 687 ; (by default client will always send following info:
682 688 ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
683 689 ; start with HTTP* this list be extended with additional keywords here
684 690 #appenlight.environ_keys_whitelist =
685 691
686 692 ; list of keywords that should be blanked from request object
687 693 ; can be string with comma separated list of words in lowercase
688 694 ; (by default client will always blank keys that contain following words
689 695 ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
690 696 ; this list be extended with additional keywords set here
691 697 #appenlight.request_keys_blacklist =
692 698
693 699 ; list of namespaces that should be ignores when gathering log entries
694 700 ; can be string with comma separated list of namespaces
695 701 ; (by default the client ignores own entries: appenlight_client.client)
696 702 #appenlight.log_namespace_blacklist =
697 703
698 704 ; Statsd client config, this is used to send metrics to statsd
699 705 ; We recommend setting statsd_exported and scrape them using Prometheus
700 706 #statsd.enabled = false
701 707 #statsd.statsd_host = 0.0.0.0
702 708 #statsd.statsd_port = 8125
703 709 #statsd.statsd_prefix =
704 710 #statsd.statsd_ipv6 = false
705 711
706 712 ; configure logging automatically at server startup set to false
707 713 ; to use the below custom logging config.
708 714 ; RC_LOGGING_FORMATTER
709 715 ; RC_LOGGING_LEVEL
710 716 ; env variables can control the settings for logging in case of autoconfigure
711 717
712 718 #logging.autoconfigure = true
713 719
714 720 ; specify your own custom logging config file to configure logging
715 721 #logging.logging_conf_file = /path/to/custom_logging.ini
716 722
717 723 ; Dummy marker to add new entries after.
718 724 ; Add any custom entries below. Please don't remove this marker.
719 725 custom.conf = 1
720 726
721 727
722 728 ; #####################
723 729 ; LOGGING CONFIGURATION
724 730 ; #####################
725 731
726 732 [loggers]
727 733 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
728 734
729 735 [handlers]
730 736 keys = console, console_sql
731 737
732 738 [formatters]
733 739 keys = generic, json, color_formatter, color_formatter_sql
734 740
735 741 ; #######
736 742 ; LOGGERS
737 743 ; #######
738 744 [logger_root]
739 745 level = NOTSET
740 746 handlers = console
741 747
742 748 [logger_sqlalchemy]
743 749 level = INFO
744 750 handlers = console_sql
745 751 qualname = sqlalchemy.engine
746 752 propagate = 0
747 753
748 754 [logger_beaker]
749 755 level = DEBUG
750 756 handlers =
751 757 qualname = beaker.container
752 758 propagate = 1
753 759
754 760 [logger_rhodecode]
755 761 level = DEBUG
756 762 handlers =
757 763 qualname = rhodecode
758 764 propagate = 1
759 765
760 766 [logger_ssh_wrapper]
761 767 level = DEBUG
762 768 handlers =
763 769 qualname = ssh_wrapper
764 770 propagate = 1
765 771
766 772 [logger_celery]
767 773 level = DEBUG
768 774 handlers =
769 775 qualname = celery
770 776
771 777
772 778 ; ########
773 779 ; HANDLERS
774 780 ; ########
775 781
776 782 [handler_console]
777 783 class = StreamHandler
778 784 args = (sys.stderr, )
779 785 level = INFO
780 786 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
781 787 ; This allows sending properly formatted logs to grafana loki or elasticsearch
782 788 formatter = generic
783 789
784 790 [handler_console_sql]
785 791 ; "level = DEBUG" logs SQL queries and results.
786 792 ; "level = INFO" logs SQL queries.
787 793 ; "level = WARN" logs neither. (Recommended for production systems.)
788 794 class = StreamHandler
789 795 args = (sys.stderr, )
790 796 level = WARN
791 797 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
792 798 ; This allows sending properly formatted logs to grafana loki or elasticsearch
793 799 formatter = generic
794 800
795 801 ; ##########
796 802 ; FORMATTERS
797 803 ; ##########
798 804
799 805 [formatter_generic]
800 806 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
801 807 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
802 808 datefmt = %Y-%m-%d %H:%M:%S
803 809
804 810 [formatter_color_formatter]
805 811 class = rhodecode.lib.logging_formatter.ColorFormatter
806 812 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
807 813 datefmt = %Y-%m-%d %H:%M:%S
808 814
809 815 [formatter_color_formatter_sql]
810 816 class = rhodecode.lib.logging_formatter.ColorFormatterSql
811 817 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
812 818 datefmt = %Y-%m-%d %H:%M:%S
813 819
814 820 [formatter_json]
815 821 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
816 822 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,573 +1,582 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import itertools
20 20 import logging
21 21 import sys
22 22 import fnmatch
23 23
24 24 import decorator
25 25 import typing
26 26 import venusian
27 27 from collections import OrderedDict
28 28
29 29 from pyramid.exceptions import ConfigurationError
30 30 from pyramid.renderers import render
31 31 from pyramid.response import Response
32 32 from pyramid.httpexceptions import HTTPNotFound
33 33
34 34 from rhodecode.api.exc import (
35 35 JSONRPCBaseError, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
36 36 from rhodecode.apps._base import TemplateArgs
37 37 from rhodecode.lib.auth import AuthUser
38 38 from rhodecode.lib.base import get_ip_addr, attach_context_attributes
39 39 from rhodecode.lib.exc_tracking import store_exception
40 40 from rhodecode.lib import ext_json
41 41 from rhodecode.lib.utils2 import safe_str
42 42 from rhodecode.lib.plugins.utils import get_plugin_settings
43 43 from rhodecode.model.db import User, UserApiKeys
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47 DEFAULT_RENDERER = 'jsonrpc_renderer'
48 48 DEFAULT_URL = '/_admin/apiv2'
49 SERVICE_API_IDENTIFIER = 'service_'
49 50
50 51
51 52 def find_methods(jsonrpc_methods, pattern):
52 53 matches = OrderedDict()
53 54 if not isinstance(pattern, (list, tuple)):
54 55 pattern = [pattern]
55 56
56 57 for single_pattern in pattern:
57 for method_name, method in jsonrpc_methods.items():
58 for method_name, method in filter(
59 lambda x: not x[0].startswith(SERVICE_API_IDENTIFIER), jsonrpc_methods.items()
60 ):
58 61 if fnmatch.fnmatch(method_name, single_pattern):
59 62 matches[method_name] = method
60 63 return matches
61 64
62 65
63 66 class ExtJsonRenderer(object):
64 67 """
65 68 Custom renderer that makes use of our ext_json lib
66 69
67 70 """
68 71
69 72 def __init__(self):
70 73 self.serializer = ext_json.formatted_json
71 74
72 75 def __call__(self, info):
73 76 """ Returns a plain JSON-encoded string with content-type
74 77 ``application/json``. The content-type may be overridden by
75 78 setting ``request.response.content_type``."""
76 79
77 80 def _render(value, system):
78 81 request = system.get('request')
79 82 if request is not None:
80 83 response = request.response
81 84 ct = response.content_type
82 85 if ct == response.default_content_type:
83 86 response.content_type = 'application/json'
84 87
85 88 return self.serializer(value)
86 89
87 90 return _render
88 91
89 92
90 93 def jsonrpc_response(request, result):
91 94 rpc_id = getattr(request, 'rpc_id', None)
92 95
93 96 ret_value = ''
94 97 if rpc_id:
95 98 ret_value = {'id': rpc_id, 'result': result, 'error': None}
96 99
97 100 # fetch deprecation warnings, and store it inside results
98 101 deprecation = getattr(request, 'rpc_deprecation', None)
99 102 if deprecation:
100 103 ret_value['DEPRECATION_WARNING'] = deprecation
101 104
102 105 raw_body = render(DEFAULT_RENDERER, ret_value, request=request)
103 106 content_type = 'application/json'
104 107 content_type_header = 'Content-Type'
105 108 headers = {
106 109 content_type_header: content_type
107 110 }
108 111 return Response(
109 112 body=raw_body,
110 113 content_type=content_type,
111 114 headerlist=[(k, v) for k, v in headers.items()]
112 115 )
113 116
114 117
115 118 def jsonrpc_error(request, message, retid=None, code: int | None = None, headers: dict | None = None):
116 119 """
117 120 Generate a Response object with a JSON-RPC error body
118 121 """
119 122 headers = headers or {}
120 123 content_type = 'application/json'
121 124 content_type_header = 'Content-Type'
122 125 if content_type_header not in headers:
123 126 headers[content_type_header] = content_type
124 127
125 128 err_dict = {'id': retid, 'result': None, 'error': message}
126 129 raw_body = render(DEFAULT_RENDERER, err_dict, request=request)
127 130
128 131 return Response(
129 132 body=raw_body,
130 133 status=code,
131 134 content_type=content_type,
132 135 headerlist=[(k, v) for k, v in headers.items()]
133 136 )
134 137
135 138
136 139 def exception_view(exc, request):
137 140 rpc_id = getattr(request, 'rpc_id', None)
138 141
139 142 if isinstance(exc, JSONRPCError):
140 143 fault_message = safe_str(exc)
141 144 log.debug('json-rpc error rpc_id:%s "%s"', rpc_id, fault_message)
142 145 elif isinstance(exc, JSONRPCValidationError):
143 146 colander_exc = exc.colander_exception
144 147 # TODO(marcink): think maybe of nicer way to serialize errors ?
145 148 fault_message = colander_exc.asdict()
146 149 log.debug('json-rpc colander error rpc_id:%s "%s"', rpc_id, fault_message)
147 150 elif isinstance(exc, JSONRPCForbidden):
148 151 fault_message = 'Access was denied to this resource.'
149 152 log.warning('json-rpc forbidden call rpc_id:%s "%s"', rpc_id, fault_message)
150 153 elif isinstance(exc, HTTPNotFound):
151 154 method = request.rpc_method
152 155 log.debug('json-rpc method `%s` not found in list of '
153 156 'api calls: %s, rpc_id:%s',
154 157 method, list(request.registry.jsonrpc_methods.keys()), rpc_id)
155 158
156 159 similar = 'none'
157 160 try:
158 161 similar_paterns = [f'*{x}*' for x in method.split('_')]
159 162 similar_found = find_methods(
160 163 request.registry.jsonrpc_methods, similar_paterns)
161 164 similar = ', '.join(similar_found.keys()) or similar
162 165 except Exception:
163 166 # make the whole above block safe
164 167 pass
165 168
166 169 fault_message = f"No such method: {method}. Similar methods: {similar}"
167 170 else:
168 171 fault_message = 'undefined error'
169 172 exc_info = exc.exc_info()
170 173 store_exception(id(exc_info), exc_info, prefix='rhodecode-api')
171 174
172 175 statsd = request.registry.statsd
173 176 if statsd:
174 177 exc_type = f"{exc.__class__.__module__}.{exc.__class__.__name__}"
175 178 statsd.incr('rhodecode_exception_total',
176 179 tags=["exc_source:api", f"type:{exc_type}"])
177 180
178 181 return jsonrpc_error(request, fault_message, rpc_id)
179 182
180 183
181 184 def request_view(request):
182 185 """
183 186 Main request handling method. It handles all logic to call a specific
184 187 exposed method
185 188 """
186 189 # cython compatible inspect
187 190 from rhodecode.config.patches import inspect_getargspec
188 191 inspect = inspect_getargspec()
189 192
190 193 # check if we can find this session using api_key, get_by_auth_token
191 194 # search not expired tokens only
192 195 try:
193 api_user = User.get_by_auth_token(request.rpc_api_key)
196 if not request.rpc_method.startswith(SERVICE_API_IDENTIFIER):
197 api_user = User.get_by_auth_token(request.rpc_api_key)
194 198
195 if api_user is None:
196 return jsonrpc_error(
197 request, retid=request.rpc_id, message='Invalid API KEY')
199 if api_user is None:
200 return jsonrpc_error(
201 request, retid=request.rpc_id, message='Invalid API KEY')
198 202
199 if not api_user.active:
200 return jsonrpc_error(
201 request, retid=request.rpc_id,
202 message='Request from this user not allowed')
203 if not api_user.active:
204 return jsonrpc_error(
205 request, retid=request.rpc_id,
206 message='Request from this user not allowed')
203 207
204 # check if we are allowed to use this IP
205 auth_u = AuthUser(
206 api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr)
207 if not auth_u.ip_allowed:
208 return jsonrpc_error(
209 request, retid=request.rpc_id,
210 message='Request from IP:{} not allowed'.format(
211 request.rpc_ip_addr))
212 else:
213 log.info('Access for IP:%s allowed', request.rpc_ip_addr)
208 # check if we are allowed to use this IP
209 auth_u = AuthUser(
210 api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr)
211 if not auth_u.ip_allowed:
212 return jsonrpc_error(
213 request, retid=request.rpc_id,
214 message='Request from IP:{} not allowed'.format(
215 request.rpc_ip_addr))
216 else:
217 log.info('Access for IP:%s allowed', request.rpc_ip_addr)
218
219 # register our auth-user
220 request.rpc_user = auth_u
221 request.environ['rc_auth_user_id'] = str(auth_u.user_id)
214 222
215 # register our auth-user
216 request.rpc_user = auth_u
217 request.environ['rc_auth_user_id'] = str(auth_u.user_id)
223 # now check if token is valid for API
224 auth_token = request.rpc_api_key
225 token_match = api_user.authenticate_by_token(
226 auth_token, roles=[UserApiKeys.ROLE_API])
227 invalid_token = not token_match
218 228
219 # now check if token is valid for API
220 auth_token = request.rpc_api_key
221 token_match = api_user.authenticate_by_token(
222 auth_token, roles=[UserApiKeys.ROLE_API])
223 invalid_token = not token_match
224
225 log.debug('Checking if API KEY is valid with proper role')
226 if invalid_token:
227 return jsonrpc_error(
228 request, retid=request.rpc_id,
229 message='API KEY invalid or, has bad role for an API call')
229 log.debug('Checking if API KEY is valid with proper role')
230 if invalid_token:
231 return jsonrpc_error(
232 request, retid=request.rpc_id,
233 message='API KEY invalid or, has bad role for an API call')
234 else:
235 auth_u = 'service'
236 if request.rpc_api_key != request.registry.settings['app.service_api.token']:
237 raise Exception("Provided service secret is not recognized!")
230 238
231 239 except Exception:
232 240 log.exception('Error on API AUTH')
233 241 return jsonrpc_error(
234 242 request, retid=request.rpc_id, message='Invalid API KEY')
235 243
236 244 method = request.rpc_method
237 245 func = request.registry.jsonrpc_methods[method]
238 246
239 247 # now that we have a method, add request._req_params to
240 248 # self.kargs and dispatch control to WGIController
241 249
242 250 argspec = inspect.getargspec(func)
243 251 arglist = argspec[0]
244 252 defs = argspec[3] or []
245 253 defaults = [type(a) for a in defs]
246 254 default_empty = type(NotImplemented)
247 255
248 256 # kw arguments required by this method
249 257 func_kwargs = dict(itertools.zip_longest(
250 258 reversed(arglist), reversed(defaults), fillvalue=default_empty))
251 259
252 260 # This attribute will need to be first param of a method that uses
253 261 # api_key, which is translated to instance of user at that name
254 262 user_var = 'apiuser'
255 263 request_var = 'request'
256 264
257 265 for arg in [user_var, request_var]:
258 266 if arg not in arglist:
259 267 return jsonrpc_error(
260 268 request,
261 269 retid=request.rpc_id,
262 270 message='This method [%s] does not support '
263 271 'required parameter `%s`' % (func.__name__, arg))
264 272
265 273 # get our arglist and check if we provided them as args
266 274 for arg, default in func_kwargs.items():
267 275 if arg in [user_var, request_var]:
268 276 # user_var and request_var are pre-hardcoded parameters and we
269 277 # don't need to do any translation
270 278 continue
271 279
272 280 # skip the required param check if it's default value is
273 281 # NotImplementedType (default_empty)
274 282 if default == default_empty and arg not in request.rpc_params:
275 283 return jsonrpc_error(
276 284 request,
277 285 retid=request.rpc_id,
278 286 message=('Missing non optional `%s` arg in JSON DATA' % arg)
279 287 )
280 288
281 289 # sanitize extra passed arguments
282 290 for k in list(request.rpc_params.keys()):
283 291 if k not in func_kwargs:
284 292 del request.rpc_params[k]
285 293
286 294 call_params = request.rpc_params
287 295 call_params.update({
288 296 'request': request,
289 297 'apiuser': auth_u
290 298 })
291 299
292 300 # register some common functions for usage
293 attach_context_attributes(TemplateArgs(), request, request.rpc_user.user_id)
301 rpc_user = request.rpc_user.user_id if hasattr(request, 'rpc_user') else None
302 attach_context_attributes(TemplateArgs(), request, rpc_user)
294 303
295 304 statsd = request.registry.statsd
296 305
297 306 try:
298 307 ret_value = func(**call_params)
299 308 resp = jsonrpc_response(request, ret_value)
300 309 if statsd:
301 310 statsd.incr('rhodecode_api_call_success_total')
302 311 return resp
303 312 except JSONRPCBaseError:
304 313 raise
305 314 except Exception:
306 315 log.exception('Unhandled exception occurred on api call: %s', func)
307 316 exc_info = sys.exc_info()
308 317 exc_id, exc_type_name = store_exception(
309 318 id(exc_info), exc_info, prefix='rhodecode-api')
310 319 error_headers = {
311 320 'RhodeCode-Exception-Id': str(exc_id),
312 321 'RhodeCode-Exception-Type': str(exc_type_name)
313 322 }
314 323 err_resp = jsonrpc_error(
315 324 request, retid=request.rpc_id, message='Internal server error',
316 325 headers=error_headers)
317 326 if statsd:
318 327 statsd.incr('rhodecode_api_call_fail_total')
319 328 return err_resp
320 329
321 330
322 331 def setup_request(request):
323 332 """
324 333 Parse a JSON-RPC request body. It's used inside the predicates method
325 334 to validate and bootstrap requests for usage in rpc calls.
326 335
327 336 We need to raise JSONRPCError here if we want to return some errors back to
328 337 user.
329 338 """
330 339
331 340 log.debug('Executing setup request: %r', request)
332 341 request.rpc_ip_addr = get_ip_addr(request.environ)
333 342 # TODO(marcink): deprecate GET at some point
334 343 if request.method not in ['POST', 'GET']:
335 344 log.debug('unsupported request method "%s"', request.method)
336 345 raise JSONRPCError(
337 346 'unsupported request method "%s". Please use POST' % request.method)
338 347
339 348 if 'CONTENT_LENGTH' not in request.environ:
340 349 log.debug("No Content-Length")
341 350 raise JSONRPCError("Empty body, No Content-Length in request")
342 351
343 352 else:
344 353 length = request.environ['CONTENT_LENGTH']
345 354 log.debug('Content-Length: %s', length)
346 355
347 356 if length == 0:
348 357 log.debug("Content-Length is 0")
349 358 raise JSONRPCError("Content-Length is 0")
350 359
351 360 raw_body = request.body
352 361 log.debug("Loading JSON body now")
353 362 try:
354 363 json_body = ext_json.json.loads(raw_body)
355 364 except ValueError as e:
356 365 # catch JSON errors Here
357 366 raise JSONRPCError(f"JSON parse error ERR:{e} RAW:{raw_body!r}")
358 367
359 368 request.rpc_id = json_body.get('id')
360 369 request.rpc_method = json_body.get('method')
361 370
362 371 # check required base parameters
363 372 try:
364 373 api_key = json_body.get('api_key')
365 374 if not api_key:
366 375 api_key = json_body.get('auth_token')
367 376
368 377 if not api_key:
369 378 raise KeyError('api_key or auth_token')
370 379
371 380 # TODO(marcink): support passing in token in request header
372 381
373 382 request.rpc_api_key = api_key
374 383 request.rpc_id = json_body['id']
375 384 request.rpc_method = json_body['method']
376 385 request.rpc_params = json_body['args'] \
377 386 if isinstance(json_body['args'], dict) else {}
378 387
379 388 log.debug('method: %s, params: %.10240r', request.rpc_method, request.rpc_params)
380 389 except KeyError as e:
381 390 raise JSONRPCError(f'Incorrect JSON data. Missing {e}')
382 391
383 392 log.debug('setup complete, now handling method:%s rpcid:%s',
384 393 request.rpc_method, request.rpc_id, )
385 394
386 395
387 396 class RoutePredicate(object):
388 397 def __init__(self, val, config):
389 398 self.val = val
390 399
391 400 def text(self):
392 401 return f'jsonrpc route = {self.val}'
393 402
394 403 phash = text
395 404
396 405 def __call__(self, info, request):
397 406 if self.val:
398 407 # potentially setup and bootstrap our call
399 408 setup_request(request)
400 409
401 410 # Always return True so that even if it isn't a valid RPC it
402 411 # will fall through to the underlaying handlers like notfound_view
403 412 return True
404 413
405 414
406 415 class NotFoundPredicate(object):
407 416 def __init__(self, val, config):
408 417 self.val = val
409 418 self.methods = config.registry.jsonrpc_methods
410 419
411 420 def text(self):
412 421 return f'jsonrpc method not found = {self.val}'
413 422
414 423 phash = text
415 424
416 425 def __call__(self, info, request):
417 426 return hasattr(request, 'rpc_method')
418 427
419 428
420 429 class MethodPredicate(object):
421 430 def __init__(self, val, config):
422 431 self.method = val
423 432
424 433 def text(self):
425 434 return f'jsonrpc method = {self.method}'
426 435
427 436 phash = text
428 437
429 438 def __call__(self, context, request):
430 439 # we need to explicitly return False here, so pyramid doesn't try to
431 440 # execute our view directly. We need our main handler to execute things
432 441 return getattr(request, 'rpc_method') == self.method
433 442
434 443
435 444 def add_jsonrpc_method(config, view, **kwargs):
436 445 # pop the method name
437 446 method = kwargs.pop('method', None)
438 447
439 448 if method is None:
440 449 raise ConfigurationError(
441 450 'Cannot register a JSON-RPC method without specifying the "method"')
442 451
443 452 # we define custom predicate, to enable to detect conflicting methods,
444 453 # those predicates are kind of "translation" from the decorator variables
445 454 # to internal predicates names
446 455
447 456 kwargs['jsonrpc_method'] = method
448 457
449 458 # register our view into global view store for validation
450 459 config.registry.jsonrpc_methods[method] = view
451 460
452 461 # we're using our main request_view handler, here, so each method
453 462 # has a unified handler for itself
454 463 config.add_view(request_view, route_name='apiv2', **kwargs)
455 464
456 465
457 466 class jsonrpc_method(object):
458 467 """
459 468 decorator that works similar to @add_view_config decorator,
460 469 but tailored for our JSON RPC
461 470 """
462 471
463 472 venusian = venusian # for testing injection
464 473
465 474 def __init__(self, method=None, **kwargs):
466 475 self.method = method
467 476 self.kwargs = kwargs
468 477
469 478 def __call__(self, wrapped):
470 479 kwargs = self.kwargs.copy()
471 480 kwargs['method'] = self.method or wrapped.__name__
472 481 depth = kwargs.pop('_depth', 0)
473 482
474 483 def callback(context, name, ob):
475 484 config = context.config.with_package(info.module)
476 485 config.add_jsonrpc_method(view=ob, **kwargs)
477 486
478 487 info = venusian.attach(wrapped, callback, category='pyramid',
479 488 depth=depth + 1)
480 489 if info.scope == 'class':
481 490 # ensure that attr is set if decorating a class method
482 491 kwargs.setdefault('attr', wrapped.__name__)
483 492
484 493 kwargs['_info'] = info.codeinfo # fbo action_method
485 494 return wrapped
486 495
487 496
488 497 class jsonrpc_deprecated_method(object):
489 498 """
490 499 Marks method as deprecated, adds log.warning, and inject special key to
491 500 the request variable to mark method as deprecated.
492 501 Also injects special docstring that extract_docs will catch to mark
493 502 method as deprecated.
494 503
495 504 :param use_method: specify which method should be used instead of
496 505 the decorated one
497 506
498 507 Use like::
499 508
500 509 @jsonrpc_method()
501 510 @jsonrpc_deprecated_method(use_method='new_func', deprecated_at_version='3.0.0')
502 511 def old_func(request, apiuser, arg1, arg2):
503 512 ...
504 513 """
505 514
506 515 def __init__(self, use_method, deprecated_at_version):
507 516 self.use_method = use_method
508 517 self.deprecated_at_version = deprecated_at_version
509 518 self.deprecated_msg = ''
510 519
511 520 def __call__(self, func):
512 521 self.deprecated_msg = 'Please use method `{method}` instead.'.format(
513 522 method=self.use_method)
514 523
515 524 docstring = """\n
516 525 .. deprecated:: {version}
517 526
518 527 {deprecation_message}
519 528
520 529 {original_docstring}
521 530 """
522 531 func.__doc__ = docstring.format(
523 532 version=self.deprecated_at_version,
524 533 deprecation_message=self.deprecated_msg,
525 534 original_docstring=func.__doc__)
526 535 return decorator.decorator(self.__wrapper, func)
527 536
528 537 def __wrapper(self, func, *fargs, **fkwargs):
529 538 log.warning('DEPRECATED API CALL on function %s, please '
530 539 'use `%s` instead', func, self.use_method)
531 540 # alter function docstring to mark as deprecated, this is picked up
532 541 # via fabric file that generates API DOC.
533 542 result = func(*fargs, **fkwargs)
534 543
535 544 request = fargs[0]
536 545 request.rpc_deprecation = 'DEPRECATED METHOD ' + self.deprecated_msg
537 546 return result
538 547
539 548
540 549 def add_api_methods(config):
541 550 from rhodecode.api.views import (
542 551 deprecated_api, gist_api, pull_request_api, repo_api, repo_group_api,
543 552 server_api, search_api, testing_api, user_api, user_group_api)
544 553
545 554 config.scan('rhodecode.api.views')
546 555
547 556
548 557 def includeme(config):
549 558 plugin_module = 'rhodecode.api'
550 559 plugin_settings = get_plugin_settings(
551 560 plugin_module, config.registry.settings)
552 561
553 562 if not hasattr(config.registry, 'jsonrpc_methods'):
554 563 config.registry.jsonrpc_methods = OrderedDict()
555 564
556 565 # match filter by given method only
557 566 config.add_view_predicate('jsonrpc_method', MethodPredicate)
558 567 config.add_view_predicate('jsonrpc_method_not_found', NotFoundPredicate)
559 568
560 569 config.add_renderer(DEFAULT_RENDERER, ExtJsonRenderer())
561 570 config.add_directive('add_jsonrpc_method', add_jsonrpc_method)
562 571
563 572 config.add_route_predicate(
564 573 'jsonrpc_call', RoutePredicate)
565 574
566 575 config.add_route(
567 576 'apiv2', plugin_settings.get('url', DEFAULT_URL), jsonrpc_call=True)
568 577
569 578 # register some exception handling view
570 579 config.add_view(exception_view, context=JSONRPCBaseError)
571 580 config.add_notfound_view(exception_view, jsonrpc_method_not_found=True)
572 581
573 582 add_api_methods(config)
@@ -1,263 +1,392 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import re
21 21 import logging
22 22 import datetime
23 23 import configparser
24 24 from sqlalchemy import Table
25 25
26 from rhodecode.lib.utils import call_service_api
26 27 from rhodecode.lib.utils2 import AttributeDict
27 28 from rhodecode.model.scm import ScmModel
28 29
29 30 from .hg import MercurialServer
30 31 from .git import GitServer
31 32 from .svn import SubversionServer
32 33 log = logging.getLogger(__name__)
33 34
34 35
35 36 class SshWrapper(object):
36 37 hg_cmd_pat = re.compile(r'^hg\s+\-R\s+(\S+)\s+serve\s+\-\-stdio$')
37 38 git_cmd_pat = re.compile(r'^git-(receive-pack|upload-pack)\s\'[/]?(\S+?)(|\.git)\'$')
38 39 svn_cmd_pat = re.compile(r'^svnserve -t')
39 40
40 41 def __init__(self, command, connection_info, mode,
41 42 user, user_id, key_id: int, shell, ini_path: str, env):
42 43 self.command = command
43 44 self.connection_info = connection_info
44 45 self.mode = mode
45 46 self.username = user
46 47 self.user_id = user_id
47 48 self.key_id = key_id
48 49 self.shell = shell
49 50 self.ini_path = ini_path
50 51 self.env = env
51 52
52 53 self.config = self.parse_config(ini_path)
53 54 self.server_impl = None
54 55
55 56 def parse_config(self, config_path):
56 57 parser = configparser.ConfigParser()
57 58 parser.read(config_path)
58 59 return parser
59 60
60 61 def update_key_access_time(self, key_id):
61 62 from rhodecode.model.meta import raw_query_executor, Base
62 63
63 64 table = Table('user_ssh_keys', Base.metadata, autoload=False)
64 65 atime = datetime.datetime.utcnow()
65 66 stmt = (
66 67 table.update()
67 68 .where(table.c.ssh_key_id == key_id)
68 69 .values(accessed_on=atime)
69 70 # no MySQL Support for .returning :((
70 71 #.returning(table.c.accessed_on, table.c.ssh_key_fingerprint)
71 72 )
72 73
73 74 res_count = None
74 75 with raw_query_executor() as session:
75 76 result = session.execute(stmt)
76 77 if result.rowcount:
77 78 res_count = result.rowcount
78 79
79 80 if res_count:
80 81 log.debug('Update key id:`%s` access time', key_id)
81 82
82 83 def get_user(self, user_id):
83 84 user = AttributeDict()
84 85 # lazy load db imports
85 86 from rhodecode.model.db import User
86 87 dbuser = User.get(user_id)
87 88 if not dbuser:
88 89 return None
89 90 user.user_id = dbuser.user_id
90 91 user.username = dbuser.username
91 92 user.auth_user = dbuser.AuthUser()
92 93 return user
93 94
94 95 def get_connection_info(self):
95 96 """
96 97 connection_info
97 98
98 99 Identifies the client and server ends of the connection.
99 100 The variable contains four space-separated values: client IP address,
100 101 client port number, server IP address, and server port number.
101 102 """
102 103 conn = dict(
103 104 client_ip=None,
104 105 client_port=None,
105 106 server_ip=None,
106 107 server_port=None,
107 108 )
108 109
109 110 info = self.connection_info.split(' ')
110 111 if len(info) == 4:
111 112 conn['client_ip'] = info[0]
112 113 conn['client_port'] = info[1]
113 114 conn['server_ip'] = info[2]
114 115 conn['server_port'] = info[3]
115 116
116 117 return conn
117 118
118 119 def maybe_translate_repo_uid(self, repo_name):
119 120 _org_name = repo_name
120 121 if _org_name.startswith('_'):
121 122 # remove format of _ID/subrepo
122 123 _org_name = _org_name.split('/', 1)[0]
123 124
124 125 if repo_name.startswith('_'):
125 126 from rhodecode.model.repo import RepoModel
126 127 org_repo_name = repo_name
127 128 log.debug('translating UID repo %s', org_repo_name)
128 129 by_id_match = RepoModel().get_repo_by_id(repo_name)
129 130 if by_id_match:
130 131 repo_name = by_id_match.repo_name
131 132 log.debug('translation of UID repo %s got `%s`', org_repo_name, repo_name)
132 133
133 134 return repo_name, _org_name
134 135
135 136 def get_repo_details(self, mode):
136 137 vcs_type = mode if mode in ['svn', 'hg', 'git'] else None
137 138 repo_name = None
138 139
139 140 hg_match = self.hg_cmd_pat.match(self.command)
140 141 if hg_match is not None:
141 142 vcs_type = 'hg'
142 143 repo_id = hg_match.group(1).strip('/')
143 144 repo_name, org_name = self.maybe_translate_repo_uid(repo_id)
144 145 return vcs_type, repo_name, mode
145 146
146 147 git_match = self.git_cmd_pat.match(self.command)
147 148 if git_match is not None:
148 149 mode = git_match.group(1)
149 150 vcs_type = 'git'
150 151 repo_id = git_match.group(2).strip('/')
151 152 repo_name, org_name = self.maybe_translate_repo_uid(repo_id)
152 153 return vcs_type, repo_name, mode
153 154
154 155 svn_match = self.svn_cmd_pat.match(self.command)
155 156 if svn_match is not None:
156 157 vcs_type = 'svn'
157 158 # Repo name should be extracted from the input stream, we're unable to
158 159 # extract it at this point in execution
159 160 return vcs_type, repo_name, mode
160 161
161 162 return vcs_type, repo_name, mode
162 163
163 164 def serve(self, vcs, repo, mode, user, permissions, branch_permissions):
164 165 store = ScmModel().repos_path
165 166
166 167 check_branch_perms = False
167 168 detect_force_push = False
168 169
169 170 if branch_permissions:
170 171 check_branch_perms = True
171 172 detect_force_push = True
172 173
173 174 log.debug(
174 175 'VCS detected:`%s` mode: `%s` repo_name: %s, branch_permission_checks:%s',
175 176 vcs, mode, repo, check_branch_perms)
176 177
177 178 # detect if we have to check branch permissions
178 179 extras = {
179 180 'detect_force_push': detect_force_push,
180 181 'check_branch_perms': check_branch_perms,
181 182 'config': self.ini_path
182 183 }
183 184
184 185 if vcs == 'hg':
185 186 server = MercurialServer(
186 187 store=store, ini_path=self.ini_path,
187 188 repo_name=repo, user=user,
188 189 user_permissions=permissions, config=self.config, env=self.env)
189 190 self.server_impl = server
190 191 return server.run(tunnel_extras=extras)
191 192
192 193 elif vcs == 'git':
193 194 server = GitServer(
194 195 store=store, ini_path=self.ini_path,
195 196 repo_name=repo, repo_mode=mode, user=user,
196 197 user_permissions=permissions, config=self.config, env=self.env)
197 198 self.server_impl = server
198 199 return server.run(tunnel_extras=extras)
199 200
200 201 elif vcs == 'svn':
201 202 server = SubversionServer(
202 203 store=store, ini_path=self.ini_path,
203 204 repo_name=None, user=user,
204 205 user_permissions=permissions, config=self.config, env=self.env)
205 206 self.server_impl = server
206 207 return server.run(tunnel_extras=extras)
207 208
208 209 else:
209 210 raise Exception(f'Unrecognised VCS: {vcs}')
210 211
211 212 def wrap(self):
212 213 mode = self.mode
213 214 username = self.username
214 215 user_id = self.user_id
215 216 key_id = self.key_id
216 217 shell = self.shell
217 218
218 219 scm_detected, scm_repo, scm_mode = self.get_repo_details(mode)
219 220
220 221 log.debug(
221 222 'Mode: `%s` User: `name:%s : id:%s` Shell: `%s` SSH Command: `\"%s\"` '
222 223 'SCM_DETECTED: `%s` SCM Mode: `%s` SCM Repo: `%s`',
223 224 mode, username, user_id, shell, self.command,
224 225 scm_detected, scm_mode, scm_repo)
225 226
226 227 log.debug('SSH Connection info %s', self.get_connection_info())
227 228
228 229 # update last access time for this key
229 230 if key_id:
230 231 self.update_key_access_time(key_id)
231 232
232 233 if shell and self.command is None:
233 234 log.info('Dropping to shell, no command given and shell is allowed')
234 235 os.execl('/bin/bash', '-l')
235 236 exit_code = 1
236 237
237 238 elif scm_detected:
238 239 user = self.get_user(user_id)
239 240 if not user:
240 241 log.warning('User with id %s not found', user_id)
241 242 exit_code = -1
242 243 return exit_code
243 244
244 245 auth_user = user.auth_user
245 246 permissions = auth_user.permissions['repositories']
246 247 repo_branch_permissions = auth_user.get_branch_permissions(scm_repo)
247 248 try:
248 249 exit_code, is_updated = self.serve(
249 250 scm_detected, scm_repo, scm_mode, user, permissions,
250 251 repo_branch_permissions)
251 252 except Exception:
252 253 log.exception('Error occurred during execution of SshWrapper')
253 254 exit_code = -1
254 255
255 256 elif self.command is None and shell is False:
256 257 log.error('No Command given.')
257 258 exit_code = -1
258 259
259 260 else:
260 261 log.error('Unhandled Command: "%s" Aborting.', self.command)
261 262 exit_code = -1
262 263
263 264 return exit_code
265
266
267 class SshWrapperStandalone(SshWrapper):
268 """
269 New version of SshWrapper designed to be depended only on service API
270 """
271 repos_path = None
272
273 @staticmethod
274 def parse_user_related_data(user_data):
275 user = AttributeDict()
276 user.user_id = user_data['user_id']
277 user.username = user_data['username']
278 user.repo_permissions = user_data['repo_permissions']
279 user.branch_permissions = user_data['branch_permissions']
280 return user
281
282 def wrap(self):
283 mode = self.mode
284 username = self.username
285 user_id = self.user_id
286 shell = self.shell
287
288 scm_detected, scm_repo, scm_mode = self.get_repo_details(mode)
289
290 log.debug(
291 'Mode: `%s` User: `name:%s : id:%s` Shell: `%s` SSH Command: `\"%s\"` '
292 'SCM_DETECTED: `%s` SCM Mode: `%s` SCM Repo: `%s`',
293 mode, username, user_id, shell, self.command,
294 scm_detected, scm_mode, scm_repo)
295
296 log.debug('SSH Connection info %s', self.get_connection_info())
297
298 if shell and self.command is None:
299 log.info('Dropping to shell, no command given and shell is allowed')
300 os.execl('/bin/bash', '-l')
301 exit_code = 1
302
303 elif scm_detected:
304 data = call_service_api(self.ini_path, {
305 "method": "service_get_data_for_ssh_wrapper",
306 "args": {"user_id": user_id, "repo_name": scm_repo, "key_id": self.key_id}
307 })
308 user = self.parse_user_related_data(data)
309 if not user:
310 log.warning('User with id %s not found', user_id)
311 exit_code = -1
312 return exit_code
313 self.repos_path = data['repos_path']
314 permissions = user.repo_permissions
315 repo_branch_permissions = user.branch_permissions
316 try:
317 exit_code, is_updated = self.serve(
318 scm_detected, scm_repo, scm_mode, user, permissions,
319 repo_branch_permissions)
320 except Exception:
321 log.exception('Error occurred during execution of SshWrapper')
322 exit_code = -1
323
324 elif self.command is None and shell is False:
325 log.error('No Command given.')
326 exit_code = -1
327
328 else:
329 log.error('Unhandled Command: "%s" Aborting.', self.command)
330 exit_code = -1
331
332 return exit_code
333
334 def maybe_translate_repo_uid(self, repo_name):
335 _org_name = repo_name
336 if _org_name.startswith('_'):
337 _org_name = _org_name.split('/', 1)[0]
338
339 if repo_name.startswith('_'):
340 org_repo_name = repo_name
341 log.debug('translating UID repo %s', org_repo_name)
342 by_id_match = call_service_api(self.ini_path, {
343 'method': 'service_get_repo_name_by_id',
344 "args": {"repo_id": repo_name}
345 })
346 if by_id_match:
347 repo_name = by_id_match['repo_name']
348 log.debug('translation of UID repo %s got `%s`', org_repo_name, repo_name)
349
350 return repo_name, _org_name
351
352 def serve(self, vcs, repo, mode, user, permissions, branch_permissions):
353 store = self.repos_path
354
355 check_branch_perms = False
356 detect_force_push = False
357
358 if branch_permissions:
359 check_branch_perms = True
360 detect_force_push = True
361
362 log.debug(
363 'VCS detected:`%s` mode: `%s` repo_name: %s, branch_permission_checks:%s',
364 vcs, mode, repo, check_branch_perms)
365
366 # detect if we have to check branch permissions
367 extras = {
368 'detect_force_push': detect_force_push,
369 'check_branch_perms': check_branch_perms,
370 'config': self.ini_path
371 }
372
373 match vcs:
374 case 'hg':
375 server = MercurialServer(
376 store=store, ini_path=self.ini_path,
377 repo_name=repo, user=user,
378 user_permissions=permissions, config=self.config, env=self.env)
379 case 'git':
380 server = GitServer(
381 store=store, ini_path=self.ini_path,
382 repo_name=repo, repo_mode=mode, user=user,
383 user_permissions=permissions, config=self.config, env=self.env)
384 case 'svn':
385 server = SubversionServer(
386 store=store, ini_path=self.ini_path,
387 repo_name=None, user=user,
388 user_permissions=permissions, config=self.config, env=self.env)
389 case _:
390 raise Exception(f'Unrecognised VCS: {vcs}')
391 self.server_impl = server
392 return server.run(tunnel_extras=extras)
@@ -1,161 +1,174 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import sys
21 21 import logging
22 22
23 23 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
24 24 from rhodecode.lib.ext_json import sjson as json
25 25 from rhodecode.lib.vcs.conf import settings as vcs_settings
26 from rhodecode.lib.utils import call_service_api
26 27 from rhodecode.model.scm import ScmModel
27 28
28 29 log = logging.getLogger(__name__)
29 30
30 31
31 32 class VcsServer(object):
32 33 repo_user_agent = None # set in child classes
33 34 _path = None # set executable path for hg/git/svn binary
34 35 backend = None # set in child classes
35 36 tunnel = None # subprocess handling tunnel
36 37 write_perms = ['repository.admin', 'repository.write']
37 38 read_perms = ['repository.read', 'repository.admin', 'repository.write']
38 39
39 40 def __init__(self, user, user_permissions, config, env):
40 41 self.user = user
41 42 self.user_permissions = user_permissions
42 43 self.config = config
43 44 self.env = env
44 45 self.stdin = sys.stdin
45 46
46 47 self.repo_name = None
47 48 self.repo_mode = None
48 49 self.store = ''
49 50 self.ini_path = ''
51 self.hooks_protocol = None
50 52
51 53 def _invalidate_cache(self, repo_name):
52 54 """
53 55 Set's cache for this repository for invalidation on next access
54 56
55 57 :param repo_name: full repo name, also a cache key
56 58 """
57 ScmModel().mark_for_invalidation(repo_name)
59 # Todo: Leave only "celery" case after transition.
60 match self.hooks_protocol:
61 case 'http':
62 ScmModel().mark_for_invalidation(repo_name)
63 case 'celery':
64 call_service_api(self.ini_path, {
65 "method": "service_mark_for_invalidation",
66 "args": {"repo_name": repo_name}
67 })
58 68
59 69 def has_write_perm(self):
60 70 permission = self.user_permissions.get(self.repo_name)
61 71 if permission in ['repository.write', 'repository.admin']:
62 72 return True
63 73
64 74 return False
65 75
66 76 def _check_permissions(self, action):
67 77 permission = self.user_permissions.get(self.repo_name)
78 user_info = f'{self.user["user_id"]}:{self.user["username"]}'
68 79 log.debug('permission for %s on %s are: %s',
69 self.user, self.repo_name, permission)
80 user_info, self.repo_name, permission)
70 81
71 82 if not permission:
72 83 log.error('user `%s` permissions to repo:%s are empty. Forbidding access.',
73 self.user, self.repo_name)
84 user_info, self.repo_name)
74 85 return -2
75 86
76 87 if action == 'pull':
77 88 if permission in self.read_perms:
78 89 log.info(
79 90 'READ Permissions for User "%s" detected to repo "%s"!',
80 self.user, self.repo_name)
91 user_info, self.repo_name)
81 92 return 0
82 93 else:
83 94 if permission in self.write_perms:
84 95 log.info(
85 96 'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!',
86 self.user, self.repo_name)
97 user_info, self.repo_name)
87 98 return 0
88 99
89 100 log.error('Cannot properly fetch or verify user `%s` permissions. '
90 101 'Permissions: %s, vcs action: %s',
91 self.user, permission, action)
102 user_info, permission, action)
92 103 return -2
93 104
94 105 def update_environment(self, action, extras=None):
95 106
96 107 scm_data = {
97 108 'ip': os.environ['SSH_CLIENT'].split()[0],
98 109 'username': self.user.username,
99 110 'user_id': self.user.user_id,
100 111 'action': action,
101 112 'repository': self.repo_name,
102 113 'scm': self.backend,
103 114 'config': self.ini_path,
104 115 'repo_store': self.store,
105 116 'make_lock': None,
106 117 'locked_by': [None, None],
107 118 'server_url': None,
108 119 'user_agent': f'{self.repo_user_agent}/ssh-user-agent',
109 120 'hooks': ['push', 'pull'],
110 121 'hooks_module': 'rhodecode.lib.hooks_daemon',
111 122 'is_shadow_repo': False,
112 123 'detect_force_push': False,
113 124 'check_branch_perms': False,
114 125
115 126 'SSH': True,
116 127 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name),
117 128 }
118 129 if extras:
119 130 scm_data.update(extras)
120 131 os.putenv("RC_SCM_DATA", json.dumps(scm_data))
121 132 return scm_data
122 133
123 134 def get_root_store(self):
124 135 root_store = self.store
125 136 if not root_store.endswith('/'):
126 137 # always append trailing slash
127 138 root_store = root_store + '/'
128 139 return root_store
129 140
130 141 def _handle_tunnel(self, extras):
131 142 # pre-auth
132 143 action = 'pull'
133 144 exit_code = self._check_permissions(action)
134 145 if exit_code:
135 146 return exit_code, False
136 147
137 req = self.env['request']
138 server_url = req.host_url + req.script_name
139 extras['server_url'] = server_url
148 req = self.env.get('request')
149 if req:
150 server_url = req.host_url + req.script_name
151 extras['server_url'] = server_url
140 152
141 153 log.debug('Using %s binaries from path %s', self.backend, self._path)
142 154 exit_code = self.tunnel.run(extras)
143 155
144 156 return exit_code, action == "push"
145 157
146 158 def run(self, tunnel_extras=None):
159 self.hooks_protocol = self.config.get('app:main', 'vcs.hooks.protocol')
147 160 tunnel_extras = tunnel_extras or {}
148 161 extras = {}
149 162 extras.update(tunnel_extras)
150 163
151 164 callback_daemon, extras = prepare_callback_daemon(
152 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
165 extras, protocol=self.hooks_protocol,
153 166 host=vcs_settings.HOOKS_HOST)
154 167
155 168 with callback_daemon:
156 169 try:
157 170 return self._handle_tunnel(extras)
158 171 finally:
159 172 log.debug('Running cleanup with cache invalidation')
160 173 if self.repo_name:
161 174 self._invalidate_cache(self.repo_name)
@@ -1,146 +1,155 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import sys
21 21 import logging
22 22 import tempfile
23 23 import textwrap
24 24 import collections
25 25 from .base import VcsServer
26 from rhodecode.lib.utils import call_service_api
26 27 from rhodecode.model.db import RhodeCodeUi
27 28 from rhodecode.model.settings import VcsSettingsModel
28 29
29 30 log = logging.getLogger(__name__)
30 31
31 32
32 33 class MercurialTunnelWrapper(object):
33 34 process = None
34 35
35 36 def __init__(self, server):
36 37 self.server = server
37 38 self.stdin = sys.stdin
38 39 self.stdout = sys.stdout
39 40 self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp(prefix='hgrc_rhodecode_')
40 41
41 42 def create_hooks_env(self):
42 43 repo_name = self.server.repo_name
43 44 hg_flags = self.server.config_to_hgrc(repo_name)
44 45
45 46 content = textwrap.dedent(
46 47 '''
47 48 # RhodeCode SSH hooks version=2.0.0
48 49 {custom}
49 50 '''
50 51 ).format(custom='\n'.join(hg_flags))
51 52
52 53 root = self.server.get_root_store()
53 54 hgrc_custom = os.path.join(root, repo_name, '.hg', 'hgrc_rhodecode')
54 55 hgrc_main = os.path.join(root, repo_name, '.hg', 'hgrc')
55 56
56 57 # cleanup custom hgrc file
57 58 if os.path.isfile(hgrc_custom):
58 59 with open(hgrc_custom, 'wb') as f:
59 60 f.write('')
60 61 log.debug('Cleanup custom hgrc file under %s', hgrc_custom)
61 62
62 63 # write temp
63 64 with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file:
64 65 hooks_env_file.write(content)
65 66
66 67 return self.hooks_env_path
67 68
68 69 def remove_configs(self):
69 70 os.remove(self.hooks_env_path)
70 71
71 72 def command(self, hgrc_path):
72 73 root = self.server.get_root_store()
73 74
74 75 command = (
75 76 "cd {root}; HGRCPATH={hgrc} {hg_path} -R {root}{repo_name} "
76 77 "serve --stdio".format(
77 78 root=root, hg_path=self.server.hg_path,
78 79 repo_name=self.server.repo_name, hgrc=hgrc_path))
79 80 log.debug("Final CMD: %s", command)
80 81 return command
81 82
82 83 def run(self, extras):
83 84 # at this point we cannot tell, we do further ACL checks
84 85 # inside the hooks
85 86 action = '?'
86 87 # permissions are check via `pre_push_ssh_auth` hook
87 88 self.server.update_environment(action=action, extras=extras)
88 89 custom_hgrc_file = self.create_hooks_env()
89 90
90 91 try:
91 92 return os.system(self.command(custom_hgrc_file))
92 93 finally:
93 94 self.remove_configs()
94 95
95 96
96 97 class MercurialServer(VcsServer):
97 98 backend = 'hg'
98 99 repo_user_agent = 'mercurial'
99 100 cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks']
100 101
101 102 def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env):
102 103 super().__init__(user, user_permissions, config, env)
103 104
104 105 self.store = store
105 106 self.ini_path = ini_path
106 107 self.repo_name = repo_name
107 108 self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg')
108 109 self.tunnel = MercurialTunnelWrapper(server=self)
109 110
110 111 def config_to_hgrc(self, repo_name):
112 # Todo: once transition is done only call to service api should exist
113 if self.hooks_protocol == 'celery':
114 data = call_service_api(self.ini_path, {
115 "method": "service_config_to_hgrc",
116 "args": {"cli_flags": self.cli_flags, "repo_name": repo_name}
117 })
118 return data['flags']
119
111 120 ui_sections = collections.defaultdict(list)
112 121 ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
113 122
114 123 # write default hooks
115 124 default_hooks = [
116 125 ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
117 126 ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
118 127 ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
119 128
120 129 ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
121 130 ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
122 131 ]
123 132
124 133 for k, v in default_hooks:
125 134 ui_sections['hooks'].append((k, v))
126 135
127 136 for entry in ui:
128 137 if not entry.active:
129 138 continue
130 139 sec = entry.section
131 140 key = entry.key
132 141
133 142 if sec in self.cli_flags:
134 143 # we want only custom hooks, so we skip builtins
135 144 if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
136 145 continue
137 146
138 147 ui_sections[sec].append([key, entry.value])
139 148
140 149 flags = []
141 150 for _sec, key_val in ui_sections.items():
142 151 flags.append(' ')
143 152 flags.append(f'[{_sec}]')
144 153 for key, val in key_val:
145 154 flags.append(f'{key}= {val}')
146 155 return flags
1 NO CONTENT: file renamed from rhodecode/apps/ssh_support/lib/ssh_wrapper.py to rhodecode/apps/ssh_support/lib/ssh_wrapper_v1.py
@@ -1,68 +1,68 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import pytest
21 21 import configparser
22 22
23 from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper
23 from rhodecode.apps.ssh_support.lib.ssh_wrapper_v1 import SshWrapper
24 24 from rhodecode.lib.utils2 import AttributeDict
25 25
26 26
27 27 @pytest.fixture()
28 28 def dummy_conf_file(tmpdir):
29 29 conf = configparser.ConfigParser()
30 30 conf.add_section('app:main')
31 31 conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg')
32 32 conf.set('app:main', 'ssh.executable.git', '/usr/bin/git')
33 33 conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve')
34 34
35 35 f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini')
36 36 with open(f_path, 'wt') as f:
37 37 conf.write(f)
38 38
39 39 return os.path.join(f_path)
40 40
41 41
42 42 def plain_dummy_env():
43 43 return {
44 44 'request':
45 45 AttributeDict(host_url='http://localhost', script_name='/')
46 46 }
47 47
48 48
49 49 @pytest.fixture()
50 50 def dummy_env():
51 51 return plain_dummy_env()
52 52
53 53
54 54 def plain_dummy_user():
55 55 return AttributeDict(username='test_user')
56 56
57 57
58 58 @pytest.fixture()
59 59 def dummy_user():
60 60 return plain_dummy_user()
61 61
62 62
63 63 @pytest.fixture()
64 64 def ssh_wrapper(app, dummy_conf_file, dummy_env):
65 65 conn_info = '127.0.0.1 22 10.0.0.1 443'
66 66 return SshWrapper(
67 67 'random command', conn_info, 'auto', 'admin', '1', key_id='1',
68 68 shell=False, ini_path=dummy_conf_file, env=dummy_env)
@@ -1,823 +1,851 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Utilities library for RhodeCode
21 21 """
22 22
23 23 import datetime
24 24 import decorator
25 25 import logging
26 26 import os
27 27 import re
28 28 import sys
29 29 import shutil
30 30 import socket
31 31 import tempfile
32 32 import traceback
33 33 import tarfile
34 34 import warnings
35 35 from functools import wraps
36 36 from os.path import join as jn
37 from configparser import NoOptionError
37 38
38 39 import paste
39 40 import pkg_resources
40 41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
41 42
42 43 from mako import exceptions
43 44
44 45 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
45 46 from rhodecode.lib.type_utils import AttributeDict
46 47 from rhodecode.lib.str_utils import safe_bytes, safe_str
47 48 from rhodecode.lib.vcs.backends.base import Config
48 49 from rhodecode.lib.vcs.exceptions import VCSError
49 50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 51 from rhodecode.lib.ext_json import sjson as json
51 52 from rhodecode.model import meta
52 53 from rhodecode.model.db import (
53 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 55 from rhodecode.model.meta import Session
56 from rhodecode.lib.pyramid_utils import get_config
57 from rhodecode.lib.vcs import CurlSession
58 from rhodecode.lib.vcs.exceptions import ImproperlyConfiguredError
55 59
56 60
57 61 log = logging.getLogger(__name__)
58 62
59 63 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 64
61 65 # String which contains characters that are not allowed in slug names for
62 66 # repositories or repository groups. It is properly escaped to use it in
63 67 # regular expressions.
64 68 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 69
66 70 # Regex that matches forbidden characters in repo/group slugs.
67 71 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
68 72
69 73 # Regex that matches allowed characters in repo/group slugs.
70 74 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
71 75
72 76 # Regex that matches whole repo/group slugs.
73 77 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
74 78
75 79 _license_cache = None
76 80
77 81
78 82 def adopt_for_celery(func):
79 83 """
80 84 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
81 85 for further usage as a celery tasks.
82 86 """
83 87 @wraps(func)
84 88 def wrapper(extras):
85 89 extras = AttributeDict(extras)
86 90 # HooksResponse implements to_json method which must be used there.
87 91 return func(extras).to_json()
88 92 return wrapper
89 93
90 94
91 95 def repo_name_slug(value):
92 96 """
93 97 Return slug of name of repository
94 98 This function is called on each creation/modification
95 99 of repository to prevent bad names in repo
96 100 """
97 101
98 102 replacement_char = '-'
99 103
100 104 slug = strip_tags(value)
101 105 slug = convert_accented_entities(slug)
102 106 slug = convert_misc_entities(slug)
103 107
104 108 slug = SLUG_BAD_CHAR_RE.sub('', slug)
105 109 slug = re.sub(r'[\s]+', '-', slug)
106 110 slug = collapse(slug, replacement_char)
107 111
108 112 return slug
109 113
110 114
111 115 #==============================================================================
112 116 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
113 117 #==============================================================================
114 118 def get_repo_slug(request):
115 119 _repo = ''
116 120
117 121 if hasattr(request, 'db_repo_name'):
118 122 # if our requests has set db reference use it for name, this
119 123 # translates the example.com/_<id> into proper repo names
120 124 _repo = request.db_repo_name
121 125 elif getattr(request, 'matchdict', None):
122 126 # pyramid
123 127 _repo = request.matchdict.get('repo_name')
124 128
125 129 if _repo:
126 130 _repo = _repo.rstrip('/')
127 131 return _repo
128 132
129 133
130 134 def get_repo_group_slug(request):
131 135 _group = ''
132 136 if hasattr(request, 'db_repo_group'):
133 137 # if our requests has set db reference use it for name, this
134 138 # translates the example.com/_<id> into proper repo group names
135 139 _group = request.db_repo_group.group_name
136 140 elif getattr(request, 'matchdict', None):
137 141 # pyramid
138 142 _group = request.matchdict.get('repo_group_name')
139 143
140 144 if _group:
141 145 _group = _group.rstrip('/')
142 146 return _group
143 147
144 148
145 149 def get_user_group_slug(request):
146 150 _user_group = ''
147 151
148 152 if hasattr(request, 'db_user_group'):
149 153 _user_group = request.db_user_group.users_group_name
150 154 elif getattr(request, 'matchdict', None):
151 155 # pyramid
152 156 _user_group = request.matchdict.get('user_group_id')
153 157 _user_group_name = request.matchdict.get('user_group_name')
154 158 try:
155 159 if _user_group:
156 160 _user_group = UserGroup.get(_user_group)
157 161 elif _user_group_name:
158 162 _user_group = UserGroup.get_by_group_name(_user_group_name)
159 163
160 164 if _user_group:
161 165 _user_group = _user_group.users_group_name
162 166 except Exception:
163 167 log.exception('Failed to get user group by id and name')
164 168 # catch all failures here
165 169 return None
166 170
167 171 return _user_group
168 172
169 173
170 174 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
171 175 """
172 176 Scans given path for repos and return (name,(type,path)) tuple
173 177
174 178 :param path: path to scan for repositories
175 179 :param recursive: recursive search and return names with subdirs in front
176 180 """
177 181
178 182 # remove ending slash for better results
179 183 path = path.rstrip(os.sep)
180 184 log.debug('now scanning in %s location recursive:%s...', path, recursive)
181 185
182 186 def _get_repos(p):
183 187 dirpaths = get_dirpaths(p)
184 188 if not _is_dir_writable(p):
185 189 log.warning('repo path without write access: %s', p)
186 190
187 191 for dirpath in dirpaths:
188 192 if os.path.isfile(os.path.join(p, dirpath)):
189 193 continue
190 194 cur_path = os.path.join(p, dirpath)
191 195
192 196 # skip removed repos
193 197 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
194 198 continue
195 199
196 200 #skip .<somethin> dirs
197 201 if dirpath.startswith('.'):
198 202 continue
199 203
200 204 try:
201 205 scm_info = get_scm(cur_path)
202 206 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
203 207 except VCSError:
204 208 if not recursive:
205 209 continue
206 210 #check if this dir containts other repos for recursive scan
207 211 rec_path = os.path.join(p, dirpath)
208 212 if os.path.isdir(rec_path):
209 213 yield from _get_repos(rec_path)
210 214
211 215 return _get_repos(path)
212 216
213 217
214 218 def get_dirpaths(p: str) -> list:
215 219 try:
216 220 # OS-independable way of checking if we have at least read-only
217 221 # access or not.
218 222 dirpaths = os.listdir(p)
219 223 except OSError:
220 224 log.warning('ignoring repo path without read access: %s', p)
221 225 return []
222 226
223 227 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
224 228 # decode paths and suddenly returns unicode objects itself. The items it
225 229 # cannot decode are returned as strings and cause issues.
226 230 #
227 231 # Those paths are ignored here until a solid solution for path handling has
228 232 # been built.
229 233 expected_type = type(p)
230 234
231 235 def _has_correct_type(item):
232 236 if type(item) is not expected_type:
233 237 log.error(
234 238 "Ignoring path %s since it cannot be decoded into str.",
235 239 # Using "repr" to make sure that we see the byte value in case
236 240 # of support.
237 241 repr(item))
238 242 return False
239 243 return True
240 244
241 245 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
242 246
243 247 return dirpaths
244 248
245 249
246 250 def _is_dir_writable(path):
247 251 """
248 252 Probe if `path` is writable.
249 253
250 254 Due to trouble on Cygwin / Windows, this is actually probing if it is
251 255 possible to create a file inside of `path`, stat does not produce reliable
252 256 results in this case.
253 257 """
254 258 try:
255 259 with tempfile.TemporaryFile(dir=path):
256 260 pass
257 261 except OSError:
258 262 return False
259 263 return True
260 264
261 265
262 266 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
263 267 """
264 268 Returns True if given path is a valid repository False otherwise.
265 269 If expect_scm param is given also, compare if given scm is the same
266 270 as expected from scm parameter. If explicit_scm is given don't try to
267 271 detect the scm, just use the given one to check if repo is valid
268 272
269 273 :param repo_name:
270 274 :param base_path:
271 275 :param expect_scm:
272 276 :param explicit_scm:
273 277 :param config:
274 278
275 279 :return True: if given path is a valid repository
276 280 """
277 281 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
278 282 log.debug('Checking if `%s` is a valid path for repository. '
279 283 'Explicit type: %s', repo_name, explicit_scm)
280 284
281 285 try:
282 286 if explicit_scm:
283 287 detected_scms = [get_scm_backend(explicit_scm)(
284 288 full_path, config=config).alias]
285 289 else:
286 290 detected_scms = get_scm(full_path)
287 291
288 292 if expect_scm:
289 293 return detected_scms[0] == expect_scm
290 294 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
291 295 return True
292 296 except VCSError:
293 297 log.debug('path: %s is not a valid repo !', full_path)
294 298 return False
295 299
296 300
297 301 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
298 302 """
299 303 Returns True if a given path is a repository group, False otherwise
300 304
301 305 :param repo_group_name:
302 306 :param base_path:
303 307 """
304 308 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
305 309 log.debug('Checking if `%s` is a valid path for repository group',
306 310 repo_group_name)
307 311
308 312 # check if it's not a repo
309 313 if is_valid_repo(repo_group_name, base_path):
310 314 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
311 315 return False
312 316
313 317 try:
314 318 # we need to check bare git repos at higher level
315 319 # since we might match branches/hooks/info/objects or possible
316 320 # other things inside bare git repo
317 321 maybe_repo = os.path.dirname(full_path)
318 322 if maybe_repo == base_path:
319 323 # skip root level repo check; we know root location CANNOT BE a repo group
320 324 return False
321 325
322 326 scm_ = get_scm(maybe_repo)
323 327 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
324 328 return False
325 329 except VCSError:
326 330 pass
327 331
328 332 # check if it's a valid path
329 333 if skip_path_check or os.path.isdir(full_path):
330 334 log.debug('path: %s is a valid repo group !', full_path)
331 335 return True
332 336
333 337 log.debug('path: %s is not a valid repo group !', full_path)
334 338 return False
335 339
336 340
337 341 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
338 342 while True:
339 343 ok = input(prompt)
340 344 if ok.lower() in ('y', 'ye', 'yes'):
341 345 return True
342 346 if ok.lower() in ('n', 'no', 'nop', 'nope'):
343 347 return False
344 348 retries = retries - 1
345 349 if retries < 0:
346 350 raise OSError
347 351 print(complaint)
348 352
349 353 # propagated from mercurial documentation
350 354 ui_sections = [
351 355 'alias', 'auth',
352 356 'decode/encode', 'defaults',
353 357 'diff', 'email',
354 358 'extensions', 'format',
355 359 'merge-patterns', 'merge-tools',
356 360 'hooks', 'http_proxy',
357 361 'smtp', 'patch',
358 362 'paths', 'profiling',
359 363 'server', 'trusted',
360 364 'ui', 'web', ]
361 365
362 366
363 367 def config_data_from_db(clear_session=True, repo=None):
364 368 """
365 369 Read the configuration data from the database and return configuration
366 370 tuples.
367 371 """
368 372 from rhodecode.model.settings import VcsSettingsModel
369 373
370 374 config = []
371 375
372 376 sa = meta.Session()
373 377 settings_model = VcsSettingsModel(repo=repo, sa=sa)
374 378
375 379 ui_settings = settings_model.get_ui_settings()
376 380
377 381 ui_data = []
378 382 for setting in ui_settings:
379 383 if setting.active:
380 384 ui_data.append((setting.section, setting.key, setting.value))
381 385 config.append((
382 386 safe_str(setting.section), safe_str(setting.key),
383 387 safe_str(setting.value)))
384 388 if setting.key == 'push_ssl':
385 389 # force set push_ssl requirement to False, rhodecode
386 390 # handles that
387 391 config.append((
388 392 safe_str(setting.section), safe_str(setting.key), False))
389 393 log.debug(
390 394 'settings ui from db@repo[%s]: %s',
391 395 repo,
392 396 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
393 397 if clear_session:
394 398 meta.Session.remove()
395 399
396 400 # TODO: mikhail: probably it makes no sense to re-read hooks information.
397 401 # It's already there and activated/deactivated
398 402 skip_entries = []
399 403 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
400 404 if 'pull' not in enabled_hook_classes:
401 405 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
402 406 if 'push' not in enabled_hook_classes:
403 407 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
404 408 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
405 409 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
406 410
407 411 config = [entry for entry in config if entry[:2] not in skip_entries]
408 412
409 413 return config
410 414
411 415
412 416 def make_db_config(clear_session=True, repo=None):
413 417 """
414 418 Create a :class:`Config` instance based on the values in the database.
415 419 """
416 420 config = Config()
417 421 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
418 422 for section, option, value in config_data:
419 423 config.set(section, option, value)
420 424 return config
421 425
422 426
423 427 def get_enabled_hook_classes(ui_settings):
424 428 """
425 429 Return the enabled hook classes.
426 430
427 431 :param ui_settings: List of ui_settings as returned
428 432 by :meth:`VcsSettingsModel.get_ui_settings`
429 433
430 434 :return: a list with the enabled hook classes. The order is not guaranteed.
431 435 :rtype: list
432 436 """
433 437 enabled_hooks = []
434 438 active_hook_keys = [
435 439 key for section, key, value, active in ui_settings
436 440 if section == 'hooks' and active]
437 441
438 442 hook_names = {
439 443 RhodeCodeUi.HOOK_PUSH: 'push',
440 444 RhodeCodeUi.HOOK_PULL: 'pull',
441 445 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
442 446 }
443 447
444 448 for key in active_hook_keys:
445 449 hook = hook_names.get(key)
446 450 if hook:
447 451 enabled_hooks.append(hook)
448 452
449 453 return enabled_hooks
450 454
451 455
452 456 def set_rhodecode_config(config):
453 457 """
454 458 Updates pyramid config with new settings from database
455 459
456 460 :param config:
457 461 """
458 462 from rhodecode.model.settings import SettingsModel
459 463 app_settings = SettingsModel().get_all_settings()
460 464
461 465 for k, v in list(app_settings.items()):
462 466 config[k] = v
463 467
464 468
465 469 def get_rhodecode_realm():
466 470 """
467 471 Return the rhodecode realm from database.
468 472 """
469 473 from rhodecode.model.settings import SettingsModel
470 474 realm = SettingsModel().get_setting_by_name('realm')
471 475 return safe_str(realm.app_settings_value)
472 476
473 477
474 478 def get_rhodecode_base_path():
475 479 """
476 480 Returns the base path. The base path is the filesystem path which points
477 481 to the repository store.
478 482 """
479 483
480 484 import rhodecode
481 485 return rhodecode.CONFIG['default_base_path']
482 486
483 487
484 488 def map_groups(path):
485 489 """
486 490 Given a full path to a repository, create all nested groups that this
487 491 repo is inside. This function creates parent-child relationships between
488 492 groups and creates default perms for all new groups.
489 493
490 494 :param paths: full path to repository
491 495 """
492 496 from rhodecode.model.repo_group import RepoGroupModel
493 497 sa = meta.Session()
494 498 groups = path.split(Repository.NAME_SEP)
495 499 parent = None
496 500 group = None
497 501
498 502 # last element is repo in nested groups structure
499 503 groups = groups[:-1]
500 504 rgm = RepoGroupModel(sa)
501 505 owner = User.get_first_super_admin()
502 506 for lvl, group_name in enumerate(groups):
503 507 group_name = '/'.join(groups[:lvl] + [group_name])
504 508 group = RepoGroup.get_by_group_name(group_name)
505 509 desc = '%s group' % group_name
506 510
507 511 # skip folders that are now removed repos
508 512 if REMOVED_REPO_PAT.match(group_name):
509 513 break
510 514
511 515 if group is None:
512 516 log.debug('creating group level: %s group_name: %s',
513 517 lvl, group_name)
514 518 group = RepoGroup(group_name, parent)
515 519 group.group_description = desc
516 520 group.user = owner
517 521 sa.add(group)
518 522 perm_obj = rgm._create_default_perms(group)
519 523 sa.add(perm_obj)
520 524 sa.flush()
521 525
522 526 parent = group
523 527 return group
524 528
525 529
526 530 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
527 531 """
528 532 maps all repos given in initial_repo_list, non existing repositories
529 533 are created, if remove_obsolete is True it also checks for db entries
530 534 that are not in initial_repo_list and removes them.
531 535
532 536 :param initial_repo_list: list of repositories found by scanning methods
533 537 :param remove_obsolete: check for obsolete entries in database
534 538 """
535 539 from rhodecode.model.repo import RepoModel
536 540 from rhodecode.model.repo_group import RepoGroupModel
537 541 from rhodecode.model.settings import SettingsModel
538 542
539 543 sa = meta.Session()
540 544 repo_model = RepoModel()
541 545 user = User.get_first_super_admin()
542 546 added = []
543 547
544 548 # creation defaults
545 549 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
546 550 enable_statistics = defs.get('repo_enable_statistics')
547 551 enable_locking = defs.get('repo_enable_locking')
548 552 enable_downloads = defs.get('repo_enable_downloads')
549 553 private = defs.get('repo_private')
550 554
551 555 for name, repo in list(initial_repo_list.items()):
552 556 group = map_groups(name)
553 557 str_name = safe_str(name)
554 558 db_repo = repo_model.get_by_repo_name(str_name)
555 559
556 560 # found repo that is on filesystem not in RhodeCode database
557 561 if not db_repo:
558 562 log.info('repository `%s` not found in the database, creating now', name)
559 563 added.append(name)
560 564 desc = (repo.description
561 565 if repo.description != 'unknown'
562 566 else '%s repository' % name)
563 567
564 568 db_repo = repo_model._create_repo(
565 569 repo_name=name,
566 570 repo_type=repo.alias,
567 571 description=desc,
568 572 repo_group=getattr(group, 'group_id', None),
569 573 owner=user,
570 574 enable_locking=enable_locking,
571 575 enable_downloads=enable_downloads,
572 576 enable_statistics=enable_statistics,
573 577 private=private,
574 578 state=Repository.STATE_CREATED
575 579 )
576 580 sa.commit()
577 581 # we added that repo just now, and make sure we updated server info
578 582 if db_repo.repo_type == 'git':
579 583 git_repo = db_repo.scm_instance()
580 584 # update repository server-info
581 585 log.debug('Running update server info')
582 586 git_repo._update_server_info(force=True)
583 587
584 588 db_repo.update_commit_cache()
585 589
586 590 config = db_repo._config
587 591 config.set('extensions', 'largefiles', '')
588 592 repo = db_repo.scm_instance(config=config)
589 593 repo.install_hooks(force=force_hooks_rebuild)
590 594
591 595 removed = []
592 596 if remove_obsolete:
593 597 # remove from database those repositories that are not in the filesystem
594 598 for repo in sa.query(Repository).all():
595 599 if repo.repo_name not in list(initial_repo_list.keys()):
596 600 log.debug("Removing non-existing repository found in db `%s`",
597 601 repo.repo_name)
598 602 try:
599 603 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
600 604 sa.commit()
601 605 removed.append(repo.repo_name)
602 606 except Exception:
603 607 # don't hold further removals on error
604 608 log.error(traceback.format_exc())
605 609 sa.rollback()
606 610
607 611 def splitter(full_repo_name):
608 612 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
609 613 gr_name = None
610 614 if len(_parts) == 2:
611 615 gr_name = _parts[0]
612 616 return gr_name
613 617
614 618 initial_repo_group_list = [splitter(x) for x in
615 619 list(initial_repo_list.keys()) if splitter(x)]
616 620
617 621 # remove from database those repository groups that are not in the
618 622 # filesystem due to parent child relationships we need to delete them
619 623 # in a specific order of most nested first
620 624 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
621 625 def nested_sort(gr):
622 626 return len(gr.split('/'))
623 627 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
624 628 if group_name not in initial_repo_group_list:
625 629 repo_group = RepoGroup.get_by_group_name(group_name)
626 630 if (repo_group.children.all() or
627 631 not RepoGroupModel().check_exist_filesystem(
628 632 group_name=group_name, exc_on_failure=False)):
629 633 continue
630 634
631 635 log.info(
632 636 'Removing non-existing repository group found in db `%s`',
633 637 group_name)
634 638 try:
635 639 RepoGroupModel(sa).delete(group_name, fs_remove=False)
636 640 sa.commit()
637 641 removed.append(group_name)
638 642 except Exception:
639 643 # don't hold further removals on error
640 644 log.exception(
641 645 'Unable to remove repository group `%s`',
642 646 group_name)
643 647 sa.rollback()
644 648 raise
645 649
646 650 return added, removed
647 651
648 652
649 653 def load_rcextensions(root_path):
650 654 import rhodecode
651 655 from rhodecode.config import conf
652 656
653 657 path = os.path.join(root_path)
654 658 sys.path.append(path)
655 659
656 660 try:
657 661 rcextensions = __import__('rcextensions')
658 662 except ImportError:
659 663 if os.path.isdir(os.path.join(path, 'rcextensions')):
660 664 log.warning('Unable to load rcextensions from %s', path)
661 665 rcextensions = None
662 666
663 667 if rcextensions:
664 668 log.info('Loaded rcextensions from %s...', rcextensions)
665 669 rhodecode.EXTENSIONS = rcextensions
666 670
667 671 # Additional mappings that are not present in the pygments lexers
668 672 conf.LANGUAGES_EXTENSIONS_MAP.update(
669 673 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
670 674
671 675
672 676 def get_custom_lexer(extension):
673 677 """
674 678 returns a custom lexer if it is defined in rcextensions module, or None
675 679 if there's no custom lexer defined
676 680 """
677 681 import rhodecode
678 682 from pygments import lexers
679 683
680 684 # custom override made by RhodeCode
681 685 if extension in ['mako']:
682 686 return lexers.get_lexer_by_name('html+mako')
683 687
684 688 # check if we didn't define this extension as other lexer
685 689 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
686 690 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
687 691 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
688 692 return lexers.get_lexer_by_name(_lexer_name)
689 693
690 694
691 695 #==============================================================================
692 696 # TEST FUNCTIONS AND CREATORS
693 697 #==============================================================================
694 698 def create_test_index(repo_location, config):
695 699 """
696 700 Makes default test index.
697 701 """
698 702 try:
699 703 import rc_testdata
700 704 except ImportError:
701 705 raise ImportError('Failed to import rc_testdata, '
702 706 'please make sure this package is installed from requirements_test.txt')
703 707 rc_testdata.extract_search_index(
704 708 'vcs_search_index', os.path.dirname(config['search.location']))
705 709
706 710
707 711 def create_test_directory(test_path):
708 712 """
709 713 Create test directory if it doesn't exist.
710 714 """
711 715 if not os.path.isdir(test_path):
712 716 log.debug('Creating testdir %s', test_path)
713 717 os.makedirs(test_path)
714 718
715 719
716 720 def create_test_database(test_path, config):
717 721 """
718 722 Makes a fresh database.
719 723 """
720 724 from rhodecode.lib.db_manage import DbManage
721 725 from rhodecode.lib.utils2 import get_encryption_key
722 726
723 727 # PART ONE create db
724 728 dbconf = config['sqlalchemy.db1.url']
725 729 enc_key = get_encryption_key(config)
726 730
727 731 log.debug('making test db %s', dbconf)
728 732
729 733 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
730 734 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
731 735 dbmanage.create_tables(override=True)
732 736 dbmanage.set_db_version()
733 737 # for tests dynamically set new root paths based on generated content
734 738 dbmanage.create_settings(dbmanage.config_prompt(test_path))
735 739 dbmanage.create_default_user()
736 740 dbmanage.create_test_admin_and_users()
737 741 dbmanage.create_permissions()
738 742 dbmanage.populate_default_permissions()
739 743 Session().commit()
740 744
741 745
742 746 def create_test_repositories(test_path, config):
743 747 """
744 748 Creates test repositories in the temporary directory. Repositories are
745 749 extracted from archives within the rc_testdata package.
746 750 """
747 751 import rc_testdata
748 752 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
749 753
750 754 log.debug('making test vcs repositories')
751 755
752 756 idx_path = config['search.location']
753 757 data_path = config['cache_dir']
754 758
755 759 # clean index and data
756 760 if idx_path and os.path.exists(idx_path):
757 761 log.debug('remove %s', idx_path)
758 762 shutil.rmtree(idx_path)
759 763
760 764 if data_path and os.path.exists(data_path):
761 765 log.debug('remove %s', data_path)
762 766 shutil.rmtree(data_path)
763 767
764 768 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
765 769 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
766 770
767 771 # Note: Subversion is in the process of being integrated with the system,
768 772 # until we have a properly packed version of the test svn repository, this
769 773 # tries to copy over the repo from a package "rc_testdata"
770 774 svn_repo_path = rc_testdata.get_svn_repo_archive()
771 775 with tarfile.open(svn_repo_path) as tar:
772 776 tar.extractall(jn(test_path, SVN_REPO))
773 777
774 778
775 779 def password_changed(auth_user, session):
776 780 # Never report password change in case of default user or anonymous user.
777 781 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
778 782 return False
779 783
780 784 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
781 785 rhodecode_user = session.get('rhodecode_user', {})
782 786 session_password_hash = rhodecode_user.get('password', '')
783 787 return password_hash != session_password_hash
784 788
785 789
786 790 def read_opensource_licenses():
787 791 global _license_cache
788 792
789 793 if not _license_cache:
790 794 licenses = pkg_resources.resource_string(
791 795 'rhodecode', 'config/licenses.json')
792 796 _license_cache = json.loads(licenses)
793 797
794 798 return _license_cache
795 799
796 800
797 801 def generate_platform_uuid():
798 802 """
799 803 Generates platform UUID based on it's name
800 804 """
801 805 import platform
802 806
803 807 try:
804 808 uuid_list = [platform.platform()]
805 809 return sha256_safe(':'.join(uuid_list))
806 810 except Exception as e:
807 811 log.error('Failed to generate host uuid: %s', e)
808 812 return 'UNDEFINED'
809 813
810 814
811 815 def send_test_email(recipients, email_body='TEST EMAIL'):
812 816 """
813 817 Simple code for generating test emails.
814 818 Usage::
815 819
816 820 from rhodecode.lib import utils
817 821 utils.send_test_email()
818 822 """
819 823 from rhodecode.lib.celerylib import tasks, run_task
820 824
821 825 email_body = email_body_plaintext = email_body
822 826 subject = f'SUBJECT FROM: {socket.gethostname()}'
823 827 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
828
829
830 def call_service_api(ini_path, payload):
831 config = get_config(ini_path)
832 try:
833 host = config.get('app:main', 'app.service_api.host')
834 except NoOptionError:
835 raise ImproperlyConfiguredError(
836 "app.service_api.host is missing. "
837 "Please ensure that app.service_api.host and app.service_api.token are "
838 "defined inside of .ini configuration file."
839 )
840 api_url = config.get('app:main', 'rhodecode.api.url')
841 payload.update({
842 'id': 'service',
843 'auth_token': config.get('app:main', 'app.service_api.token')
844 })
845
846 response = CurlSession().post(f'{host}{api_url}', json.dumps(payload))
847
848 if response.status_code != 200:
849 raise Exception("Service API responded with error")
850
851 return json.loads(response.content)['result']
@@ -1,232 +1,236 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Custom vcs exceptions module.
21 21 """
22 22 import logging
23 23 import functools
24 24 import urllib.error
25 25 import urllib.parse
26 26 import rhodecode
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30
31 31 class VCSCommunicationError(Exception):
32 32 pass
33 33
34 34
35 35 class HttpVCSCommunicationError(VCSCommunicationError):
36 36 pass
37 37
38 38
39 39 class VCSError(Exception):
40 40 pass
41 41
42 42
43 43 class RepositoryError(VCSError):
44 44 pass
45 45
46 46
47 47 class RepositoryRequirementError(RepositoryError):
48 48 pass
49 49
50 50
51 51 class UnresolvedFilesInRepo(RepositoryError):
52 52 pass
53 53
54 54
55 55 class VCSBackendNotSupportedError(VCSError):
56 56 """
57 57 Exception raised when VCSServer does not support requested backend
58 58 """
59 59
60 60
61 61 class EmptyRepositoryError(RepositoryError):
62 62 pass
63 63
64 64
65 65 class TagAlreadyExistError(RepositoryError):
66 66 pass
67 67
68 68
69 69 class TagDoesNotExistError(RepositoryError):
70 70 pass
71 71
72 72
73 73 class BranchAlreadyExistError(RepositoryError):
74 74 pass
75 75
76 76
77 77 class BranchDoesNotExistError(RepositoryError):
78 78 pass
79 79
80 80
81 81 class CommitError(RepositoryError):
82 82 """
83 83 Exceptions related to an existing commit
84 84 """
85 85
86 86
87 87 class CommitDoesNotExistError(CommitError):
88 88 pass
89 89
90 90
91 91 class CommittingError(RepositoryError):
92 92 """
93 93 Exceptions happening while creating a new commit
94 94 """
95 95
96 96
97 97 class NothingChangedError(CommittingError):
98 98 pass
99 99
100 100
101 101 class NodeError(VCSError):
102 102 pass
103 103
104 104
105 105 class RemovedFileNodeError(NodeError):
106 106 pass
107 107
108 108
109 109 class NodeAlreadyExistsError(CommittingError):
110 110 pass
111 111
112 112
113 113 class NodeAlreadyChangedError(CommittingError):
114 114 pass
115 115
116 116
117 117 class NodeDoesNotExistError(CommittingError):
118 118 pass
119 119
120 120
121 121 class NodeNotChangedError(CommittingError):
122 122 pass
123 123
124 124
125 125 class NodeAlreadyAddedError(CommittingError):
126 126 pass
127 127
128 128
129 129 class NodeAlreadyRemovedError(CommittingError):
130 130 pass
131 131
132 132
133 133 class SubrepoMergeError(RepositoryError):
134 134 """
135 135 This happens if we try to merge a repository which contains subrepos and
136 136 the subrepos cannot be merged. The subrepos are not merged itself but
137 137 their references in the root repo are merged.
138 138 """
139 139
140 140
141 141 class ImproperArchiveTypeError(VCSError):
142 142 pass
143 143
144 144
145 145 class CommandError(VCSError):
146 146 pass
147 147
148 148
149 class ImproperlyConfiguredError(Exception):
150 pass
151
152
149 153 class UnhandledException(VCSError):
150 154 """
151 155 Signals that something unexpected went wrong.
152 156
153 157 This usually means we have a programming error on the side of the VCSServer
154 158 and should inspect the logfile of the VCSServer to find more details.
155 159 """
156 160
157 161
158 162 _EXCEPTION_MAP = {
159 163 'abort': RepositoryError,
160 164 'archive': ImproperArchiveTypeError,
161 165 'error': RepositoryError,
162 166 'lookup': CommitDoesNotExistError,
163 167 'repo_locked': RepositoryError,
164 168 'requirement': RepositoryRequirementError,
165 169 'unhandled': UnhandledException,
166 170 # TODO: johbo: Define our own exception for this and stop abusing
167 171 # urllib's exception class.
168 172 'url_error': urllib.error.URLError,
169 173 'subrepo_merge_error': SubrepoMergeError,
170 174 }
171 175
172 176
173 177 def map_vcs_exceptions(func):
174 178 """
175 179 Utility to decorate functions so that plain exceptions are translated.
176 180
177 181 The translation is based on `exc_map` which maps a `str` indicating
178 182 the error type into an exception class representing this error inside
179 183 of the vcs layer.
180 184 """
181 185
182 186 @functools.wraps(func)
183 187 def wrapper(*args, **kwargs):
184 188 try:
185 189 return func(*args, **kwargs)
186 190 except Exception as e:
187 191 debug = rhodecode.ConfigGet().get_bool('debug')
188 192
189 193 # The error middleware adds information if it finds
190 194 # __traceback_info__ in a frame object. This way the remote
191 195 # traceback information is made available in error reports.
192 196
193 197 remote_tb = getattr(e, '_vcs_server_traceback', None)
194 198 org_remote_tb = getattr(e, '_vcs_server_org_exc_tb', '')
195 199 __traceback_info__ = None
196 200 if remote_tb:
197 201 if isinstance(remote_tb, str):
198 202 remote_tb = [remote_tb]
199 203 __traceback_info__ = (
200 204 'Found VCSServer remote traceback information:\n'
201 205 '{}\n'
202 206 '+++ BEG SOURCE EXCEPTION +++\n\n'
203 207 '{}\n'
204 208 '+++ END SOURCE EXCEPTION +++\n'
205 209 ''.format('\n'.join(remote_tb), org_remote_tb)
206 210 )
207 211
208 212 # Avoid that remote_tb also appears in the frame
209 213 del remote_tb
210 214
211 215 # Special vcs errors had an attribute "_vcs_kind" which is used
212 216 # to translate them to the proper exception class in the vcs
213 217 # client layer.
214 218 kind = getattr(e, '_vcs_kind', None)
215 219 exc_name = getattr(e, '_vcs_server_org_exc_name', None)
216 220
217 221 if kind:
218 222 if any(e.args):
219 223 _args = [a for a in e.args]
220 224 # replace the first argument with a prefix exc name
221 225 args = ['{}:{}'.format(exc_name, _args[0] if _args else '?')] + _args[1:]
222 226 else:
223 227 args = [__traceback_info__ or f'{exc_name}: UnhandledException']
224 228 if debug or __traceback_info__ and kind not in ['unhandled', 'lookup']:
225 229 # for other than unhandled errors also log the traceback
226 230 # can be useful for debugging
227 231 log.error(__traceback_info__)
228 232
229 233 raise _EXCEPTION_MAP[kind](*args)
230 234 else:
231 235 raise
232 236 return wrapper
@@ -1,222 +1,223 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import pytest
21 21
22 22 from rhodecode.lib.pyramid_utils import get_app_config
23 23 from rhodecode.tests.fixture import TestINI
24 24 from rhodecode.tests.server_utils import RcVCSServer
25 25
26 26
27 27 @pytest.fixture(scope='session')
28 28 def vcsserver(request, vcsserver_port, vcsserver_factory):
29 29 """
30 30 Session scope VCSServer.
31 31
32 32 Tests which need the VCSServer have to rely on this fixture in order
33 33 to ensure it will be running.
34 34
35 35 For specific needs, the fixture vcsserver_factory can be used. It allows to
36 36 adjust the configuration file for the test run.
37 37
38 38 Command line args:
39 39
40 40 --without-vcsserver: Allows to switch this fixture off. You have to
41 41 manually start the server.
42 42
43 43 --vcsserver-port: Will expect the VCSServer to listen on this port.
44 44 """
45 45
46 46 if not request.config.getoption('with_vcsserver'):
47 47 return None
48 48
49 49 return vcsserver_factory(
50 50 request, vcsserver_port=vcsserver_port)
51 51
52 52
53 53 @pytest.fixture(scope='session')
54 54 def vcsserver_factory(tmpdir_factory):
55 55 """
56 56 Use this if you need a running vcsserver with a special configuration.
57 57 """
58 58
59 59 def factory(request, overrides=(), vcsserver_port=None,
60 60 log_file=None, workers='2'):
61 61
62 62 if vcsserver_port is None:
63 63 vcsserver_port = get_available_port()
64 64
65 65 overrides = list(overrides)
66 66 overrides.append({'server:main': {'port': vcsserver_port}})
67 67
68 68 option_name = 'vcsserver_config_http'
69 69 override_option_name = 'vcsserver_config_override'
70 70 config_file = get_config(
71 71 request.config, option_name=option_name,
72 72 override_option_name=override_option_name, overrides=overrides,
73 73 basetemp=tmpdir_factory.getbasetemp().strpath,
74 74 prefix='test_vcs_')
75 75
76 76 server = RcVCSServer(config_file, log_file, workers)
77 77 server.start()
78 78
79 79 @request.addfinalizer
80 80 def cleanup():
81 81 server.shutdown()
82 82
83 83 server.wait_until_ready()
84 84 return server
85 85
86 86 return factory
87 87
88 88
89 89 def _use_log_level(config):
90 90 level = config.getoption('test_loglevel') or 'critical'
91 91 return level.upper()
92 92
93 93
94 94 @pytest.fixture(scope='session')
95 95 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
96 96 option_name = 'pyramid_config'
97 97 log_level = _use_log_level(request.config)
98 98
99 99 overrides = [
100 100 {'server:main': {'port': rcserver_port}},
101 101 {'app:main': {
102 102 'cache_dir': '%(here)s/rc_data',
103 103 'vcs.server': f'localhost:{vcsserver_port}',
104 104 # johbo: We will always start the VCSServer on our own based on the
105 105 # fixtures of the test cases. For the test run it must always be
106 106 # off in the INI file.
107 107 'vcs.start_server': 'false',
108 108
109 109 'vcs.server.protocol': 'http',
110 110 'vcs.scm_app_implementation': 'http',
111 111 'vcs.hooks.protocol': 'http',
112 112 'vcs.hooks.host': '*',
113 'app.service_api.token': 'service_secret_token',
113 114 }},
114 115
115 116 {'handler_console': {
116 117 'class': 'StreamHandler',
117 118 'args': '(sys.stderr,)',
118 119 'level': log_level,
119 120 }},
120 121
121 122 ]
122 123
123 124 filename = get_config(
124 125 request.config, option_name=option_name,
125 126 override_option_name='{}_override'.format(option_name),
126 127 overrides=overrides,
127 128 basetemp=tmpdir_factory.getbasetemp().strpath,
128 129 prefix='test_rce_')
129 130 return filename
130 131
131 132
132 133 @pytest.fixture(scope='session')
133 134 def ini_settings(ini_config):
134 135 ini_path = ini_config
135 136 return get_app_config(ini_path)
136 137
137 138
138 139 def get_available_port(min_port=40000, max_port=55555):
139 140 from rhodecode.lib.utils2 import get_available_port as _get_port
140 141 return _get_port(min_port, max_port)
141 142
142 143
143 144 @pytest.fixture(scope='session')
144 145 def rcserver_port(request):
145 146 port = get_available_port()
146 147 print(f'Using rhodecode port {port}')
147 148 return port
148 149
149 150
150 151 @pytest.fixture(scope='session')
151 152 def vcsserver_port(request):
152 153 port = request.config.getoption('--vcsserver-port')
153 154 if port is None:
154 155 port = get_available_port()
155 156 print(f'Using vcsserver port {port}')
156 157 return port
157 158
158 159
159 160 @pytest.fixture(scope='session')
160 161 def available_port_factory():
161 162 """
162 163 Returns a callable which returns free port numbers.
163 164 """
164 165 return get_available_port
165 166
166 167
167 168 @pytest.fixture()
168 169 def available_port(available_port_factory):
169 170 """
170 171 Gives you one free port for the current test.
171 172
172 173 Uses "available_port_factory" to retrieve the port.
173 174 """
174 175 return available_port_factory()
175 176
176 177
177 178 @pytest.fixture(scope='session')
178 179 def testini_factory(tmpdir_factory, ini_config):
179 180 """
180 181 Factory to create an INI file based on TestINI.
181 182
182 183 It will make sure to place the INI file in the correct directory.
183 184 """
184 185 basetemp = tmpdir_factory.getbasetemp().strpath
185 186 return TestIniFactory(basetemp, ini_config)
186 187
187 188
188 189 class TestIniFactory(object):
189 190
190 191 def __init__(self, basetemp, template_ini):
191 192 self._basetemp = basetemp
192 193 self._template_ini = template_ini
193 194
194 195 def __call__(self, ini_params, new_file_prefix='test'):
195 196 ini_file = TestINI(
196 197 self._template_ini, ini_params=ini_params,
197 198 new_file_prefix=new_file_prefix, dir=self._basetemp)
198 199 result = ini_file.create()
199 200 return result
200 201
201 202
202 203 def get_config(
203 204 config, option_name, override_option_name, overrides=None,
204 205 basetemp=None, prefix='test'):
205 206 """
206 207 Find a configuration file and apply overrides for the given `prefix`.
207 208 """
208 209 config_file = (
209 210 config.getoption(option_name) or config.getini(option_name))
210 211 if not config_file:
211 212 pytest.exit(
212 213 "Configuration error, could not extract {}.".format(option_name))
213 214
214 215 overrides = overrides or []
215 216 config_override = config.getoption(override_option_name)
216 217 if config_override:
217 218 overrides.append(config_override)
218 219 temp_ini_file = TestINI(
219 220 config_file, ini_params=overrides, new_file_prefix=prefix,
220 221 dir=basetemp)
221 222
222 223 return temp_ini_file.create()
@@ -1,206 +1,207 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 # Import early to make sure things are patched up properly
20 20 from setuptools import setup, find_packages, Extension
21 21
22 22 import os
23 23 import re
24 24 import sys
25 25 import pkgutil
26 26 import platform
27 27 import codecs
28 28
29 29 import pip
30 30
31 31 pip_major_version = int(pip.__version__.split(".")[0])
32 32 if pip_major_version >= 20:
33 33 from pip._internal.req import parse_requirements
34 34 from pip._internal.network.session import PipSession
35 35 elif pip_major_version >= 10:
36 36 from pip._internal.req import parse_requirements
37 37 from pip._internal.download import PipSession
38 38 else:
39 39 from pip.req import parse_requirements
40 40 from pip.download import PipSession
41 41
42 42
43 43 def get_package_name(req_object):
44 44 package_name = None
45 45 try:
46 46 from pip._internal.req.constructors import install_req_from_parsed_requirement
47 47 except ImportError:
48 48 install_req_from_parsed_requirement = None
49 49
50 50 # In 20.1 of pip, the requirements object changed
51 51 if hasattr(req_object, 'req'):
52 52 package_name = req_object.req.name
53 53
54 54 if package_name is None:
55 55 if install_req_from_parsed_requirement:
56 56 package = install_req_from_parsed_requirement(req_object)
57 57 package_name = package.req.name
58 58
59 59 if package_name is None:
60 60 # fallback for older pip
61 61 package_name = re.split('===|<=|!=|==|>=|~=|<|>', req_object.requirement)[0]
62 62
63 63 return package_name
64 64
65 65
66 66 if sys.version_info < (3, 10):
67 67 raise Exception('RhodeCode requires Python 3.10 or later')
68 68
69 69 here = os.path.abspath(os.path.dirname(__file__))
70 70
71 71 # defines current platform
72 72 __platform__ = platform.system()
73 73 __license__ = 'AGPLv3, and Commercial License'
74 74 __author__ = 'RhodeCode GmbH'
75 75 __url__ = 'https://code.rhodecode.com'
76 76 is_windows = __platform__ in ('Windows',)
77 77
78 78
79 79 def _get_requirements(req_filename, exclude=None, extras=None):
80 80 extras = extras or []
81 81 exclude = exclude or []
82 82
83 83 try:
84 84 parsed = parse_requirements(
85 85 os.path.join(here, req_filename), session=PipSession())
86 86 except TypeError:
87 87 # try pip < 6.0.0, that doesn't support session
88 88 parsed = parse_requirements(os.path.join(here, req_filename))
89 89
90 90 requirements = []
91 91 for int_req in parsed:
92 92 req_name = get_package_name(int_req)
93 93 if req_name not in exclude:
94 94 requirements.append(req_name)
95 95 return requirements + extras
96 96
97 97
98 98 # requirements extract
99 99 setup_requirements = ['PasteScript']
100 100 install_requirements = _get_requirements(
101 101 'requirements.txt', exclude=['setuptools', 'entrypoints'])
102 102 test_requirements = _get_requirements(
103 103 'requirements_test.txt')
104 104
105 105
106 106 def get_version():
107 107 here = os.path.abspath(os.path.dirname(__file__))
108 108 ver_file = os.path.join(here, "rhodecode", "VERSION")
109 109 with open(ver_file, "rt") as f:
110 110 version = f.read().strip()
111 111
112 112 return version
113 113
114 114
115 115 # additional files that goes into package itself
116 116 package_data = {
117 117 '': ['*.txt', '*.rst'],
118 118 'configs': ['*.ini'],
119 119 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
120 120 }
121 121
122 122 description = 'Source Code Management Platform'
123 123 keywords = ' '.join([
124 124 'rhodecode', 'mercurial', 'git', 'svn',
125 125 'code review',
126 126 'repo groups', 'ldap', 'repository management', 'hgweb',
127 127 'hgwebdir', 'gitweb', 'serving hgweb',
128 128 ])
129 129
130 130
131 131 # README/DESCRIPTION generation
132 132 readme_file = 'README.rst'
133 133 changelog_file = 'CHANGES.rst'
134 134 try:
135 135 long_description = codecs.open(readme_file).read() + '\n\n' + \
136 136 codecs.open(changelog_file).read()
137 137 except IOError as err:
138 138 sys.stderr.write(
139 139 "[WARNING] Cannot find file specified as long_description (%s)\n "
140 140 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
141 141 long_description = description
142 142
143 143
144 144 setup(
145 145 name='rhodecode-enterprise-ce',
146 146 version=get_version(),
147 147 description=description,
148 148 long_description=long_description,
149 149 keywords=keywords,
150 150 license=__license__,
151 151 author=__author__,
152 152 author_email='support@rhodecode.com',
153 153 url=__url__,
154 154 setup_requires=setup_requirements,
155 155 install_requires=install_requirements,
156 156 tests_require=test_requirements,
157 157 zip_safe=False,
158 158 packages=find_packages(exclude=["docs", "tests*"]),
159 159 package_data=package_data,
160 160 include_package_data=True,
161 161 classifiers=[
162 162 'Development Status :: 6 - Mature',
163 163 'Environment :: Web Environment',
164 164 'Intended Audience :: Developers',
165 165 'Operating System :: OS Independent',
166 166 'Topic :: Software Development :: Version Control',
167 167 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
168 168 'Programming Language :: Python :: 3.10',
169 169 ],
170 170 message_extractors={
171 171 'rhodecode': [
172 172 ('**.py', 'python', None),
173 173 ('**.js', 'javascript', None),
174 174 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
175 175 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
176 176 ('public/**', 'ignore', None),
177 177 ]
178 178 },
179 179 paster_plugins=['PasteScript'],
180 180 entry_points={
181 181 'paste.app_factory': [
182 182 'main=rhodecode.config.middleware:make_pyramid_app',
183 183 ],
184 184 'paste.global_paster_command': [
185 185 'ishell=rhodecode.lib.paster_commands.ishell:Command',
186 186 'upgrade-db=rhodecode.lib.paster_commands.upgrade_db:UpgradeDb',
187 187
188 188 'setup-rhodecode=rhodecode.lib.paster_commands.deprecated.setup_rhodecode:Command',
189 189 'celeryd=rhodecode.lib.paster_commands.deprecated.celeryd:Command',
190 190 ],
191 191 'pyramid.pshell_runner': [
192 192 'ipython = rhodecode.lib.pyramid_shell:ipython_shell_runner',
193 193 ],
194 194 'console_scripts': [
195 195 'rc-setup-app=rhodecode.lib.rc_commands.setup_rc:main',
196 196 'rc-upgrade-db=rhodecode.lib.rc_commands.upgrade_db:main',
197 197 'rc-ishell=rhodecode.lib.rc_commands.ishell:main',
198 198 'rc-add-artifact=rhodecode.lib.rc_commands.add_artifact:main',
199 'rc-ssh-wrapper=rhodecode.apps.ssh_support.lib.ssh_wrapper:main',
199 'rc-ssh-wrapper=rhodecode.apps.ssh_support.lib.ssh_wrapper_v1:main',
200 'rc-ssh-wrapper-v2=rhodecode.apps.ssh_support.lib.ssh_wrapper_v2:main',
200 201 ],
201 202 'beaker.backends': [
202 203 'memorylru_base=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerBase',
203 204 'memorylru_debug=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerDebug'
204 205 ]
205 206 },
206 207 )
General Comments 0
You need to be logged in to leave comments. Login now