diff --git a/.bumpversion.cfg b/.bumpversion.cfg
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.0.3
+current_version = 5.1.0
message = release: Bump version {current_version} to {new_version}
[bumpversion:file:rhodecode/VERSION]
diff --git a/Makefile b/Makefile
--- a/Makefile
+++ b/Makefile
@@ -37,31 +37,6 @@ test-only:
--cov=rhodecode rhodecode
-.PHONY: test-only-mysql
-## run tests against mysql
-test-only-mysql:
- PYTHONHASHSEED=random \
- py.test -x -vv -r xw -p no:sugar \
- --cov-report=term-missing --cov-report=html \
- --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
- --cov=rhodecode rhodecode
-
-
-.PHONY: test-only-postgres
-## run tests against postgres
-test-only-postgres:
- PYTHONHASHSEED=random \
- py.test -x -vv -r xw -p no:sugar \
- --cov-report=term-missing --cov-report=html \
- --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
- --cov=rhodecode rhodecode
-
-.PHONY: ruff-check
-## run a ruff analysis
-ruff-check:
- ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
-
-
.PHONY: docs
## build docs
docs:
@@ -88,6 +63,10 @@ web-build:
./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/
rm -rf node_modules
+.PHONY: ruff-check
+## run a ruff analysis
+ruff-check:
+ ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
.PHONY: pip-packages
## Show outdated packages
@@ -109,8 +88,9 @@ dev-sh:
sudo apt-get install -y zsh carapace-bin
rm -rf /home/rhodecode/.oh-my-zsh
curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh | sh
- echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
- PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
+ @echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
+ @echo "${RC_DEV_CMD_HELP}"
+ @PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
.PHONY: dev-cleanup
@@ -122,7 +102,9 @@ dev-cleanup:
.PHONY: dev-env
## make dev-env based on the requirements files and install develop of packages
+## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
dev-env:
+ sudo -u root chown rhodecode:rhodecode /home/rhodecode/.cache/pip/
pip install build virtualenv
pushd ../rhodecode-vcsserver/ && make dev-env && popd
pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
@@ -137,16 +119,13 @@ sh:
make dev-sh
-.PHONY: dev-srv
-## run develop server instance, docker exec -it $(docker ps -q --filter 'name=dev-enterprise-ce') /bin/bash
-dev-srv:
- pserve --reload .dev/dev.ini
+## Allows changes of workers e.g make dev-srv-g workers=2
+workers?=1
-
-.PHONY: dev-srv-g
-## run gunicorn multi process workers
-dev-srv-g:
- gunicorn --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120 --reload
+.PHONY: dev-srv
+## run gunicorn web server with reloader, use workers=N to set multiworker mode
+dev-srv:
+ gunicorn --paste=.dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120 --reload --workers=$(workers)
# Default command on calling make
diff --git a/configs/development.ini b/configs/development.ini
--- a/configs/development.ini
+++ b/configs/development.ini
@@ -31,32 +31,15 @@ debug = true
host = 127.0.0.1
port = 10020
-; ##################################################
-; WAITRESS WSGI SERVER - Recommended for Development
-; ##################################################
-
-; use server type
-use = egg:waitress#main
-
-; number of worker threads
-threads = 5
-
-; MAX BODY SIZE 100GB
-max_request_body_size = 107374182400
-
-; Use poll instead of select, fixes file descriptors limits problems.
-; May not work on old windows systems.
-asyncore_use_poll = true
-
; ###########################
; GUNICORN APPLICATION SERVER
; ###########################
-; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
+; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini
; Module to use, this setting shouldn't be changed
-#use = egg:gunicorn#main
+use = egg:gunicorn#main
; Prefix middleware for RhodeCode.
; recommended when using proxy setup.
@@ -153,6 +136,12 @@ startup.import_repos = false
; SSH calls. Set this for events to receive proper url for SSH calls.
app.base_url = http://rhodecode.local
+; Host at which the Service API is running.
+app.service_api.host = http://rhodecode.local:10020
+
+; Secret for Service API authentication.
+app.service_api.token =
+
; Unique application ID. Should be a random unique string for security.
app_instance_uuid = rc-production
@@ -255,8 +244,8 @@ auth_ret_code_detection = false
; codes don't break the transactions while 4XX codes do
lock_ret_code = 423
-; allows to change the repository location in settings page
-allow_repo_location_change = true
+; Filesystem location were repositories should be stored
+repo_store.path = /var/opt/rhodecode_repo_store
; allows to setup custom hooks in settings page
allow_custom_hooks_settings = true
@@ -298,23 +287,72 @@ file_store.enabled = true
; Storage backend, available options are: local
file_store.backend = local
-; path to store the uploaded binaries
-file_store.storage_path = %(here)s/data/file_store
+; path to store the uploaded binaries and artifacts
+file_store.storage_path = /var/opt/rhodecode_data/file_store
+
+
+; Redis url to acquire/check generation of archives locks
+archive_cache.locking.url = redis://redis:6379/1
+
+; Storage backend, only 'filesystem' and 'objectstore' are available now
+archive_cache.backend.type = filesystem
+
+; url for s3 compatible storage that allows to upload artifacts
+; e.g http://minio:9000
+archive_cache.objectstore.url = http://s3-minio:9000
+
+; key for s3 auth
+archive_cache.objectstore.key = key
+
+; secret for s3 auth
+archive_cache.objectstore.secret = secret
-; Uncomment and set this path to control settings for archive download cache.
+;region for s3 storage
+archive_cache.objectstore.region = eu-central-1
+
+; number of sharded buckets to create to distribute archives across
+; default is 8 shards
+archive_cache.objectstore.bucket_shards = 8
+
+; a top-level bucket to put all other shards in
+; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
+archive_cache.objectstore.bucket = rhodecode-archive-cache
+
+; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
+archive_cache.objectstore.retry = false
+
+; number of seconds to wait for next try using retry
+archive_cache.objectstore.retry_backoff = 1
+
+; how many tries do do a retry fetch from this backend
+archive_cache.objectstore.retry_attempts = 10
+
+; Default is $cache_dir/archive_cache if not set
; Generated repo archives will be cached at this location
; and served from the cache during subsequent requests for the same archive of
; the repository. This path is important to be shared across filesystems and with
; RhodeCode and vcsserver
-
-; Default is $cache_dir/archive_cache if not set
-archive_cache.store_dir = %(here)s/data/archive_cache
+archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
-archive_cache.cache_size_gb = 10
+archive_cache.filesystem.cache_size_gb = 1
+
+; Eviction policy used to clear out after cache_size_gb limit is reached
+archive_cache.filesystem.eviction_policy = least-recently-stored
; By default cache uses sharding technique, this specifies how many shards are there
-archive_cache.cache_shards = 10
+; default is 8 shards
+archive_cache.filesystem.cache_shards = 8
+
+; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
+archive_cache.filesystem.retry = false
+
+; number of seconds to wait for next try using retry
+archive_cache.filesystem.retry_backoff = 1
+
+; how many tries do do a retry fetch from this backend
+archive_cache.filesystem.retry_attempts = 10
+
; #############
; CELERY CONFIG
@@ -322,7 +360,7 @@ archive_cache.cache_shards = 10
; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
-use_celery = false
+use_celery = true
; path to store schedule database
#celerybeat-schedule.path =
@@ -348,7 +386,7 @@ celery.task_always_eager = false
; Default cache dir for caches. Putting this into a ramdisk can boost performance.
; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
-cache_dir = %(here)s/data
+cache_dir = /var/opt/rhodecode_data
; *********************************************
; `sql_cache_short` cache for heavy SQL queries
@@ -457,12 +495,12 @@ rc_cache.cache_repo.expiration_time = 25
; beaker.session.type is type of storage options for the logged users sessions. Current allowed
; types are file, ext:redis, ext:database, ext:memcached
; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
-beaker.session.type = file
-beaker.session.data_dir = %(here)s/data/sessions
+#beaker.session.type = file
+#beaker.session.data_dir = %(here)s/data/sessions
; Redis based sessions
-#beaker.session.type = ext:redis
-#beaker.session.url = redis://127.0.0.1:6379/2
+beaker.session.type = ext:redis
+beaker.session.url = redis://redis:6379/2
; DB based session, fast, and allows easy management over logged in users
#beaker.session.type = ext:database
@@ -474,7 +512,7 @@ beaker.session.data_dir = %(here)s/data/
beaker.session.key = rhodecode
beaker.session.secret = develop-rc-uytcxaz
-beaker.session.lock_dir = %(here)s/data/sessions/lock
+beaker.session.lock_dir = /data_ramdisk/lock
; Secure encrypted cookie. Requires AES and AES python libraries
; you must disable beaker.session.secret to use this
@@ -515,18 +553,18 @@ search.location = %(here)s/data/index
; channelstream enables persistent connections and live notification
; in the system. It's also used by the chat system
-channelstream.enabled = false
+channelstream.enabled = true
; server address for channelstream server on the backend
-channelstream.server = 127.0.0.1:9800
+channelstream.server = channelstream:9800
; location of the channelstream server from outside world
; use ws:// for http or wss:// for https. This address needs to be handled
; by external HTTP server such as Nginx or Apache
; see Nginx/Apache configuration examples in our docs
channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
-channelstream.secret = secret
-channelstream.history.location = %(here)s/channelstream_history
+channelstream.secret = ENV_GENERATED
+channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
; Internal application path that Javascript uses to connect into.
; If you use proxy-prefix the prefix should be added before /_channelstream
@@ -572,7 +610,7 @@ sqlalchemy.db1.pool_recycle = 3600
; VCS CONFIG
; ##########
vcs.server.enable = true
-vcs.server = localhost:9900
+vcs.server = vcsserver:10010
; Web server connectivity protocol, responsible for web based VCS operations
; Available protocols are:
@@ -585,6 +623,7 @@ vcs.scm_app_implementation = http
; Push/Pull operations hooks protocol, available options are:
; `http` - use http-rpc backend (default)
+; `celery` - use celery based hooks
vcs.hooks.protocol = http
; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
@@ -604,11 +643,6 @@ vcs.backends = hg, git, svn
; Wait this number of seconds before killing connection to the vcsserver
vcs.connection_timeout = 3600
-; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
-; Set a numeric version for your current SVN e.g 1.8, or 1.12
-; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
-#vcs.svn.compatible_version = 1.8
-
; Cache flag to cache vcsserver remote calls locally
; It uses cache_region `cache_repo`
vcs.methods.cache = true
@@ -618,14 +652,29 @@ vcs.methods.cache = true
; Maps RhodeCode repo groups into SVN paths for Apache
; ####################################################
+; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
+; Set a numeric version for your current SVN e.g 1.8, or 1.12
+; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
+#vcs.svn.compatible_version = 1.8
+
+; Redis connection settings for svn integrations logic
+; This connection string needs to be the same on ce and vcsserver
+vcs.svn.redis_conn = redis://redis:6379/0
+
+; Enable SVN proxy of requests over HTTP
+vcs.svn.proxy.enabled = true
+
+; host to connect to running SVN subsystem
+vcs.svn.proxy.host = http://svn:8090
+
; Enable or disable the config file generation.
-svn.proxy.generate_config = false
+svn.proxy.generate_config = true
; Generate config file with `SVNListParentPath` set to `On`.
svn.proxy.list_parent_path = true
; Set location and file name of generated config file.
-svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
+svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
; alternative mod_dav config template. This needs to be a valid mako template
; Example template can be found in the source code:
@@ -653,7 +702,7 @@ svn.proxy.location_root = /
; any change user ssh keys. Setting this to false also disables possibility
; of adding SSH keys by users from web interface. Super admins can still
; manage SSH Keys.
-ssh.generate_authorized_keyfile = false
+ssh.generate_authorized_keyfile = true
; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
# ssh.authorized_keys_ssh_opts =
@@ -661,12 +710,13 @@ ssh.generate_authorized_keyfile = false
; Path to the authorized_keys file where the generate entries are placed.
; It is possible to have multiple key files specified in `sshd_config` e.g.
; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
-ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
+ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
; Command to execute the SSH wrapper. The binary is available in the
; RhodeCode installation directory.
-; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
-ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
+; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
+; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
+ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
; Allow shell when executing the ssh-wrapper command
ssh.wrapper_cmd_allow_shell = false
@@ -677,73 +727,14 @@ ssh.enable_debug_logging = true
; Paths to binary executable, by default they are the names, but we can
; override them if we want to use a custom one
-ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
-ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
-ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
+ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
+ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
+ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
; Enables SSH key generator web interface. Disabling this still allows users
; to add their own keys.
ssh.enable_ui_key_generator = true
-
-; #################
-; APPENLIGHT CONFIG
-; #################
-
-; Appenlight is tailored to work with RhodeCode, see
-; http://appenlight.rhodecode.com for details how to obtain an account
-
-; Appenlight integration enabled
-#appenlight = false
-
-#appenlight.server_url = https://api.appenlight.com
-#appenlight.api_key = YOUR_API_KEY
-#appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
-
-; used for JS client
-#appenlight.api_public_key = YOUR_API_PUBLIC_KEY
-
-; TWEAK AMOUNT OF INFO SENT HERE
-
-; enables 404 error logging (default False)
-#appenlight.report_404 = false
-
-; time in seconds after request is considered being slow (default 1)
-#appenlight.slow_request_time = 1
-
-; record slow requests in application
-; (needs to be enabled for slow datastore recording and time tracking)
-#appenlight.slow_requests = true
-
-; enable hooking to application loggers
-#appenlight.logging = true
-
-; minimum log level for log capture
-#ppenlight.logging.level = WARNING
-
-; send logs only from erroneous/slow requests
-; (saves API quota for intensive logging)
-#appenlight.logging_on_error = false
-
-; list of additional keywords that should be grabbed from environ object
-; can be string with comma separated list of words in lowercase
-; (by default client will always send following info:
-; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
-; start with HTTP* this list be extended with additional keywords here
-#appenlight.environ_keys_whitelist =
-
-; list of keywords that should be blanked from request object
-; can be string with comma separated list of words in lowercase
-; (by default client will always blank keys that contain following words
-; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
-; this list be extended with additional keywords set here
-#appenlight.request_keys_blacklist =
-
-; list of namespaces that should be ignores when gathering log entries
-; can be string with comma separated list of namespaces
-; (by default the client ignores own entries: appenlight_client.client)
-#appenlight.log_namespace_blacklist =
-
; Statsd client config, this is used to send metrics to statsd
; We recommend setting statsd_exported and scrape them using Prometheus
#statsd.enabled = false
diff --git a/configs/production.ini b/configs/production.ini
--- a/configs/production.ini
+++ b/configs/production.ini
@@ -36,7 +36,7 @@ port = 10020
; GUNICORN APPLICATION SERVER
; ###########################
-; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
+; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini
; Module to use, this setting shouldn't be changed
use = egg:gunicorn#main
@@ -104,6 +104,12 @@ startup.import_repos = false
; SSH calls. Set this for events to receive proper url for SSH calls.
app.base_url = http://rhodecode.local
+; Host at which the Service API is running.
+app.service_api.host = http://rhodecode.local:10020
+
+; Secret for Service API authentication.
+app.service_api.token =
+
; Unique application ID. Should be a random unique string for security.
app_instance_uuid = rc-production
@@ -206,8 +212,8 @@ auth_ret_code_detection = false
; codes don't break the transactions while 4XX codes do
lock_ret_code = 423
-; allows to change the repository location in settings page
-allow_repo_location_change = true
+; Filesystem location were repositories should be stored
+repo_store.path = /var/opt/rhodecode_repo_store
; allows to setup custom hooks in settings page
allow_custom_hooks_settings = true
@@ -249,23 +255,72 @@ file_store.enabled = true
; Storage backend, available options are: local
file_store.backend = local
-; path to store the uploaded binaries
-file_store.storage_path = %(here)s/data/file_store
+; path to store the uploaded binaries and artifacts
+file_store.storage_path = /var/opt/rhodecode_data/file_store
+
+
+; Redis url to acquire/check generation of archives locks
+archive_cache.locking.url = redis://redis:6379/1
+
+; Storage backend, only 'filesystem' and 'objectstore' are available now
+archive_cache.backend.type = filesystem
+
+; url for s3 compatible storage that allows to upload artifacts
+; e.g http://minio:9000
+archive_cache.objectstore.url = http://s3-minio:9000
+
+; key for s3 auth
+archive_cache.objectstore.key = key
+
+; secret for s3 auth
+archive_cache.objectstore.secret = secret
-; Uncomment and set this path to control settings for archive download cache.
+;region for s3 storage
+archive_cache.objectstore.region = eu-central-1
+
+; number of sharded buckets to create to distribute archives across
+; default is 8 shards
+archive_cache.objectstore.bucket_shards = 8
+
+; a top-level bucket to put all other shards in
+; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
+archive_cache.objectstore.bucket = rhodecode-archive-cache
+
+; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
+archive_cache.objectstore.retry = false
+
+; number of seconds to wait for next try using retry
+archive_cache.objectstore.retry_backoff = 1
+
+; how many tries do do a retry fetch from this backend
+archive_cache.objectstore.retry_attempts = 10
+
+; Default is $cache_dir/archive_cache if not set
; Generated repo archives will be cached at this location
; and served from the cache during subsequent requests for the same archive of
; the repository. This path is important to be shared across filesystems and with
; RhodeCode and vcsserver
-
-; Default is $cache_dir/archive_cache if not set
-archive_cache.store_dir = %(here)s/data/archive_cache
+archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
-archive_cache.cache_size_gb = 40
+archive_cache.filesystem.cache_size_gb = 40
+
+; Eviction policy used to clear out after cache_size_gb limit is reached
+archive_cache.filesystem.eviction_policy = least-recently-stored
; By default cache uses sharding technique, this specifies how many shards are there
-archive_cache.cache_shards = 4
+; default is 8 shards
+archive_cache.filesystem.cache_shards = 8
+
+; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
+archive_cache.filesystem.retry = false
+
+; number of seconds to wait for next try using retry
+archive_cache.filesystem.retry_backoff = 1
+
+; how many tries do do a retry fetch from this backend
+archive_cache.filesystem.retry_attempts = 10
+
; #############
; CELERY CONFIG
@@ -273,7 +328,7 @@ archive_cache.cache_shards = 4
; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
-use_celery = false
+use_celery = true
; path to store schedule database
#celerybeat-schedule.path =
@@ -299,7 +354,7 @@ celery.task_always_eager = false
; Default cache dir for caches. Putting this into a ramdisk can boost performance.
; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
-cache_dir = %(here)s/data
+cache_dir = /var/opt/rhodecode_data
; *********************************************
; `sql_cache_short` cache for heavy SQL queries
@@ -408,12 +463,12 @@ rc_cache.cache_repo.expiration_time = 25
; beaker.session.type is type of storage options for the logged users sessions. Current allowed
; types are file, ext:redis, ext:database, ext:memcached
; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
-beaker.session.type = file
-beaker.session.data_dir = %(here)s/data/sessions
+#beaker.session.type = file
+#beaker.session.data_dir = %(here)s/data/sessions
; Redis based sessions
-#beaker.session.type = ext:redis
-#beaker.session.url = redis://127.0.0.1:6379/2
+beaker.session.type = ext:redis
+beaker.session.url = redis://redis:6379/2
; DB based session, fast, and allows easy management over logged in users
#beaker.session.type = ext:database
@@ -425,7 +480,7 @@ beaker.session.data_dir = %(here)s/data/
beaker.session.key = rhodecode
beaker.session.secret = production-rc-uytcxaz
-beaker.session.lock_dir = %(here)s/data/sessions/lock
+beaker.session.lock_dir = /data_ramdisk/lock
; Secure encrypted cookie. Requires AES and AES python libraries
; you must disable beaker.session.secret to use this
@@ -466,18 +521,18 @@ search.location = %(here)s/data/index
; channelstream enables persistent connections and live notification
; in the system. It's also used by the chat system
-channelstream.enabled = false
+channelstream.enabled = true
; server address for channelstream server on the backend
-channelstream.server = 127.0.0.1:9800
+channelstream.server = channelstream:9800
; location of the channelstream server from outside world
; use ws:// for http or wss:// for https. This address needs to be handled
; by external HTTP server such as Nginx or Apache
; see Nginx/Apache configuration examples in our docs
channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
-channelstream.secret = secret
-channelstream.history.location = %(here)s/channelstream_history
+channelstream.secret = ENV_GENERATED
+channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
; Internal application path that Javascript uses to connect into.
; If you use proxy-prefix the prefix should be added before /_channelstream
@@ -523,7 +578,7 @@ sqlalchemy.db1.pool_recycle = 3600
; VCS CONFIG
; ##########
vcs.server.enable = true
-vcs.server = localhost:9900
+vcs.server = vcsserver:10010
; Web server connectivity protocol, responsible for web based VCS operations
; Available protocols are:
@@ -536,6 +591,7 @@ vcs.scm_app_implementation = http
; Push/Pull operations hooks protocol, available options are:
; `http` - use http-rpc backend (default)
+; `celery` - use celery based hooks
vcs.hooks.protocol = http
; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
@@ -555,11 +611,6 @@ vcs.backends = hg, git, svn
; Wait this number of seconds before killing connection to the vcsserver
vcs.connection_timeout = 3600
-; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
-; Set a numeric version for your current SVN e.g 1.8, or 1.12
-; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
-#vcs.svn.compatible_version = 1.8
-
; Cache flag to cache vcsserver remote calls locally
; It uses cache_region `cache_repo`
vcs.methods.cache = true
@@ -569,14 +620,29 @@ vcs.methods.cache = true
; Maps RhodeCode repo groups into SVN paths for Apache
; ####################################################
+; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
+; Set a numeric version for your current SVN e.g 1.8, or 1.12
+; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
+#vcs.svn.compatible_version = 1.8
+
+; Redis connection settings for svn integrations logic
+; This connection string needs to be the same on ce and vcsserver
+vcs.svn.redis_conn = redis://redis:6379/0
+
+; Enable SVN proxy of requests over HTTP
+vcs.svn.proxy.enabled = true
+
+; host to connect to running SVN subsystem
+vcs.svn.proxy.host = http://svn:8090
+
; Enable or disable the config file generation.
-svn.proxy.generate_config = false
+svn.proxy.generate_config = true
; Generate config file with `SVNListParentPath` set to `On`.
svn.proxy.list_parent_path = true
; Set location and file name of generated config file.
-svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
+svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
; alternative mod_dav config template. This needs to be a valid mako template
; Example template can be found in the source code:
@@ -604,7 +670,7 @@ svn.proxy.location_root = /
; any change user ssh keys. Setting this to false also disables possibility
; of adding SSH keys by users from web interface. Super admins can still
; manage SSH Keys.
-ssh.generate_authorized_keyfile = false
+ssh.generate_authorized_keyfile = true
; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
# ssh.authorized_keys_ssh_opts =
@@ -612,12 +678,13 @@ ssh.generate_authorized_keyfile = false
; Path to the authorized_keys file where the generate entries are placed.
; It is possible to have multiple key files specified in `sshd_config` e.g.
; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
-ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
+ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
; Command to execute the SSH wrapper. The binary is available in the
; RhodeCode installation directory.
-; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
-ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
+; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
+; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
+ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
; Allow shell when executing the ssh-wrapper command
ssh.wrapper_cmd_allow_shell = false
@@ -628,73 +695,14 @@ ssh.enable_debug_logging = false
; Paths to binary executable, by default they are the names, but we can
; override them if we want to use a custom one
-ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
-ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
-ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
+ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
+ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
+ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
; Enables SSH key generator web interface. Disabling this still allows users
; to add their own keys.
ssh.enable_ui_key_generator = true
-
-; #################
-; APPENLIGHT CONFIG
-; #################
-
-; Appenlight is tailored to work with RhodeCode, see
-; http://appenlight.rhodecode.com for details how to obtain an account
-
-; Appenlight integration enabled
-#appenlight = false
-
-#appenlight.server_url = https://api.appenlight.com
-#appenlight.api_key = YOUR_API_KEY
-#appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
-
-; used for JS client
-#appenlight.api_public_key = YOUR_API_PUBLIC_KEY
-
-; TWEAK AMOUNT OF INFO SENT HERE
-
-; enables 404 error logging (default False)
-#appenlight.report_404 = false
-
-; time in seconds after request is considered being slow (default 1)
-#appenlight.slow_request_time = 1
-
-; record slow requests in application
-; (needs to be enabled for slow datastore recording and time tracking)
-#appenlight.slow_requests = true
-
-; enable hooking to application loggers
-#appenlight.logging = true
-
-; minimum log level for log capture
-#ppenlight.logging.level = WARNING
-
-; send logs only from erroneous/slow requests
-; (saves API quota for intensive logging)
-#appenlight.logging_on_error = false
-
-; list of additional keywords that should be grabbed from environ object
-; can be string with comma separated list of words in lowercase
-; (by default client will always send following info:
-; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
-; start with HTTP* this list be extended with additional keywords here
-#appenlight.environ_keys_whitelist =
-
-; list of keywords that should be blanked from request object
-; can be string with comma separated list of words in lowercase
-; (by default client will always blank keys that contain following words
-; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
-; this list be extended with additional keywords set here
-#appenlight.request_keys_blacklist =
-
-; list of namespaces that should be ignores when gathering log entries
-; can be string with comma separated list of namespaces
-; (by default the client ignores own entries: appenlight_client.client)
-#appenlight.log_namespace_blacklist =
-
; Statsd client config, this is used to send metrics to statsd
; We recommend setting statsd_exported and scrape them using Prometheus
#statsd.enabled = false
diff --git a/docs/admin/lab-settings.rst b/docs/admin/lab-settings.rst
--- a/docs/admin/lab-settings.rst
+++ b/docs/admin/lab-settings.rst
@@ -8,7 +8,7 @@ level of support to optimize for product
use the following instructions:
1. Open the |RCE| configuration file,
- :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+ :file:`config/_shared/rhodecode.ini`
2. Add the following configuration option in the ``[app:main]`` section.
diff --git a/docs/admin/sec-x-frame.rst b/docs/admin/sec-x-frame.rst
--- a/docs/admin/sec-x-frame.rst
+++ b/docs/admin/sec-x-frame.rst
@@ -42,7 +42,7 @@ information see the :ref:`apache-ws-ref`
|RCE| can also be configured to force strict *https* connections and Strict
Transport Security. To set this, configure the following options to ``true``
-in the :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
+in the :file:`config/_shared/rhodecode.ini` file.
.. code-block:: ini
diff --git a/docs/admin/sec-your-server.rst b/docs/admin/sec-your-server.rst
--- a/docs/admin/sec-your-server.rst
+++ b/docs/admin/sec-your-server.rst
@@ -83,7 +83,7 @@ see the `OpenSSL PKI tutorial`_ site, or
If the network you are running is SSL/TLS encrypted, you can configure |RCE|
to always use secure connections using the ``force_https`` and ``use_htsts``
-options in the :file:`/home/user/.rccontrol/instance-id/rhodecode.ini` file.
+options in the :file:`config/_shared/rhodecode.ini` file.
For more details, see the :ref:`x-frame` section.
FireWalls and Ports
diff --git a/docs/admin/system-overview.rst b/docs/admin/system-overview.rst
--- a/docs/admin/system-overview.rst
+++ b/docs/admin/system-overview.rst
@@ -78,7 +78,7 @@ For example:
Configuration Files
-------------------
-* :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+* :file:`config/_shared/rhodecode.ini`
* :file:`/home/{user}/.rccontrol/{instance-id}/search_mapping.ini`
* :file:`/home/{user}/.rccontrol/{vcsserver-id}/vcsserver.ini`
* :file:`/home/{user}/.rccontrol/supervisor/supervisord.ini`
diff --git a/docs/admin/system_admin/admin-tricks.rst b/docs/admin/system_admin/admin-tricks.rst
--- a/docs/admin/system_admin/admin-tricks.rst
+++ b/docs/admin/system_admin/admin-tricks.rst
@@ -188,7 +188,7 @@ Changing Default Language
^^^^^^^^^^^^^^^^^^^^^^^^^
To change the default language of a |RCE| instance, change the language code
-in the :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. To
+in the :file:`config/_shared/rhodecode.ini` file. To
do this, use the following steps.
1. Open the :file:`rhodecode.ini` file and set the required language code.
diff --git a/docs/admin/system_admin/config-files-overview.rst b/docs/admin/system_admin/config-files-overview.rst
--- a/docs/admin/system_admin/config-files-overview.rst
+++ b/docs/admin/system_admin/config-files-overview.rst
@@ -11,7 +11,7 @@ sections.
\- **rhodecode.ini**
Default location:
- :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+ :file:`config/_shared/rhodecode.ini`
This is the main |RCE| configuration file and controls much of its
default behaviour. It is also used to configure certain customer
diff --git a/docs/admin/system_admin/enable-debug.rst b/docs/admin/system_admin/enable-debug.rst
--- a/docs/admin/system_admin/enable-debug.rst
+++ b/docs/admin/system_admin/enable-debug.rst
@@ -14,7 +14,7 @@ track particular user logs only, and exc
simply grep by `req_id` uuid which you'll have to find for the individual request.
To enable debug mode on a |RCE| instance you need to set the debug property
-in the :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. To
+in the :file:`config/_shared/rhodecode.ini` file. To
do this, use the following steps
1. Open the file and set the ``debug`` line to ``true``
@@ -38,7 +38,7 @@ Debug and Logging Configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Further debugging and logging settings can also be set in the
-:file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
+:file:`config/_shared/rhodecode.ini` file.
In the logging section, the various packages that run with |RCE| can have
different debug levels set. If you want to increase the logging level change
diff --git a/docs/admin/system_admin/svn-http.rst b/docs/admin/system_admin/svn-http.rst
--- a/docs/admin/system_admin/svn-http.rst
+++ b/docs/admin/system_admin/svn-http.rst
@@ -134,7 +134,7 @@ 2. Go to the :menuselection:`Admin --> S
:guilabel:`Subversion HTTP Server URL`.
3. Open the |RCE| configuration file,
- :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+ :file:`config/_shared/rhodecode.ini`
4. Add the following configuration option in the ``[app:main]``
section if you don't have it yet.
diff --git a/docs/admin/system_admin/tuning/tuning-change-encoding.rst b/docs/admin/system_admin/tuning/tuning-change-encoding.rst
--- a/docs/admin/system_admin/tuning/tuning-change-encoding.rst
+++ b/docs/admin/system_admin/tuning/tuning-change-encoding.rst
@@ -4,7 +4,7 @@ Change Default Encoding
-----------------------
|RCE| uses ``utf8`` encoding by default. You can change the default encoding
-in the :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file. To
+in the :file:`config/_shared/rhodecode.ini` file. To
change the default encoding used by |RCE|, set a new value for the
``default_encoding``.
diff --git a/docs/admin/system_admin/tuning/tuning-hg-auth-loop.rst b/docs/admin/system_admin/tuning/tuning-hg-auth-loop.rst
--- a/docs/admin/system_admin/tuning/tuning-hg-auth-loop.rst
+++ b/docs/admin/system_admin/tuning/tuning-hg-auth-loop.rst
@@ -7,7 +7,7 @@ When using external authentication tools
password retry loop in |hg| can result in users being locked out due to too
many failed password attempts. To prevent this from happening, add the
following setting to your
-:file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file, in the
+:file:`config/_shared/rhodecode.ini` file, in the
``[app:main]`` section.
diff --git a/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst b/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst
--- a/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst
+++ b/docs/admin/system_admin/tuning/tuning-scale-horizontally-cluster.rst
@@ -100,7 +100,7 @@ Each one should already connect to share
1) Assuming our final url will be http://rc-node-1, Configure `instances_id`, `app.base_url`
-a) On **rc-node-2** find the following settings and edit :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+a) On **rc-node-2** find the following settings and edit :file:`config/_shared/rhodecode.ini`
.. code-block:: ini
@@ -109,7 +109,7 @@ a) On **rc-node-2** find the following s
app.base_url = http://rc-node-1
-b) On **rc-node-3** find the following settings and edit :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+b) On **rc-node-3** find the following settings and edit :file:`config/_shared/rhodecode.ini`
.. code-block:: ini
@@ -121,7 +121,7 @@ b) On **rc-node-3** find the following s
2) Configure `User Session` to use a shared database. Example config that should be
changed on both **rc-node-2** and **rc-node-3** .
- Edit :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+ Edit :file:`config/_shared/rhodecode.ini`
.. code-block:: ini
@@ -163,7 +163,7 @@ 3) Configure stored cached/archive cache
4) Use shared exception store. Example config that should be
changed on both **rc-node-2** and **rc-node-3**, and also for VCSServer.
- Edit :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` and
+ Edit :file:`config/_shared/rhodecode.ini` and
:file:`/home/{user}/.rccontrol/{vcsserver-instance-id}/vcsserver.ini`
and add/change following setting.
diff --git a/docs/admin/system_admin/tuning/tuning-user-sessions-performance.rst b/docs/admin/system_admin/tuning/tuning-user-sessions-performance.rst
--- a/docs/admin/system_admin/tuning/tuning-user-sessions-performance.rst
+++ b/docs/admin/system_admin/tuning/tuning-user-sessions-performance.rst
@@ -15,7 +15,7 @@ scalability, and maintainability we reco
sessions to database-based user sessions or Redis based sessions.
To switch to database-based user sessions uncomment the following section in
-your :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
+your :file:`config/_shared/rhodecode.ini` file.
.. code-block:: ini
@@ -49,7 +49,7 @@ uses, or if required it can be a differe
To switch to redis-based user sessions uncomment the following section in
-your :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
+your :file:`config/_shared/rhodecode.ini` file.
.. code-block:: ini
diff --git a/docs/admin/system_admin/vcs-server.rst b/docs/admin/system_admin/vcs-server.rst
--- a/docs/admin/system_admin/vcs-server.rst
+++ b/docs/admin/system_admin/vcs-server.rst
@@ -52,7 +52,7 @@ To configure a |RCE| instance to use a V
The following list shows the available options on the |RCE| side of the
connection to the VCS Server. The settings are configured per
instance in the
-:file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
+:file:`config/_shared/rhodecode.ini` file.
.. rst-class:: dl-horizontal
diff --git a/docs/api/api.rst b/docs/api/api.rst
--- a/docs/api/api.rst
+++ b/docs/api/api.rst
@@ -27,7 +27,7 @@ of views that have API access enabled by
edit the |RCE| configuration ``.ini`` file. The default location is:
* |RCE| Pre-2.2.7 :file:`root/rhodecode/data/production.ini`
-* |RCE| 3.0 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+* |RCE| 3.0 :file:`config/_shared/rhodecode.ini`
To configure the white list, edit this section of the file. In this
configuration example, API access is granted to the patch/diff raw file and
diff --git a/docs/auth/ssh-connection.rst b/docs/auth/ssh-connection.rst
--- a/docs/auth/ssh-connection.rst
+++ b/docs/auth/ssh-connection.rst
@@ -87,7 +87,7 @@ 3. Set base_url for instance to enable p
Hostname is required for the integration to properly set the instance URL.
When your hostname is known (e.g https://code.rhodecode.com) please set it
- inside :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+ inside :file:`config/_shared/rhodecode.ini`
add into `[app:main]` section the following configuration:
@@ -111,7 +111,7 @@ 4. Add the public key to your user accou
In case of connection problems please set
`ssh.enable_debug_logging = true` inside the SSH configuration of
- :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+ :file:`config/_shared/rhodecode.ini`
Then add, remove your SSH key and try connecting again.
Debug logging will be printed to help find the problems on the server side.
diff --git a/docs/install/setup-email.rst b/docs/install/setup-email.rst
--- a/docs/install/setup-email.rst
+++ b/docs/install/setup-email.rst
@@ -4,7 +4,7 @@ Set up Email
------------
To setup email with your |RCE| instance, open the default
-:file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
+:file:`config/_shared/rhodecode.ini`
file and uncomment and configure the email section. If it is not there,
use the below example to insert it.
diff --git a/docs/release-notes/release-notes-5.0.0.rst b/docs/release-notes/release-notes-5.0.0.rst
--- a/docs/release-notes/release-notes-5.0.0.rst
+++ b/docs/release-notes/release-notes-5.0.0.rst
@@ -4,7 +4,8 @@
Release Date
^^^^^^^^^^^^
-- TBA
+
+- 2024-05-14
New Features
@@ -12,21 +13,20 @@ New Features
- Full support of Python3 and Python3.11
- Git repositories with LFS object are now pushing and pulling the LFS objects when remote sync is enabled.
-- Archive generation: implemented a new system for caching generated archive files that allows setting cache size limit
- see: `archive_cache.cache_size_gb=` option.
+- Archive generation: implemented a new system for caching generated archive files that allows setting cache size limit see: archive_cache.cache_size_gb= option.
- Introduced statsd metrics in various places for new monitoring stack to provide useful details on traffic and usage.
General
^^^^^^^
-- Upgraded all dependency libraries to their latest available versions
-- Dropped support for deprecated hgsubversion no longer available in python3
+- Upgraded all dependency libraries to their latest available versions for python3 compatability
Security
^^^^^^^^
+- fixed few edge cases of permission invalidation on change of permissions
Performance
@@ -38,6 +38,7 @@ Performance
Fixes
^^^^^
+- Various small fixes and improvements found during python3 migration
Upgrade notes
diff --git a/docs/release-notes/release-notes-5.0.1.rst b/docs/release-notes/release-notes-5.0.1.rst
new file mode 100644
--- /dev/null
+++ b/docs/release-notes/release-notes-5.0.1.rst
@@ -0,0 +1,43 @@
+|RCE| 5.0.1 |RNS|
+-----------------
+
+Release Date
+^^^^^^^^^^^^
+
+- 2024-05-20
+
+
+New Features
+^^^^^^^^^^^^
+
+
+
+General
+^^^^^^^
+
+
+
+Security
+^^^^^^^^
+
+
+
+Performance
+^^^^^^^^^^^
+
+
+
+
+Fixes
+^^^^^
+
+- Fixed Celery serialization issues
+- Fixed Celery startup problems signaling
+- Fixed SVN hooks binary dir paths which in certain scenarios resulted in empty values forbidding hooks to execute
+- Fixed annotation bug for files without new lines or mixed newlines
+
+
+Upgrade notes
+^^^^^^^^^^^^^
+
+- RhodeCode 5.0.1 is unscheduled bugfix release to address some of the issues found during 4.X -> 5.X migration
diff --git a/docs/release-notes/release-notes-5.0.2.rst b/docs/release-notes/release-notes-5.0.2.rst
new file mode 100644
--- /dev/null
+++ b/docs/release-notes/release-notes-5.0.2.rst
@@ -0,0 +1,39 @@
+|RCE| 5.0.2 |RNS|
+-----------------
+
+Release Date
+^^^^^^^^^^^^
+
+- 2024-05-29
+
+
+New Features
+^^^^^^^^^^^^
+
+
+
+General
+^^^^^^^
+
+
+
+Security
+^^^^^^^^
+
+
+
+Performance
+^^^^^^^^^^^
+
+
+
+
+Fixes
+^^^^^
+
+- Fixed problems with saving branch permissions
+
+Upgrade notes
+^^^^^^^^^^^^^
+
+- RhodeCode 5.0.2 is unscheduled bugfix release to address some of the issues found during 4.X -> 5.X migration
diff --git a/docs/release-notes/release-notes-5.0.3.rst b/docs/release-notes/release-notes-5.0.3.rst
new file mode 100644
--- /dev/null
+++ b/docs/release-notes/release-notes-5.0.3.rst
@@ -0,0 +1,39 @@
+|RCE| 5.0.3 |RNS|
+-----------------
+
+Release Date
+^^^^^^^^^^^^
+
+- 2024-06-17
+
+
+New Features
+^^^^^^^^^^^^
+
+
+
+General
+^^^^^^^
+
+
+
+Security
+^^^^^^^^
+
+
+
+Performance
+^^^^^^^^^^^
+
+
+
+
+Fixes
+^^^^^
+
+- Fixed problems nested ldap groups
+
+Upgrade notes
+^^^^^^^^^^^^^
+
+- RhodeCode 5.0.3 is unscheduled bugfix release to address some of the issues found during 4.X -> 5.X migration
diff --git a/docs/release-notes/release-notes-5.1.0.rst b/docs/release-notes/release-notes-5.1.0.rst
new file mode 100644
--- /dev/null
+++ b/docs/release-notes/release-notes-5.1.0.rst
@@ -0,0 +1,59 @@
+|RCE| 5.1.0 |RNS|
+-----------------
+
+Release Date
+^^^^^^^^^^^^
+
+- 2024-07-18
+
+
+New Features
+^^^^^^^^^^^^
+
+- We've introduced 2FA for users. Now alongside the external auth 2fa support RhodeCode allows to enable 2FA for users
+ 2FA options will be available for each user individually, or enforced via authentication plugins like ldap, or internal.
+- Email based log-in. RhodeCode now allows to log-in using email as well as username for main authentication type.
+- Ability to replace a file using web UI. Now one can replace an existing file from the web-ui.
+- GIT LFS Sync automation. Remote push/pull commands now can also sync GIT LFS objects.
+- Added ability to remove or close branches from the web ui
+- Added ability to delete a branch automatically after merging PR for git repositories
+- Added support for S3 based archive_cache based that allows storing cached archives in S3 compatible object store.
+
+
+General
+^^^^^^^
+
+- Upgraded all dependency libraries to their latest available versions
+- Repository storage is no longer controlled via DB settings, but .ini file. This allows easier automated deployments.
+- Bumped mercurial to 6.7.4
+- Mercurial: enable httppostarguments for better support of large repositories with lots of heads.
+- Added explicit db-migrate step to update hooks for 5.X release.
+
+
+Security
+^^^^^^^^
+
+
+
+Performance
+^^^^^^^^^^^
+
+- Introduced a full rewrite of ssh backend for performance. The result is 2-5x speed improvement for operation with ssh.
+ enable new ssh wrapper by setting: `ssh.wrapper_cmd = /home/rhodecode/venv/bin/rc-ssh-wrapper-v2`
+- Introduced a new hooks subsystem that is more scalable and faster, enable it by settings: `vcs.hooks.protocol = celery`
+
+
+Fixes
+^^^^^
+
+- Archives: Zip archive download breaks when a gitmodules file is present
+- Branch permissions: fixed bug preventing to specify own rules from 4.X install
+- SVN: refactored svn events, thus fixing support for it in dockerized env
+- Fixed empty server url in PR link after push from cli
+
+
+Upgrade notes
+^^^^^^^^^^^^^
+
+- RhodeCode 5.1.0 is a mayor feature release after big 5.0.0 python3 migration. Happy to ship a first time feature
+ rich release
diff --git a/docs/release-notes/release-notes.rst b/docs/release-notes/release-notes.rst
--- a/docs/release-notes/release-notes.rst
+++ b/docs/release-notes/release-notes.rst
@@ -9,6 +9,11 @@ Release Notes
.. toctree::
:maxdepth: 1
+
+ release-notes-5.1.0.rst
+ release-notes-5.0.3.rst
+ release-notes-5.0.2.rst
+ release-notes-5.0.1.rst
release-notes-5.0.0.rst
diff --git a/docs/tutorials/multi-instance-setup.rst b/docs/tutorials/multi-instance-setup.rst
--- a/docs/tutorials/multi-instance-setup.rst
+++ b/docs/tutorials/multi-instance-setup.rst
@@ -27,7 +27,7 @@ 1. Install a new instance of |RCE|, choo
Once the new instance is installed you need to update the licence token and
database connection string in the
- :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` file.
+ :file:`config/_shared/rhodecode.ini` file.
.. code-block:: bash
diff --git a/package.json b/package.json
--- a/package.json
+++ b/package.json
@@ -47,7 +47,6 @@
"moment": "^2.18.1",
"mousetrap": "^1.6.1",
"polymer-webpack-loader": "^2.0.1",
- "qrious": "^4.0.2",
"raw-loader": "1.0.0-beta.0",
"sticky-sidebar": "3.3.1",
"style-loader": "^0.21.0",
diff --git a/pytest.ini b/pytest.ini
--- a/pytest.ini
+++ b/pytest.ini
@@ -21,3 +21,7 @@ markers =
skip_backends: Mark tests as skipped for given backends.
backends: Mark backends
dbs: database markers for running tests for given DB
+
+env =
+ RC_TEST=1
+ RUN_ENV=test
diff --git a/requirements.txt b/requirements.txt
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,9 +1,9 @@
# deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
-alembic==1.12.1
+alembic==1.13.1
mako==1.2.4
markupsafe==2.1.2
- sqlalchemy==1.4.51
+ sqlalchemy==1.4.52
greenlet==3.0.3
typing_extensions==4.9.0
async-timeout==4.0.3
@@ -27,13 +27,13 @@ celery==5.3.6
vine==5.1.0
python-dateutil==2.8.2
six==1.16.0
- tzdata==2023.4
+ tzdata==2024.1
vine==5.1.0
channelstream==0.7.1
gevent==24.2.1
greenlet==3.0.3
zope.event==5.0.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
itsdangerous==1.1.0
marshmallow==2.18.0
pyramid==2.0.2
@@ -46,9 +46,7 @@ channelstream==0.7.1
venusian==3.0.0
webob==1.8.7
zope.deprecation==5.0.0
- zope.interface==6.1.0
- pyramid-apispec==0.3.3
- apispec==1.3.3
+ zope.interface==6.3.0
pyramid-jinja2==2.10
jinja2==3.1.2
markupsafe==2.1.2
@@ -63,7 +61,7 @@ channelstream==0.7.1
venusian==3.0.0
webob==1.8.7
zope.deprecation==5.0.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
zope.deprecation==5.0.0
python-dateutil==2.8.2
six==1.16.0
@@ -82,20 +80,20 @@ deform==2.0.15
peppercorn==0.6
translationstring==1.4
zope.deprecation==5.0.0
-diskcache==5.6.3
docutils==0.19
-dogpile.cache==1.3.0
+dogpile.cache==1.3.3
decorator==5.1.1
stevedore==5.1.0
pbr==5.11.1
formencode==2.1.0
six==1.16.0
+fsspec==2024.6.0
gunicorn==21.2.0
- packaging==23.1
+ packaging==24.0
gevent==24.2.1
greenlet==3.0.3
zope.event==5.0.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
ipython==8.14.0
backcall==0.2.0
decorator==5.1.1
@@ -116,11 +114,11 @@ ipython==8.14.0
pure-eval==0.2.2
traitlets==5.9.0
markdown==3.4.3
-msgpack==1.0.7
+msgpack==1.0.8
mysqlclient==2.1.1
nbconvert==7.7.3
- beautifulsoup4==4.11.2
- soupsieve==2.4
+ beautifulsoup4==4.12.3
+ soupsieve==2.5
bleach==6.1.0
six==1.16.0
webencodings==0.5.1
@@ -165,20 +163,15 @@ nbconvert==7.7.3
platformdirs==3.10.0
traitlets==5.9.0
traitlets==5.9.0
- packaging==23.1
pandocfilters==1.5.0
pygments==2.15.1
tinycss2==1.2.1
webencodings==0.5.1
traitlets==5.9.0
-orjson==3.9.13
-pastescript==3.4.0
- paste==3.7.1
- six==1.16.0
- pastedeploy==3.1.0
- six==1.16.0
+orjson==3.10.3
+paste==3.10.1
premailer==3.10.0
- cachetools==5.3.2
+ cachetools==5.3.3
cssselect==1.2.0
cssutils==2.6.0
lxml==4.9.3
@@ -194,11 +187,11 @@ pycmarkgfm==1.2.0
cffi==1.16.0
pycparser==2.21
pycryptodome==3.17
-pycurl==7.45.2
+pycurl==7.45.3
pymysql==1.0.3
pyotp==2.8.0
pyparsing==3.1.1
-pyramid-debugtoolbar==4.11
+pyramid-debugtoolbar==4.12.1
pygments==2.15.1
pyramid==2.0.2
hupper==1.12
@@ -210,7 +203,7 @@ pyramid-debugtoolbar==4.11
venusian==3.0.0
webob==1.8.7
zope.deprecation==5.0.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
pyramid-mako==1.1.0
mako==1.2.4
markupsafe==2.1.2
@@ -224,7 +217,7 @@ pyramid-debugtoolbar==4.11
venusian==3.0.0
webob==1.8.7
zope.deprecation==5.0.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
pyramid-mailer==0.15.1
pyramid==2.0.2
hupper==1.12
@@ -236,13 +229,13 @@ pyramid-mailer==0.15.1
venusian==3.0.0
webob==1.8.7
zope.deprecation==5.0.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
repoze.sendmail==4.4.1
transaction==3.1.0
- zope.interface==6.1.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
+ zope.interface==6.3.0
transaction==3.1.0
- zope.interface==6.1.0
+ zope.interface==6.3.0
python-ldap==3.4.3
pyasn1==0.4.8
pyasn1-modules==0.2.8
@@ -257,39 +250,64 @@ python3-saml==1.15.0
xmlsec==1.3.13
lxml==4.9.3
pyyaml==6.0.1
-redis==5.0.1
+redis==5.0.4
+ async-timeout==4.0.3
regex==2022.10.31
routes==2.5.1
repoze.lru==0.7
six==1.16.0
-simplejson==3.19.1
+s3fs==2024.6.0
+ aiobotocore==2.13.0
+ aiohttp==3.9.5
+ aiosignal==1.3.1
+ frozenlist==1.4.1
+ attrs==22.2.0
+ frozenlist==1.4.1
+ multidict==6.0.5
+ yarl==1.9.4
+ idna==3.4
+ multidict==6.0.5
+ aioitertools==0.11.0
+ botocore==1.34.106
+ jmespath==1.0.1
+ python-dateutil==2.8.2
+ six==1.16.0
+ urllib3==1.26.14
+ wrapt==1.16.0
+ aiohttp==3.9.5
+ aiosignal==1.3.1
+ frozenlist==1.4.1
+ attrs==22.2.0
+ frozenlist==1.4.1
+ multidict==6.0.5
+ yarl==1.9.4
+ idna==3.4
+ multidict==6.0.5
+ fsspec==2024.6.0
+simplejson==3.19.2
sshpubkeys==3.3.1
cryptography==40.0.2
cffi==1.16.0
pycparser==2.21
ecdsa==0.18.0
six==1.16.0
-sqlalchemy==1.4.51
+sqlalchemy==1.4.52
greenlet==3.0.3
typing_extensions==4.9.0
supervisor==4.2.5
tzlocal==4.3
pytz-deprecation-shim==0.1.0.post0
- tzdata==2023.4
+ tzdata==2024.1
+tempita==0.5.2
unidecode==1.3.6
urlobject==2.4.3
waitress==3.0.0
-weberror==0.13.1
- paste==3.7.1
- six==1.16.0
- pygments==2.15.1
- tempita==0.5.2
- webob==1.8.7
-webhelpers2==2.0
+webhelpers2==2.1
markupsafe==2.1.2
six==1.16.0
whoosh==2.7.4
zope.cachedescriptors==5.0.0
+qrcode==7.4.2
## uncomment to add the debug libraries
#-r requirements_debug.txt
diff --git a/requirements_test.txt b/requirements_test.txt
--- a/requirements_test.txt
+++ b/requirements_test.txt
@@ -1,43 +1,45 @@
# test related requirements
-
-cov-core==1.15.0
- coverage==7.2.3
-mock==5.0.2
-py==1.11.0
-pytest-cov==4.0.0
- coverage==7.2.3
- pytest==7.3.1
- attrs==22.2.0
+mock==5.1.0
+pytest-cov==4.1.0
+ coverage==7.4.3
+ pytest==8.1.1
iniconfig==2.0.0
- packaging==23.1
- pluggy==1.0.0
-pytest-rerunfailures==12.0
+ packaging==24.0
+ pluggy==1.4.0
+pytest-env==1.1.3
+ pytest==8.1.1
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.4.0
pytest-profiling==1.7.0
gprof2dot==2022.7.29
- pytest==7.3.1
- attrs==22.2.0
+ pytest==8.1.1
iniconfig==2.0.0
- packaging==23.1
- pluggy==1.0.0
+ packaging==24.0
+ pluggy==1.4.0
six==1.16.0
-pytest-runner==6.0.0
-pytest-sugar==0.9.7
- packaging==23.1
- pytest==7.3.1
- attrs==22.2.0
+pytest-rerunfailures==13.0
+ packaging==24.0
+ pytest==8.1.1
iniconfig==2.0.0
- packaging==23.1
- pluggy==1.0.0
- termcolor==2.3.0
-pytest-timeout==2.1.0
- pytest==7.3.1
- attrs==22.2.0
+ packaging==24.0
+ pluggy==1.4.0
+pytest-runner==6.0.1
+pytest-sugar==1.0.0
+ packaging==24.0
+ pytest==8.1.1
iniconfig==2.0.0
- packaging==23.1
- pluggy==1.0.0
+ packaging==24.0
+ pluggy==1.4.0
+ termcolor==2.4.0
+pytest-timeout==2.3.1
+ pytest==8.1.1
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.4.0
webtest==3.0.0
- beautifulsoup4==4.11.2
- soupsieve==2.4
+ beautifulsoup4==4.12.3
+ soupsieve==2.5
waitress==3.0.0
webob==1.8.7
diff --git a/rhodecode/VERSION b/rhodecode/VERSION
--- a/rhodecode/VERSION
+++ b/rhodecode/VERSION
@@ -1,1 +1,1 @@
-5.0.3
\ No newline at end of file
+5.1.0
\ No newline at end of file
diff --git a/rhodecode/__init__.py b/rhodecode/__init__.py
--- a/rhodecode/__init__.py
+++ b/rhodecode/__init__.py
@@ -82,10 +82,10 @@ PYRAMID_SETTINGS = {}
EXTENSIONS = {}
__version__ = ('.'.join((str(each) for each in VERSION[:3])))
-__dbversion__ = 114 # defines current db version for migrations
+__dbversion__ = 115 # defines current db version for migrations
__license__ = 'AGPLv3, and Commercial License'
__author__ = 'RhodeCode GmbH'
__url__ = 'https://code.rhodecode.com'
-is_test = False
+is_test = os.getenv('RC_TEST', '0') == '1'
disable_error_handler = False
diff --git a/rhodecode/api/__init__.py b/rhodecode/api/__init__.py
--- a/rhodecode/api/__init__.py
+++ b/rhodecode/api/__init__.py
@@ -22,7 +22,6 @@ import sys
import fnmatch
import decorator
-import typing
import venusian
from collections import OrderedDict
@@ -45,7 +44,8 @@ from rhodecode.model.db import User, Use
log = logging.getLogger(__name__)
DEFAULT_RENDERER = 'jsonrpc_renderer'
-DEFAULT_URL = '/_admin/apiv2'
+DEFAULT_URL = '/_admin/api'
+SERVICE_API_IDENTIFIER = 'service_'
def find_methods(jsonrpc_methods, pattern):
@@ -54,7 +54,9 @@ def find_methods(jsonrpc_methods, patter
pattern = [pattern]
for single_pattern in pattern:
- for method_name, method in jsonrpc_methods.items():
+ for method_name, method in filter(
+ lambda x: not x[0].startswith(SERVICE_API_IDENTIFIER), jsonrpc_methods.items()
+ ):
if fnmatch.fnmatch(method_name, single_pattern):
matches[method_name] = method
return matches
@@ -190,43 +192,48 @@ def request_view(request):
# check if we can find this session using api_key, get_by_auth_token
# search not expired tokens only
try:
- api_user = User.get_by_auth_token(request.rpc_api_key)
+ if not request.rpc_method.startswith(SERVICE_API_IDENTIFIER):
+ api_user = User.get_by_auth_token(request.rpc_api_key)
- if api_user is None:
- return jsonrpc_error(
- request, retid=request.rpc_id, message='Invalid API KEY')
+ if api_user is None:
+ return jsonrpc_error(
+ request, retid=request.rpc_id, message='Invalid API KEY')
- if not api_user.active:
- return jsonrpc_error(
- request, retid=request.rpc_id,
- message='Request from this user not allowed')
+ if not api_user.active:
+ return jsonrpc_error(
+ request, retid=request.rpc_id,
+ message='Request from this user not allowed')
- # check if we are allowed to use this IP
- auth_u = AuthUser(
- api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr)
- if not auth_u.ip_allowed:
- return jsonrpc_error(
- request, retid=request.rpc_id,
- message='Request from IP:{} not allowed'.format(
- request.rpc_ip_addr))
- else:
- log.info('Access for IP:%s allowed', request.rpc_ip_addr)
+ # check if we are allowed to use this IP
+ auth_u = AuthUser(
+ api_user.user_id, request.rpc_api_key, ip_addr=request.rpc_ip_addr)
+ if not auth_u.ip_allowed:
+ return jsonrpc_error(
+ request, retid=request.rpc_id,
+ message='Request from IP:{} not allowed'.format(
+ request.rpc_ip_addr))
+ else:
+ log.info('Access for IP:%s allowed', request.rpc_ip_addr)
+
+ # register our auth-user
+ request.rpc_user = auth_u
+ request.environ['rc_auth_user_id'] = str(auth_u.user_id)
- # register our auth-user
- request.rpc_user = auth_u
- request.environ['rc_auth_user_id'] = str(auth_u.user_id)
+ # now check if token is valid for API
+ auth_token = request.rpc_api_key
+ token_match = api_user.authenticate_by_token(
+ auth_token, roles=[UserApiKeys.ROLE_API])
+ invalid_token = not token_match
- # now check if token is valid for API
- auth_token = request.rpc_api_key
- token_match = api_user.authenticate_by_token(
- auth_token, roles=[UserApiKeys.ROLE_API])
- invalid_token = not token_match
-
- log.debug('Checking if API KEY is valid with proper role')
- if invalid_token:
- return jsonrpc_error(
- request, retid=request.rpc_id,
- message='API KEY invalid or, has bad role for an API call')
+ log.debug('Checking if API KEY is valid with proper role')
+ if invalid_token:
+ return jsonrpc_error(
+ request, retid=request.rpc_id,
+ message='API KEY invalid or, has bad role for an API call')
+ else:
+ auth_u = 'service'
+ if request.rpc_api_key != request.registry.settings['app.service_api.token']:
+ raise Exception("Provided service secret is not recognized!")
except Exception:
log.exception('Error on API AUTH')
@@ -290,7 +297,8 @@ def request_view(request):
})
# register some common functions for usage
- attach_context_attributes(TemplateArgs(), request, request.rpc_user.user_id)
+ rpc_user = request.rpc_user.user_id if hasattr(request, 'rpc_user') else None
+ attach_context_attributes(TemplateArgs(), request, rpc_user)
statsd = request.registry.statsd
diff --git a/rhodecode/api/tests/test_create_repo.py b/rhodecode/api/tests/test_create_repo.py
--- a/rhodecode/api/tests/test_create_repo.py
+++ b/rhodecode/api/tests/test_create_repo.py
@@ -41,7 +41,7 @@ class TestCreateRepo(object):
@pytest.mark.parametrize('given, expected_name, expected_exc', [
('api repo-1', 'api-repo-1', False),
('api-repo 1-ąć', 'api-repo-1-ąć', False),
- (u'unicode-ąć', u'unicode-ąć', False),
+ ('unicode-ąć', u'unicode-ąć', False),
('some repo v1.2', 'some-repo-v1.2', False),
('v2.0', 'v2.0', False),
])
diff --git a/rhodecode/api/tests/test_create_repo_group.py b/rhodecode/api/tests/test_create_repo_group.py
--- a/rhodecode/api/tests/test_create_repo_group.py
+++ b/rhodecode/api/tests/test_create_repo_group.py
@@ -211,8 +211,8 @@ class TestCreateRepoGroup(object):
expected = {
'repo_group':
- u'You do not have the permission to store '
- u'repository groups in the root location.'}
+ 'You do not have the permission to store '
+ 'repository groups in the root location.'}
assert_error(id_, expected, given=response.body)
def test_api_create_repo_group_regular_user_no_parent_group_perms(self):
@@ -232,8 +232,8 @@ class TestCreateRepoGroup(object):
expected = {
'repo_group':
- u"You do not have the permissions to store "
- u"repository groups inside repository group `{}`".format(repo_group_name)}
+ "You do not have the permissions to store "
+ "repository groups inside repository group `{}`".format(repo_group_name)}
try:
assert_error(id_, expected, given=response.body)
finally:
diff --git a/rhodecode/api/tests/test_get_gist.py b/rhodecode/api/tests/test_get_gist.py
--- a/rhodecode/api/tests/test_get_gist.py
+++ b/rhodecode/api/tests/test_get_gist.py
@@ -76,8 +76,8 @@ class TestApiGetGist(object):
'url': 'http://%s/_admin/gists/%s' % (http_host_only_stub, gist_id,),
'acl_level': Gist.ACL_LEVEL_PUBLIC,
'content': {
- u'filename1.txt': u'hello world',
- u'filename1ą.txt': u'hello worldę'
+ 'filename1.txt': 'hello world',
+ 'filename1ą.txt': 'hello worldę'
},
}
diff --git a/rhodecode/api/tests/test_service_api.py b/rhodecode/api/tests/test_service_api.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/api/tests/test_service_api.py
@@ -0,0 +1,55 @@
+
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import pytest
+
+from rhodecode.api.tests.utils import (
+ build_data, api_call)
+
+
+@pytest.mark.usefixtures("app")
+class TestServiceApi:
+
+ def test_service_api_with_wrong_secret(self):
+ id, payload = build_data("wrong_api_key", 'service_get_repo_name_by_id')
+ response = api_call(self.app, payload)
+
+ assert 'Invalid API KEY' == response.json['error']
+
+ def test_service_api_with_legit_secret(self):
+ id, payload = build_data(self.app.app.config.get_settings()['app.service_api.token'],
+ 'service_get_repo_name_by_id', repo_id='1')
+ response = api_call(self.app, payload)
+ assert not response.json['error']
+
+ def test_service_api_not_a_part_of_public_api_suggestions(self):
+ id, payload = build_data("secret", 'some_random_guess_method')
+ response = api_call(self.app, payload)
+ assert 'service_' not in response.json['error']
+
+ def test_service_get_data_for_ssh_wrapper_output(self):
+ id, payload = build_data(
+ self.app.app.config.get_settings()['app.service_api.token'],
+ 'service_get_data_for_ssh_wrapper',
+ user_id=1,
+ repo_name='vcs_test_git')
+ response = api_call(self.app, payload)
+
+ assert ['branch_permissions', 'repo_permissions', 'repos_path', 'user_id', 'username']\
+ == list(response.json['result'].keys())
diff --git a/rhodecode/api/views/server_api.py b/rhodecode/api/views/server_api.py
--- a/rhodecode/api/views/server_api.py
+++ b/rhodecode/api/views/server_api.py
@@ -25,7 +25,7 @@ from rhodecode.api import (
from rhodecode.api.utils import (
Optional, OAttr, has_superadmin_permission, get_user_or_error)
-from rhodecode.lib.utils import repo2db_mapper
+from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
from rhodecode.lib import system_info
from rhodecode.lib import user_sessions
from rhodecode.lib import exc_tracking
@@ -33,7 +33,6 @@ from rhodecode.lib.ext_json import json
from rhodecode.lib.utils2 import safe_int
from rhodecode.model.db import UserIpMap
from rhodecode.model.scm import ScmModel
-from rhodecode.model.settings import VcsSettingsModel
from rhodecode.apps.file_store import utils
from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
FileOverSizeException
@@ -103,7 +102,7 @@ def get_repo_store(request, apiuser):
if not has_superadmin_permission(apiuser):
raise JSONRPCForbidden()
- path = VcsSettingsModel().get_repos_location()
+ path = get_rhodecode_repo_store_path()
return {"path": path}
diff --git a/rhodecode/api/views/service_api.py b/rhodecode/api/views/service_api.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/api/views/service_api.py
@@ -0,0 +1,125 @@
+# Copyright (C) 2011-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import logging
+import datetime
+from collections import defaultdict
+
+from sqlalchemy import Table
+from rhodecode.api import jsonrpc_method, SERVICE_API_IDENTIFIER
+
+
+log = logging.getLogger(__name__)
+
+
+@jsonrpc_method()
+def service_get_data_for_ssh_wrapper(request, apiuser, user_id, repo_name, key_id=None):
+ from rhodecode.model.db import User
+ from rhodecode.model.scm import ScmModel
+ from rhodecode.model.meta import raw_query_executor, Base
+
+ if key_id:
+ table = Table('user_ssh_keys', Base.metadata, autoload=False)
+ atime = datetime.datetime.utcnow()
+ stmt = (
+ table.update()
+ .where(table.c.ssh_key_id == key_id)
+ .values(accessed_on=atime)
+ )
+
+ res_count = None
+ with raw_query_executor() as session:
+ result = session.execute(stmt)
+ if result.rowcount:
+ res_count = result.rowcount
+
+ if res_count:
+ log.debug(f'Update key id:{key_id} access time')
+ db_user = User.get(user_id)
+ if not db_user:
+ return None
+ auth_user = db_user.AuthUser()
+
+ return {
+ 'user_id': db_user.user_id,
+ 'username': db_user.username,
+ 'repo_permissions': auth_user.permissions['repositories'],
+ "branch_permissions": auth_user.get_branch_permissions(repo_name),
+ "repos_path": ScmModel().repos_path
+ }
+
+
+@jsonrpc_method()
+def service_get_repo_name_by_id(request, apiuser, repo_id):
+ from rhodecode.model.repo import RepoModel
+ by_id_match = RepoModel().get_repo_by_id(repo_id)
+ if by_id_match:
+ repo_name = by_id_match.repo_name
+ return {
+ 'repo_name': repo_name
+ }
+ return None
+
+
+@jsonrpc_method()
+def service_mark_for_invalidation(request, apiuser, repo_name):
+ from rhodecode.model.scm import ScmModel
+ ScmModel().mark_for_invalidation(repo_name)
+ return {'msg': "Applied"}
+
+
+@jsonrpc_method()
+def service_config_to_hgrc(request, apiuser, cli_flags, repo_name):
+ from rhodecode.model.db import RhodeCodeUi
+ from rhodecode.model.settings import VcsSettingsModel
+
+ ui_sections = defaultdict(list)
+ ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
+
+ default_hooks = [
+ ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
+ ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
+ ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
+
+ ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
+ ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
+ ]
+
+ for k, v in default_hooks:
+ ui_sections['hooks'].append((k, v))
+
+ for entry in ui:
+ if not entry.active:
+ continue
+ sec = entry.section
+ key = entry.key
+
+ if sec in cli_flags:
+ # we want only custom hooks, so we skip builtins
+ if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
+ continue
+
+ ui_sections[sec].append([key, entry.value])
+
+ flags = []
+ for _sec, key_val in ui_sections.items():
+ flags.append(' ')
+ flags.append(f'[{_sec}]')
+ for key, val in key_val:
+ flags.append(f'{key}= {val}')
+ return {'flags': flags}
diff --git a/rhodecode/apps/_base/__init__.py b/rhodecode/apps/_base/__init__.py
--- a/rhodecode/apps/_base/__init__.py
+++ b/rhodecode/apps/_base/__init__.py
@@ -104,6 +104,11 @@ class TemplateArgs(StrictAttributeDict):
class BaseAppView(object):
+ DONT_CHECKOUT_VIEWS = ["channelstream_connect", "ops_ping"]
+ EXTRA_VIEWS_TO_IGNORE = ['login', 'register', 'logout']
+ SETUP_2FA_VIEW = 'setup_2fa'
+ VERIFY_2FA_VIEW = 'check_2fa'
+
def __init__(self, context, request):
self.request = request
self.context = context
@@ -117,13 +122,19 @@ class BaseAppView(object):
self._rhodecode_user = request.user # auth user
self._rhodecode_db_user = self._rhodecode_user.get_instance()
+ self.user_data = self._rhodecode_db_user.user_data if self._rhodecode_db_user else {}
self._maybe_needs_password_change(
request.matched_route.name, self._rhodecode_db_user
)
+ self._maybe_needs_2fa_configuration(
+ request.matched_route.name, self._rhodecode_db_user
+ )
+ self._maybe_needs_2fa_check(
+ request.matched_route.name, self._rhodecode_db_user
+ )
def _maybe_needs_password_change(self, view_name, user_obj):
- dont_check_views = ["channelstream_connect", "ops_ping"]
- if view_name in dont_check_views:
+ if view_name in self.DONT_CHECKOUT_VIEWS:
return
log.debug(
@@ -133,6 +144,7 @@ class BaseAppView(object):
skip_user_views = [
"logout",
"login",
+ "check_2fa",
"my_account_password",
"my_account_password_update",
]
@@ -144,7 +156,7 @@ class BaseAppView(object):
return
now = time.time()
- should_change = user_obj.user_data.get("force_password_change")
+ should_change = self.user_data.get("force_password_change")
change_after = safe_int(should_change) or 0
if should_change and now > change_after:
log.debug("User %s requires password change", user_obj)
@@ -157,6 +169,33 @@ class BaseAppView(object):
if view_name not in skip_user_views:
raise HTTPFound(self.request.route_path("my_account_password"))
+ def _maybe_needs_2fa_configuration(self, view_name, user_obj):
+ if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
+ return
+
+ if not user_obj:
+ return
+
+ if user_obj.needs_2fa_configure and view_name != self.SETUP_2FA_VIEW:
+ h.flash(
+ "You are required to configure 2FA",
+ "warning",
+ ignore_duplicate=False,
+ )
+ # Special case for users created "on the fly" (ldap case for new user)
+ user_obj.check_2fa_required = False
+ raise HTTPFound(self.request.route_path(self.SETUP_2FA_VIEW))
+
+ def _maybe_needs_2fa_check(self, view_name, user_obj):
+ if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE:
+ return
+
+ if not user_obj:
+ return
+
+ if user_obj.check_2fa_required and view_name != self.VERIFY_2FA_VIEW:
+ raise HTTPFound(self.request.route_path(self.VERIFY_2FA_VIEW))
+
def _log_creation_exception(self, e, repo_name):
_ = self.request.translate
reason = None
@@ -676,6 +715,7 @@ class BaseReferencesView(RepoAppView):
{
"name": _render("name", ref_name, files_url, closed),
"name_raw": ref_name,
+ "closed": closed,
"date": _render("date", commit.date),
"date_raw": datetime_to_time(commit.date),
"author": _render("author", commit.author),
diff --git a/rhodecode/apps/admin/tests/test_admin_repos.py b/rhodecode/apps/admin/tests/test_admin_repos.py
--- a/rhodecode/apps/admin/tests/test_admin_repos.py
+++ b/rhodecode/apps/admin/tests/test_admin_repos.py
@@ -446,8 +446,8 @@ class TestAdminRepos(object):
csrf_token=csrf_token))
response.mustcontain(
- u"You do not have the permission to store repositories in "
- u"the root location.")
+ "You do not have the permission to store repositories in "
+ "the root location.")
@mock.patch.object(RepoModel, '_create_filesystem_repo', error_function)
def test_create_repo_when_filesystem_op_fails(
diff --git a/rhodecode/apps/admin/tests/test_admin_settings.py b/rhodecode/apps/admin/tests/test_admin_settings.py
--- a/rhodecode/apps/admin/tests/test_admin_settings.py
+++ b/rhodecode/apps/admin/tests/test_admin_settings.py
@@ -485,7 +485,7 @@ class TestAdminSystemInfo(object):
update_data = {
'versions': [
{
- 'version': '100.3.1415926535',
+ 'version': '100.0.0',
'general': 'The latest version we are ever going to ship'
},
{
@@ -502,15 +502,15 @@ class TestAdminSystemInfo(object):
update_data = {
'versions': [
{
- 'version': '0.0.0',
+ 'version': '4.0.0',
'general': 'The first version we ever shipped'
}
]
}
+ text = f"Your current version, {rhodecode.__version__}, is up-to-date as it is equal to or newer than the latest available version, 4.0.0."
with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
response = self.app.get(route_path('admin_settings_system_update'))
- response.mustcontain(
- 'This instance is already running the latest stable version')
+ response.mustcontain(text)
def test_system_update_bad_response(self, autologin_user):
with mock.patch(UPDATE_DATA_QUALNAME, side_effect=ValueError('foo')):
diff --git a/rhodecode/apps/admin/views/permissions.py b/rhodecode/apps/admin/views/permissions.py
--- a/rhodecode/apps/admin/views/permissions.py
+++ b/rhodecode/apps/admin/views/permissions.py
@@ -28,7 +28,7 @@ from pyramid.renderers import render
from pyramid.response import Response
from rhodecode.apps._base import BaseAppView, DataGridAppView
-from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
+from rhodecode.apps.ssh_support.events import SshKeyFileChangeEvent
from rhodecode import events
from rhodecode.lib import helpers as h
diff --git a/rhodecode/apps/admin/views/settings.py b/rhodecode/apps/admin/views/settings.py
--- a/rhodecode/apps/admin/views/settings.py
+++ b/rhodecode/apps/admin/views/settings.py
@@ -32,13 +32,13 @@ from pyramid.response import Response
from rhodecode.apps._base import BaseAppView
from rhodecode.apps._base.navigation import navigation_list
-from rhodecode.apps.svn_support.config_keys import generate_config
+from rhodecode.apps.svn_support import config_keys
from rhodecode.lib import helpers as h
from rhodecode.lib.auth import (
LoginRequired, HasPermissionAllDecorator, CSRFRequired)
from rhodecode.lib.celerylib import tasks, run_task
from rhodecode.lib.str_utils import safe_str
-from rhodecode.lib.utils import repo2db_mapper
+from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
from rhodecode.lib.utils2 import str2bool, AttributeDict
from rhodecode.lib.index import searcher_from_config
@@ -113,10 +113,8 @@ class AdminSettingsView(BaseAppView):
model = VcsSettingsModel()
c.svn_branch_patterns = model.get_global_svn_branch_patterns()
c.svn_tag_patterns = model.get_global_svn_tag_patterns()
-
- settings = self.request.registry.settings
- c.svn_proxy_generate_config = settings[generate_config]
-
+ c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
+ c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
defaults = self._form_defaults()
model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
@@ -143,9 +141,8 @@ class AdminSettingsView(BaseAppView):
c.svn_branch_patterns = model.get_global_svn_branch_patterns()
c.svn_tag_patterns = model.get_global_svn_tag_patterns()
- settings = self.request.registry.settings
- c.svn_proxy_generate_config = settings[generate_config]
-
+ c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
+ c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
application_form = ApplicationUiSettingsForm(self.request.translate)()
try:
@@ -167,9 +164,6 @@ class AdminSettingsView(BaseAppView):
return Response(html)
try:
- if c.visual.allow_repo_location_change:
- model.update_global_path_setting(form_result['paths_root_path'])
-
model.update_global_ssl_setting(form_result['web_push_ssl'])
model.update_global_hook_settings(form_result)
@@ -217,7 +211,7 @@ class AdminSettingsView(BaseAppView):
def settings_mapping(self):
c = self.load_default_context()
c.active = 'mapping'
- c.storage_path = VcsSettingsModel().get_repos_location()
+ c.storage_path = get_rhodecode_repo_store_path()
data = render('rhodecode:templates/admin/settings/settings.mako',
self._get_template_context(c), self.request)
html = formencode.htmlfill.render(
diff --git a/rhodecode/apps/admin/views/system_info.py b/rhodecode/apps/admin/views/system_info.py
--- a/rhodecode/apps/admin/views/system_info.py
+++ b/rhodecode/apps/admin/views/system_info.py
@@ -165,15 +165,20 @@ class AdminSystemInfoSettingsView(BaseAp
(_('Storage location'), val('storage')['path'], state('storage')),
(_('Storage info'), val('storage')['text'], state('storage')),
(_('Storage inodes'), val('storage_inodes')['text'], state('storage_inodes')),
+ ('', '', ''), # spacer
(_('Gist storage location'), val('storage_gist')['path'], state('storage_gist')),
(_('Gist storage info'), val('storage_gist')['text'], state('storage_gist')),
+ ('', '', ''), # spacer
+ (_('Archive cache storage type'), val('storage_archive')['type'], state('storage_archive')),
(_('Archive cache storage location'), val('storage_archive')['path'], state('storage_archive')),
(_('Archive cache info'), val('storage_archive')['text'], state('storage_archive')),
+ ('', '', ''), # spacer
(_('Temp storage location'), val('storage_temp')['path'], state('storage_temp')),
(_('Temp storage info'), val('storage_temp')['text'], state('storage_temp')),
+ ('', '', ''), # spacer
(_('Search info'), val('search')['text'], state('search')),
(_('Search location'), val('search')['location'], state('search')),
@@ -189,7 +194,7 @@ class AdminSystemInfoSettingsView(BaseAp
]
c.vcsserver_data_items = [
- (k, v) for k,v in (val('vcs_server_config') or {}).items()
+ (k, v) for k, v in (val('vcs_server_config') or {}).items()
]
if snapshot:
@@ -209,7 +214,8 @@ class AdminSystemInfoSettingsView(BaseAp
update_url = UpdateModel().get_update_url()
def _err(s):
- return '
{}
'.format(s)
+ return f'
{s}
'
+
try:
data = UpdateModel().get_update_data(update_url)
except urllib.error.URLError as e:
@@ -225,12 +231,12 @@ class AdminSystemInfoSettingsView(BaseAp
c.update_url = update_url
c.latest_data = latest
- c.latest_ver = latest['version']
- c.cur_ver = rhodecode.__version__
+ c.latest_ver = (latest['version'] or '').strip()
+ c.cur_ver = self.request.GET.get('ver') or rhodecode.__version__
c.should_upgrade = False
- is_oudated = UpdateModel().is_outdated(c.cur_ver, c.latest_ver)
- if is_oudated:
+ is_outdated = UpdateModel().is_outdated(c.cur_ver, c.latest_ver)
+ if is_outdated:
c.should_upgrade = True
c.important_notices = latest['general']
UpdateModel().store_version(latest['version'])
diff --git a/rhodecode/apps/admin/views/users.py b/rhodecode/apps/admin/views/users.py
--- a/rhodecode/apps/admin/views/users.py
+++ b/rhodecode/apps/admin/views/users.py
@@ -27,7 +27,7 @@ from pyramid.response import Response
from rhodecode import events
from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
-from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
+from rhodecode.apps.ssh_support.events import SshKeyFileChangeEvent
from rhodecode.authentication.base import get_authn_registry, RhodeCodeExternalAuthPlugin
from rhodecode.authentication.plugins import auth_rhodecode
from rhodecode.events import trigger
@@ -214,7 +214,7 @@ class AdminUsersView(BaseAppView, DataGr
html = formencode.htmlfill.render(
data,
defaults=errors.value,
- errors=errors.unpack_errors() or {},
+ errors=errors.error_dict or {},
prefix_error=False,
encoding="UTF-8",
force_defaults=False
diff --git a/rhodecode/apps/login/__init__.py b/rhodecode/apps/login/__init__.py
--- a/rhodecode/apps/login/__init__.py
+++ b/rhodecode/apps/login/__init__.py
@@ -75,3 +75,27 @@ def includeme(config):
LoginView,
attr='password_reset_confirmation',
route_name='reset_password_confirmation', request_method='GET')
+
+ config.add_route(
+ name='setup_2fa',
+ pattern=ADMIN_PREFIX + '/setup_2fa')
+ config.add_view(
+ LoginView,
+ attr='setup_2fa',
+ route_name='setup_2fa', request_method=['GET', 'POST'],
+ renderer='rhodecode:templates/configure_2fa.mako')
+
+ config.add_route(
+ name='check_2fa',
+ pattern=ADMIN_PREFIX + '/check_2fa')
+ config.add_view(
+ LoginView,
+ attr='verify_2fa',
+ route_name='check_2fa', request_method='GET',
+ renderer='rhodecode:templates/verify_2fa.mako')
+ config.add_view(
+ LoginView,
+ attr='verify_2fa',
+ route_name='check_2fa', request_method='POST',
+ renderer='rhodecode:templates/verify_2fa.mako')
+
diff --git a/rhodecode/apps/login/tests/test_2fa.py b/rhodecode/apps/login/tests/test_2fa.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/apps/login/tests/test_2fa.py
@@ -0,0 +1,67 @@
+import pytest
+import mock
+
+from rhodecode.lib.type_utils import AttributeDict
+from rhodecode.model.meta import Session
+from rhodecode.tests.fixture import Fixture
+from rhodecode.tests.routes import route_path
+from rhodecode.model.settings import SettingsModel
+
+fixture = Fixture()
+
+
+@pytest.mark.usefixtures('app')
+class Test2FA(object):
+ @classmethod
+ def setup_class(cls):
+ cls.password = 'valid-one'
+
+ def test_redirect_to_2fa_setup_if_enabled_for_user(self, user_util):
+ user = user_util.create_user(password=self.password)
+ user.has_enabled_2fa = True
+ self.app.post(
+ route_path('login'),
+ {'username': user.username,
+ 'password': self.password})
+
+ response = self.app.get('/')
+ assert response.status_code == 302
+ assert response.location.endswith(route_path('setup_2fa'))
+
+ def test_redirect_to_2fa_check_if_2fa_configured(self, user_util):
+ user = user_util.create_user(password=self.password)
+ user.has_enabled_2fa = True
+ user.init_secret_2fa()
+ Session().add(user)
+ Session().commit()
+ self.app.post(
+ route_path('login'),
+ {'username': user.username,
+ 'password': self.password})
+ response = self.app.get('/')
+ assert response.status_code == 302
+ assert response.location.endswith(route_path('check_2fa'))
+
+ def test_2fa_recovery_codes_works_only_once(self, user_util):
+ user = user_util.create_user(password=self.password)
+ user.has_enabled_2fa = True
+ user.init_secret_2fa()
+ recovery_code_to_check = user.init_2fa_recovery_codes()[0]
+ Session().add(user)
+ Session().commit()
+ self.app.post(
+ route_path('login'),
+ {'username': user.username,
+ 'password': self.password})
+ response = self.app.post(route_path('check_2fa'), {'totp': recovery_code_to_check})
+ assert response.status_code == 302
+ response = self.app.post(route_path('check_2fa'), {'totp': recovery_code_to_check})
+ response.mustcontain('Code is invalid. Try again!')
+
+ def test_2fa_state_when_forced_by_admin(self, user_util):
+ user = user_util.create_user(password=self.password)
+ user.has_enabled_2fa = False
+ with mock.patch.object(
+ SettingsModel, 'get_setting_by_name', lambda *a, **kw: AttributeDict(app_settings_value=True)):
+
+ assert user.has_enabled_2fa
diff --git a/rhodecode/apps/login/tests/test_login.py b/rhodecode/apps/login/tests/test_login.py
--- a/rhodecode/apps/login/tests/test_login.py
+++ b/rhodecode/apps/login/tests/test_login.py
@@ -80,6 +80,18 @@ class TestLoginController(object):
assert username == 'test_regular'
response.mustcontain('logout')
+ def test_login_with_primary_email(self):
+ user_email = 'test_regular@mail.com'
+ response = self.app.post(route_path('login'),
+ {'username': user_email,
+ 'password': 'test12'}, status=302)
+ response = response.follow()
+ session = response.get_session_from_response()
+ user = session['rhodecode_user']
+ assert user['username'] == user_email.split('@')[0]
+ assert user['is_authenticated']
+ response.mustcontain('logout')
+
def test_login_regular_forbidden_when_super_admin_restriction(self):
from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin
with fixture.auth_restriction(self.app._pyramid_registry,
@@ -254,7 +266,7 @@ class TestLoginController(object):
)
assertr = response.assert_response()
- msg = u'This e-mail address is already taken'
+ msg = 'This e-mail address is already taken'
assertr.element_contains('#email+.error-message', msg)
def test_register_err_same_email_case_sensitive(self):
@@ -270,7 +282,7 @@ class TestLoginController(object):
}
)
assertr = response.assert_response()
- msg = u'This e-mail address is already taken'
+ msg = 'This e-mail address is already taken'
assertr.element_contains('#email+.error-message', msg)
def test_register_err_wrong_data(self):
@@ -423,7 +435,7 @@ class TestLoginController(object):
'If such email exists, a password reset link was sent to it.')
# BAD KEY
- confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey')
+ confirm_url = route_path('reset_password_confirmation', params={'key': 'badkey'})
response = self.app.get(confirm_url, status=302)
assert response.location.endswith(route_path('reset_password'))
assert_session_flash(response, 'Given reset token is invalid')
diff --git a/rhodecode/apps/login/views.py b/rhodecode/apps/login/views.py
--- a/rhodecode/apps/login/views.py
+++ b/rhodecode/apps/login/views.py
@@ -17,6 +17,9 @@
# and proprietary license terms, please see https://rhodecode.com/licenses/
import time
+import json
+import pyotp
+import qrcode
import collections
import datetime
import formencode
@@ -24,10 +27,14 @@ import formencode.htmlfill
import logging
import urllib.parse
import requests
+from io import BytesIO
+from base64 import b64encode
+from pyramid.renderers import render
+from pyramid.response import Response
from pyramid.httpexceptions import HTTPFound
-
+import rhodecode
from rhodecode.apps._base import BaseAppView
from rhodecode.authentication.base import authenticate, HTTP_TYPE
from rhodecode.authentication.plugins import auth_rhodecode
@@ -35,12 +42,12 @@ from rhodecode.events import UserRegiste
from rhodecode.lib import helpers as h
from rhodecode.lib import audit_logger
from rhodecode.lib.auth import (
- AuthUser, HasPermissionAnyDecorator, CSRFRequired)
+ AuthUser, HasPermissionAnyDecorator, CSRFRequired, LoginRequired, NotAnonymous)
from rhodecode.lib.base import get_ip_addr
from rhodecode.lib.exceptions import UserCreationError
from rhodecode.lib.utils2 import safe_str
from rhodecode.model.db import User, UserApiKeys
-from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm
+from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm, TOTPForm
from rhodecode.model.meta import Session
from rhodecode.model.auth_token import AuthTokenModel
from rhodecode.model.settings import SettingsModel
@@ -54,8 +61,8 @@ CaptchaData = collections.namedtuple(
'CaptchaData', 'active, private_key, public_key')
-def store_user_in_session(session, username, remember=False):
- user = User.get_by_username(username, case_insensitive=True)
+def store_user_in_session(session, user_identifier, remember=False):
+ user = User.get_by_username_or_primary_email(user_identifier)
auth_user = AuthUser(user.user_id)
auth_user.set_authenticated()
cs = auth_user.get_cookie_store()
@@ -74,7 +81,7 @@ def store_user_in_session(session, usern
safe_cs = cs.copy()
safe_cs['password'] = '****'
log.info('user %s is now authenticated and stored in '
- 'session, session attrs %s', username, safe_cs)
+ 'session, session attrs %s', user_identifier, safe_cs)
# dumps session attrs back to cookie
session._update_cookie_out()
@@ -179,9 +186,13 @@ class LoginView(BaseAppView):
self.session.invalidate()
form_result = login_form.to_python(self.request.POST)
# form checks for username/password, now we're authenticated
+ username = form_result['username']
+ if (user := User.get_by_username_or_primary_email(username)).has_enabled_2fa:
+ user.check_2fa_required = True
+
headers = store_user_in_session(
self.session,
- username=form_result['username'],
+ user_identifier=username,
remember=form_result['remember'])
log.debug('Redirecting to "%s" after login.', c.came_from)
@@ -438,12 +449,12 @@ class LoginView(BaseAppView):
def password_reset_confirmation(self):
self.load_default_context()
- if self.request.GET and self.request.GET.get('key'):
+
+ if key := self.request.GET.get('key'):
# make this take 2s, to prevent brute forcing.
time.sleep(2)
- token = AuthTokenModel().get_auth_token(
- self.request.GET.get('key'))
+ token = AuthTokenModel().get_auth_token(key)
# verify token is the correct role
if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET:
@@ -467,3 +478,76 @@ class LoginView(BaseAppView):
return HTTPFound(self.request.route_path('reset_password'))
return HTTPFound(self.request.route_path('login'))
+
+ @LoginRequired()
+ @NotAnonymous()
+ def setup_2fa(self):
+ _ = self.request.translate
+ c = self.load_default_context()
+ user_instance = self._rhodecode_db_user
+ form = TOTPForm(_, user_instance)()
+ render_ctx = {}
+ if self.request.method == 'POST':
+ post_items = dict(self.request.POST)
+
+ try:
+ form_details = form.to_python(post_items)
+ secret = form_details['secret_totp']
+
+ user_instance.init_2fa_recovery_codes(persist=True, force=True)
+ user_instance.secret_2fa = secret
+
+ Session().commit()
+ raise HTTPFound(self.request.route_path('my_account_configure_2fa', _query={'show-recovery-codes': 1}))
+ except formencode.Invalid as errors:
+ defaults = errors.value
+ render_ctx = {
+ 'errors': errors.error_dict,
+ 'defaults': defaults,
+ }
+
+ # NOTE: here we DO NOT persist the secret 2FA, since this is only for setup, once a setup is completed
+ # only then we should persist it
+ secret = user_instance.init_secret_2fa(persist=False)
+
+ instance_name = rhodecode.ConfigGet().get_str('app.base_url', 'rhodecode')
+ totp_name = f'{instance_name}:{self.request.user.username}'
+
+ qr = qrcode.QRCode(version=1, box_size=5, border=4)
+ qr.add_data(pyotp.totp.TOTP(secret).provisioning_uri(name=totp_name))
+ qr.make(fit=True)
+ img = qr.make_image(fill_color='black', back_color='white')
+ buffered = BytesIO()
+ img.save(buffered)
+ return self._get_template_context(
+ c,
+ qr=b64encode(buffered.getvalue()).decode("utf-8"),
+ key=secret,
+ totp_name=totp_name,
+ ** render_ctx
+ )
+
+ @LoginRequired()
+ @NotAnonymous()
+ def verify_2fa(self):
+ _ = self.request.translate
+ c = self.load_default_context()
+ render_ctx = {}
+ user_instance = self._rhodecode_db_user
+ totp_form = TOTPForm(_, user_instance, allow_recovery_code_use=True)()
+ if self.request.method == 'POST':
+ post_items = dict(self.request.POST)
+ # NOTE: inject secret, as it's a post configured saved item.
+ post_items['secret_totp'] = user_instance.secret_2fa
+ try:
+ totp_form.to_python(post_items)
+ user_instance.check_2fa_required = False
+ Session().commit()
+ raise HTTPFound(c.came_from)
+ except formencode.Invalid as errors:
+ defaults = errors.value
+ render_ctx = {
+ 'errors': errors.error_dict,
+ 'defaults': defaults,
+ }
+ return self._get_template_context(c, **render_ctx)
diff --git a/rhodecode/apps/my_account/__init__.py b/rhodecode/apps/my_account/__init__.py
--- a/rhodecode/apps/my_account/__init__.py
+++ b/rhodecode/apps/my_account/__init__.py
@@ -74,6 +74,45 @@ def includeme(config):
route_name='my_account_password_update', request_method='POST',
renderer='rhodecode:templates/admin/my_account/my_account.mako')
+ # my account 2fa
+ config.add_route(
+ name='my_account_configure_2fa',
+ pattern=ADMIN_PREFIX + '/my_account/configure_2fa')
+ config.add_view(
+ MyAccountView,
+ attr='my_account_2fa',
+ route_name='my_account_configure_2fa', request_method='GET',
+ renderer='rhodecode:templates/admin/my_account/my_account.mako')
+ # my account 2fa save
+ config.add_route(
+ name='my_account_configure_2fa_update',
+ pattern=ADMIN_PREFIX + '/my_account/configure_2fa_update')
+ config.add_view(
+ MyAccountView,
+ attr='my_account_2fa_update',
+ route_name='my_account_configure_2fa_update', request_method='POST',
+ renderer='rhodecode:templates/admin/my_account/my_account.mako')
+
+ # my account 2fa recovery code-reset
+ config.add_route(
+ name='my_account_show_2fa_recovery_codes',
+ pattern=ADMIN_PREFIX + '/my_account/recovery_codes')
+ config.add_view(
+ MyAccountView,
+ attr='my_account_2fa_show_recovery_codes',
+ route_name='my_account_show_2fa_recovery_codes', request_method='POST', xhr=True,
+ renderer='json_ext')
+
+ # my account 2fa recovery code-reset
+ config.add_route(
+ name='my_account_regenerate_2fa_recovery_codes',
+ pattern=ADMIN_PREFIX + '/my_account/regenerate_recovery_codes')
+ config.add_view(
+ MyAccountView,
+ attr='my_account_2fa_regenerate_recovery_codes',
+ route_name='my_account_regenerate_2fa_recovery_codes', request_method='POST',
+ renderer='rhodecode:templates/admin/my_account/my_account.mako')
+
# my account tokens
config.add_route(
name='my_account_auth_tokens',
diff --git a/rhodecode/apps/my_account/views/my_account.py b/rhodecode/apps/my_account/views/my_account.py
--- a/rhodecode/apps/my_account/views/my_account.py
+++ b/rhodecode/apps/my_account/views/my_account.py
@@ -16,6 +16,7 @@
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
+import time
import logging
import datetime
import string
@@ -40,9 +41,10 @@ from rhodecode.lib.utils2 import safe_in
from rhodecode.model.auth_token import AuthTokenModel
from rhodecode.model.comment import CommentsModel
from rhodecode.model.db import (
- IntegrityError, or_, in_filter_generator,
+ IntegrityError, or_, in_filter_generator, select,
Repository, UserEmailMap, UserApiKeys, UserFollowing,
PullRequest, UserBookmark, RepoGroup, ChangesetStatus)
+from rhodecode.model.forms import TOTPForm
from rhodecode.model.meta import Session
from rhodecode.model.pull_request import PullRequestModel
from rhodecode.model.user import UserModel
@@ -136,6 +138,7 @@ class MyAccountView(BaseAppView, DataGri
except forms.ValidationFailure as e:
c.form = e
return self._get_template_context(c)
+
except Exception:
log.exception("Exception updating user")
h.flash(_('Error occurred during update of user'),
@@ -203,6 +206,74 @@ class MyAccountView(BaseAppView, DataGri
@LoginRequired()
@NotAnonymous()
+ def my_account_2fa(self):
+ _ = self.request.translate
+ c = self.load_default_context()
+ c.active = '2fa'
+ user_instance = c.auth_user.get_instance()
+ locked_by_admin = user_instance.has_forced_2fa
+ c.state_of_2fa = user_instance.has_enabled_2fa
+ c.user_seen_2fa_recovery_codes = user_instance.has_seen_2fa_codes
+ c.locked_2fa = str2bool(locked_by_admin)
+ return self._get_template_context(c)
+
+ @LoginRequired()
+ @NotAnonymous()
+ @CSRFRequired()
+ def my_account_2fa_update(self):
+ _ = self.request.translate
+ c = self.load_default_context()
+ c.active = '2fa'
+ user_instance = c.auth_user.get_instance()
+
+ state = str2bool(self.request.POST.get('2fa_status'))
+ user_instance.has_enabled_2fa = state
+ user_instance.update_userdata(update_2fa=time.time())
+ Session().commit()
+ if state:
+ h.flash(_("2FA has been successfully enabled"), category='success')
+ else:
+ h.flash(_("2FA has been successfully disabled"), category='success')
+ raise HTTPFound(self.request.route_path('my_account_configure_2fa'))
+
+ @LoginRequired()
+ @NotAnonymous()
+ @CSRFRequired()
+ def my_account_2fa_show_recovery_codes(self):
+ c = self.load_default_context()
+ user_instance = c.auth_user.get_instance()
+ user_instance.has_seen_2fa_codes = True
+ Session().commit()
+ return {'recovery_codes': user_instance.get_2fa_recovery_codes()}
+
+ @LoginRequired()
+ @NotAnonymous()
+ @CSRFRequired()
+ def my_account_2fa_regenerate_recovery_codes(self):
+ _ = self.request.translate
+ c = self.load_default_context()
+ user_instance = c.auth_user.get_instance()
+
+ totp_form = TOTPForm(_, user_instance, allow_recovery_code_use=True)()
+
+ post_items = dict(self.request.POST)
+ # NOTE: inject secret, as it's a post configured saved item.
+ post_items['secret_totp'] = user_instance.secret_2fa
+ try:
+ totp_form.to_python(post_items)
+ user_instance.regenerate_2fa_recovery_codes()
+ Session().commit()
+ except formencode.Invalid as errors:
+ h.flash(_("Failed to generate new recovery codes: {}").format(errors), category='error')
+ raise HTTPFound(self.request.route_path('my_account_configure_2fa'))
+ except Exception as e:
+ h.flash(_("Failed to generate new recovery codes: {}").format(e), category='error')
+ raise HTTPFound(self.request.route_path('my_account_configure_2fa'))
+
+ raise HTTPFound(self.request.route_path('my_account_configure_2fa', _query={'show-recovery-codes': 1}))
+
+ @LoginRequired()
+ @NotAnonymous()
def my_account_auth_tokens(self):
_ = self.request.translate
@@ -483,8 +554,15 @@ class MyAccountView(BaseAppView, DataGri
def my_account_bookmarks(self):
c = self.load_default_context()
c.active = 'bookmarks'
- c.bookmark_items = UserBookmark.get_bookmarks_for_user(
- self._rhodecode_db_user.user_id, cache=False)
+
+ user_bookmarks = \
+ select(UserBookmark, Repository, RepoGroup) \
+ .where(UserBookmark.user_id == self._rhodecode_user.user_id) \
+ .outerjoin(Repository, Repository.repo_id == UserBookmark.bookmark_repo_id) \
+ .outerjoin(RepoGroup, RepoGroup.group_id == UserBookmark.bookmark_repo_group_id) \
+ .order_by(UserBookmark.position.asc())
+
+ c.user_bookmark_items = Session().execute(user_bookmarks).all()
return self._get_template_context(c)
def _process_bookmark_entry(self, entry, user_id):
diff --git a/rhodecode/apps/my_account/views/my_account_ssh_keys.py b/rhodecode/apps/my_account/views/my_account_ssh_keys.py
--- a/rhodecode/apps/my_account/views/my_account_ssh_keys.py
+++ b/rhodecode/apps/my_account/views/my_account_ssh_keys.py
@@ -21,7 +21,7 @@ import logging
from pyramid.httpexceptions import HTTPFound
from rhodecode.apps._base import BaseAppView, DataGridAppView
-from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
+from rhodecode.apps.ssh_support.events import SshKeyFileChangeEvent
from rhodecode.events import trigger
from rhodecode.lib import helpers as h
from rhodecode.lib import audit_logger
diff --git a/rhodecode/apps/ops/__init__.py b/rhodecode/apps/ops/__init__.py
--- a/rhodecode/apps/ops/__init__.py
+++ b/rhodecode/apps/ops/__init__.py
@@ -41,6 +41,15 @@ def admin_routes(config):
renderer='json_ext')
config.add_route(
+ name='ops_celery_error_test',
+ pattern='/error-celery')
+ config.add_view(
+ OpsView,
+ attr='ops_celery_error_test',
+ route_name='ops_celery_error_test', request_method='GET',
+ renderer='json_ext')
+
+ config.add_route(
name='ops_redirect_test',
pattern='/redirect')
config.add_view(
diff --git a/rhodecode/apps/ops/views.py b/rhodecode/apps/ops/views.py
--- a/rhodecode/apps/ops/views.py
+++ b/rhodecode/apps/ops/views.py
@@ -66,6 +66,20 @@ class OpsView(BaseAppView):
'Client:{}. Generation time: {}.'.format(self.request.user, time.time()))
raise TestException(msg)
+ def ops_celery_error_test(self):
+ """
+ Test exception handling and emails on errors
+ """
+ from rhodecode.lib.celerylib import tasks, run_task
+
+ # add timeout so we add some sort of rate limiter
+ time.sleep(2)
+
+ msg = ('RhodeCode Enterprise test exception. '
+ 'Client:{}. Generation time: {}.'.format(self.request.user, time.time()))
+ celery_task = run_task(tasks.test_celery_exception, msg)
+ return {'task': str(celery_task)}
+
def ops_redirect_test(self):
"""
Test redirect handling
diff --git a/rhodecode/apps/repository/__init__.py b/rhodecode/apps/repository/__init__.py
--- a/rhodecode/apps/repository/__init__.py
+++ b/rhodecode/apps/repository/__init__.py
@@ -591,6 +591,15 @@ def includeme(config):
route_name='branches_home', request_method='GET',
renderer='rhodecode:templates/branches/branches.mako')
+ config.add_route(
+ name='branch_remove',
+ pattern='/{repo_name:.*?[^/]}/branches/{branch_name:.*?[^/]}/remove', repo_route=True, repo_accepted_types=['hg', 'git'])
+ config.add_view(
+ RepoBranchesView,
+ attr='remove_branch',
+ route_name='branch_remove', request_method='POST'
+ )
+
# Bookmarks
config.add_route(
name='bookmarks_home',
diff --git a/rhodecode/apps/repository/tests/test_repo_branches.py b/rhodecode/apps/repository/tests/test_repo_branches.py
--- a/rhodecode/apps/repository/tests/test_repo_branches.py
+++ b/rhodecode/apps/repository/tests/test_repo_branches.py
@@ -19,6 +19,7 @@
import pytest
from rhodecode.model.db import Repository
from rhodecode.tests.routes import route_path
+from rhodecode.tests import assert_session_flash
@pytest.mark.usefixtures('autologin_user', 'app')
@@ -33,3 +34,50 @@ class TestBranchesController(object):
for commit_id, obj_name in repo.scm_instance().branches.items():
assert commit_id in response
assert obj_name in response
+
+ def test_landing_branch_delete(self, backend, csrf_token):
+ if backend.alias == 'svn':
+ pytest.skip("Not supported yet")
+ branch_related_data_per_backend = {
+ 'git': {'name': 'master'},
+ 'hg': {'name': 'default'},
+ }
+ response = self.app.post(
+ route_path('branch_remove', repo_name=backend.repo_name,
+ branch_name=branch_related_data_per_backend[backend.alias]['name']),
+ params={'csrf_token': csrf_token}, status=302)
+ assert_session_flash(
+ response,
+ f"This branch {branch_related_data_per_backend[backend.alias]['name']} cannot be removed as it's currently set as landing branch"
+ )
+
+ def test_delete_branch_by_repo_owner(self, backend, csrf_token):
+ if backend.alias in ('svn', 'hg'):
+ pytest.skip("Skipping for hg and svn")
+ branch_to_be_removed = 'remove_me'
+ repo = Repository.get_by_repo_name(backend.repo_name)
+ repo.scm_instance()._create_branch(branch_to_be_removed, repo.scm_instance().commit_ids[1])
+ response = self.app.post(
+ route_path('branch_remove', repo_name=backend.repo_name,
+ branch_name=branch_to_be_removed),
+ params={'csrf_token': csrf_token}, status=302)
+ assert_session_flash(response, f"Branch {branch_to_be_removed} has been successfully deleted")
+
+ def test_delete_branch_by_not_repo_owner(self, backend, csrf_token):
+ username = 'test_regular'
+ pwd = 'test12'
+ branch_related_data_per_backend = {
+ 'git': {'name': 'master', 'action': 'deleted'},
+ 'hg': {'name': 'stable', 'action': 'closed'},
+ }
+ if backend.alias == 'svn':
+ pytest.skip("Not supported yet")
+ self.app.post(route_path('login'),
+ {'username': username,
+ 'password': pwd})
+ selected_branch = branch_related_data_per_backend[backend.alias]['name']
+ response = self.app.post(
+ route_path('branch_remove', repo_name=backend.repo_name,
+ branch_name=selected_branch),
+ params={'csrf_token': csrf_token, 'username': username, 'password': pwd}, status=404)
+ assert response.status_code == 404
diff --git a/rhodecode/apps/repository/tests/test_repo_summary.py b/rhodecode/apps/repository/tests/test_repo_summary.py
--- a/rhodecode/apps/repository/tests/test_repo_summary.py
+++ b/rhodecode/apps/repository/tests/test_repo_summary.py
@@ -1,4 +1,3 @@
-
# Copyright (C) 2010-2023 RhodeCode GmbH
#
# This program is free software: you can redistribute it and/or modify
@@ -52,13 +51,13 @@ def assert_clone_url(response, server, r
@pytest.mark.usefixtures('app')
class TestSummaryView(object):
+
def test_index(self, autologin_user, backend, http_host_only_stub):
repo_id = backend.repo.repo_id
repo_name = backend.repo_name
- with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
- return_value=False):
- response = self.app.get(
- route_path('repo_summary', repo_name=repo_name))
+
+ response = self.app.get(
+ route_path('repo_summary', repo_name=repo_name))
# repo type
response.mustcontain(
@@ -71,37 +70,43 @@ class TestSummaryView(object):
# clone url...
assert_clone_url(response, http_host_only_stub, repo_name)
- assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
+ assert_clone_url(response, http_host_only_stub, f'_{repo_id}')
def test_index_svn_without_proxy(
self, autologin_user, backend_svn, http_host_only_stub):
+
repo_id = backend_svn.repo.repo_id
repo_name = backend_svn.repo_name
- response = self.app.get(route_path('repo_summary', repo_name=repo_name))
- # clone url...
+
+ # by default the SVN is enabled now, this is how inputs look when it's disabled
+ with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy', return_value=True):
+ response = self.app.get(
+ route_path('repo_summary', repo_name=repo_name),
+ status=200)
+
+ # clone url test...
assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
- assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
+ assert_clone_url(response, http_host_only_stub, f'_{repo_id}', disabled=True)
def test_index_with_trailing_slash(
self, autologin_user, backend, http_host_only_stub):
repo_id = backend.repo.repo_id
repo_name = backend.repo_name
- with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
- return_value=False):
- response = self.app.get(
- route_path('repo_summary', repo_name=repo_name) + '/',
- status=200)
+ trailing_slash = '/'
+ response = self.app.get(
+ route_path('repo_summary', repo_name=repo_name) + trailing_slash,
+ status=200)
# clone url...
assert_clone_url(response, http_host_only_stub, repo_name)
- assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
+ assert_clone_url(response, http_host_only_stub, f'_{repo_id}')
def test_index_by_id(self, autologin_user, backend):
repo_id = backend.repo.repo_id
response = self.app.get(
- route_path('repo_summary', repo_name='_%s' % (repo_id,)))
+ route_path('repo_summary', repo_name=f'_{repo_id}'))
# repo type
response.mustcontain(
diff --git a/rhodecode/apps/repository/views/repo_branches.py b/rhodecode/apps/repository/views/repo_branches.py
--- a/rhodecode/apps/repository/views/repo_branches.py
+++ b/rhodecode/apps/repository/views/repo_branches.py
@@ -18,11 +18,15 @@
import logging
+from pyramid.httpexceptions import HTTPFound
from rhodecode.apps._base import BaseReferencesView
from rhodecode.lib import ext_json
-from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator)
+from rhodecode.lib import helpers as h
+from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
from rhodecode.model.scm import ScmModel
+from rhodecode.model.meta import Session
+from rhodecode.model.db import PullRequest
log = logging.getLogger(__name__)
@@ -33,15 +37,71 @@ class RepoBranchesView(BaseReferencesVie
@HasRepoPermissionAnyDecorator(
'repository.read', 'repository.write', 'repository.admin')
def branches(self):
+ partial_render = self.request.get_partial_renderer(
+ 'rhodecode:templates/data_table/_dt_elements.mako')
+ repo_name = self.db_repo_name
c = self.load_default_context()
self._prepare_and_set_clone_url(c)
c.rhodecode_repo = self.rhodecode_vcs_repo
c.repository_forks = ScmModel().get_forks(self.db_repo)
-
ref_items = self.rhodecode_vcs_repo.branches_all.items()
data = self.load_refs_context(
ref_items=ref_items, partials_template='branches/branches_data.mako')
-
+ data_with_actions = []
+ if self.db_repo.repo_type != 'svn':
+ for branch in data:
+ branch['action'] = partial_render(
+ f"branch_actions_{self.db_repo.repo_type}", branch['name_raw'], repo_name, closed=branch['closed']
+ )
+ data_with_actions.append(branch)
+ data = data_with_actions
c.has_references = bool(data)
c.data = ext_json.str_json(data)
return self._get_template_context(c)
+
+ @LoginRequired()
+ @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
+ @CSRFRequired()
+ def remove_branch(self):
+ _ = self.request.translate
+ self.load_default_context()
+ repo = self.db_repo
+ repo_name = self.db_repo_name
+ repo_type = repo.repo_type
+ action = _('deleted') if repo_type == 'git' else _('closed')
+ redirect = HTTPFound(location=self.request.route_path('branches_home', repo_name=repo_name))
+ branch_name = self.request.matchdict.get('branch_name')
+ if repo.landing_ref_name == branch_name:
+ h.flash(
+ _("This branch {} cannot be removed as it's currently set as landing branch").format(branch_name),
+ category='error'
+ )
+ return redirect
+ if prs_related_to := Session().query(PullRequest).filter(PullRequest.target_repo_id == repo.repo_id,
+ PullRequest.status != PullRequest.STATUS_CLOSED).filter(
+ (PullRequest.source_ref.like(f'branch:{branch_name}:%')) | (
+ PullRequest.target_ref.like(f'branch:{branch_name}:%'))
+ ).all():
+ h.flash(_("Branch cannot be {} - it's used in following open Pull Request ids: {}").format(action, ','.join(
+ map(str, prs_related_to))), category='error')
+ return redirect
+
+ match repo_type:
+ case 'git':
+ self.rhodecode_vcs_repo.delete_branch(branch_name)
+ case 'hg':
+ from rhodecode.lib.vcs.backends.base import Reference
+ self.rhodecode_vcs_repo._local_close(
+ source_ref=Reference(type='branch', name=branch_name,
+ commit_id=self.rhodecode_vcs_repo.branches[branch_name]),
+ target_ref=Reference(type='branch', name='', commit_id=None),
+ user_name=self.request.user.name,
+ user_email=self.request.user.email)
+ case _:
+ raise NotImplementedError('Branch deleting functionality not yet implemented')
+ ScmModel().mark_for_invalidation(repo_name)
+ self.rhodecode_vcs_repo._invalidate_prop_cache('commit_ids')
+ self.rhodecode_vcs_repo._invalidate_prop_cache('_refs')
+ self.rhodecode_vcs_repo._invalidate_prop_cache('branches')
+ h.flash(_("Branch {} has been successfully {}").format(branch_name, action), category='success')
+ return redirect
diff --git a/rhodecode/apps/repository/views/repo_files.py b/rhodecode/apps/repository/views/repo_files.py
--- a/rhodecode/apps/repository/views/repo_files.py
+++ b/rhodecode/apps/repository/views/repo_files.py
@@ -24,6 +24,8 @@ import urllib.request
import urllib.parse
import urllib.error
import pathlib
+import time
+import random
from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
@@ -37,7 +39,8 @@ from rhodecode.apps._base import RepoApp
from rhodecode.lib import diffs, helpers as h, rc_cache
from rhodecode.lib import audit_logger
from rhodecode.lib.hash_utils import sha1_safe
-from rhodecode.lib.rc_cache.archive_cache import get_archival_cache_store, get_archival_config, ReentrantLock
+from rhodecode.lib.archive_cache import (
+ get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator)
from rhodecode.lib.str_utils import safe_bytes, convert_special_chars
from rhodecode.lib.view_utils import parse_path_ref
from rhodecode.lib.exceptions import NonRelativePathError
@@ -417,41 +420,46 @@ class RepoFilesView(RepoAppView):
# NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
d_cache_conf = get_archival_config(config=CONFIG)
+ # This is also a cache key, and lock key
reentrant_lock_key = archive_name_key + '.lock'
- with ReentrantLock(d_cache, reentrant_lock_key):
- # This is also a cache key
- use_cached_archive = False
- if archive_name_key in d_cache and not archive_cache_disable:
- reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
- use_cached_archive = True
- log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
- archive_name_key, tag, reader.name)
- else:
- reader = None
- log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
+
+ use_cached_archive = False
+ if not archive_cache_disable and archive_name_key in d_cache:
+ reader, metadata = d_cache.fetch(archive_name_key)
- # generate new archive, as previous was not found in the cache
- if not reader:
-
- try:
- commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
- kind=fileformat, subrepos=subrepos,
- archive_at_path=at_path, cache_config=d_cache_conf)
- except ImproperArchiveTypeError:
- return _('Unknown archive type')
-
- reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
+ use_cached_archive = True
+ log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
+ archive_name_key, metadata, reader.name)
+ else:
+ reader = None
+ log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
if not reader:
- raise ValueError('archive cache reader is empty, failed to fetch file from distributed archive cache')
+ # generate new archive, as previous was not found in the cache
+ try:
+ with d_cache.get_lock(reentrant_lock_key):
+ try:
+ commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
+ kind=fileformat, subrepos=subrepos,
+ archive_at_path=at_path, cache_config=d_cache_conf)
+ except ImproperArchiveTypeError:
+ return _('Unknown archive type')
- def archive_iterator(_reader, block_size: int = 4096*512):
- # 4096 * 64 = 64KB
- while 1:
- data = _reader.read(block_size)
- if not data:
- break
- yield data
+ except ArchiveCacheGenerationLock:
+ retry_after = round(random.uniform(0.3, 3.0), 1)
+ time.sleep(retry_after)
+
+ location = self.request.url
+ response = Response(
+ f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}"
+ )
+ response.headers["Retry-After"] = str(retry_after)
+ response.status_code = 307 # temporary redirect
+
+ response.location = location
+ return response
+
+ reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30)
response = Response(app_iter=archive_iterator(reader))
response.content_disposition = f'attachment; filename={response_archive_name}'
diff --git a/rhodecode/apps/repository/views/repo_settings_vcs.py b/rhodecode/apps/repository/views/repo_settings_vcs.py
--- a/rhodecode/apps/repository/views/repo_settings_vcs.py
+++ b/rhodecode/apps/repository/views/repo_settings_vcs.py
@@ -24,7 +24,9 @@ from pyramid.httpexceptions import HTTPF
from pyramid.response import Response
from pyramid.renderers import render
+import rhodecode
from rhodecode.apps._base import RepoAppView
+from rhodecode.apps.svn_support import config_keys
from rhodecode.lib import helpers as h
from rhodecode.lib.auth import (
LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
@@ -38,8 +40,6 @@ log = logging.getLogger(__name__)
class RepoSettingsVcsView(RepoAppView):
def load_default_context(self):
c = self._get_local_tmpl_context()
-
-
return c
def _vcs_form_defaults(self, repo_name):
@@ -77,6 +77,9 @@ class RepoSettingsVcsView(RepoAppView):
c.svn_branch_patterns = model.get_repo_svn_branch_patterns()
c.svn_tag_patterns = model.get_repo_svn_tag_patterns()
+ c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
+ c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
+
defaults = self._vcs_form_defaults(self.db_repo_name)
c.inherit_global_settings = defaults['inherit_global_settings']
@@ -103,6 +106,8 @@ class RepoSettingsVcsView(RepoAppView):
c.global_svn_tag_patterns = model.get_global_svn_tag_patterns()
c.svn_branch_patterns = model.get_repo_svn_branch_patterns()
c.svn_tag_patterns = model.get_repo_svn_tag_patterns()
+ c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
+ c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
defaults = self._vcs_form_defaults(self.db_repo_name)
c.inherit_global_settings = defaults['inherit_global_settings']
diff --git a/rhodecode/apps/ssh_support/__init__.py b/rhodecode/apps/ssh_support/__init__.py
--- a/rhodecode/apps/ssh_support/__init__.py
+++ b/rhodecode/apps/ssh_support/__init__.py
@@ -19,8 +19,6 @@
import logging
from . import config_keys
-from .events import SshKeyFileChangeEvent
-from .subscribers import generate_ssh_authorized_keys_file_subscriber
from rhodecode.config.settings_maker import SettingsMaker
@@ -42,9 +40,9 @@ def _sanitize_settings_and_apply_default
settings_maker.make_setting(config_keys.wrapper_cmd, '')
settings_maker.make_setting(config_keys.authorized_keys_line_ssh_opts, '')
- settings_maker.make_setting(config_keys.ssh_hg_bin, '~/.rccontrol/vcsserver-1/profile/bin/hg')
- settings_maker.make_setting(config_keys.ssh_git_bin, '~/.rccontrol/vcsserver-1/profile/bin/git')
- settings_maker.make_setting(config_keys.ssh_svn_bin, '~/.rccontrol/vcsserver-1/profile/bin/svnserve')
+ settings_maker.make_setting(config_keys.ssh_hg_bin, '/usr/local/bin/rhodecode_bin/vcs_bin/hg')
+ settings_maker.make_setting(config_keys.ssh_git_bin, '/usr/local/bin/rhodecode_bin/vcs_bin/git')
+ settings_maker.make_setting(config_keys.ssh_svn_bin, '/usr/local/bin/rhodecode_bin/vcs_bin/svnserve')
settings_maker.env_expand()
@@ -55,5 +53,8 @@ def includeme(config):
# if we have enable generation of file, subscribe to event
if settings[config_keys.generate_authorized_keyfile]:
+ # lazy import here for faster code reading... via sshwrapper-v2 mode
+ from .subscribers import generate_ssh_authorized_keys_file_subscriber
+ from .events import SshKeyFileChangeEvent
config.add_subscriber(
generate_ssh_authorized_keys_file_subscriber, SshKeyFileChangeEvent)
diff --git a/rhodecode/apps/ssh_support/lib/backends/__init__.py b/rhodecode/apps/ssh_support/lib/backends/__init__.py
--- a/rhodecode/apps/ssh_support/lib/backends/__init__.py
+++ b/rhodecode/apps/ssh_support/lib/backends/__init__.py
@@ -20,11 +20,10 @@ import os
import re
import logging
import datetime
-import configparser
from sqlalchemy import Table
+from rhodecode.lib.api_utils import call_service_api
from rhodecode.lib.utils2 import AttributeDict
-from rhodecode.model.scm import ScmModel
from .hg import MercurialServer
from .git import GitServer
@@ -38,7 +37,7 @@ class SshWrapper(object):
svn_cmd_pat = re.compile(r'^svnserve -t')
def __init__(self, command, connection_info, mode,
- user, user_id, key_id: int, shell, ini_path: str, env):
+ user, user_id, key_id: int, shell, ini_path: str, settings, env):
self.command = command
self.connection_info = connection_info
self.mode = mode
@@ -48,15 +47,9 @@ class SshWrapper(object):
self.shell = shell
self.ini_path = ini_path
self.env = env
-
- self.config = self.parse_config(ini_path)
+ self.settings = settings
self.server_impl = None
- def parse_config(self, config_path):
- parser = configparser.ConfigParser()
- parser.read(config_path)
- return parser
-
def update_key_access_time(self, key_id):
from rhodecode.model.meta import raw_query_executor, Base
@@ -161,6 +154,9 @@ class SshWrapper(object):
return vcs_type, repo_name, mode
def serve(self, vcs, repo, mode, user, permissions, branch_permissions):
+ # TODO: remove this once we have .ini defined access path...
+ from rhodecode.model.scm import ScmModel
+
store = ScmModel().repos_path
check_branch_perms = False
@@ -185,7 +181,7 @@ class SshWrapper(object):
server = MercurialServer(
store=store, ini_path=self.ini_path,
repo_name=repo, user=user,
- user_permissions=permissions, config=self.config, env=self.env)
+ user_permissions=permissions, settings=self.settings, env=self.env)
self.server_impl = server
return server.run(tunnel_extras=extras)
@@ -193,7 +189,7 @@ class SshWrapper(object):
server = GitServer(
store=store, ini_path=self.ini_path,
repo_name=repo, repo_mode=mode, user=user,
- user_permissions=permissions, config=self.config, env=self.env)
+ user_permissions=permissions, settings=self.settings, env=self.env)
self.server_impl = server
return server.run(tunnel_extras=extras)
@@ -201,7 +197,7 @@ class SshWrapper(object):
server = SubversionServer(
store=store, ini_path=self.ini_path,
repo_name=None, user=user,
- user_permissions=permissions, config=self.config, env=self.env)
+ user_permissions=permissions, settings=self.settings, env=self.env)
self.server_impl = server
return server.run(tunnel_extras=extras)
@@ -261,3 +257,131 @@ class SshWrapper(object):
exit_code = -1
return exit_code
+
+
+class SshWrapperStandalone(SshWrapper):
+ """
+ New version of SshWrapper designed to be depended only on service API
+ """
+ repos_path = None
+
+ @staticmethod
+ def parse_user_related_data(user_data):
+ user = AttributeDict()
+ user.user_id = user_data['user_id']
+ user.username = user_data['username']
+ user.repo_permissions = user_data['repo_permissions']
+ user.branch_permissions = user_data['branch_permissions']
+ return user
+
+ def wrap(self):
+ mode = self.mode
+ username = self.username
+ user_id = self.user_id
+ shell = self.shell
+
+ scm_detected, scm_repo, scm_mode = self.get_repo_details(mode)
+
+ log.debug(
+ 'Mode: `%s` User: `name:%s : id:%s` Shell: `%s` SSH Command: `\"%s\"` '
+ 'SCM_DETECTED: `%s` SCM Mode: `%s` SCM Repo: `%s`',
+ mode, username, user_id, shell, self.command,
+ scm_detected, scm_mode, scm_repo)
+
+ log.debug('SSH Connection info %s', self.get_connection_info())
+
+ if shell and self.command is None:
+ log.info('Dropping to shell, no command given and shell is allowed')
+ os.execl('/bin/bash', '-l')
+ exit_code = 1
+
+ elif scm_detected:
+ data = call_service_api(self.settings, {
+ "method": "service_get_data_for_ssh_wrapper",
+ "args": {"user_id": user_id, "repo_name": scm_repo, "key_id": self.key_id}
+ })
+ user = self.parse_user_related_data(data)
+ if not user:
+ log.warning('User with id %s not found', user_id)
+ exit_code = -1
+ return exit_code
+ self.repos_path = data['repos_path']
+ permissions = user.repo_permissions
+ repo_branch_permissions = user.branch_permissions
+ try:
+ exit_code, is_updated = self.serve(
+ scm_detected, scm_repo, scm_mode, user, permissions,
+ repo_branch_permissions)
+ except Exception:
+ log.exception('Error occurred during execution of SshWrapper')
+ exit_code = -1
+
+ elif self.command is None and shell is False:
+ log.error('No Command given.')
+ exit_code = -1
+
+ else:
+ log.error('Unhandled Command: "%s" Aborting.', self.command)
+ exit_code = -1
+
+ return exit_code
+
+ def maybe_translate_repo_uid(self, repo_name):
+ _org_name = repo_name
+ if _org_name.startswith('_'):
+ _org_name = _org_name.split('/', 1)[0]
+
+ if repo_name.startswith('_'):
+ org_repo_name = repo_name
+ log.debug('translating UID repo %s', org_repo_name)
+ by_id_match = call_service_api(self.settings, {
+ 'method': 'service_get_repo_name_by_id',
+ "args": {"repo_id": repo_name}
+ })
+ if by_id_match:
+ repo_name = by_id_match['repo_name']
+ log.debug('translation of UID repo %s got `%s`', org_repo_name, repo_name)
+
+ return repo_name, _org_name
+
+ def serve(self, vcs, repo, mode, user, permissions, branch_permissions):
+ store = self.repos_path
+
+ check_branch_perms = False
+ detect_force_push = False
+
+ if branch_permissions:
+ check_branch_perms = True
+ detect_force_push = True
+
+ log.debug(
+ 'VCS detected:`%s` mode: `%s` repo_name: %s, branch_permission_checks:%s',
+ vcs, mode, repo, check_branch_perms)
+
+ # detect if we have to check branch permissions
+ extras = {
+ 'detect_force_push': detect_force_push,
+ 'check_branch_perms': check_branch_perms,
+ 'config': self.ini_path
+ }
+
+ match vcs:
+ case 'hg':
+ server = MercurialServer(
+ store=store, ini_path=self.ini_path,
+ repo_name=repo, user=user,
+ user_permissions=permissions, settings=self.settings, env=self.env)
+ case 'git':
+ server = GitServer(
+ store=store, ini_path=self.ini_path,
+ repo_name=repo, repo_mode=mode, user=user,
+ user_permissions=permissions, settings=self.settings, env=self.env)
+ case 'svn':
+ server = SubversionServer(
+ store=store, ini_path=self.ini_path,
+ repo_name=None, user=user,
+ user_permissions=permissions, settings=self.settings, env=self.env)
+ case _:
+ raise Exception(f'Unrecognised VCS: {vcs}')
+ self.server_impl = server
+ return server.run(tunnel_extras=extras)
diff --git a/rhodecode/apps/ssh_support/lib/backends/base.py b/rhodecode/apps/ssh_support/lib/backends/base.py
--- a/rhodecode/apps/ssh_support/lib/backends/base.py
+++ b/rhodecode/apps/ssh_support/lib/backends/base.py
@@ -20,26 +20,27 @@ import os
import sys
import logging
-from rhodecode.lib.hooks_daemon import prepare_callback_daemon
+from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
from rhodecode.lib.ext_json import sjson as json
from rhodecode.lib.vcs.conf import settings as vcs_settings
-from rhodecode.model.scm import ScmModel
+from rhodecode.lib.api_utils import call_service_api
log = logging.getLogger(__name__)
-class VcsServer(object):
+class SshVcsServer(object):
repo_user_agent = None # set in child classes
_path = None # set executable path for hg/git/svn binary
backend = None # set in child classes
tunnel = None # subprocess handling tunnel
+ settings = None # parsed settings module
write_perms = ['repository.admin', 'repository.write']
read_perms = ['repository.read', 'repository.admin', 'repository.write']
- def __init__(self, user, user_permissions, config, env):
+ def __init__(self, user, user_permissions, settings, env):
self.user = user
self.user_permissions = user_permissions
- self.config = config
+ self.settings = settings
self.env = env
self.stdin = sys.stdin
@@ -47,6 +48,7 @@ class VcsServer(object):
self.repo_mode = None
self.store = ''
self.ini_path = ''
+ self.hooks_protocol = None
def _invalidate_cache(self, repo_name):
"""
@@ -54,7 +56,16 @@ class VcsServer(object):
:param repo_name: full repo name, also a cache key
"""
- ScmModel().mark_for_invalidation(repo_name)
+ # Todo: Leave only "celery" case after transition.
+ match self.hooks_protocol:
+ case 'http':
+ from rhodecode.model.scm import ScmModel
+ ScmModel().mark_for_invalidation(repo_name)
+ case 'celery':
+ call_service_api(self.settings, {
+ "method": "service_mark_for_invalidation",
+ "args": {"repo_name": repo_name}
+ })
def has_write_perm(self):
permission = self.user_permissions.get(self.repo_name)
@@ -65,30 +76,31 @@ class VcsServer(object):
def _check_permissions(self, action):
permission = self.user_permissions.get(self.repo_name)
+ user_info = f'{self.user["user_id"]}:{self.user["username"]}'
log.debug('permission for %s on %s are: %s',
- self.user, self.repo_name, permission)
+ user_info, self.repo_name, permission)
if not permission:
log.error('user `%s` permissions to repo:%s are empty. Forbidding access.',
- self.user, self.repo_name)
+ user_info, self.repo_name)
return -2
if action == 'pull':
if permission in self.read_perms:
log.info(
'READ Permissions for User "%s" detected to repo "%s"!',
- self.user, self.repo_name)
+ user_info, self.repo_name)
return 0
else:
if permission in self.write_perms:
log.info(
'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!',
- self.user, self.repo_name)
+ user_info, self.repo_name)
return 0
log.error('Cannot properly fetch or verify user `%s` permissions. '
'Permissions: %s, vcs action: %s',
- self.user, permission, action)
+ user_info, permission, action)
return -2
def update_environment(self, action, extras=None):
@@ -107,7 +119,7 @@ class VcsServer(object):
'server_url': None,
'user_agent': f'{self.repo_user_agent}/ssh-user-agent',
'hooks': ['push', 'pull'],
- 'hooks_module': 'rhodecode.lib.hooks_daemon',
+ 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module',
'is_shadow_repo': False,
'detect_force_push': False,
'check_branch_perms': False,
@@ -134,9 +146,10 @@ class VcsServer(object):
if exit_code:
return exit_code, False
- req = self.env['request']
- server_url = req.host_url + req.script_name
- extras['server_url'] = server_url
+ req = self.env.get('request')
+ if req:
+ server_url = req.host_url + req.script_name
+ extras['server_url'] = server_url
log.debug('Using %s binaries from path %s', self.backend, self._path)
exit_code = self.tunnel.run(extras)
@@ -144,12 +157,13 @@ class VcsServer(object):
return exit_code, action == "push"
def run(self, tunnel_extras=None):
+ self.hooks_protocol = self.settings['vcs.hooks.protocol']
tunnel_extras = tunnel_extras or {}
extras = {}
extras.update(tunnel_extras)
callback_daemon, extras = prepare_callback_daemon(
- extras, protocol=vcs_settings.HOOKS_PROTOCOL,
+ extras, protocol=self.hooks_protocol,
host=vcs_settings.HOOKS_HOST)
with callback_daemon:
diff --git a/rhodecode/apps/ssh_support/lib/backends/git.py b/rhodecode/apps/ssh_support/lib/backends/git.py
--- a/rhodecode/apps/ssh_support/lib/backends/git.py
+++ b/rhodecode/apps/ssh_support/lib/backends/git.py
@@ -21,7 +21,7 @@ import logging
import subprocess
from vcsserver import hooks
-from .base import VcsServer
+from .base import SshVcsServer
log = logging.getLogger(__name__)
@@ -70,19 +70,17 @@ class GitTunnelWrapper(object):
return result
-class GitServer(VcsServer):
+class GitServer(SshVcsServer):
backend = 'git'
repo_user_agent = 'git'
- def __init__(self, store, ini_path, repo_name, repo_mode,
- user, user_permissions, config, env):
- super().\
- __init__(user, user_permissions, config, env)
+ def __init__(self, store, ini_path, repo_name, repo_mode, user, user_permissions, settings, env):
+ super().__init__(user, user_permissions, settings, env)
self.store = store
self.ini_path = ini_path
self.repo_name = repo_name
- self._path = self.git_path = config.get('app:main', 'ssh.executable.git')
+ self._path = self.git_path = settings['ssh.executable.git']
self.repo_mode = repo_mode
self.tunnel = GitTunnelWrapper(server=self)
diff --git a/rhodecode/apps/ssh_support/lib/backends/hg.py b/rhodecode/apps/ssh_support/lib/backends/hg.py
--- a/rhodecode/apps/ssh_support/lib/backends/hg.py
+++ b/rhodecode/apps/ssh_support/lib/backends/hg.py
@@ -22,9 +22,10 @@ import logging
import tempfile
import textwrap
import collections
-from .base import VcsServer
-from rhodecode.model.db import RhodeCodeUi
-from rhodecode.model.settings import VcsSettingsModel
+
+from .base import SshVcsServer
+
+from rhodecode.lib.api_utils import call_service_api
log = logging.getLogger(__name__)
@@ -56,7 +57,7 @@ class MercurialTunnelWrapper(object):
# cleanup custom hgrc file
if os.path.isfile(hgrc_custom):
with open(hgrc_custom, 'wb') as f:
- f.write('')
+ f.write(b'')
log.debug('Cleanup custom hgrc file under %s', hgrc_custom)
# write temp
@@ -93,54 +94,64 @@ class MercurialTunnelWrapper(object):
self.remove_configs()
-class MercurialServer(VcsServer):
+class MercurialServer(SshVcsServer):
backend = 'hg'
repo_user_agent = 'mercurial'
cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks']
- def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env):
- super().__init__(user, user_permissions, config, env)
+ def __init__(self, store, ini_path, repo_name, user, user_permissions, settings, env):
+ super().__init__(user, user_permissions, settings, env)
self.store = store
self.ini_path = ini_path
self.repo_name = repo_name
- self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg')
+ self._path = self.hg_path = settings['ssh.executable.hg']
self.tunnel = MercurialTunnelWrapper(server=self)
def config_to_hgrc(self, repo_name):
- ui_sections = collections.defaultdict(list)
- ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
+ # Todo: once transition is done only call to service api should exist
+ if self.hooks_protocol == 'celery':
+ data = call_service_api(self.settings, {
+ "method": "service_config_to_hgrc",
+ "args": {"cli_flags": self.cli_flags, "repo_name": repo_name}
+ })
+ return data['flags']
+ else:
+ from rhodecode.model.db import RhodeCodeUi
+ from rhodecode.model.settings import VcsSettingsModel
+ ui_sections = collections.defaultdict(list)
+ ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
- # write default hooks
- default_hooks = [
- ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
- ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
- ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
+ # write default hooks
+ default_hooks = [
+ ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
+ ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
+ ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
- ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
- ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
- ]
-
- for k, v in default_hooks:
- ui_sections['hooks'].append((k, v))
+ ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
+ ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
+ ]
- for entry in ui:
- if not entry.active:
- continue
- sec = entry.section
- key = entry.key
+ for k, v in default_hooks:
+ ui_sections['hooks'].append((k, v))
- if sec in self.cli_flags:
- # we want only custom hooks, so we skip builtins
- if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
+ for entry in ui:
+ if not entry.active:
continue
+ sec = entry.section
+ key = entry.key
- ui_sections[sec].append([key, entry.value])
+ if sec in self.cli_flags:
+ # we want only custom hooks, so we skip builtins
+ if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
+ continue
- flags = []
- for _sec, key_val in ui_sections.items():
- flags.append(' ')
- flags.append(f'[{_sec}]')
- for key, val in key_val:
- flags.append(f'{key}= {val}')
- return flags
+ ui_sections[sec].append([key, entry.value])
+
+ flags = []
+ for _sec, key_val in ui_sections.items():
+ flags.append(' ')
+ flags.append(f'[{_sec}]')
+ for key, val in key_val:
+ flags.append(f'{key}= {val}')
+ return flags
diff --git a/rhodecode/apps/ssh_support/lib/backends/svn.py b/rhodecode/apps/ssh_support/lib/backends/svn.py
--- a/rhodecode/apps/ssh_support/lib/backends/svn.py
+++ b/rhodecode/apps/ssh_support/lib/backends/svn.py
@@ -25,7 +25,8 @@ import tempfile
from subprocess import Popen, PIPE
import urllib.parse
-from .base import VcsServer
+from rhodecode_tools.lib.utils import safe_str
+from .base import SshVcsServer
log = logging.getLogger(__name__)
@@ -81,7 +82,7 @@ class SubversionTunnelWrapper(object):
def sync(self):
while self.process.poll() is None:
- next_byte = self.stdin.read(1)
+ next_byte = self.stdin.buffer.read(1)
if not next_byte:
break
self.process.stdin.write(next_byte)
@@ -101,19 +102,27 @@ class SubversionTunnelWrapper(object):
def patch_first_client_response(self, response, **kwargs):
self.create_hooks_env()
- data = response.copy()
- data.update(kwargs)
- data['url'] = self._svn_string(data['url'])
- data['ra_client'] = self._svn_string(data['ra_client'])
- data['client'] = data['client'] or ''
- buffer_ = (
- "( {version} ( {capabilities} ) {url}{ra_client}"
- "( {client}) ) ".format(**data))
+
+ version = response['version']
+ capabilities = response['capabilities']
+ client = response['client'] or b''
+
+ url = self._svn_bytes(response['url'])
+ ra_client = self._svn_bytes(response['ra_client'])
+
+ buffer_ = b"( %b ( %b ) %b%b( %b) ) " % (
+ version,
+ capabilities,
+ url,
+ ra_client,
+ client
+ )
self.process.stdin.write(buffer_)
def fail(self, message):
- print("( failure ( ( 210005 {message} 0: 0 ) ) )".format(
- message=self._svn_string(message)))
+ fail_msg = b"( failure ( ( 210005 %b 0: 0 ) ) )" % self._svn_bytes(message)
+ sys.stdout.buffer.write(fail_msg)
+ sys.stdout.flush()
self.remove_configs()
self.process.kill()
return 1
@@ -121,27 +130,28 @@ class SubversionTunnelWrapper(object):
def interrupt(self, signum, frame):
self.fail("Exited by timeout")
- def _svn_string(self, str_):
- if not str_:
- return ''
- return f'{len(str_)}:{str_} '
+ def _svn_bytes(self, bytes_: bytes) -> bytes:
+ if not bytes_:
+ return b''
+
+ return f'{len(bytes_)}:'.encode() + bytes_ + b' '
def _read_first_client_response(self):
- buffer_ = ""
+ buffer_ = b""
brackets_stack = []
while True:
- next_byte = self.stdin.read(1)
+ next_byte = self.stdin.buffer.read(1)
buffer_ += next_byte
- if next_byte == "(":
+ if next_byte == b"(":
brackets_stack.append(next_byte)
- elif next_byte == ")":
+ elif next_byte == b")":
brackets_stack.pop()
- elif next_byte == " " and not brackets_stack:
+ elif next_byte == b" " and not brackets_stack:
break
return buffer_
- def _parse_first_client_response(self, buffer_):
+ def _parse_first_client_response(self, buffer_: bytes):
"""
According to the Subversion RA protocol, the first request
should look like:
@@ -151,16 +161,20 @@ class SubversionTunnelWrapper(object):
Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol
"""
- version_re = r'(?P\d+)'
- capabilities_re = r'\(\s(?P[\w\d\-\ ]+)\s\)'
- url_re = r'\d+\:(?P[\W\w]+)'
- ra_client_re = r'(\d+\:(?P[\W\w]+)\s)'
- client_re = r'(\d+\:(?P[\W\w]+)\s)*'
+ version_re = br'(?P\d+)'
+ capabilities_re = br'\(\s(?P[\w\d\-\ ]+)\s\)'
+ url_re = br'\d+\:(?P[\W\w]+)'
+ ra_client_re = br'(\d+\:(?P[\W\w]+)\s)'
+ client_re = br'(\d+\:(?P[\W\w]+)\s)*'
regex = re.compile(
- r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}'
- r'\(\s{client}\)\s\)\s*$'.format(
- version=version_re, capabilities=capabilities_re,
- url=url_re, ra_client=ra_client_re, client=client_re))
+ br'^\(\s%b\s%b\s%b\s%b'
+ br'\(\s%b\)\s\)\s*$' % (
+ version_re,
+ capabilities_re,
+ url_re,
+ ra_client_re,
+ client_re)
+ )
matcher = regex.match(buffer_)
return matcher.groupdict() if matcher else None
@@ -198,11 +212,11 @@ class SubversionTunnelWrapper(object):
first_response = self.get_first_client_response()
if not first_response:
- return self.fail("Repository name cannot be extracted")
+ return self.fail(b"Repository name cannot be extracted")
url_parts = urllib.parse.urlparse(first_response['url'])
- self.server.repo_name = self._match_repo_name(url_parts.path.strip('/'))
+ self.server.repo_name = self._match_repo_name(safe_str(url_parts.path).strip('/'))
exit_code = self.server._check_permissions(action)
if exit_code:
@@ -218,20 +232,18 @@ class SubversionTunnelWrapper(object):
return self.return_code
-class SubversionServer(VcsServer):
+class SubversionServer(SshVcsServer):
backend = 'svn'
repo_user_agent = 'svn'
- def __init__(self, store, ini_path, repo_name,
- user, user_permissions, config, env):
- super()\
- .__init__(user, user_permissions, config, env)
+ def __init__(self, store, ini_path, repo_name, user, user_permissions, settings, env):
+ super().__init__(user, user_permissions, settings, env)
self.store = store
self.ini_path = ini_path
# NOTE(dan): repo_name at this point is empty,
# this is set later in .run() based from parsed input stream
self.repo_name = repo_name
- self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn')
+ self._path = self.svn_path = settings['ssh.executable.svn']
self.tunnel = SubversionTunnelWrapper(server=self)
@@ -244,9 +256,10 @@ class SubversionServer(VcsServer):
# if exit_code:
# return exit_code, False
- req = self.env['request']
- server_url = req.host_url + req.script_name
- extras['server_url'] = server_url
+ req = self.env.get('request')
+ if req:
+ server_url = req.host_url + req.script_name
+ extras['server_url'] = server_url
log.debug('Using %s binaries from path %s', self.backend, self._path)
exit_code = self.tunnel.run(extras)
diff --git a/rhodecode/apps/ssh_support/lib/ssh_wrapper.py b/rhodecode/apps/ssh_support/lib/ssh_wrapper_v1.py
rename from rhodecode/apps/ssh_support/lib/ssh_wrapper.py
rename to rhodecode/apps/ssh_support/lib/ssh_wrapper_v1.py
--- a/rhodecode/apps/ssh_support/lib/ssh_wrapper.py
+++ b/rhodecode/apps/ssh_support/lib/ssh_wrapper_v1.py
@@ -23,29 +23,14 @@ import logging
import click
-from pyramid.paster import setup_logging
-
from rhodecode.lib.pyramid_utils import bootstrap
from rhodecode.lib.statsd_client import StatsdClient
from .backends import SshWrapper
+from .utils import setup_custom_logging
log = logging.getLogger(__name__)
-def setup_custom_logging(ini_path, debug):
- if debug:
- # enabled rhodecode.ini controlled logging setup
- setup_logging(ini_path)
- else:
- # configure logging in a mode that doesn't print anything.
- # in case of regularly configured logging it gets printed out back
- # to the client doing an SSH command.
- logger = logging.getLogger('')
- null = logging.NullHandler()
- # add the handler to the root logger
- logger.handlers = [null]
-
-
@click.command()
@click.argument('ini_path', type=click.Path(exists=True))
@click.option(
@@ -69,11 +54,12 @@ def main(ini_path, mode, user, user_id,
connection_info = os.environ.get('SSH_CONNECTION', '')
time_start = time.time()
with bootstrap(ini_path, env={'RC_CMD_SSH_WRAPPER': '1'}) as env:
+ settings = env['registry'].settings
statsd = StatsdClient.statsd
try:
ssh_wrapper = SshWrapper(
command, connection_info, mode,
- user, user_id, key_id, shell, ini_path, env)
+ user, user_id, key_id, shell, ini_path, settings, env)
except Exception:
log.exception('Failed to execute SshWrapper')
sys.exit(-5)
diff --git a/rhodecode/apps/ssh_support/lib/ssh_wrapper_v2.py b/rhodecode/apps/ssh_support/lib/ssh_wrapper_v2.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/apps/ssh_support/lib/ssh_wrapper_v2.py
@@ -0,0 +1,98 @@
+# Copyright (C) 2016-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+"""
+WARNING: be really carefully with changing ANY imports in this file
+# This script is to mean as really fast executable, doing some imports here that would yield an import chain change
+# can affect execution times...
+# This can be easily debugged using such command::
+# time PYTHONPROFILEIMPORTTIME=1 rc-ssh-wrapper-v2 --debug --mode=test .dev/dev.ini
+"""
+
+import os
+import sys
+import time
+import logging
+
+import click
+
+from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
+from rhodecode.lib.request import Request
+from rhodecode.lib.utils2 import AttributeDict
+from rhodecode.lib.statsd_client import StatsdClient
+from rhodecode.lib.config_utils import get_app_config_lightweight
+
+from .utils import setup_custom_logging
+from .backends import SshWrapperStandalone
+
+log = logging.getLogger(__name__)
+
+
+@click.command()
+@click.argument('ini_path', type=click.Path(exists=True))
+@click.option(
+ '--mode', '-m', required=False, default='auto',
+ type=click.Choice(['auto', 'vcs', 'git', 'hg', 'svn', 'test']),
+ help='mode of operation')
+@click.option('--user', help='Username for which the command will be executed')
+@click.option('--user-id', help='User ID for which the command will be executed')
+@click.option('--key-id', help='ID of the key from the database')
+@click.option('--shell', '-s', is_flag=True, help='Allow Shell')
+@click.option('--debug', is_flag=True, help='Enabled detailed output logging')
+def main(ini_path, mode, user, user_id, key_id, shell, debug):
+
+ time_start = time.time()
+ setup_custom_logging(ini_path, debug)
+
+ command = os.environ.get('SSH_ORIGINAL_COMMAND', '')
+ if not command and mode not in ['test']:
+ raise ValueError(
+ 'Unable to fetch SSH_ORIGINAL_COMMAND from environment.'
+ 'Please make sure this is set and available during execution '
+ 'of this script.')
+
+ # initialize settings and get defaults
+ settings = get_app_config_lightweight(ini_path)
+ settings = sanitize_settings_and_apply_defaults({'__file__': ini_path}, settings)
+
+ # init and bootstrap StatsdClient
+ StatsdClient.setup(settings)
+ statsd = StatsdClient.statsd
+
+ try:
+ connection_info = os.environ.get('SSH_CONNECTION', '')
+ request = Request.blank('/', base_url=settings['app.base_url'])
+ request.user = AttributeDict({'username': user,
+ 'user_id': user_id,
+ 'ip_addr': connection_info.split(' ')[0] if connection_info else None})
+ env = {'RC_CMD_SSH_WRAPPER': '1', 'request': request}
+ ssh_wrapper = SshWrapperStandalone(
+ command, connection_info, mode,
+ user, user_id, key_id, shell, ini_path, settings, env)
+ except Exception:
+ log.exception('Failed to execute SshWrapper')
+ sys.exit(-5)
+
+ return_code = ssh_wrapper.wrap()
+ operation_took = time.time() - time_start
+ if statsd:
+ operation_took_ms = round(1000.0 * operation_took)
+ statsd.timing("rhodecode_ssh_wrapper_timing.histogram", operation_took_ms,
+ use_decimals=False)
+
+ sys.exit(return_code)
diff --git a/rhodecode/apps/ssh_support/lib/utils.py b/rhodecode/apps/ssh_support/lib/utils.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/apps/ssh_support/lib/utils.py
@@ -0,0 +1,34 @@
+# Copyright (C) 2016-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import logging
+
+
+def setup_custom_logging(ini_path, debug):
+ if debug:
+ from pyramid.paster import setup_logging # Lazy import
+ # enabled rhodecode.ini controlled logging setup
+ setup_logging(ini_path)
+ else:
+ # configure logging in a mode that doesn't print anything.
+ # in case of regularly configured logging it gets printed out back
+ # to the client doing an SSH command.
+ logger = logging.getLogger('')
+ null = logging.NullHandler()
+ # add the handler to the root logger
+ logger.handlers = [null]
diff --git a/rhodecode/apps/ssh_support/tests/conftest.py b/rhodecode/apps/ssh_support/tests/conftest.py
--- a/rhodecode/apps/ssh_support/tests/conftest.py
+++ b/rhodecode/apps/ssh_support/tests/conftest.py
@@ -20,7 +20,7 @@ import os
import pytest
import configparser
-from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper
+from rhodecode.apps.ssh_support.lib.ssh_wrapper_v1 import SshWrapper
from rhodecode.lib.utils2 import AttributeDict
@@ -52,7 +52,10 @@ def dummy_env():
def plain_dummy_user():
- return AttributeDict(username='test_user')
+ return AttributeDict(
+ user_id=1,
+ username='test_user'
+ )
@pytest.fixture()
@@ -65,4 +68,4 @@ def ssh_wrapper(app, dummy_conf_file, du
conn_info = '127.0.0.1 22 10.0.0.1 443'
return SshWrapper(
'random command', conn_info, 'auto', 'admin', '1', key_id='1',
- shell=False, ini_path=dummy_conf_file, env=dummy_env)
+ shell=False, ini_path=dummy_conf_file, settings={}, env=dummy_env)
diff --git a/rhodecode/apps/ssh_support/tests/test_server_git.py b/rhodecode/apps/ssh_support/tests/test_server_git.py
--- a/rhodecode/apps/ssh_support/tests/test_server_git.py
+++ b/rhodecode/apps/ssh_support/tests/test_server_git.py
@@ -25,6 +25,7 @@ from rhodecode.apps.ssh_support.lib.back
from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
from rhodecode.lib.ext_json import json
+
class GitServerCreator(object):
root = '/tmp/repo/path/'
git_path = '/usr/local/bin/git'
@@ -39,10 +40,7 @@ class GitServerCreator(object):
user = plain_dummy_user()
def __init__(self):
- def config_get(part, key):
- return self.config_data.get(part, {}).get(key)
- self.config_mock = mock.Mock()
- self.config_mock.get = mock.Mock(side_effect=config_get)
+ pass
def create(self, **kwargs):
parameters = {
@@ -54,7 +52,7 @@ class GitServerCreator(object):
'user_permissions': {
self.repo_name: 'repository.admin'
},
- 'config': self.config_mock,
+ 'settings': self.config_data['app:main'],
'env': plain_dummy_env()
}
parameters.update(kwargs)
@@ -142,7 +140,7 @@ class TestGitServer(object):
'server_url': None,
'hooks': ['push', 'pull'],
'is_shadow_repo': False,
- 'hooks_module': 'rhodecode.lib.hooks_daemon',
+ 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module',
'check_branch_perms': False,
'detect_force_push': False,
'user_agent': u'git/ssh-user-agent',
diff --git a/rhodecode/apps/ssh_support/tests/test_server_hg.py b/rhodecode/apps/ssh_support/tests/test_server_hg.py
--- a/rhodecode/apps/ssh_support/tests/test_server_hg.py
+++ b/rhodecode/apps/ssh_support/tests/test_server_hg.py
@@ -38,10 +38,7 @@ class MercurialServerCreator(object):
user = plain_dummy_user()
def __init__(self):
- def config_get(part, key):
- return self.config_data.get(part, {}).get(key)
- self.config_mock = mock.Mock()
- self.config_mock.get = mock.Mock(side_effect=config_get)
+ pass
def create(self, **kwargs):
parameters = {
@@ -52,7 +49,7 @@ class MercurialServerCreator(object):
'user_permissions': {
'test_hg': 'repository.admin'
},
- 'config': self.config_mock,
+ 'settings': self.config_data['app:main'],
'env': plain_dummy_env()
}
parameters.update(kwargs)
diff --git a/rhodecode/apps/ssh_support/tests/test_server_svn.py b/rhodecode/apps/ssh_support/tests/test_server_svn.py
--- a/rhodecode/apps/ssh_support/tests/test_server_svn.py
+++ b/rhodecode/apps/ssh_support/tests/test_server_svn.py
@@ -36,10 +36,7 @@ class SubversionServerCreator(object):
user = plain_dummy_user()
def __init__(self):
- def config_get(part, key):
- return self.config_data.get(part, {}).get(key)
- self.config_mock = mock.Mock()
- self.config_mock.get = mock.Mock(side_effect=config_get)
+ pass
def create(self, **kwargs):
parameters = {
@@ -50,7 +47,7 @@ class SubversionServerCreator(object):
'user_permissions': {
self.repo_name: 'repository.admin'
},
- 'config': self.config_mock,
+ 'settings': self.config_data['app:main'],
'env': plain_dummy_env()
}
@@ -65,6 +62,7 @@ def svn_server(app):
class TestSubversionServer(object):
+
def test_command(self, svn_server):
server = svn_server.create()
expected_command = [
diff --git a/rhodecode/apps/ssh_support/tests/test_ssh_wrapper.py b/rhodecode/apps/ssh_support/tests/test_ssh_wrapper.py
--- a/rhodecode/apps/ssh_support/tests/test_ssh_wrapper.py
+++ b/rhodecode/apps/ssh_support/tests/test_ssh_wrapper.py
@@ -28,10 +28,6 @@ class TestSSHWrapper(object):
permissions={}, branch_permissions={})
assert str(exc_info.value) == 'Unrecognised VCS: microsoft-tfs'
- def test_parse_config(self, ssh_wrapper):
- config = ssh_wrapper.parse_config(ssh_wrapper.ini_path)
- assert config
-
def test_get_connection_info(self, ssh_wrapper):
conn_info = ssh_wrapper.get_connection_info()
assert {'client_ip': '127.0.0.1',
diff --git a/rhodecode/apps/svn_support/utils.py b/rhodecode/apps/svn_support/utils.py
--- a/rhodecode/apps/svn_support/utils.py
+++ b/rhodecode/apps/svn_support/utils.py
@@ -22,7 +22,7 @@ import os
from pyramid.renderers import render
from rhodecode.events import trigger
-from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_base_path
+from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_repo_store_path
from rhodecode.lib.utils2 import str2bool
from rhodecode.model.db import RepoGroup
@@ -38,7 +38,7 @@ def write_mod_dav_svn_config(settings):
file_path = settings[config_keys.config_file_path]
config = _render_mod_dav_svn_config(
use_ssl=use_ssl,
- parent_path_root=get_rhodecode_base_path(),
+ parent_path_root=get_rhodecode_repo_store_path(),
list_parent_path=settings[config_keys.list_parent_path],
location_root=settings[config_keys.location_root],
repo_groups=RepoGroup.get_all_repo_groups(),
diff --git a/rhodecode/authentication/base.py b/rhodecode/authentication/base.py
--- a/rhodecode/authentication/base.py
+++ b/rhodecode/authentication/base.py
@@ -389,11 +389,7 @@ class RhodeCodeAuthPluginBase(object):
log.debug(
'Trying to fetch user `%s` from RhodeCode database', username)
if username:
- user = User.get_by_username(username)
- if not user:
- log.debug('User not found, fallback to fetch user in '
- 'case insensitive mode')
- user = User.get_by_username(username, case_insensitive=True)
+ user = User.get_by_username_or_primary_email(username)
else:
log.debug('provided username:`%s` is empty skipping...', username)
if not user:
diff --git a/rhodecode/authentication/plugins/auth_crowd.py b/rhodecode/authentication/plugins/auth_crowd.py
--- a/rhodecode/authentication/plugins/auth_crowd.py
+++ b/rhodecode/authentication/plugins/auth_crowd.py
@@ -31,7 +31,7 @@ import urllib.parse
from rhodecode.translation import _
from rhodecode.authentication.base import (
RhodeCodeExternalAuthPlugin, hybrid_property)
-from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
+from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase, TwoFactorAuthnPluginSettingsSchemaMixin
from rhodecode.authentication.routes import AuthnPluginResourceBase
from rhodecode.lib.colander_utils import strip_whitespace
from rhodecode.lib.ext_json import json, formatted_json
@@ -53,7 +53,7 @@ class CrowdAuthnResource(AuthnPluginReso
pass
-class CrowdSettingsSchema(AuthnPluginSettingsSchemaBase):
+class CrowdSettingsSchema(TwoFactorAuthnPluginSettingsSchemaMixin, AuthnPluginSettingsSchemaBase):
host = colander.SchemaNode(
colander.String(),
default='127.0.0.1',
diff --git a/rhodecode/authentication/plugins/auth_jasig_cas.py b/rhodecode/authentication/plugins/auth_jasig_cas.py
--- a/rhodecode/authentication/plugins/auth_jasig_cas.py
+++ b/rhodecode/authentication/plugins/auth_jasig_cas.py
@@ -33,7 +33,7 @@ import urllib.error
from rhodecode.translation import _
from rhodecode.authentication.base import (
RhodeCodeExternalAuthPlugin, hybrid_property)
-from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
+from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase, TwoFactorAuthnPluginSettingsSchemaMixin
from rhodecode.authentication.routes import AuthnPluginResourceBase
from rhodecode.lib.colander_utils import strip_whitespace
from rhodecode.model.db import User
@@ -55,7 +55,7 @@ class JasigCasAuthnResource(AuthnPluginR
pass
-class JasigCasSettingsSchema(AuthnPluginSettingsSchemaBase):
+class JasigCasSettingsSchema(TwoFactorAuthnPluginSettingsSchemaMixin, AuthnPluginSettingsSchemaBase):
service_url = colander.SchemaNode(
colander.String(),
default='https://domain.com/cas/v1/tickets',
diff --git a/rhodecode/authentication/plugins/auth_ldap.py b/rhodecode/authentication/plugins/auth_ldap.py
--- a/rhodecode/authentication/plugins/auth_ldap.py
+++ b/rhodecode/authentication/plugins/auth_ldap.py
@@ -27,7 +27,7 @@ import colander
from rhodecode.translation import _
from rhodecode.authentication.base import (
RhodeCodeExternalAuthPlugin, AuthLdapBase, hybrid_property)
-from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
+from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase, TwoFactorAuthnPluginSettingsSchemaMixin
from rhodecode.authentication.routes import AuthnPluginResourceBase
from rhodecode.lib.colander_utils import strip_whitespace
from rhodecode.lib.exceptions import (
@@ -245,7 +245,7 @@ class AuthLdap(AuthLdapBase):
return dn, user_attrs
-class LdapSettingsSchema(AuthnPluginSettingsSchemaBase):
+class LdapSettingsSchema(TwoFactorAuthnPluginSettingsSchemaMixin, AuthnPluginSettingsSchemaBase):
tls_kind_choices = ['PLAIN', 'LDAPS', 'START_TLS']
tls_reqcert_choices = ['NEVER', 'ALLOW', 'TRY', 'DEMAND', 'HARD']
search_scope_choices = ['BASE', 'ONELEVEL', 'SUBTREE']
diff --git a/rhodecode/authentication/plugins/auth_pam.py b/rhodecode/authentication/plugins/auth_pam.py
--- a/rhodecode/authentication/plugins/auth_pam.py
+++ b/rhodecode/authentication/plugins/auth_pam.py
@@ -31,7 +31,7 @@ import socket
from rhodecode.translation import _
from rhodecode.authentication.base import (
RhodeCodeExternalAuthPlugin, hybrid_property)
-from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
+from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase, TwoFactorAuthnPluginSettingsSchemaMixin
from rhodecode.authentication.routes import AuthnPluginResourceBase
from rhodecode.lib.colander_utils import strip_whitespace
@@ -51,7 +51,7 @@ class PamAuthnResource(AuthnPluginResour
pass
-class PamSettingsSchema(AuthnPluginSettingsSchemaBase):
+class PamSettingsSchema(TwoFactorAuthnPluginSettingsSchemaMixin, AuthnPluginSettingsSchemaBase):
service = colander.SchemaNode(
colander.String(),
default='login',
diff --git a/rhodecode/authentication/plugins/auth_rhodecode.py b/rhodecode/authentication/plugins/auth_rhodecode.py
--- a/rhodecode/authentication/plugins/auth_rhodecode.py
+++ b/rhodecode/authentication/plugins/auth_rhodecode.py
@@ -27,7 +27,7 @@ import colander
from rhodecode.translation import _
from rhodecode.lib.utils2 import safe_bytes
from rhodecode.model.db import User
-from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
+from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase, TwoFactorAuthnPluginSettingsSchemaMixin
from rhodecode.authentication.base import (
RhodeCodeAuthPluginBase, hybrid_property, HTTP_TYPE, VCS_TYPE)
from rhodecode.authentication.routes import AuthnPluginResourceBase
@@ -169,7 +169,7 @@ class RhodeCodeAuthPlugin(RhodeCodeAuthP
extra={"action": "user_auth_ok", "auth_module": "auth_rhodecode_anon", "username": userobj.username})
return user_attrs
- elif userobj.username == username and password_match:
+ elif (userobj.username == username or userobj.email == username) and password_match:
log.info('user `%s` authenticated correctly', userobj.username,
extra={"action": "user_auth_ok", "auth_module": "auth_rhodecode", "username": userobj.username})
return user_attrs
@@ -182,8 +182,7 @@ class RhodeCodeAuthPlugin(RhodeCodeAuthP
return None
-class RhodeCodeSettingsSchema(AuthnPluginSettingsSchemaBase):
-
+class RhodeCodeSettingsSchema(TwoFactorAuthnPluginSettingsSchemaMixin, AuthnPluginSettingsSchemaBase):
auth_restriction_choices = [
(RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE, 'All users'),
(RhodeCodeAuthPlugin.AUTH_RESTRICTION_SUPER_ADMIN, 'Super admins only'),
diff --git a/rhodecode/authentication/schema.py b/rhodecode/authentication/schema.py
--- a/rhodecode/authentication/schema.py
+++ b/rhodecode/authentication/schema.py
@@ -48,3 +48,17 @@ class AuthnPluginSettingsSchemaBase(cola
validator=colander.Range(min=0, max=None),
widget='int',
)
+
+
+class TwoFactorAuthnPluginSettingsSchemaMixin(colander.MappingSchema):
+ """
+ Mixin for extending plugins with two-factor authentication option.
+ """
+ global_2fa = colander.SchemaNode(
+ colander.Bool(),
+ default=False,
+ description=_('Force all users to use two factor authentication with this plugin.'),
+ missing=False,
+ title=_('enforce 2FA for users'),
+ widget='bool',
+ )
diff --git a/rhodecode/config/config_maker.py b/rhodecode/config/config_maker.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/config/config_maker.py
@@ -0,0 +1,224 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import os
+import tempfile
+import logging
+
+from pyramid.settings import asbool
+
+from rhodecode.config.settings_maker import SettingsMaker
+from rhodecode.config import utils as config_utils
+
+log = logging.getLogger(__name__)
+
+
+def sanitize_settings_and_apply_defaults(global_config, settings):
+ """
+ Applies settings defaults and does all type conversion.
+
+ We would move all settings parsing and preparation into this place, so that
+ we have only one place left which deals with this part. The remaining parts
+ of the application would start to rely fully on well-prepared settings.
+
+ This piece would later be split up per topic to avoid a big fat monster
+ function.
+ """
+ jn = os.path.join
+
+ global_settings_maker = SettingsMaker(global_config)
+ global_settings_maker.make_setting('debug', default=False, parser='bool')
+ debug_enabled = asbool(global_config.get('debug'))
+
+ settings_maker = SettingsMaker(settings)
+
+ settings_maker.make_setting(
+ 'logging.autoconfigure',
+ default=False,
+ parser='bool')
+
+ logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
+ settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
+
+ # Default includes, possible to change as a user
+ pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
+ log.debug(
+ "Using the following pyramid.includes: %s",
+ pyramid_includes)
+
+ settings_maker.make_setting('rhodecode.edition', 'Community Edition')
+ settings_maker.make_setting('rhodecode.edition_id', 'CE')
+
+ if 'mako.default_filters' not in settings:
+ # set custom default filters if we don't have it defined
+ settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
+ settings['mako.default_filters'] = 'h_filter'
+
+ if 'mako.directories' not in settings:
+ mako_directories = settings.setdefault('mako.directories', [
+ # Base templates of the original application
+ 'rhodecode:templates',
+ ])
+ log.debug(
+ "Using the following Mako template directories: %s",
+ mako_directories)
+
+ # NOTE(marcink): fix redis requirement for schema of connection since 3.X
+ if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
+ raw_url = settings['beaker.session.url']
+ if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
+ settings['beaker.session.url'] = 'redis://' + raw_url
+
+ settings_maker.make_setting('__file__', global_config.get('__file__'))
+
+ # TODO: johbo: Re-think this, usually the call to config.include
+ # should allow to pass in a prefix.
+ settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
+
+ # Sanitize generic settings.
+ settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
+ settings_maker.make_setting('gzip_responses', False, parser='bool')
+ settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
+
+ # statsd
+ settings_maker.make_setting('statsd.enabled', False, parser='bool')
+ settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
+ settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
+ settings_maker.make_setting('statsd.statsd_prefix', '')
+ settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
+
+ settings_maker.make_setting('vcs.svn.compatible_version', '')
+ settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
+ settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
+ settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
+ settings_maker.make_setting('vcs.hooks.protocol', 'http')
+ settings_maker.make_setting('vcs.hooks.host', '*')
+ settings_maker.make_setting('vcs.scm_app_implementation', 'http')
+ settings_maker.make_setting('vcs.server', '')
+ settings_maker.make_setting('vcs.server.protocol', 'http')
+ settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
+ settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
+ settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
+ settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
+ settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
+
+ settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
+
+ # repo_store path
+ settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
+ # Support legacy values of vcs.scm_app_implementation. Legacy
+ # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
+ # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
+ scm_app_impl = settings['vcs.scm_app_implementation']
+ if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
+ settings['vcs.scm_app_implementation'] = 'http'
+
+ settings_maker.make_setting('appenlight', False, parser='bool')
+
+ temp_store = tempfile.gettempdir()
+ tmp_cache_dir = jn(temp_store, 'rc_cache')
+
+ # save default, cache dir, and use it for all backends later.
+ default_cache_dir = settings_maker.make_setting(
+ 'cache_dir',
+ default=tmp_cache_dir, default_when_empty=True,
+ parser='dir:ensured')
+
+ # exception store cache
+ settings_maker.make_setting(
+ 'exception_tracker.store_path',
+ default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
+ parser='dir:ensured'
+ )
+
+ settings_maker.make_setting(
+ 'celerybeat-schedule.path',
+ default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
+ parser='file:ensured'
+ )
+
+ settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
+ settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
+
+ # sessions, ensure file since no-value is memory
+ settings_maker.make_setting('beaker.session.type', 'file')
+ settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
+
+ # cache_general
+ settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
+ settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
+ settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
+
+ # cache_perms
+ settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
+ settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
+ settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
+
+ # cache_repo
+ settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
+ settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
+ settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
+
+ # cache_license
+ settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
+ settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
+ settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
+
+ # cache_repo_longterm memory, 96H
+ settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
+ settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
+ settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
+
+ # sql_cache_short
+ settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
+ settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
+ settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
+
+ # archive_cache
+ settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1')
+ settings_maker.make_setting('archive_cache.backend.type', 'filesystem')
+
+ settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
+ settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
+ settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float')
+ settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored')
+
+ settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool')
+ settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int')
+ settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int')
+
+ settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
+ settings_maker.make_setting('archive_cache.objectstore.key', '')
+ settings_maker.make_setting('archive_cache.objectstore.secret', '')
+ settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1')
+ settings_maker.make_setting('archive_cache.objectstore.bucket', 'rhodecode-archive-cache', default_when_empty=True,)
+ settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int')
+
+ settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float')
+ settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored')
+
+ settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool')
+ settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int')
+ settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int')
+
+ settings_maker.env_expand()
+
+ # configure instance id
+ config_utils.set_instance_id(settings)
+
+ return settings
diff --git a/rhodecode/config/environment.py b/rhodecode/config/environment.py
--- a/rhodecode/config/environment.py
+++ b/rhodecode/config/environment.py
@@ -46,8 +46,7 @@ def load_pyramid_environment(global_conf
# If this is a test run we prepare the test environment like
# creating a test database, test search index and test repositories.
# This has to be done before the database connection is initialized.
- if settings['is_test']:
- rhodecode.is_test = True
+ if rhodecode.is_test:
rhodecode.disable_error_handler = True
from rhodecode import authentication
authentication.plugin_default_auth_ttl = 0
@@ -81,7 +80,6 @@ def load_pyramid_environment(global_conf
rhodecode.PYRAMID_SETTINGS = settings_merged
rhodecode.CONFIG = settings_merged
rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
- rhodecode.CONFIG['default_base_path'] = utils.get_default_base_path()
if vcs_server_enabled:
connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py
--- a/rhodecode/config/middleware.py
+++ b/rhodecode/config/middleware.py
@@ -19,14 +19,13 @@
import os
import sys
import collections
-import tempfile
+
import time
import logging.config
from paste.gzipper import make_gzip_middleware
import pyramid.events
from pyramid.wsgi import wsgiapp
-from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.config import Configurator
from pyramid.settings import asbool, aslist
from pyramid.httpexceptions import (
@@ -35,11 +34,11 @@ from pyramid.renderers import render_to_
from rhodecode.model import meta
from rhodecode.config import patches
-from rhodecode.config import utils as config_utils
-from rhodecode.config.settings_maker import SettingsMaker
+
from rhodecode.config.environment import load_pyramid_environment
import rhodecode.events
+from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
from rhodecode.lib.middleware.vcs import VCSMiddleware
from rhodecode.lib.request import Request
from rhodecode.lib.vcs import VCSCommunicationError
@@ -327,7 +326,7 @@ def includeme(config, auth_resources=Non
config.include('pyramid_mako')
config.include('rhodecode.lib.rc_beaker')
config.include('rhodecode.lib.rc_cache')
- config.include('rhodecode.lib.rc_cache.archive_cache')
+ config.include('rhodecode.lib.archive_cache')
config.include('rhodecode.apps._base.navigation')
config.include('rhodecode.apps._base.subscribers')
@@ -465,173 +464,3 @@ def wrap_app_in_wsgi_middlewares(pyramid
log.debug('Request processing finalized: %.4fs', total)
return pyramid_app_with_cleanup
-
-
-def sanitize_settings_and_apply_defaults(global_config, settings):
- """
- Applies settings defaults and does all type conversion.
-
- We would move all settings parsing and preparation into this place, so that
- we have only one place left which deals with this part. The remaining parts
- of the application would start to rely fully on well prepared settings.
-
- This piece would later be split up per topic to avoid a big fat monster
- function.
- """
- jn = os.path.join
-
- global_settings_maker = SettingsMaker(global_config)
- global_settings_maker.make_setting('debug', default=False, parser='bool')
- debug_enabled = asbool(global_config.get('debug'))
-
- settings_maker = SettingsMaker(settings)
-
- settings_maker.make_setting(
- 'logging.autoconfigure',
- default=False,
- parser='bool')
-
- logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
- settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
-
- # Default includes, possible to change as a user
- pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
- log.debug(
- "Using the following pyramid.includes: %s",
- pyramid_includes)
-
- settings_maker.make_setting('rhodecode.edition', 'Community Edition')
- settings_maker.make_setting('rhodecode.edition_id', 'CE')
-
- if 'mako.default_filters' not in settings:
- # set custom default filters if we don't have it defined
- settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
- settings['mako.default_filters'] = 'h_filter'
-
- if 'mako.directories' not in settings:
- mako_directories = settings.setdefault('mako.directories', [
- # Base templates of the original application
- 'rhodecode:templates',
- ])
- log.debug(
- "Using the following Mako template directories: %s",
- mako_directories)
-
- # NOTE(marcink): fix redis requirement for schema of connection since 3.X
- if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
- raw_url = settings['beaker.session.url']
- if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
- settings['beaker.session.url'] = 'redis://' + raw_url
-
- settings_maker.make_setting('__file__', global_config.get('__file__'))
-
- # TODO: johbo: Re-think this, usually the call to config.include
- # should allow to pass in a prefix.
- settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
-
- # Sanitize generic settings.
- settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
- settings_maker.make_setting('is_test', False, parser='bool')
- settings_maker.make_setting('gzip_responses', False, parser='bool')
-
- # statsd
- settings_maker.make_setting('statsd.enabled', False, parser='bool')
- settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
- settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
- settings_maker.make_setting('statsd.statsd_prefix', '')
- settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
-
- settings_maker.make_setting('vcs.svn.compatible_version', '')
- settings_maker.make_setting('vcs.hooks.protocol', 'http')
- settings_maker.make_setting('vcs.hooks.host', '*')
- settings_maker.make_setting('vcs.scm_app_implementation', 'http')
- settings_maker.make_setting('vcs.server', '')
- settings_maker.make_setting('vcs.server.protocol', 'http')
- settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
- settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
- settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
- settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
- settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
- settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
-
- settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
-
- # Support legacy values of vcs.scm_app_implementation. Legacy
- # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
- # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
- scm_app_impl = settings['vcs.scm_app_implementation']
- if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
- settings['vcs.scm_app_implementation'] = 'http'
-
- settings_maker.make_setting('appenlight', False, parser='bool')
-
- temp_store = tempfile.gettempdir()
- tmp_cache_dir = jn(temp_store, 'rc_cache')
-
- # save default, cache dir, and use it for all backends later.
- default_cache_dir = settings_maker.make_setting(
- 'cache_dir',
- default=tmp_cache_dir, default_when_empty=True,
- parser='dir:ensured')
-
- # exception store cache
- settings_maker.make_setting(
- 'exception_tracker.store_path',
- default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
- parser='dir:ensured'
- )
-
- settings_maker.make_setting(
- 'celerybeat-schedule.path',
- default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
- parser='file:ensured'
- )
-
- settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
- settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
-
- # sessions, ensure file since no-value is memory
- settings_maker.make_setting('beaker.session.type', 'file')
- settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
-
- # cache_general
- settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
- settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
- settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
-
- # cache_perms
- settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
- settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
- settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
-
- # cache_repo
- settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
- settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
- settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
-
- # cache_license
- settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
- settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
- settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
-
- # cache_repo_longterm memory, 96H
- settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
- settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
- settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
-
- # sql_cache_short
- settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
- settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
- settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
-
- # archive_cache
- settings_maker.make_setting('archive_cache.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
- settings_maker.make_setting('archive_cache.cache_size_gb', 10, parser='float')
- settings_maker.make_setting('archive_cache.cache_shards', 10, parser='int')
-
- settings_maker.env_expand()
-
- # configure instance id
- config_utils.set_instance_id(settings)
-
- return settings
diff --git a/rhodecode/config/settings_maker.py b/rhodecode/config/settings_maker.py
--- a/rhodecode/config/settings_maker.py
+++ b/rhodecode/config/settings_maker.py
@@ -23,6 +23,7 @@ import functools
import logging
import tempfile
import logging.config
+
from rhodecode.lib.type_utils import str2bool, aslist
log = logging.getLogger(__name__)
@@ -34,13 +35,16 @@ set_keys = {
}
-class SettingsMaker(object):
+class SettingsMaker:
def __init__(self, app_settings):
self.settings = app_settings
@classmethod
def _bool_func(cls, input_val):
+ if isinstance(input_val, bytes):
+ # decode to str
+ input_val = input_val.decode('utf8')
return str2bool(input_val)
@classmethod
@@ -62,11 +66,24 @@ class SettingsMaker(object):
return input_val
@classmethod
+ def _string_no_quote_func(cls, input_val, lower=True):
+ """
+ Special case string function that detects if value is set to empty quote string
+ e.g.
+
+ core.binar_dir = ""
+ """
+
+ input_val = cls._string_func(input_val, lower=lower)
+ if input_val in ['""', "''"]:
+ return ''
+
+ @classmethod
def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
# ensure we have our dir created
if not os.path.isdir(input_val) and ensure_dir:
- os.makedirs(input_val, mode=mode)
+ os.makedirs(input_val, mode=mode, exist_ok=True)
if not os.path.isdir(input_val):
raise Exception(f'Dir at {input_val} does not exist')
@@ -150,6 +167,7 @@ class SettingsMaker(object):
'list:newline': functools.partial(self._list_func, sep='/n'),
'list:spacesep': functools.partial(self._list_func, sep=' '),
'string': functools.partial(self._string_func, lower=lower),
+ 'string:noquote': functools.partial(self._string_no_quote_func, lower=lower),
'dir': self._dir_func,
'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
'file': self._file_path_func,
diff --git a/rhodecode/config/utils.py b/rhodecode/config/utils.py
--- a/rhodecode/config/utils.py
+++ b/rhodecode/config/utils.py
@@ -19,7 +19,7 @@
import os
import platform
-from rhodecode.model import init_model
+DEFAULT_USER = 'default'
def configure_vcs(config):
@@ -44,6 +44,7 @@ def configure_vcs(config):
def initialize_database(config):
from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
+ from rhodecode.model import init_model
engine = engine_from_config(config, 'sqlalchemy.db1.')
init_model(engine, encryption_key=get_encryption_key(config))
@@ -93,25 +94,17 @@ def set_instance_id(config):
def get_default_user_id():
- DEFAULT_USER = 'default'
from sqlalchemy import text
from rhodecode.model import meta
engine = meta.get_engine()
with meta.SA_Session(engine) as session:
- result = session.execute(text("SELECT user_id from users where username = :uname"), {'uname': DEFAULT_USER})
- user_id = result.first()[0]
+ result = session.execute(text(
+ "SELECT user_id from users where username = :uname"
+ ), {'uname': DEFAULT_USER})
+ user = result.first()
+ if not user:
+ raise ValueError('Unable to retrieve default user data from DB')
+ user_id = user[0]
return user_id
-
-
-def get_default_base_path():
- from sqlalchemy import text
- from rhodecode.model import meta
-
- engine = meta.get_engine()
- with meta.SA_Session(engine) as session:
- result = session.execute(text("SELECT ui_value from rhodecode_ui where ui_key = '/'"))
- base_path = result.first()[0]
-
- return base_path
diff --git a/rhodecode/events/base.py b/rhodecode/events/base.py
--- a/rhodecode/events/base.py
+++ b/rhodecode/events/base.py
@@ -15,13 +15,13 @@
# This program is dual-licensed. If you wish to learn more about the
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
+
import logging
import datetime
-import typing
from zope.cachedescriptors.property import Lazy as LazyProperty
-from pyramid.threadlocal import get_current_request
+from rhodecode.lib.pyramid_utils import get_current_request
from rhodecode.lib.utils2 import AttributeDict
@@ -41,8 +41,9 @@ class RhodecodeEvent(object):
name = "RhodeCodeEvent"
no_url_set = ''
- def __init__(self, request=None):
+ def __init__(self, request=None, actor=None):
self._request = request
+ self._actor = actor
self.utc_timestamp = datetime.datetime.utcnow()
def __repr__(self):
@@ -72,16 +73,24 @@ class RhodecodeEvent(object):
@property
def actor(self):
+ from rhodecode.lib.auth import AuthUser
+
+ # if an explicit actor is specified, use this
+ if self._actor:
+ return self._actor
+
auth_user = self.auth_user
- if auth_user:
+ log.debug('Got integration actor: %s', auth_user)
+ if isinstance(auth_user, AuthUser):
instance = auth_user.get_instance()
+ # we can't find this DB user...
if not instance:
return AttributeDict(dict(
username=auth_user.username,
user_id=auth_user.user_id,
))
- return instance
-
+ elif auth_user:
+ return auth_user
return SYSTEM_USER
@property
@@ -129,3 +138,4 @@ class FtsBuild(RhodecodeEvent):
"""
name = 'fts-build'
display_name = 'Start FTS Build'
+
diff --git a/rhodecode/events/repo.py b/rhodecode/events/repo.py
--- a/rhodecode/events/repo.py
+++ b/rhodecode/events/repo.py
@@ -156,11 +156,11 @@ class RepoEvent(RhodeCodeIntegrationEven
Base class for events acting on a repository.
"""
- def __init__(self, repo):
+ def __init__(self, repo, actor=None):
"""
:param repo: a :class:`Repository` instance
"""
- super().__init__()
+ super().__init__(actor=actor)
self.repo = repo
def as_dict(self):
diff --git a/rhodecode/lib/_vendor/jsonlogger/__init__.py b/rhodecode/lib/_vendor/jsonlogger/__init__.py
--- a/rhodecode/lib/_vendor/jsonlogger/__init__.py
+++ b/rhodecode/lib/_vendor/jsonlogger/__init__.py
@@ -11,9 +11,9 @@ import importlib
from inspect import istraceback
from collections import OrderedDict
-from rhodecode.lib.logging_formatter import _inject_req_id, ExceptionAwareFormatter
-from rhodecode.lib.ext_json import sjson as json
+from ...logging_formatter import _inject_req_id, ExceptionAwareFormatter
+from ...ext_json import sjson as json
ZERO = timedelta(0)
HOUR = timedelta(hours=1)
@@ -78,7 +78,7 @@ class JsonEncoder(json.JSONEncoder):
return str(obj)
try:
- return super(JsonEncoder, self).default(obj)
+ return super().default(obj)
except TypeError:
try:
@@ -194,7 +194,7 @@ class JsonFormatter(ExceptionAwareFormat
def serialize_log_record(self, log_record):
"""Returns the final representation of the log record."""
- return "%s%s" % (self.prefix, self.jsonify_log_record(log_record))
+ return "{}{}".format(self.prefix, self.jsonify_log_record(log_record))
def format(self, record):
"""Formats a log record and serializes to json"""
diff --git a/rhodecode/lib/_vendor/redis_lock/__init__.py b/rhodecode/lib/_vendor/redis_lock/__init__.py
--- a/rhodecode/lib/_vendor/redis_lock/__init__.py
+++ b/rhodecode/lib/_vendor/redis_lock/__init__.py
@@ -102,7 +102,7 @@ class NotExpirable(RuntimeError):
pass
-class Lock(object):
+class Lock:
"""
A Lock context manager implemented via redis SETNX/BLPOP.
"""
@@ -111,11 +111,12 @@ class Lock(object):
extend_script = None
reset_script = None
reset_all_script = None
+ blocking = None
_lock_renewal_interval: float
_lock_renewal_thread: Union[threading.Thread, None]
- def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000):
+ def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000, blocking=True):
"""
:param redis_client:
An instance of :class:`~StrictRedis`.
@@ -143,6 +144,9 @@ class Lock(object):
If set ``True`` then the ``redis_client`` needs to be an instance of ``redis.StrictRedis``.
:param signal_expire:
Advanced option to override signal list expiration in milliseconds. Increase it for very slow clients. Default: ``1000``.
+ :param blocking:
+ Boolean value specifying whether lock should be blocking or not.
+ Used in `__enter__` method.
"""
if strict and not isinstance(redis_client, StrictRedis):
raise ValueError("redis_client must be instance of StrictRedis. "
@@ -179,6 +183,8 @@ class Lock(object):
else None)
self._lock_renewal_thread = None
+ self.blocking = blocking
+
self.register_scripts(redis_client)
@classmethod
@@ -342,9 +348,11 @@ class Lock(object):
loggers["refresh.exit"].debug("Renewal thread for Lock(%r) exited.", self._name)
def __enter__(self):
- acquired = self.acquire(blocking=True)
+ acquired = self.acquire(blocking=self.blocking)
if not acquired:
- raise AssertionError(f"Lock({self._name}) wasn't acquired, but blocking=True was used!")
+ if self.blocking:
+ raise AssertionError(f"Lock({self._name}) wasn't acquired, but blocking=True was used!")
+ raise NotAcquired(f"Lock({self._name}) is not acquired or it already expired.")
return self
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
diff --git a/rhodecode/lib/_vendor/statsd/__init__.py b/rhodecode/lib/_vendor/statsd/__init__.py
--- a/rhodecode/lib/_vendor/statsd/__init__.py
+++ b/rhodecode/lib/_vendor/statsd/__init__.py
@@ -1,5 +1,3 @@
-
-
import logging
from .stream import TCPStatsClient, UnixSocketStatsClient # noqa
@@ -26,9 +24,10 @@ def client_from_config(configuration, pr
from pyramid.settings import asbool
_config = statsd_config(configuration, prefix)
+ statsd_flag = _config.get('enabled')
statsd_enabled = asbool(_config.pop('enabled', False))
if not statsd_enabled:
- log.debug('statsd client not enabled by statsd.enabled = flag, skipping...')
+ log.debug('statsd client not enabled by statsd.enabled = %s flag, skipping...', statsd_flag)
return
host = _config.pop('statsd_host', HOST)
diff --git a/rhodecode/lib/_vendor/statsd/base.py b/rhodecode/lib/_vendor/statsd/base.py
--- a/rhodecode/lib/_vendor/statsd/base.py
+++ b/rhodecode/lib/_vendor/statsd/base.py
@@ -1,5 +1,3 @@
-
-
import re
import random
from collections import deque
@@ -31,7 +29,7 @@ def normalize_tags(tag_list):
return _normalize_tags_with_cache(tuple(tag_list))
-class StatsClientBase(object):
+class StatsClientBase:
"""A Base class for various statsd clients."""
def close(self):
@@ -73,7 +71,7 @@ class StatsClientBase(object):
def incr(self, stat, count=1, rate=1, tags=None):
"""Increment a stat by `count`."""
- self._send_stat(stat, '%s|c' % count, rate, tags)
+ self._send_stat(stat, f'{count}|c', rate, tags)
def decr(self, stat, count=1, rate=1, tags=None):
"""Decrement a stat by `count`."""
@@ -87,18 +85,18 @@ class StatsClientBase(object):
return
with self.pipeline() as pipe:
pipe._send_stat(stat, '0|g', 1)
- pipe._send_stat(stat, '%s|g' % value, 1)
+ pipe._send_stat(stat, f'{value}|g', 1)
else:
prefix = '+' if delta and value >= 0 else ''
- self._send_stat(stat, '%s%s|g' % (prefix, value), rate, tags)
+ self._send_stat(stat, f'{prefix}{value}|g', rate, tags)
def set(self, stat, value, rate=1):
"""Set a set value."""
- self._send_stat(stat, '%s|s' % value, rate)
+ self._send_stat(stat, f'{value}|s', rate)
def histogram(self, stat, value, rate=1, tags=None):
"""Set a histogram"""
- self._send_stat(stat, '%s|h' % value, rate, tags)
+ self._send_stat(stat, f'{value}|h', rate, tags)
def _send_stat(self, stat, value, rate, tags=None):
self._after(self._prepare(stat, value, rate, tags))
@@ -110,10 +108,10 @@ class StatsClientBase(object):
if rate < 1:
if random.random() > rate:
return
- value = '%s|@%s' % (value, rate)
+ value = f'{value}|@{rate}'
if self._prefix:
- stat = '%s.%s' % (self._prefix, stat)
+ stat = f'{self._prefix}.{stat}'
res = '%s:%s%s' % (
stat,
diff --git a/rhodecode/lib/_vendor/statsd/stream.py b/rhodecode/lib/_vendor/statsd/stream.py
--- a/rhodecode/lib/_vendor/statsd/stream.py
+++ b/rhodecode/lib/_vendor/statsd/stream.py
@@ -1,5 +1,3 @@
-
-
import socket
from .base import StatsClientBase, PipelineBase
diff --git a/rhodecode/lib/_vendor/statsd/timer.py b/rhodecode/lib/_vendor/statsd/timer.py
--- a/rhodecode/lib/_vendor/statsd/timer.py
+++ b/rhodecode/lib/_vendor/statsd/timer.py
@@ -1,5 +1,3 @@
-
-
import functools
from time import perf_counter as time_now
@@ -11,7 +9,7 @@ def safe_wraps(wrapper, *args, **kwargs)
return functools.wraps(wrapper, *args, **kwargs)
-class Timer(object):
+class Timer:
"""A context manager/decorator for statsd.timing()."""
def __init__(self, client, stat, rate=1, tags=None, use_decimals=True, auto_send=True):
diff --git a/rhodecode/lib/_vendor/statsd/udp.py b/rhodecode/lib/_vendor/statsd/udp.py
--- a/rhodecode/lib/_vendor/statsd/udp.py
+++ b/rhodecode/lib/_vendor/statsd/udp.py
@@ -1,5 +1,3 @@
-
-
import socket
from .base import StatsClientBase, PipelineBase
@@ -8,7 +6,7 @@ from .base import StatsClientBase, Pipel
class Pipeline(PipelineBase):
def __init__(self, client):
- super(Pipeline, self).__init__(client)
+ super().__init__(client)
self._maxudpsize = client._maxudpsize
def _send(self):
diff --git a/rhodecode/lib/action_parser.py b/rhodecode/lib/action_parser.py
--- a/rhodecode/lib/action_parser.py
+++ b/rhodecode/lib/action_parser.py
@@ -258,8 +258,7 @@ class ActionParser(object):
commit = repo.get_commit(commit_id=commit_id)
commits.append(commit)
except CommitDoesNotExistError:
- log.error(
- 'cannot find commit id %s in this repository',
+ log.error('cannot find commit id %s in this repository',
commit_id)
commits.append(commit_id)
continue
diff --git a/rhodecode/lib/api_utils.py b/rhodecode/lib/api_utils.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/api_utils.py
@@ -0,0 +1,47 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import urllib.parse
+
+from rhodecode.lib.vcs import CurlSession
+from rhodecode.lib.ext_json import json
+from rhodecode.lib.vcs.exceptions import ImproperlyConfiguredError
+
+
+def call_service_api(settings, payload):
+ try:
+ api_host = settings['app.service_api.host']
+ api_token = settings['app.service_api.token']
+ api_url = settings['rhodecode.api.url']
+ except KeyError as exc:
+ raise ImproperlyConfiguredError(
+ f"{str(exc)} is missing. "
+ "Please ensure that app.service_api.host, app.service_api.token and rhodecode.api.url are "
+ "defined inside of .ini configuration file."
+ )
+ payload.update({
+ 'id': 'service',
+ 'auth_token': api_token
+ })
+ service_api_url = urllib.parse.urljoin(api_host, api_url)
+ response = CurlSession().post(service_api_url, json.dumps(payload))
+
+ if response.status_code != 200:
+ raise Exception(f"Service API at {service_api_url} responded with error: {response.status_code}")
+
+ return json.loads(response.content)['result']
diff --git a/rhodecode/lib/archive_cache/__init__.py b/rhodecode/lib/archive_cache/__init__.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/archive_cache/__init__.py
@@ -0,0 +1,78 @@
+# Copyright (C) 2015-2024 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import logging
+
+from .backends.fanout_cache import FileSystemFanoutCache
+from .backends.objectstore_cache import ObjectStoreCache
+
+from .utils import archive_iterator # noqa
+from .lock import ArchiveCacheGenerationLock # noqa
+
+log = logging.getLogger(__name__)
+
+
+cache_meta = None
+
+
+def includeme(config):
+ # init our cache at start
+ settings = config.get_settings()
+ get_archival_cache_store(settings)
+
+
+def get_archival_config(config):
+
+ final_config = {
+
+ }
+
+ for k, v in config.items():
+ if k.startswith('archive_cache'):
+ final_config[k] = v
+
+ return final_config
+
+
+def get_archival_cache_store(config, always_init=False):
+
+ global cache_meta
+ if cache_meta is not None and not always_init:
+ return cache_meta
+
+ config = get_archival_config(config)
+ backend = config['archive_cache.backend.type']
+
+ archive_cache_locking_url = config['archive_cache.locking.url']
+
+ match backend:
+ case 'filesystem':
+ d_cache = FileSystemFanoutCache(
+ locking_url=archive_cache_locking_url,
+ **config
+ )
+ case 'objectstore':
+ d_cache = ObjectStoreCache(
+ locking_url=archive_cache_locking_url,
+ **config
+ )
+ case _:
+ raise ValueError(f'archive_cache.backend.type only supports "filesystem" or "objectstore" got {backend} ')
+
+ cache_meta = d_cache
+ return cache_meta
diff --git a/rhodecode/lib/archive_cache/backends/__init__.py b/rhodecode/lib/archive_cache/backends/__init__.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/archive_cache/backends/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (C) 2015-2024 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
diff --git a/rhodecode/lib/archive_cache/backends/base.py b/rhodecode/lib/archive_cache/backends/base.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/archive_cache/backends/base.py
@@ -0,0 +1,372 @@
+# Copyright (C) 2015-2024 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import os
+import functools
+import logging
+import typing
+import time
+import zlib
+
+from ...ext_json import json
+from ..utils import StatsDB, NOT_GIVEN, ShardFileReader, EVICTION_POLICY, format_size
+from ..lock import GenerationLock
+
+log = logging.getLogger(__name__)
+
+
+class BaseShard:
+ storage_type: str = ''
+ fs = None
+
+ @classmethod
+ def hash(cls, key):
+ """Compute portable hash for `key`.
+
+ :param key: key to hash
+ :return: hash value
+
+ """
+ mask = 0xFFFFFFFF
+ return zlib.adler32(key.encode('utf-8')) & mask # noqa
+
+ def _write_file(self, full_path, read_iterator, mode):
+ raise NotImplementedError
+
+ def _get_keyfile(self, key):
+ raise NotImplementedError
+
+ def random_filename(self):
+ raise NotImplementedError
+
+ def store(self, *args, **kwargs):
+ raise NotImplementedError
+
+ def _store(self, key, value_reader, metadata, mode):
+ (filename, # hash-name
+ full_path # full-path/hash-name
+ ) = self.random_filename()
+
+ key_file, key_file_path = self._get_keyfile(key)
+
+ # STORE METADATA
+ _metadata = {
+ "version": "v1",
+
+ "key_file": key_file, # this is the .key.json file storing meta
+ "key_file_path": key_file_path, # full path to key_file
+ "archive_key": key, # original name we stored archive under, e.g my-archive.zip
+ "archive_filename": filename, # the actual filename we stored that file under
+ "archive_full_path": full_path,
+
+ "store_time": time.time(),
+ "access_count": 0,
+ "access_time": 0,
+
+ "size": 0
+ }
+ if metadata:
+ _metadata.update(metadata)
+
+ read_iterator = iter(functools.partial(value_reader.read, 2**22), b'')
+ size, sha256 = self._write_file(full_path, read_iterator, mode)
+ _metadata['size'] = size
+ _metadata['sha256'] = sha256
+
+ # after archive is finished, we create a key to save the presence of the binary file
+ with self.fs.open(key_file_path, 'wb') as f:
+ f.write(json.dumps(_metadata))
+
+ return key, filename, size, _metadata
+
+ def fetch(self, *args, **kwargs):
+ raise NotImplementedError
+
+ def _fetch(self, key, retry, retry_attempts, retry_backoff,
+ presigned_url_expires: int = 0) -> tuple[ShardFileReader, dict]:
+ if retry is NOT_GIVEN:
+ retry = False
+ if retry_attempts is NOT_GIVEN:
+ retry_attempts = 0
+
+ if retry and retry_attempts > 0:
+ for attempt in range(1, retry_attempts + 1):
+ if key in self:
+ break
+ # we didn't find the key, wait retry_backoff N seconds, and re-check
+ time.sleep(retry_backoff)
+
+ if key not in self:
+ log.exception(f'requested key={key} not found in {self} retry={retry}, attempts={retry_attempts}')
+ raise KeyError(key)
+
+ key_file, key_file_path = self._get_keyfile(key)
+ with self.fs.open(key_file_path, 'rb') as f:
+ metadata = json.loads(f.read())
+
+ archive_path = metadata['archive_full_path']
+ if presigned_url_expires and presigned_url_expires > 0:
+ metadata['url'] = self.fs.url(archive_path, expires=presigned_url_expires)
+
+ try:
+ return ShardFileReader(self.fs.open(archive_path, 'rb')), metadata
+ finally:
+ # update usage stats, count and accessed
+ metadata["access_count"] = metadata.get("access_count", 0) + 1
+ metadata["access_time"] = time.time()
+ log.debug('Updated %s with access snapshot, access_count=%s access_time=%s',
+ key_file, metadata['access_count'], metadata['access_time'])
+ with self.fs.open(key_file_path, 'wb') as f:
+ f.write(json.dumps(metadata))
+
+ def remove(self, *args, **kwargs):
+ raise NotImplementedError
+
+ def _remove(self, key):
+ if key not in self:
+ log.exception(f'requested key={key} not found in {self}')
+ raise KeyError(key)
+
+ key_file, key_file_path = self._get_keyfile(key)
+ with self.fs.open(key_file_path, 'rb') as f:
+ metadata = json.loads(f.read())
+
+ archive_path = metadata['archive_full_path']
+ self.fs.rm(archive_path)
+ self.fs.rm(key_file_path)
+ return 1
+
+ @property
+ def storage_medium(self):
+ return getattr(self, self.storage_type)
+
+ @property
+ def key_suffix(self):
+ return 'key.json'
+
+ def __contains__(self, key):
+ """Return `True` if `key` matching item is found in cache.
+
+ :param key: key matching item
+ :return: True if key matching item
+
+ """
+ key_file, key_file_path = self._get_keyfile(key)
+ return self.fs.exists(key_file_path)
+
+
+class BaseCache:
+ _locking_url: str = ''
+ _storage_path: str = ''
+ _config: dict = {}
+ retry = False
+ retry_attempts: int = 0
+ retry_backoff: int | float = 1
+ _shards = tuple()
+ shard_cls = BaseShard
+ # define the presigned url expiration, 0 == disabled
+ presigned_url_expires: int = 0
+
+ def __contains__(self, key):
+ """Return `True` if `key` matching item is found in cache.
+
+ :param key: key matching item
+ :return: True if key matching item
+
+ """
+ return self.has_key(key)
+
+ def __repr__(self):
+ return f'<{self.__class__.__name__}(storage={self._storage_path})>'
+
+ @classmethod
+ def gb_to_bytes(cls, gb):
+ return gb * (1024 ** 3)
+
+ @property
+ def storage_path(self):
+ return self._storage_path
+
+ @classmethod
+ def get_stats_db(cls):
+ return StatsDB()
+
+ def get_conf(self, key, pop=False):
+ if key not in self._config:
+ raise ValueError(f"No configuration key '{key}', please make sure it exists in archive_cache config")
+ val = self._config[key]
+ if pop:
+ del self._config[key]
+ return val
+
+ def _get_shard(self, key) -> shard_cls:
+ index = self._hash(key) % self._shard_count
+ shard = self._shards[index]
+ return shard
+
+ def _get_size(self, shard, archive_path):
+ raise NotImplementedError
+
+ def store(self, key, value_reader, metadata=None):
+ shard = self._get_shard(key)
+ return shard.store(key, value_reader, metadata)
+
+ def fetch(self, key, retry=NOT_GIVEN, retry_attempts=NOT_GIVEN) -> tuple[typing.BinaryIO, dict]:
+ """
+ Return file handle corresponding to `key` from specific shard cache.
+ """
+ if retry is NOT_GIVEN:
+ retry = self.retry
+ if retry_attempts is NOT_GIVEN:
+ retry_attempts = self.retry_attempts
+ retry_backoff = self.retry_backoff
+ presigned_url_expires = self.presigned_url_expires
+
+ shard = self._get_shard(key)
+ return shard.fetch(key, retry=retry,
+ retry_attempts=retry_attempts,
+ retry_backoff=retry_backoff,
+ presigned_url_expires=presigned_url_expires)
+
+ def remove(self, key):
+ shard = self._get_shard(key)
+ return shard.remove(key)
+
+ def has_key(self, archive_key):
+ """Return `True` if `key` matching item is found in cache.
+
+ :param archive_key: key for item, this is a unique archive name we want to store data under. e.g my-archive-svn.zip
+ :return: True if key is found
+
+ """
+ shard = self._get_shard(archive_key)
+ return archive_key in shard
+
+ def iter_keys(self):
+ for shard in self._shards:
+ if shard.fs.exists(shard.storage_medium):
+ for path, _dirs, _files in shard.fs.walk(shard.storage_medium):
+ for key_file_path in _files:
+ if key_file_path.endswith(shard.key_suffix):
+ yield shard, key_file_path
+
+ def get_lock(self, lock_key):
+ return GenerationLock(lock_key, self._locking_url)
+
+ def evict(self, policy=None, size_limit=None) -> dict:
+ """
+ Remove old items based on the conditions
+
+
+ explanation of this algo:
+ iterate over each shard, then for each shard iterate over the .key files
+ read the key files metadata stored. This gives us a full list of keys, cached_archived, their size and
+ access data, time creation, and access counts.
+
+ Store that into a memory DB in order we can run different sorting strategies easily.
+ Summing the size is a sum sql query.
+
+ Then we run a sorting strategy based on eviction policy.
+ We iterate over sorted keys, and remove each checking if we hit the overall limit.
+ """
+ removal_info = {
+ "removed_items": 0,
+ "removed_size": 0
+ }
+ policy = policy or self._eviction_policy
+ size_limit = size_limit or self._cache_size_limit
+
+ select_policy = EVICTION_POLICY[policy]['evict']
+
+ log.debug('Running eviction policy \'%s\', and checking for size limit: %s',
+ policy, format_size(size_limit))
+
+ if select_policy is None:
+ return removal_info
+
+ db = self.get_stats_db()
+
+ data = []
+ cnt = 1
+
+ for shard, key_file in self.iter_keys():
+ with shard.fs.open(os.path.join(shard.storage_medium, key_file), 'rb') as f:
+ metadata = json.loads(f.read())
+
+ key_file_path = os.path.join(shard.storage_medium, key_file)
+
+ archive_key = metadata['archive_key']
+ archive_path = metadata['archive_full_path']
+
+ size = metadata.get('size')
+ if not size:
+ # in case we don't have size re-calc it...
+ size = self._get_size(shard, archive_path)
+
+ data.append([
+ cnt,
+ key_file,
+ key_file_path,
+ archive_key,
+ archive_path,
+ metadata.get('store_time', 0),
+ metadata.get('access_time', 0),
+ metadata.get('access_count', 0),
+ size,
+ ])
+ cnt += 1
+
+ # Insert bulk data using executemany
+ db.bulk_insert(data)
+
+ total_size = db.get_total_size()
+ log.debug('Analyzed %s keys, occupying: %s, running eviction to match %s',
+ len(data), format_size(total_size), format_size(size_limit))
+
+ removed_items = 0
+ removed_size = 0
+ for key_file, archive_key, size in db.get_sorted_keys(select_policy):
+ # simulate removal impact BEFORE removal
+ total_size -= size
+
+ if total_size <= size_limit:
+ # we obtained what we wanted...
+ break
+
+ self.remove(archive_key)
+ removed_items += 1
+ removed_size += size
+ removal_info['removed_items'] = removed_items
+ removal_info['removed_size'] = removed_size
+ log.debug('Removed %s cache archives, and reduced size by: %s',
+ removed_items, format_size(removed_size))
+ return removal_info
+
+ def get_statistics(self):
+ total_files = 0
+ total_size = 0
+ meta = {}
+
+ for shard, key_file in self.iter_keys():
+ json_key = f"{shard.storage_medium}/{key_file}"
+ with shard.fs.open(json_key, 'rb') as f:
+ total_files += 1
+ metadata = json.loads(f.read())
+ total_size += metadata['size']
+
+ return total_files, total_size, meta
diff --git a/rhodecode/lib/rc_cache/archive_cache.py b/rhodecode/lib/archive_cache/backends/fanout_cache.py
rename from rhodecode/lib/rc_cache/archive_cache.py
rename to rhodecode/lib/archive_cache/backends/fanout_cache.py
--- a/rhodecode/lib/rc_cache/archive_cache.py
+++ b/rhodecode/lib/archive_cache/backends/fanout_cache.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2015-2023 RhodeCode GmbH
+# Copyright (C) 2015-2024 RhodeCode GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3
@@ -16,73 +16,162 @@
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
+import codecs
+import hashlib
import logging
import os
-import diskcache
-from diskcache import RLock
+import typing
+
+import fsspec
+
+from .base import BaseCache, BaseShard
+from ..utils import ShardFileReader, NOT_GIVEN
+from ...type_utils import str2bool
log = logging.getLogger(__name__)
-cache_meta = None
+
+class FileSystemShard(BaseShard):
+
+ def __init__(self, index, directory, directory_folder, fs, **settings):
+ self._index: int = index
+ self._directory: str = directory
+ self._directory_folder: str = directory_folder
+ self.storage_type: str = 'directory'
+ self.fs = fs
+
+ @property
+ def directory(self) -> str:
+ """Cache directory final path."""
+ return os.path.join(self._directory, self._directory_folder)
+
+ def _get_keyfile(self, archive_key) -> tuple[str, str]:
+ key_file: str = f'{archive_key}.{self.key_suffix}'
+ return key_file, os.path.join(self.directory, key_file)
-class ReentrantLock(RLock):
- def __enter__(self):
- reentrant_lock_key = self._key
+ def _get_writer(self, path, mode):
+ for count in range(1, 11):
+ try:
+ # Another cache may have deleted the directory before
+ # the file could be opened.
+ return self.fs.open(path, mode)
+ except OSError:
+ if count == 10:
+ # Give up after 10 tries to open the file.
+ raise
+ continue
- log.debug('Acquire ReentrantLock(key=%s) for archive cache generation...', reentrant_lock_key)
- #self.acquire()
- log.debug('Lock for key=%s acquired', reentrant_lock_key)
+ def _write_file(self, full_path, iterator, mode):
+
+ # ensure dir exists
+ destination, _ = os.path.split(full_path)
+ if not self.fs.exists(destination):
+ self.fs.makedirs(destination)
+
+ writer = self._get_writer(full_path, mode)
- def __exit__(self, *exc_info):
- #self.release()
- pass
+ digest = hashlib.sha256()
+ with writer:
+ size = 0
+ for chunk in iterator:
+ size += len(chunk)
+ digest.update(chunk)
+ writer.write(chunk)
+ writer.flush()
+ # Get the file descriptor
+ fd = writer.fileno()
+ # Sync the file descriptor to disk, helps with NFS cases...
+ os.fsync(fd)
+ sha256 = digest.hexdigest()
+ log.debug('written new archive cache under %s, sha256: %s', full_path, sha256)
+ return size, sha256
-def get_archival_config(config):
+ def store(self, key, value_reader, metadata: dict | None = None):
+ return self._store(key, value_reader, metadata, mode='xb')
- final_config = {
- 'archive_cache.eviction_policy': 'least-frequently-used'
- }
+ def fetch(self, key, retry=NOT_GIVEN,
+ retry_attempts=NOT_GIVEN, retry_backoff=1, **kwargs) -> tuple[ShardFileReader, dict]:
+ return self._fetch(key, retry, retry_attempts, retry_backoff)
+
+ def remove(self, key):
+ return self._remove(key)
+
+ def random_filename(self):
+ """Return filename and full-path tuple for file storage.
- for k, v in config.items():
- if k.startswith('archive_cache'):
- final_config[k] = v
+ Filename will be a randomly generated 28 character hexadecimal string
+ with ".archive_cache" suffixed. Two levels of sub-directories will be used to
+ reduce the size of directories. On older filesystems, lookups in
+ directories with many files may be slow.
+ """
+
+ hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
- return final_config
+ archive_name = hex_name[4:] + '.archive_cache'
+ filename = f"{hex_name[:2]}/{hex_name[2:4]}/{archive_name}"
+
+ full_path = os.path.join(self.directory, filename)
+ return archive_name, full_path
+
+ def __repr__(self):
+ return f'{self.__class__.__name__}(index={self._index}, dir={self.directory})'
-def get_archival_cache_store(config):
+class FileSystemFanoutCache(BaseCache):
+ shard_name: str = 'shard_{:03d}'
+ shard_cls = FileSystemShard
- global cache_meta
- if cache_meta is not None:
- return cache_meta
+ def __init__(self, locking_url, **settings):
+ """
+ Initialize file system cache instance.
+
+ :param str locking_url: redis url for a lock
+ :param settings: settings dict
- config = get_archival_config(config)
+ """
+ self._locking_url = locking_url
+ self._config = settings
+ cache_dir = self.get_conf('archive_cache.filesystem.store_dir')
+ directory = str(cache_dir)
+ directory = os.path.expanduser(directory)
+ directory = os.path.expandvars(directory)
+ self._directory = directory
+ self._storage_path = directory # common path for all from BaseCache
- archive_cache_dir = config['archive_cache.store_dir']
- archive_cache_size_gb = config['archive_cache.cache_size_gb']
- archive_cache_shards = config['archive_cache.cache_shards']
- archive_cache_eviction_policy = config['archive_cache.eviction_policy']
+ self._shard_count = int(self.get_conf('archive_cache.filesystem.cache_shards', pop=True))
+ if self._shard_count < 1:
+ raise ValueError('cache_shards must be 1 or more')
- log.debug('Initializing archival cache instance under %s', archive_cache_dir)
+ self._eviction_policy = self.get_conf('archive_cache.filesystem.eviction_policy', pop=True)
+ self._cache_size_limit = self.gb_to_bytes(int(self.get_conf('archive_cache.filesystem.cache_size_gb')))
- # check if it's ok to write, and re-create the archive cache
- if not os.path.isdir(archive_cache_dir):
- os.makedirs(archive_cache_dir, exist_ok=True)
+ self.retry = str2bool(self.get_conf('archive_cache.filesystem.retry', pop=True))
+ self.retry_attempts = int(self.get_conf('archive_cache.filesystem.retry_attempts', pop=True))
+ self.retry_backoff = int(self.get_conf('archive_cache.filesystem.retry_backoff', pop=True))
+
+ log.debug('Initializing %s archival cache instance', self)
+ fs = fsspec.filesystem('file')
+ # check if it's ok to write, and re-create the archive cache main dir
+ # A directory is the virtual equivalent of a physical file cabinet.
+ # In other words, it's a container for organizing digital data.
+ # Unlike a folder, which can only store files, a directory can store files,
+ # subdirectories, and other directories.
+ if not fs.exists(self._directory):
+ fs.makedirs(self._directory, exist_ok=True)
- d_cache = diskcache.FanoutCache(
- archive_cache_dir, shards=archive_cache_shards,
- cull_limit=0, # manual eviction required
- size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
- eviction_policy=archive_cache_eviction_policy,
- timeout=30
- )
- cache_meta = d_cache
- return cache_meta
+ self._shards = tuple(
+ self.shard_cls(
+ index=num,
+ directory=directory,
+ directory_folder=self.shard_name.format(num),
+ fs=fs,
+ **settings,
+ )
+ for num in range(self._shard_count)
+ )
+ self._hash = self._shards[0].hash
-
-def includeme(config):
- # init our cache at start
- settings = config.get_settings()
- get_archival_cache_store(settings)
+ def _get_size(self, shard, archive_path):
+ return os.stat(archive_path).st_size
diff --git a/rhodecode/lib/archive_cache/backends/objectstore_cache.py b/rhodecode/lib/archive_cache/backends/objectstore_cache.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/archive_cache/backends/objectstore_cache.py
@@ -0,0 +1,173 @@
+# Copyright (C) 2015-2024 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import codecs
+import hashlib
+import logging
+import os
+import typing
+
+import fsspec
+
+from .base import BaseCache, BaseShard
+from ..utils import ShardFileReader, NOT_GIVEN
+from ...type_utils import str2bool
+
+log = logging.getLogger(__name__)
+
+
+class S3Shard(BaseShard):
+
+ def __init__(self, index, bucket, bucket_folder, fs, **settings):
+ self._index: int = index
+ self._bucket_folder: str = bucket_folder
+ self.storage_type: str = 'bucket'
+ self._bucket_main: str = bucket
+
+ self.fs = fs
+
+ @property
+ def bucket(self) -> str:
+ """Cache bucket final path."""
+ return os.path.join(self._bucket_main, self._bucket_folder)
+
+ def _get_keyfile(self, archive_key) -> tuple[str, str]:
+ key_file: str = f'{archive_key}-{self.key_suffix}'
+ return key_file, os.path.join(self.bucket, key_file)
+
+ def _get_writer(self, path, mode):
+ return self.fs.open(path, 'wb')
+
+ def _write_file(self, full_path, iterator, mode):
+
+ # ensure folder in bucket exists
+ destination = self.bucket
+ if not self.fs.exists(destination):
+ self.fs.mkdir(destination, s3_additional_kwargs={})
+
+ writer = self._get_writer(full_path, mode)
+
+ digest = hashlib.sha256()
+ with writer:
+ size = 0
+ for chunk in iterator:
+ size += len(chunk)
+ digest.update(chunk)
+ writer.write(chunk)
+
+ sha256 = digest.hexdigest()
+ log.debug('written new archive cache under %s, sha256: %s', full_path, sha256)
+ return size, sha256
+
+ def store(self, key, value_reader, metadata: dict | None = None):
+ return self._store(key, value_reader, metadata, mode='wb')
+
+ def fetch(self, key, retry=NOT_GIVEN,
+ retry_attempts=NOT_GIVEN, retry_backoff=1,
+ presigned_url_expires: int = 0) -> tuple[ShardFileReader, dict]:
+ return self._fetch(key, retry, retry_attempts, retry_backoff, presigned_url_expires=presigned_url_expires)
+
+ def remove(self, key):
+ return self._remove(key)
+
+ def random_filename(self):
+ """Return filename and full-path tuple for file storage.
+
+ Filename will be a randomly generated 28 character hexadecimal string
+ with ".archive_cache" suffixed. Two levels of sub-directories will be used to
+ reduce the size of directories. On older filesystems, lookups in
+ directories with many files may be slow.
+ """
+
+ hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
+
+ archive_name = hex_name[4:] + '.archive_cache'
+ filename = f"{hex_name[:2]}-{hex_name[2:4]}-{archive_name}"
+
+ full_path = os.path.join(self.bucket, filename)
+ return archive_name, full_path
+
+ def __repr__(self):
+ return f'{self.__class__.__name__}(index={self._index}, bucket={self.bucket})'
+
+
+class ObjectStoreCache(BaseCache):
+ shard_name: str = 'shard-{:03d}'
+ shard_cls = S3Shard
+
+ def __init__(self, locking_url, **settings):
+ """
+ Initialize objectstore cache instance.
+
+ :param str locking_url: redis url for a lock
+ :param settings: settings dict
+
+ """
+ self._locking_url = locking_url
+ self._config = settings
+
+ objectstore_url = self.get_conf('archive_cache.objectstore.url')
+ self._storage_path = objectstore_url # common path for all from BaseCache
+
+ self._shard_count = int(self.get_conf('archive_cache.objectstore.bucket_shards', pop=True))
+ if self._shard_count < 1:
+ raise ValueError('cache_shards must be 1 or more')
+
+ self._bucket = settings.pop('archive_cache.objectstore.bucket')
+ if not self._bucket:
+ raise ValueError('archive_cache.objectstore.bucket needs to have a value')
+
+ self._eviction_policy = self.get_conf('archive_cache.objectstore.eviction_policy', pop=True)
+ self._cache_size_limit = self.gb_to_bytes(int(self.get_conf('archive_cache.objectstore.cache_size_gb')))
+
+ self.retry = str2bool(self.get_conf('archive_cache.objectstore.retry', pop=True))
+ self.retry_attempts = int(self.get_conf('archive_cache.objectstore.retry_attempts', pop=True))
+ self.retry_backoff = int(self.get_conf('archive_cache.objectstore.retry_backoff', pop=True))
+
+ endpoint_url = settings.pop('archive_cache.objectstore.url')
+ key = settings.pop('archive_cache.objectstore.key')
+ secret = settings.pop('archive_cache.objectstore.secret')
+ region = settings.pop('archive_cache.objectstore.region')
+
+ log.debug('Initializing %s archival cache instance', self)
+
+ fs = fsspec.filesystem(
+ 's3', anon=False, endpoint_url=endpoint_url, key=key, secret=secret, client_kwargs={'region_name': region}
+ )
+
+ # init main bucket
+ if not fs.exists(self._bucket):
+ fs.mkdir(self._bucket)
+
+ self._shards = tuple(
+ self.shard_cls(
+ index=num,
+ bucket=self._bucket,
+ bucket_folder=self.shard_name.format(num),
+ fs=fs,
+ **settings,
+ )
+ for num in range(self._shard_count)
+ )
+ self._hash = self._shards[0].hash
+
+ def _get_size(self, shard, archive_path):
+ return shard.fs.info(archive_path)['size']
+
+ def set_presigned_url_expiry(self, val: int) -> None:
+ self.presigned_url_expires = val
diff --git a/rhodecode/lib/archive_cache/lock.py b/rhodecode/lib/archive_cache/lock.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/archive_cache/lock.py
@@ -0,0 +1,62 @@
+# Copyright (C) 2015-2024 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import redis
+from .._vendor import redis_lock
+
+
+class ArchiveCacheGenerationLock(Exception):
+ pass
+
+
+class GenerationLock:
+ """
+ Locking mechanism that detects if a lock is acquired
+
+ with GenerationLock(lock_key):
+ compute_archive()
+ """
+ lock_timeout = 7200
+
+ def __init__(self, lock_key, url):
+ self.lock_key = lock_key
+ self._create_client(url)
+ self.lock = self.get_lock()
+
+ def _create_client(self, url):
+ connection_pool = redis.ConnectionPool.from_url(url)
+ self.writer_client = redis.StrictRedis(
+ connection_pool=connection_pool
+ )
+ self.reader_client = self.writer_client
+
+ def get_lock(self):
+ return redis_lock.Lock(
+ redis_client=self.writer_client,
+ name=self.lock_key,
+ expire=self.lock_timeout,
+ strict=True
+ )
+
+ def __enter__(self):
+ acquired = self.lock.acquire(blocking=False)
+ if not acquired:
+ raise ArchiveCacheGenerationLock('Failed to create a lock')
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.lock.release()
diff --git a/rhodecode/lib/archive_cache/utils.py b/rhodecode/lib/archive_cache/utils.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/archive_cache/utils.py
@@ -0,0 +1,134 @@
+# Copyright (C) 2015-2024 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import sqlite3
+import s3fs.core
+
+NOT_GIVEN = -917
+
+
+EVICTION_POLICY = {
+ 'none': {
+ 'evict': None,
+ },
+ 'least-recently-stored': {
+ 'evict': 'SELECT {fields} FROM archive_cache ORDER BY store_time',
+ },
+ 'least-recently-used': {
+ 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_time',
+ },
+ 'least-frequently-used': {
+ 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_count',
+ },
+}
+
+
+def archive_iterator(_reader, block_size: int = 4096 * 512):
+ # 4096 * 64 = 64KB
+ while 1:
+ data = _reader.read(block_size)
+ if not data:
+ break
+ yield data
+
+
+def format_size(size):
+ # Convert size in bytes to a human-readable format (e.g., KB, MB, GB)
+ for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
+ if size < 1024:
+ return f"{size:.2f} {unit}"
+ size /= 1024
+
+
+class StatsDB:
+
+ def __init__(self):
+ self.connection = sqlite3.connect(':memory:')
+ self._init_db()
+
+ def _init_db(self):
+ qry = '''
+ CREATE TABLE IF NOT EXISTS archive_cache (
+ rowid INTEGER PRIMARY KEY,
+ key_file TEXT,
+ key_file_path TEXT,
+ archive_key TEXT,
+ archive_path TEXT,
+ store_time REAL,
+ access_time REAL,
+ access_count INTEGER DEFAULT 0,
+ size INTEGER DEFAULT 0
+ )
+ '''
+
+ self.sql(qry)
+ self.connection.commit()
+
+ @property
+ def sql(self):
+ return self.connection.execute
+
+ def bulk_insert(self, rows):
+ qry = '''
+ INSERT INTO archive_cache (
+ rowid,
+ key_file,
+ key_file_path,
+ archive_key,
+ archive_path,
+ store_time,
+ access_time,
+ access_count,
+ size
+ )
+ VALUES (
+ ?, ?, ?, ?, ?, ?, ?, ?, ?
+ )
+ '''
+ cursor = self.connection.cursor()
+ cursor.executemany(qry, rows)
+ self.connection.commit()
+
+ def get_total_size(self):
+ qry = 'SELECT COALESCE(SUM(size), 0) FROM archive_cache'
+ ((total_size,),) = self.sql(qry).fetchall()
+ return total_size
+
+ def get_sorted_keys(self, select_policy):
+ select_policy_qry = select_policy.format(fields='key_file, archive_key, size')
+ return self.sql(select_policy_qry).fetchall()
+
+
+class ShardFileReader:
+
+ def __init__(self, file_like_reader):
+ self._file_like_reader = file_like_reader
+
+ def __getattr__(self, item):
+ if isinstance(self._file_like_reader, s3fs.core.S3File):
+ match item:
+ case 'name':
+ # S3 FileWrapper doesn't support name attribute, and we use it
+ return self._file_like_reader.full_name
+ case _:
+ return getattr(self._file_like_reader, item)
+ else:
+ return getattr(self._file_like_reader, item)
+
+ def __repr__(self):
+ return f'<{self.__class__.__name__}={self._file_like_reader}>'
diff --git a/rhodecode/lib/auth.py b/rhodecode/lib/auth.py
--- a/rhodecode/lib/auth.py
+++ b/rhodecode/lib/auth.py
@@ -1688,7 +1688,7 @@ def get_csrf_token(session, force_new=Fa
def get_request(perm_class_instance):
- from pyramid.threadlocal import get_current_request
+ from rhodecode.lib.pyramid_utils import get_current_request
pyramid_request = get_current_request()
return pyramid_request
diff --git a/rhodecode/lib/base.py b/rhodecode/lib/base.py
--- a/rhodecode/lib/base.py
+++ b/rhodecode/lib/base.py
@@ -347,8 +347,6 @@ def attach_context_attributes(context, r
context.ssh_key_generator_enabled = str2bool(
config.get('ssh.enable_ui_key_generator', 'true'))
- context.visual.allow_repo_location_change = str2bool(
- config.get('allow_repo_location_change', True))
context.visual.allow_custom_hooks_settings = str2bool(
config.get('allow_custom_hooks_settings', True))
context.debug_style = str2bool(config.get('debug_style', False))
@@ -567,7 +565,7 @@ def add_events_routes(config):
def bootstrap_config(request, registry_name='RcTestRegistry'):
- from rhodecode.config.middleware import sanitize_settings_and_apply_defaults
+ from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
import pyramid.testing
registry = pyramid.testing.Registry(registry_name)
@@ -580,7 +578,7 @@ def bootstrap_config(request, registry_n
config.include('pyramid_mako')
config.include('rhodecode.lib.rc_beaker')
config.include('rhodecode.lib.rc_cache')
- config.include('rhodecode.lib.rc_cache.archive_cache')
+ config.include('rhodecode.lib.archive_cache')
add_events_routes(config)
return config
diff --git a/rhodecode/lib/celerylib/tasks.py b/rhodecode/lib/celerylib/tasks.py
--- a/rhodecode/lib/celerylib/tasks.py
+++ b/rhodecode/lib/celerylib/tasks.py
@@ -193,6 +193,7 @@ def create_repo(form_data, cur_user):
enable_downloads=enable_downloads,
state=state
)
+
Session().commit()
# now create this repo on Filesystem
@@ -402,6 +403,11 @@ def sync_last_update_for_objects(*args,
@async_task(ignore_result=True, base=RequestContextTask)
+def test_celery_exception(msg):
+ raise Exception(f'Test exception: {msg}')
+
+
+@async_task(ignore_result=True, base=RequestContextTask)
def sync_last_update(*args, **kwargs):
sync_last_update_for_objects(*args, **kwargs)
diff --git a/rhodecode/lib/config_utils.py b/rhodecode/lib/config_utils.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/config_utils.py
@@ -0,0 +1,40 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+import os
+
+
+def get_config(ini_path, **kwargs):
+ import configparser
+ parser = configparser.ConfigParser(**kwargs)
+ parser.read(ini_path)
+ return parser
+
+
+def get_app_config_lightweight(ini_path):
+ parser = get_config(ini_path)
+ parser.set('app:main', 'here', os.getcwd())
+ parser.set('app:main', '__file__', ini_path)
+ return dict(parser.items('app:main'))
+
+
+def get_app_config(ini_path):
+ """
+ This loads the app context and provides a heavy type iniliaziation of config
+ """
+ from paste.deploy.loadwsgi import appconfig
+ return appconfig(f'config:{ini_path}', relative_to=os.getcwd())
diff --git a/rhodecode/lib/db_manage.py b/rhodecode/lib/db_manage.py
--- a/rhodecode/lib/db_manage.py
+++ b/rhodecode/lib/db_manage.py
@@ -201,7 +201,7 @@ class DbManage(object):
f'version {curr_version} to version {__dbversion__}')
# CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
- _step = None
+ final_step = 'latest'
for step in upgrade_steps:
notify(f'performing upgrade step {step}')
time.sleep(0.5)
@@ -210,10 +210,10 @@ class DbManage(object):
self.sa.rollback()
notify(f'schema upgrade for step {step} completed')
- _step = step
+ final_step = step
self.run_post_migration_tasks()
- notify(f'upgrade to version {step} successful')
+ notify(f'upgrade to version {final_step} successful')
def fix_repo_paths(self):
"""
diff --git a/rhodecode/lib/dbmigrate/versions/115_version_5_1_0.py b/rhodecode/lib/dbmigrate/versions/115_version_5_1_0.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/dbmigrate/versions/115_version_5_1_0.py
@@ -0,0 +1,50 @@
+
+
+import logging
+from sqlalchemy import *
+from sqlalchemy.engine import reflection
+
+from alembic.migration import MigrationContext
+from alembic.operations import Operations
+
+from rhodecode.lib.dbmigrate.versions import _reset_base
+from rhodecode.model import meta, init_model_encryption
+
+
+log = logging.getLogger(__name__)
+
+
+def _get_indexes_list(migrate_engine, table_name):
+ inspector = reflection.Inspector.from_engine(migrate_engine)
+ return inspector.get_indexes(table_name)
+
+
+def upgrade(migrate_engine):
+ """
+ Upgrade operations go here.
+ Don't create your own engine; bind migrate_engine to your metadata
+ """
+ from rhodecode.model import db as db_5_1_0_0
+
+ # issue fixups
+ fixups(db_5_1_0_0, meta.Session)
+
+
+def downgrade(migrate_engine):
+ pass
+
+
+def fixups(models, _SESSION):
+ for db_repo in _SESSION.query(models.Repository).all():
+
+ config = db_repo._config
+ config.set('extensions', 'largefiles', '')
+
+ try:
+ scm = db_repo.scm_instance(cache=False, config=config)
+ if scm:
+ print(f'installing hook for repo: {db_repo}')
+ scm.install_hooks(force=True)
+ except Exception as e:
+ print(e)
+ print('continue...')
diff --git a/rhodecode/lib/enc_utils.py b/rhodecode/lib/enc_utils.py
--- a/rhodecode/lib/enc_utils.py
+++ b/rhodecode/lib/enc_utils.py
@@ -1,3 +1,21 @@
+# Copyright (C) 2011-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
from rhodecode.lib.str_utils import safe_bytes
from rhodecode.lib.encrypt import encrypt_data, validate_and_decrypt_data
from rhodecode.lib.encrypt2 import Encryptor
@@ -9,6 +27,10 @@ def get_default_algo():
import rhodecode
return rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
+def get_strict_mode():
+ import rhodecode
+ return rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict') or False
+
def encrypt_value(value: bytes, enc_key: bytes, algo: str = ''):
if not algo:
@@ -29,16 +51,21 @@ def encrypt_value(value: bytes, enc_key:
return value
-def decrypt_value(value: bytes, enc_key: bytes, algo: str = '', strict_mode: bool = False):
+def decrypt_value(value: bytes, enc_key: bytes, algo: str = '', strict_mode: bool | None = None):
+
+ if strict_mode is None:
+ # we use config value rather then explicit True/False
+ strict_mode = get_strict_mode()
+
+ enc_key = safe_bytes(enc_key)
+ value = safe_bytes(value)
if not algo:
# not explicit algo, just use what's set by config
- algo = get_default_algo()
+ algo = Encryptor.detect_enc_algo(value) or get_default_algo()
if algo not in ALLOWED_ALGOS:
ValueError(f'Bad encryption algorithm, should be {ALLOWED_ALGOS}, got: {algo}')
- enc_key = safe_bytes(enc_key)
- value = safe_bytes(value)
safe = not strict_mode
if algo == 'aes':
diff --git a/rhodecode/lib/encrypt.py b/rhodecode/lib/encrypt.py
--- a/rhodecode/lib/encrypt.py
+++ b/rhodecode/lib/encrypt.py
@@ -43,6 +43,7 @@ class InvalidDecryptedValue(str):
content = f'<{cls.__name__}({content[:16]}...)>'
return str.__new__(cls, content)
+
KEY_FORMAT = b'enc$aes_hmac${1}'
diff --git a/rhodecode/lib/encrypt2.py b/rhodecode/lib/encrypt2.py
--- a/rhodecode/lib/encrypt2.py
+++ b/rhodecode/lib/encrypt2.py
@@ -23,8 +23,25 @@ class InvalidDecryptedValue(str):
class Encryptor(object):
key_format = b'enc2$salt:{1}$data:{2}'
+
pref_len = 5 # salt:, data:
+ @classmethod
+ def detect_enc_algo(cls, enc_data: bytes):
+ parts = enc_data.split(b'$', 3)
+
+ if b'enc$aes_hmac$' in enc_data:
+ # we expect this data is encrypted, so validate the header
+ if len(parts) != 3:
+ raise ValueError(f'Encrypted Data has invalid format, expected {cls.key_format}, got `{parts}`')
+ return 'aes'
+ elif b'enc2$salt' in enc_data:
+ # we expect this data is encrypted, so validate the header
+ if len(parts) != 3:
+ raise ValueError(f'Encrypted Data has invalid format, expected {cls.key_format}, got `{parts}`')
+ return 'fernet'
+ return None
+
def __init__(self, enc_key: bytes):
self.enc_key = enc_key
@@ -52,7 +69,7 @@ class Encryptor(object):
def _get_parts(self, enc_data):
parts = enc_data.split(b'$', 3)
if len(parts) != 3:
- raise ValueError(f'Encrypted Data has invalid format, expected {self.key_format}, got {parts}')
+ raise ValueError(f'Encrypted Data has invalid format, expected {self.key_format}, got `{parts}`')
prefix, salt, enc_data = parts
try:
diff --git a/rhodecode/lib/exceptions.py b/rhodecode/lib/exceptions.py
--- a/rhodecode/lib/exceptions.py
+++ b/rhodecode/lib/exceptions.py
@@ -144,6 +144,10 @@ class NotAllowedToCreateUserError(Except
pass
+class DuplicateUpdateUserError(Exception):
+ pass
+
+
class RepositoryCreationError(Exception):
pass
diff --git a/rhodecode/lib/helpers.py b/rhodecode/lib/helpers.py
--- a/rhodecode/lib/helpers.py
+++ b/rhodecode/lib/helpers.py
@@ -74,6 +74,7 @@ from webhelpers2.html.tags import (
from webhelpers2.number import format_byte_size
# python3.11 backport fixes for webhelpers2
+from rhodecode import ConfigGet
from rhodecode.lib._vendor.webhelpers_backports import raw_select
from rhodecode.lib.action_parser import action_parser
@@ -916,9 +917,7 @@ def get_repo_type_by_name(repo_name):
def is_svn_without_proxy(repository):
if is_svn(repository):
- from rhodecode.model.settings import VcsSettingsModel
- conf = VcsSettingsModel().get_ui_settings_as_config_obj()
- return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
+ return not ConfigGet().get_bool('vcs.svn.proxy.enabled')
return False
@@ -2197,3 +2196,35 @@ class IssuesRegistry(object):
@property
def issues_unique_count(self):
return len(set(i['id'] for i in self.issues))
+
+
+def get_directory_statistics(start_path):
+ """
+ total_files, total_size, directory_stats = get_directory_statistics(start_path)
+
+ print(f"Directory statistics for: {start_path}\n")
+ print(f"Total files: {total_files}")
+ print(f"Total size: {format_size(total_size)}\n")
+
+ :param start_path:
+ :return:
+ """
+
+ total_files = 0
+ total_size = 0
+ directory_stats = {}
+
+ for dir_path, dir_names, file_names in os.walk(start_path):
+ dir_size = 0
+ file_count = len(file_names)
+
+ for fname in file_names:
+ filepath = os.path.join(dir_path, fname)
+ file_size = os.path.getsize(filepath)
+ dir_size += file_size
+
+ directory_stats[dir_path] = {'file_count': file_count, 'size': dir_size}
+ total_files += file_count
+ total_size += dir_size
+
+ return total_files, total_size, directory_stats
diff --git a/rhodecode/lib/hook_daemon/__init__.py b/rhodecode/lib/hook_daemon/__init__.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/hook_daemon/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
diff --git a/rhodecode/lib/hook_daemon/base.py b/rhodecode/lib/hook_daemon/base.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/hook_daemon/base.py
@@ -0,0 +1,89 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import os
+import time
+import logging
+
+from rhodecode.lib.config_utils import get_config
+
+from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
+
+log = logging.getLogger(__name__)
+
+
+class BaseHooksCallbackDaemon:
+ """
+ Basic context manager for actions that don't require some extra
+ """
+ def __init__(self):
+ pass
+
+ def __enter__(self):
+ log.debug('Running `%s` callback daemon', self.__class__.__name__)
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
+
+
+class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
+
+ def __init__(self, module):
+ super().__init__()
+ self.hooks_module = module
+
+ def __repr__(self):
+ return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
+
+
+def prepare_callback_daemon(extras, protocol, host, txn_id=None):
+
+ match protocol:
+ case 'http':
+ from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
+ port = 0
+ if txn_id:
+ # read txn-id to re-use the PORT for callback daemon
+ repo_path = os.path.join(extras['repo_store'], extras['repository'])
+ txn_details = get_txn_id_from_store(repo_path, txn_id)
+ port = txn_details.get('port', 0)
+
+ callback_daemon = HttpHooksCallbackDaemon(
+ txn_id=txn_id, host=host, port=port)
+ case 'celery':
+ from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
+ callback_daemon = CeleryHooksCallbackDaemon(get_config(extras['config']))
+ case 'local':
+ from rhodecode.lib.hook_daemon.hook_module import Hooks
+ callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
+ case _:
+ log.error('Unsupported callback daemon protocol "%s"', protocol)
+ raise Exception('Unsupported callback daemon protocol.')
+
+ extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
+ extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
+ extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
+ extras['hooks_protocol'] = protocol
+ extras['time'] = time.time()
+
+ # register txn_id
+ extras['txn_id'] = txn_id
+ log.debug('Prepared a callback daemon: %s',
+ callback_daemon.__class__.__name__)
+ return callback_daemon, extras
diff --git a/rhodecode/lib/hook_daemon/celery_hooks_deamon.py b/rhodecode/lib/hook_daemon/celery_hooks_deamon.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/hook_daemon/celery_hooks_deamon.py
@@ -0,0 +1,33 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
+
+
+class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
+ """
+ Context manger for achieving a compatibility with celery backend
+ """
+
+ def __init__(self, config):
+ # TODO: replace this with settings bootstrapped...
+ self.task_queue = config.get('app:main', 'celery.broker_url')
+ self.task_backend = config.get('app:main', 'celery.result_backend')
+
+ def __repr__(self):
+ return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
diff --git a/rhodecode/lib/hook_daemon/hook_module.py b/rhodecode/lib/hook_daemon/hook_module.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/hook_daemon/hook_module.py
@@ -0,0 +1,104 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import logging
+import traceback
+
+from rhodecode.model import meta
+
+from rhodecode.lib import hooks_base
+from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
+from rhodecode.lib.utils2 import AttributeDict
+
+log = logging.getLogger(__name__)
+
+
+class Hooks(object):
+ """
+ Exposes the hooks for remote callbacks
+ """
+ def __init__(self, request=None, log_prefix=''):
+ self.log_prefix = log_prefix
+ self.request = request
+
+ def repo_size(self, extras):
+ log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
+ return self._call_hook(hooks_base.repo_size, extras)
+
+ def pre_pull(self, extras):
+ log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
+ return self._call_hook(hooks_base.pre_pull, extras)
+
+ def post_pull(self, extras):
+ log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
+ return self._call_hook(hooks_base.post_pull, extras)
+
+ def pre_push(self, extras):
+ log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
+ return self._call_hook(hooks_base.pre_push, extras)
+
+ def post_push(self, extras):
+ log.debug("%sCalled post_push of %s object", self.log_prefix, self)
+ return self._call_hook(hooks_base.post_push, extras)
+
+ def _call_hook(self, hook, extras):
+ extras = AttributeDict(extras)
+ _server_url = extras['server_url']
+
+ extras.request = self.request
+
+ try:
+ result = hook(extras)
+ if result is None:
+ raise Exception(f'Failed to obtain hook result from func: {hook}')
+ except HTTPBranchProtected as handled_error:
+ # Those special cases don't need error reporting. It's a case of
+ # locked repo or protected branch
+ result = AttributeDict({
+ 'status': handled_error.code,
+ 'output': handled_error.explanation
+ })
+ except (HTTPLockedRC, Exception) as error:
+ # locked needs different handling since we need to also
+ # handle PULL operations
+ exc_tb = ''
+ if not isinstance(error, HTTPLockedRC):
+ exc_tb = traceback.format_exc()
+ log.exception('%sException when handling hook %s', self.log_prefix, hook)
+ error_args = error.args
+ return {
+ 'status': 128,
+ 'output': '',
+ 'exception': type(error).__name__,
+ 'exception_traceback': exc_tb,
+ 'exception_args': error_args,
+ }
+ finally:
+ meta.Session.remove()
+
+ log.debug('%sGot hook call response %s', self.log_prefix, result)
+ return {
+ 'status': result.status,
+ 'output': result.output,
+ }
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
diff --git a/rhodecode/lib/hook_daemon/http_hooks_deamon.py b/rhodecode/lib/hook_daemon/http_hooks_deamon.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/hook_daemon/http_hooks_deamon.py
@@ -0,0 +1,287 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import os
+import logging
+import traceback
+import threading
+import socket
+import msgpack
+import gevent
+
+from http.server import BaseHTTPRequestHandler
+from socketserver import TCPServer
+
+from rhodecode.model import meta
+from rhodecode.lib.ext_json import json
+from rhodecode.lib import rc_cache
+from rhodecode.lib.svn_txn_utils import get_txn_id_data_key
+from rhodecode.lib.hook_daemon.hook_module import Hooks
+
+log = logging.getLogger(__name__)
+
+
+class HooksHttpHandler(BaseHTTPRequestHandler):
+
+ JSON_HOOKS_PROTO = 'json.v1'
+ MSGPACK_HOOKS_PROTO = 'msgpack.v1'
+ # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
+ DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
+
+ @classmethod
+ def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
+ if proto == cls.MSGPACK_HOOKS_PROTO:
+ return msgpack.packb(data)
+ return json.dumps(data)
+
+ @classmethod
+ def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
+ if proto == cls.MSGPACK_HOOKS_PROTO:
+ return msgpack.unpackb(data)
+ return json.loads(data)
+
+ def do_POST(self):
+ hooks_proto, method, extras = self._read_request()
+ log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
+
+ txn_id = getattr(self.server, 'txn_id', None)
+ if txn_id:
+ log.debug('Computing TXN_ID based on `%s`:`%s`',
+ extras['repository'], extras['txn_id'])
+ computed_txn_id = rc_cache.utils.compute_key_from_params(
+ extras['repository'], extras['txn_id'])
+ if txn_id != computed_txn_id:
+ raise Exception(
+ 'TXN ID fail: expected {} got {} instead'.format(
+ txn_id, computed_txn_id))
+
+ request = getattr(self.server, 'request', None)
+ try:
+ hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
+ result = self._call_hook_method(hooks, method, extras)
+
+ except Exception as e:
+ exc_tb = traceback.format_exc()
+ result = {
+ 'exception': e.__class__.__name__,
+ 'exception_traceback': exc_tb,
+ 'exception_args': e.args
+ }
+ self._write_response(hooks_proto, result)
+
+ def _read_request(self):
+ length = int(self.headers['Content-Length'])
+ # respect sent headers, fallback to OLD proto for compatability
+ hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
+ if hooks_proto == self.MSGPACK_HOOKS_PROTO:
+ # support for new vcsserver msgpack based protocol hooks
+ body = self.rfile.read(length)
+ data = self.deserialize_data(body)
+ else:
+ body = self.rfile.read(length)
+ data = self.deserialize_data(body)
+
+ return hooks_proto, data['method'], data['extras']
+
+ def _write_response(self, hooks_proto, result):
+ self.send_response(200)
+ if hooks_proto == self.MSGPACK_HOOKS_PROTO:
+ self.send_header("Content-type", "application/msgpack")
+ self.end_headers()
+ data = self.serialize_data(result)
+ self.wfile.write(data)
+ else:
+ self.send_header("Content-type", "text/json")
+ self.end_headers()
+ data = self.serialize_data(result)
+ self.wfile.write(data)
+
+ def _call_hook_method(self, hooks, method, extras):
+ try:
+ result = getattr(hooks, method)(extras)
+ finally:
+ meta.Session.remove()
+ return result
+
+ def log_message(self, format, *args):
+ """
+ This is an overridden method of BaseHTTPRequestHandler which logs using
+ a logging library instead of writing directly to stderr.
+ """
+
+ message = format % args
+
+ log.debug(
+ "HOOKS: client=%s - - [%s] %s", self.client_address,
+ self.log_date_time_string(), message)
+
+
+class ThreadedHookCallbackDaemon(object):
+
+ _callback_thread = None
+ _daemon = None
+ _done = False
+ use_gevent = False
+
+ def __init__(self, txn_id=None, host=None, port=None):
+ self._prepare(txn_id=txn_id, host=host, port=port)
+ if self.use_gevent:
+ self._run_func = self._run_gevent
+ self._stop_func = self._stop_gevent
+ else:
+ self._run_func = self._run
+ self._stop_func = self._stop
+
+ def __enter__(self):
+ log.debug('Running `%s` callback daemon', self.__class__.__name__)
+ self._run_func()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
+ self._stop_func()
+
+ def _prepare(self, txn_id=None, host=None, port=None):
+ raise NotImplementedError()
+
+ def _run(self):
+ raise NotImplementedError()
+
+ def _stop(self):
+ raise NotImplementedError()
+
+ def _run_gevent(self):
+ raise NotImplementedError()
+
+ def _stop_gevent(self):
+ raise NotImplementedError()
+
+
+class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
+ """
+ Context manager which will run a callback daemon in a background thread.
+ """
+
+ hooks_uri = None
+
+ # From Python docs: Polling reduces our responsiveness to a shutdown
+ # request and wastes cpu at all other times.
+ POLL_INTERVAL = 0.01
+
+ use_gevent = False
+
+ def __repr__(self):
+ return f'HttpHooksCallbackDaemon(hooks_uri={self.hooks_uri})'
+
+ @property
+ def _hook_prefix(self):
+ return f'HOOKS: {self.hooks_uri} '
+
+ def get_hostname(self):
+ return socket.gethostname() or '127.0.0.1'
+
+ def get_available_port(self, min_port=20000, max_port=65535):
+ from rhodecode.lib.utils2 import get_available_port as _get_port
+ return _get_port(min_port, max_port)
+
+ def _prepare(self, txn_id=None, host=None, port=None):
+ from pyramid.threadlocal import get_current_request
+
+ if not host or host == "*":
+ host = self.get_hostname()
+ if not port:
+ port = self.get_available_port()
+
+ server_address = (host, port)
+ self.hooks_uri = f'{host}:{port}'
+ self.txn_id = txn_id
+ self._done = False
+
+ log.debug(
+ "%s Preparing HTTP callback daemon registering hook object: %s",
+ self._hook_prefix, HooksHttpHandler)
+
+ self._daemon = TCPServer(server_address, HooksHttpHandler)
+ # inject transaction_id for later verification
+ self._daemon.txn_id = self.txn_id
+
+ # pass the WEB app request into daemon
+ self._daemon.request = get_current_request()
+
+ def _run(self):
+ log.debug("Running thread-based loop of callback daemon in background")
+ callback_thread = threading.Thread(
+ target=self._daemon.serve_forever,
+ kwargs={'poll_interval': self.POLL_INTERVAL})
+ callback_thread.daemon = True
+ callback_thread.start()
+ self._callback_thread = callback_thread
+
+ def _run_gevent(self):
+ log.debug("Running gevent-based loop of callback daemon in background")
+ # create a new greenlet for the daemon's serve_forever method
+ callback_greenlet = gevent.spawn(
+ self._daemon.serve_forever,
+ poll_interval=self.POLL_INTERVAL)
+
+ # store reference to greenlet
+ self._callback_greenlet = callback_greenlet
+
+ # switch to this greenlet
+ gevent.sleep(0.01)
+
+ def _stop(self):
+ log.debug("Waiting for background thread to finish.")
+ self._daemon.shutdown()
+ self._callback_thread.join()
+ self._daemon = None
+ self._callback_thread = None
+ if self.txn_id:
+ #TODO: figure out the repo_path...
+ repo_path = ''
+ txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
+ log.debug('Cleaning up TXN ID %s', txn_id_file)
+ if os.path.isfile(txn_id_file):
+ os.remove(txn_id_file)
+
+ log.debug("Background thread done.")
+
+ def _stop_gevent(self):
+ log.debug("Waiting for background greenlet to finish.")
+
+ # if greenlet exists and is running
+ if self._callback_greenlet and not self._callback_greenlet.dead:
+ # shutdown daemon if it exists
+ if self._daemon:
+ self._daemon.shutdown()
+
+ # kill the greenlet
+ self._callback_greenlet.kill()
+
+ self._daemon = None
+ self._callback_greenlet = None
+
+ if self.txn_id:
+ #TODO: figure out the repo_path...
+ repo_path = ''
+ txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
+ log.debug('Cleaning up TXN ID %s', txn_id_file)
+ if os.path.isfile(txn_id_file):
+ os.remove(txn_id_file)
+
+ log.debug("Background greenlet done.")
diff --git a/rhodecode/lib/hooks_daemon.py b/rhodecode/lib/hooks_daemon.py
deleted file mode 100644
--- a/rhodecode/lib/hooks_daemon.py
+++ /dev/null
@@ -1,451 +0,0 @@
-# Copyright (C) 2010-2023 RhodeCode GmbH
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License, version 3
-# (only), as published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see .
-#
-# This program is dual-licensed. If you wish to learn more about the
-# RhodeCode Enterprise Edition, including its added features, Support services,
-# and proprietary license terms, please see https://rhodecode.com/licenses/
-
-import os
-import time
-import logging
-import tempfile
-import traceback
-import threading
-import socket
-import msgpack
-import gevent
-
-from http.server import BaseHTTPRequestHandler
-from socketserver import TCPServer
-
-import rhodecode
-from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
-from rhodecode.model import meta
-from rhodecode.lib import hooks_base
-from rhodecode.lib.utils2 import AttributeDict
-from rhodecode.lib.pyramid_utils import get_config
-from rhodecode.lib.ext_json import json
-from rhodecode.lib import rc_cache
-
-log = logging.getLogger(__name__)
-
-
-class HooksHttpHandler(BaseHTTPRequestHandler):
-
- JSON_HOOKS_PROTO = 'json.v1'
- MSGPACK_HOOKS_PROTO = 'msgpack.v1'
- # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
- DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
-
- @classmethod
- def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
- if proto == cls.MSGPACK_HOOKS_PROTO:
- return msgpack.packb(data)
- return json.dumps(data)
-
- @classmethod
- def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
- if proto == cls.MSGPACK_HOOKS_PROTO:
- return msgpack.unpackb(data)
- return json.loads(data)
-
- def do_POST(self):
- hooks_proto, method, extras = self._read_request()
- log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
-
- txn_id = getattr(self.server, 'txn_id', None)
- if txn_id:
- log.debug('Computing TXN_ID based on `%s`:`%s`',
- extras['repository'], extras['txn_id'])
- computed_txn_id = rc_cache.utils.compute_key_from_params(
- extras['repository'], extras['txn_id'])
- if txn_id != computed_txn_id:
- raise Exception(
- 'TXN ID fail: expected {} got {} instead'.format(
- txn_id, computed_txn_id))
-
- request = getattr(self.server, 'request', None)
- try:
- hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
- result = self._call_hook_method(hooks, method, extras)
-
- except Exception as e:
- exc_tb = traceback.format_exc()
- result = {
- 'exception': e.__class__.__name__,
- 'exception_traceback': exc_tb,
- 'exception_args': e.args
- }
- self._write_response(hooks_proto, result)
-
- def _read_request(self):
- length = int(self.headers['Content-Length'])
- # respect sent headers, fallback to OLD proto for compatability
- hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
- if hooks_proto == self.MSGPACK_HOOKS_PROTO:
- # support for new vcsserver msgpack based protocol hooks
- body = self.rfile.read(length)
- data = self.deserialize_data(body)
- else:
- body = self.rfile.read(length)
- data = self.deserialize_data(body)
-
- return hooks_proto, data['method'], data['extras']
-
- def _write_response(self, hooks_proto, result):
- self.send_response(200)
- if hooks_proto == self.MSGPACK_HOOKS_PROTO:
- self.send_header("Content-type", "application/msgpack")
- self.end_headers()
- data = self.serialize_data(result)
- self.wfile.write(data)
- else:
- self.send_header("Content-type", "text/json")
- self.end_headers()
- data = self.serialize_data(result)
- self.wfile.write(data)
-
- def _call_hook_method(self, hooks, method, extras):
- try:
- result = getattr(hooks, method)(extras)
- finally:
- meta.Session.remove()
- return result
-
- def log_message(self, format, *args):
- """
- This is an overridden method of BaseHTTPRequestHandler which logs using
- logging library instead of writing directly to stderr.
- """
-
- message = format % args
-
- log.debug(
- "HOOKS: client=%s - - [%s] %s", self.client_address,
- self.log_date_time_string(), message)
-
-
-class BaseHooksCallbackDaemon:
- """
- Basic context manager for actions that don't require some extra
- """
- def __init__(self):
- self.hooks_module = Hooks.__module__
-
- def __enter__(self):
- log.debug('Running `%s` callback daemon', self.__class__.__name__)
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
-
-
-class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
- """
- Context manger for achieving a compatibility with celery backend
- """
-
- def __init__(self, config):
- self.task_queue = config.get('app:main', 'celery.broker_url')
- self.task_backend = config.get('app:main', 'celery.result_backend')
-
-
-class ThreadedHookCallbackDaemon(object):
-
- _callback_thread = None
- _daemon = None
- _done = False
- use_gevent = False
-
- def __init__(self, txn_id=None, host=None, port=None):
- self._prepare(txn_id=txn_id, host=host, port=port)
- if self.use_gevent:
- self._run_func = self._run_gevent
- self._stop_func = self._stop_gevent
- else:
- self._run_func = self._run
- self._stop_func = self._stop
-
- def __enter__(self):
- log.debug('Running `%s` callback daemon', self.__class__.__name__)
- self._run_func()
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
- self._stop_func()
-
- def _prepare(self, txn_id=None, host=None, port=None):
- raise NotImplementedError()
-
- def _run(self):
- raise NotImplementedError()
-
- def _stop(self):
- raise NotImplementedError()
-
- def _run_gevent(self):
- raise NotImplementedError()
-
- def _stop_gevent(self):
- raise NotImplementedError()
-
-
-class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
- """
- Context manager which will run a callback daemon in a background thread.
- """
-
- hooks_uri = None
-
- # From Python docs: Polling reduces our responsiveness to a shutdown
- # request and wastes cpu at all other times.
- POLL_INTERVAL = 0.01
-
- use_gevent = False
-
- @property
- def _hook_prefix(self):
- return 'HOOKS: {} '.format(self.hooks_uri)
-
- def get_hostname(self):
- return socket.gethostname() or '127.0.0.1'
-
- def get_available_port(self, min_port=20000, max_port=65535):
- from rhodecode.lib.utils2 import get_available_port as _get_port
- return _get_port(min_port, max_port)
-
- def _prepare(self, txn_id=None, host=None, port=None):
- from pyramid.threadlocal import get_current_request
-
- if not host or host == "*":
- host = self.get_hostname()
- if not port:
- port = self.get_available_port()
-
- server_address = (host, port)
- self.hooks_uri = '{}:{}'.format(host, port)
- self.txn_id = txn_id
- self._done = False
-
- log.debug(
- "%s Preparing HTTP callback daemon registering hook object: %s",
- self._hook_prefix, HooksHttpHandler)
-
- self._daemon = TCPServer(server_address, HooksHttpHandler)
- # inject transaction_id for later verification
- self._daemon.txn_id = self.txn_id
-
- # pass the WEB app request into daemon
- self._daemon.request = get_current_request()
-
- def _run(self):
- log.debug("Running thread-based loop of callback daemon in background")
- callback_thread = threading.Thread(
- target=self._daemon.serve_forever,
- kwargs={'poll_interval': self.POLL_INTERVAL})
- callback_thread.daemon = True
- callback_thread.start()
- self._callback_thread = callback_thread
-
- def _run_gevent(self):
- log.debug("Running gevent-based loop of callback daemon in background")
- # create a new greenlet for the daemon's serve_forever method
- callback_greenlet = gevent.spawn(
- self._daemon.serve_forever,
- poll_interval=self.POLL_INTERVAL)
-
- # store reference to greenlet
- self._callback_greenlet = callback_greenlet
-
- # switch to this greenlet
- gevent.sleep(0.01)
-
- def _stop(self):
- log.debug("Waiting for background thread to finish.")
- self._daemon.shutdown()
- self._callback_thread.join()
- self._daemon = None
- self._callback_thread = None
- if self.txn_id:
- txn_id_file = get_txn_id_data_path(self.txn_id)
- log.debug('Cleaning up TXN ID %s', txn_id_file)
- if os.path.isfile(txn_id_file):
- os.remove(txn_id_file)
-
- log.debug("Background thread done.")
-
- def _stop_gevent(self):
- log.debug("Waiting for background greenlet to finish.")
-
- # if greenlet exists and is running
- if self._callback_greenlet and not self._callback_greenlet.dead:
- # shutdown daemon if it exists
- if self._daemon:
- self._daemon.shutdown()
-
- # kill the greenlet
- self._callback_greenlet.kill()
-
- self._daemon = None
- self._callback_greenlet = None
-
- if self.txn_id:
- txn_id_file = get_txn_id_data_path(self.txn_id)
- log.debug('Cleaning up TXN ID %s', txn_id_file)
- if os.path.isfile(txn_id_file):
- os.remove(txn_id_file)
-
- log.debug("Background greenlet done.")
-
-
-def get_txn_id_data_path(txn_id):
- import rhodecode
-
- root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
- final_dir = os.path.join(root, 'svn_txn_id')
-
- if not os.path.isdir(final_dir):
- os.makedirs(final_dir)
- return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
-
-
-def store_txn_id_data(txn_id, data_dict):
- if not txn_id:
- log.warning('Cannot store txn_id because it is empty')
- return
-
- path = get_txn_id_data_path(txn_id)
- try:
- with open(path, 'wb') as f:
- f.write(json.dumps(data_dict))
- except Exception:
- log.exception('Failed to write txn_id metadata')
-
-
-def get_txn_id_from_store(txn_id):
- """
- Reads txn_id from store and if present returns the data for callback manager
- """
- path = get_txn_id_data_path(txn_id)
- try:
- with open(path, 'rb') as f:
- return json.loads(f.read())
- except Exception:
- return {}
-
-
-def prepare_callback_daemon(extras, protocol, host, txn_id=None):
- txn_details = get_txn_id_from_store(txn_id)
- port = txn_details.get('port', 0)
- match protocol:
- case 'http':
- callback_daemon = HttpHooksCallbackDaemon(
- txn_id=txn_id, host=host, port=port)
- case 'celery':
- callback_daemon = CeleryHooksCallbackDaemon(get_config(extras['config']))
- case 'local':
- callback_daemon = BaseHooksCallbackDaemon()
- case _:
- log.error('Unsupported callback daemon protocol "%s"', protocol)
- raise Exception('Unsupported callback daemon protocol.')
-
- extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
- extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
- extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
- extras['hooks_protocol'] = protocol
- extras['time'] = time.time()
-
- # register txn_id
- extras['txn_id'] = txn_id
- log.debug('Prepared a callback daemon: %s',
- callback_daemon.__class__.__name__)
- return callback_daemon, extras
-
-
-class Hooks(object):
- """
- Exposes the hooks for remote call backs
- """
- def __init__(self, request=None, log_prefix=''):
- self.log_prefix = log_prefix
- self.request = request
-
- def repo_size(self, extras):
- log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
- return self._call_hook(hooks_base.repo_size, extras)
-
- def pre_pull(self, extras):
- log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
- return self._call_hook(hooks_base.pre_pull, extras)
-
- def post_pull(self, extras):
- log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
- return self._call_hook(hooks_base.post_pull, extras)
-
- def pre_push(self, extras):
- log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
- return self._call_hook(hooks_base.pre_push, extras)
-
- def post_push(self, extras):
- log.debug("%sCalled post_push of %s object", self.log_prefix, self)
- return self._call_hook(hooks_base.post_push, extras)
-
- def _call_hook(self, hook, extras):
- extras = AttributeDict(extras)
- server_url = extras['server_url']
-
- extras.request = self.request
-
- try:
- result = hook(extras)
- if result is None:
- raise Exception(
- 'Failed to obtain hook result from func: {}'.format(hook))
- except HTTPBranchProtected as handled_error:
- # Those special cases doesn't need error reporting. It's a case of
- # locked repo or protected branch
- result = AttributeDict({
- 'status': handled_error.code,
- 'output': handled_error.explanation
- })
- except (HTTPLockedRC, Exception) as error:
- # locked needs different handling since we need to also
- # handle PULL operations
- exc_tb = ''
- if not isinstance(error, HTTPLockedRC):
- exc_tb = traceback.format_exc()
- log.exception('%sException when handling hook %s', self.log_prefix, hook)
- error_args = error.args
- return {
- 'status': 128,
- 'output': '',
- 'exception': type(error).__name__,
- 'exception_traceback': exc_tb,
- 'exception_args': error_args,
- }
- finally:
- meta.Session.remove()
-
- log.debug('%sGot hook call response %s', self.log_prefix, result)
- return {
- 'status': result.status,
- 'output': result.output,
- }
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- pass
diff --git a/rhodecode/lib/hooks_utils.py b/rhodecode/lib/hooks_utils.py
--- a/rhodecode/lib/hooks_utils.py
+++ b/rhodecode/lib/hooks_utils.py
@@ -17,7 +17,6 @@
# and proprietary license terms, please see https://rhodecode.com/licenses/
import webob
-from pyramid.threadlocal import get_current_request
from rhodecode import events
from rhodecode.lib import hooks_base
@@ -33,6 +32,7 @@ def _supports_repo_type(repo_type):
def _get_vcs_operation_context(username, repo_name, repo_type, action):
# NOTE(dan): import loop
from rhodecode.lib.base import vcs_operation_context
+ from rhodecode.lib.pyramid_utils import get_current_request
check_locking = action in ('pull', 'push')
diff --git a/rhodecode/lib/logging_formatter.py b/rhodecode/lib/logging_formatter.py
--- a/rhodecode/lib/logging_formatter.py
+++ b/rhodecode/lib/logging_formatter.py
@@ -141,7 +141,7 @@ class ColorFormatter(ExceptionAwareForma
"""
Changes record's levelname to use with COLORS enum
"""
- def_record = super(ColorFormatter, self).format(record)
+ def_record = super().format(record)
levelname = record.levelname
start = COLOR_SEQ % (COLORS[levelname])
diff --git a/rhodecode/lib/middleware/simplesvn.py b/rhodecode/lib/middleware/simplesvn.py
--- a/rhodecode/lib/middleware/simplesvn.py
+++ b/rhodecode/lib/middleware/simplesvn.py
@@ -17,7 +17,8 @@
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
-import base64
+import re
+import os
import logging
import urllib.request
import urllib.parse
@@ -27,15 +28,11 @@ import urllib.parse
import requests
from pyramid.httpexceptions import HTTPNotAcceptable
-from rhodecode.lib import rc_cache
+from rhodecode import ConfigGet
from rhodecode.lib.middleware import simplevcs
from rhodecode.lib.middleware.utils import get_path_info
from rhodecode.lib.utils import is_valid_repo
-from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
-from rhodecode.lib.type_utils import str2bool
-from rhodecode.lib.ext_json import json
-from rhodecode.lib.hooks_daemon import store_txn_id_data
-
+from rhodecode.lib.str_utils import safe_str
log = logging.getLogger(__name__)
@@ -54,37 +51,20 @@ class SimpleSvnApp(object):
request_headers = self._get_request_headers(environ)
data_io = environ['wsgi.input']
req_method: str = environ['REQUEST_METHOD']
- has_content_length = 'CONTENT_LENGTH' in environ
+ has_content_length: bool = 'CONTENT_LENGTH' in environ
path_info = self._get_url(
self.config.get('subversion_http_server_url', ''), get_path_info(environ))
transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '')
- log.debug('Handling: %s method via `%s`', req_method, path_info)
+ log.debug('Handling: %s method via `%s` has_content_length:%s', req_method, path_info, has_content_length)
# stream control flag, based on request and content type...
stream = False
-
if req_method in ['MKCOL'] or has_content_length:
- data_processed = False
- # read chunk to check if we have txn-with-props
- initial_data: bytes = data_io.read(1024)
- if initial_data.startswith(b'(create-txn-with-props'):
- data_io = initial_data + data_io.read()
- # store on-the-fly our rc_extra using svn revision properties
- # those can be read later on in hooks executed so we have a way
- # to pass in the data into svn hooks
- rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras))
- rc_data_len = str(len(rc_data))
- # header defines data length, and serialized data
- skel = b' rc-scm-extras %b %b' % (safe_bytes(rc_data_len), safe_bytes(rc_data))
- data_io = data_io[:-2] + skel + b'))'
- data_processed = True
-
- if not data_processed:
- # NOTE(johbo): Avoid that we end up with sending the request in chunked
- # transfer encoding (mainly on Gunicorn). If we know the content
- # length, then we should transfer the payload in one request.
- data_io = initial_data + data_io.read()
+ # NOTE(johbo): Avoid that we end up with sending the request in chunked
+ # transfer encoding (mainly on Gunicorn). If we know the content
+ # length, then we should transfer the payload in one request.
+ data_io = data_io.read()
if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked':
# NOTE(marcink): when getting/uploading files, we want to STREAM content
@@ -101,6 +81,7 @@ class SimpleSvnApp(object):
stream=stream
)
if req_method in ['HEAD', 'DELETE']:
+ # NOTE(marcink): HEAD might be deprecated for SVN 1.14+ protocol
del call_kwargs['data']
try:
@@ -120,14 +101,6 @@ class SimpleSvnApp(object):
log.debug('got response code: %s', response.status_code)
response_headers = self._get_response_headers(response.headers)
-
- if response.headers.get('SVN-Txn-name'):
- svn_tx_id = response.headers.get('SVN-Txn-name')
- txn_id = rc_cache.utils.compute_key_from_params(
- self.config['repository'], svn_tx_id)
- port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1])
- store_txn_id_data(txn_id, {'port': port})
-
start_response(f'{response.status_code} {response.reason}', response_headers)
return response.iter_content(chunk_size=1024)
@@ -137,6 +110,20 @@ class SimpleSvnApp(object):
url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'")
return url_path
+ def _get_txn_id(self, environ):
+ url = environ['RAW_URI']
+
+ # Define the regex pattern
+ pattern = r'/txr/([^/]+)/'
+
+ # Search for the pattern in the URL
+ match = re.search(pattern, url)
+
+ # Check if a match is found and extract the captured group
+ if match:
+ txn_id = match.group(1)
+ return txn_id
+
def _get_request_headers(self, environ):
headers = {}
whitelist = {
@@ -182,10 +169,39 @@ class DisabledSimpleSvnApp(object):
class SimpleSvn(simplevcs.SimpleVCS):
+ """
+ details: https://svn.apache.org/repos/asf/subversion/trunk/notes/http-and-webdav/webdav-protocol
+
+ Read Commands : (OPTIONS, PROPFIND, GET, REPORT)
+
+ GET: fetch info about resources
+ PROPFIND: Used to retrieve properties of resources.
+ REPORT: Used for specialized queries to the repository. E.g History etc...
+ OPTIONS: request is sent to an SVN server, the server responds with information about the available HTTP
+ methods and other server capabilities.
+
+ Write Commands : (MKACTIVITY, PROPPATCH, PUT, CHECKOUT, MKCOL, MOVE,
+ -------------- COPY, DELETE, LOCK, UNLOCK, MERGE)
+
+ With the exception of LOCK/UNLOCK, every write command performs some
+ sort of DeltaV commit operation. In DeltaV, a commit always starts
+ by creating a transaction (MKACTIVITY), applies a log message
+ (PROPPATCH), does some other write methods, and then ends by
+ committing the transaction (MERGE). If the MERGE fails, the client
+ may try to remove the transaction with a DELETE.
+
+ PROPPATCH: Used to set and/or remove properties on resources.
+ MKCOL: Creates a new collection (directory).
+ DELETE: Removes a resource.
+ COPY and MOVE: Used for copying and moving resources.
+ MERGE: Used to merge changes from different branches.
+ CHECKOUT, CHECKIN, UNCHECKOUT: DeltaV methods for managing working resources and versions.
+ """
SCM = 'svn'
READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
- DEFAULT_HTTP_SERVER = 'http://localhost:8090'
+ WRITE_COMMANDS = ('MERGE', 'POST', 'PUT', 'COPY', 'MOVE', 'DELETE', 'MKCOL')
+ DEFAULT_HTTP_SERVER = 'http://svn:8090'
def _get_repository_name(self, environ):
"""
@@ -218,10 +234,10 @@ class SimpleSvn(simplevcs.SimpleVCS):
else 'push')
def _should_use_callback_daemon(self, extras, environ, action):
- # only MERGE command triggers hooks, so we don't want to start
+ # only PUT & MERGE command triggers hooks, so we don't want to start
# hooks server too many times. POST however starts the svn transaction
# so we also need to run the init of callback daemon of POST
- if environ['REQUEST_METHOD'] in ['MERGE', 'POST']:
+ if environ['REQUEST_METHOD'] not in self.READ_ONLY_COMMANDS:
return True
return False
@@ -232,12 +248,10 @@ class SimpleSvn(simplevcs.SimpleVCS):
return DisabledSimpleSvnApp(config)
def _is_svn_enabled(self):
- conf = self.repo_vcs_config
- return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
+ return ConfigGet().get_bool('vcs.svn.proxy.enabled')
def _create_config(self, extras, repo_name, scheme='http'):
- conf = self.repo_vcs_config
- server_url = conf.get('vcs_svn_proxy', 'http_server_url')
+ server_url = ConfigGet().get_str('vcs.svn.proxy.host')
server_url = server_url or self.DEFAULT_HTTP_SERVER
extras['subversion_http_server_url'] = server_url
diff --git a/rhodecode/lib/middleware/simplevcs.py b/rhodecode/lib/middleware/simplevcs.py
--- a/rhodecode/lib/middleware/simplevcs.py
+++ b/rhodecode/lib/middleware/simplevcs.py
@@ -25,11 +25,9 @@ It's implemented with basic auth functio
import os
import re
-import io
import logging
import importlib
from functools import wraps
-from lxml import etree
import time
from paste.httpheaders import REMOTE_USER, AUTH_TYPE
@@ -41,14 +39,15 @@ from zope.cachedescriptors.property impo
import rhodecode
from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
from rhodecode.lib import rc_cache
+from rhodecode.lib.svn_txn_utils import store_txn_id_data
from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
from rhodecode.lib.base import (
BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
-from rhodecode.lib.hooks_daemon import prepare_callback_daemon
+from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
from rhodecode.lib.middleware import appenlight
from rhodecode.lib.middleware.utils import scm_app_http
-from rhodecode.lib.str_utils import safe_bytes
+from rhodecode.lib.str_utils import safe_bytes, safe_int
from rhodecode.lib.utils import is_valid_repo, SLUG_RE
from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
from rhodecode.lib.vcs.conf import settings as vcs_settings
@@ -63,29 +62,6 @@ from rhodecode.model.settings import Set
log = logging.getLogger(__name__)
-def extract_svn_txn_id(acl_repo_name, data: bytes):
- """
- Helper method for extraction of svn txn_id from submitted XML data during
- POST operations
- """
-
- try:
- root = etree.fromstring(data)
- pat = re.compile(r'/txn/(?P.*)')
- for el in root:
- if el.tag == '{DAV:}source':
- for sub_el in el:
- if sub_el.tag == '{DAV:}href':
- match = pat.search(sub_el.text)
- if match:
- svn_tx_id = match.groupdict()['txn_id']
- txn_id = rc_cache.utils.compute_key_from_params(
- acl_repo_name, svn_tx_id)
- return txn_id
- except Exception:
- log.exception('Failed to extract txn_id')
-
-
def initialize_generator(factory):
"""
Initializes the returned generator by draining its first element.
@@ -156,17 +132,10 @@ class SimpleVCS(object):
@property
def base_path(self):
- settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
-
- if not settings_path:
- settings_path = self.global_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
+ settings_path = self.config.get('repo_store.path')
if not settings_path:
- # try, maybe we passed in explicitly as config option
- settings_path = self.config.get('base_path')
-
- if not settings_path:
- raise ValueError('FATAL: base_path is empty')
+ raise ValueError('FATAL: repo_store.path is empty')
return settings_path
def set_repo_names(self, environ):
@@ -475,7 +444,6 @@ class SimpleVCS(object):
log.debug('Not enough credentials to access repo: `%s` '
'repository as anonymous user', self.acl_repo_name)
-
username = None
# ==============================================================
# DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
@@ -589,6 +557,24 @@ class SimpleVCS(object):
return self._generate_vcs_response(
environ, start_response, repo_path, extras, action)
+ def _get_txn_id(self, environ):
+
+ for k in ['RAW_URI', 'HTTP_DESTINATION']:
+ url = environ.get(k)
+ if not url:
+ continue
+
+ # regex to search for svn-txn-id
+ pattern = r'/!svn/txr/([^/]+)/'
+
+ # Search for the pattern in the URL
+ match = re.search(pattern, url)
+
+ # Check if a match is found and extract the captured group
+ if match:
+ txn_id = match.group(1)
+ return txn_id
+
@initialize_generator
def _generate_vcs_response(
self, environ, start_response, repo_path, extras, action):
@@ -600,28 +586,23 @@ class SimpleVCS(object):
also handles the locking exceptions which will be triggered when
the first chunk is produced by the underlying WSGI application.
"""
-
- txn_id = ''
- if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
- # case for SVN, we want to re-use the callback daemon port
- # so we use the txn_id, for this we peek the body, and still save
- # it as wsgi.input
-
- stream = environ['wsgi.input']
-
- if isinstance(stream, io.BytesIO):
- data: bytes = stream.getvalue()
- elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body
- data: bytes = stream.buf.getvalue()
- else:
- # fallback to the crudest way, copy the iterator
- data = safe_bytes(stream.read())
- environ['wsgi.input'] = io.BytesIO(data)
-
- txn_id = extract_svn_txn_id(self.acl_repo_name, data)
+ svn_txn_id = ''
+ if action == 'push':
+ svn_txn_id = self._get_txn_id(environ)
callback_daemon, extras = self._prepare_callback_daemon(
- extras, environ, action, txn_id=txn_id)
+ extras, environ, action, txn_id=svn_txn_id)
+
+ if svn_txn_id:
+
+ port = safe_int(extras['hooks_uri'].split(':')[-1])
+ txn_id_data = extras.copy()
+ txn_id_data.update({'port': port})
+ txn_id_data.update({'req_method': environ['REQUEST_METHOD']})
+
+ full_repo_path = repo_path
+ store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
+
log.debug('HOOKS extras is %s', extras)
http_scheme = self._get_http_scheme(environ)
@@ -684,6 +665,7 @@ class SimpleVCS(object):
def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
protocol = vcs_settings.HOOKS_PROTOCOL
+
if not self._should_use_callback_daemon(extras, environ, action):
# disable callback daemon for actions that don't require it
protocol = 'local'
diff --git a/rhodecode/lib/middleware/vcs.py b/rhodecode/lib/middleware/vcs.py
--- a/rhodecode/lib/middleware/vcs.py
+++ b/rhodecode/lib/middleware/vcs.py
@@ -26,6 +26,7 @@ import urllib.parse
from webob.exc import HTTPNotFound
import rhodecode
+from rhodecode.apps._base import ADMIN_PREFIX
from rhodecode.lib.middleware.utils import get_path_info
from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
@@ -164,14 +165,18 @@ def detect_vcs_request(environ, backends
# login
"_admin/login",
+ # 2fa
+ f"{ADMIN_PREFIX}/check_2fa",
+ f"{ADMIN_PREFIX}/setup_2fa",
+
# _admin/api is safe too
- '_admin/api',
+ f'{ADMIN_PREFIX}/api',
# _admin/gist is safe too
- '_admin/gists++',
+ f'{ADMIN_PREFIX}/gists++',
# _admin/my_account is safe too
- '_admin/my_account++',
+ f'{ADMIN_PREFIX}/my_account++',
# static files no detection
'_static++',
@@ -180,11 +185,11 @@ def detect_vcs_request(environ, backends
'_debug_toolbar++',
# skip ops ping, status
- '_admin/ops/ping',
- '_admin/ops/status',
+ f'{ADMIN_PREFIX}/ops/ping',
+ f'{ADMIN_PREFIX}/ops/status',
# full channelstream connect should be VCS skipped
- '_admin/channelstream/connect',
+ f'{ADMIN_PREFIX}/channelstream/connect',
'++/repo_creating_check'
]
diff --git a/rhodecode/lib/paster_commands/__init__.py b/rhodecode/lib/paster_commands/__init__.py
deleted file mode 100644
--- a/rhodecode/lib/paster_commands/__init__.py
+++ /dev/null
@@ -1,89 +0,0 @@
-
-# Copyright (C) 2010-2023 RhodeCode GmbH
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License, version 3
-# (only), as published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see .
-#
-# This program is dual-licensed. If you wish to learn more about the
-# RhodeCode Enterprise Edition, including its added features, Support services,
-# and proprietary license terms, please see https://rhodecode.com/licenses/
-import os
-import logging
-
-from paste.script.command import Command, BadCommand
-
-
-class BasePasterCommand(Command):
- """
- Abstract Base Class for paster commands.
-
- The celery commands are somewhat aggressive about loading
- celery.conf, and since our module sets the `CELERY_LOADER`
- environment variable to our loader, we have to bootstrap a bit and
- make sure we've had a chance to load the pylons config off of the
- command line, otherwise everything fails.
- """
- min_args = 1
- min_args_error = "Please provide a paster config file as an argument."
- takes_config_file = 1
- requires_config_file = True
-
- def notify_msg(self, msg, log=False):
- """Make a notification to user, additionally if logger is passed
- it logs this action using given logger
-
- :param msg: message that will be printed to user
- :param log: logging instance, to use to additionally log this message
-
- """
- if log and isinstance(log, logging):
- log(msg)
-
- def run(self, args):
- """
- Overrides Command.run
-
- Checks for a config file argument and loads it.
- """
- if len(args) < self.min_args:
- raise BadCommand(
- self.min_args_error % {'min_args': self.min_args,
- 'actual_args': len(args)})
-
- # Decrement because we're going to lob off the first argument.
- # @@ This is hacky
- self.min_args -= 1
- self.bootstrap_config(args[0])
- self.update_parser()
- return super(BasePasterCommand, self).run(args[1:])
-
- def update_parser(self):
- """
- Abstract method. Allows for the class' parser to be updated
- before the superclass' `run` method is called. Necessary to
- allow options/arguments to be passed through to the underlying
- celery command.
- """
- raise NotImplementedError("Abstract Method.")
-
- def bootstrap_config(self, conf):
- """
- Loads the pylons configuration.
- """
- self.path_to_ini_file = os.path.realpath(conf)
-
- def _init_session(self):
- """
- Inits SqlAlchemy Session
- """
- logging.config.fileConfig(self.path_to_ini_file)
-
diff --git a/rhodecode/lib/paster_commands/deprecated/__init__.py b/rhodecode/lib/paster_commands/deprecated/__init__.py
deleted file mode 100644
diff --git a/rhodecode/lib/paster_commands/deprecated/celeryd.py b/rhodecode/lib/paster_commands/deprecated/celeryd.py
deleted file mode 100644
--- a/rhodecode/lib/paster_commands/deprecated/celeryd.py
+++ /dev/null
@@ -1,44 +0,0 @@
-
-
-# Copyright (C) 2013-2023 RhodeCode GmbH
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License, version 3
-# (only), as published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see .
-#
-# This program is dual-licensed. If you wish to learn more about the
-# RhodeCode Enterprise Edition, including its added features, Support services,
-# and proprietary license terms, please see https://rhodecode.com/licenses/
-
-
-from rhodecode.lib.paster_commands import BasePasterCommand
-
-
-class Command(BasePasterCommand):
- """
- Start the celery worker
-
- Starts the celery worker that uses a paste.deploy configuration
- file.
- """
- usage = 'CONFIG_FILE'
- summary = __doc__.splitlines()[0]
- description = "".join(__doc__.splitlines()[2:])
-
- parser = BasePasterCommand.standard_parser(quiet=True)
-
- def update_parser(self):
- pass
-
- def command(self):
- cmd = 'celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --loglevel DEBUG --ini=%s' % self.path_to_ini_file
- raise Exception('This Command is deprecated please run: %s' % cmd)
-
diff --git a/rhodecode/lib/paster_commands/deprecated/setup_rhodecode.py b/rhodecode/lib/paster_commands/deprecated/setup_rhodecode.py
deleted file mode 100644
--- a/rhodecode/lib/paster_commands/deprecated/setup_rhodecode.py
+++ /dev/null
@@ -1,42 +0,0 @@
-
-# Copyright (C) 2010-2023 RhodeCode GmbH
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License, version 3
-# (only), as published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see .
-#
-# This program is dual-licensed. If you wish to learn more about the
-# RhodeCode Enterprise Edition, including its added features, Support services,
-# and proprietary license terms, please see https://rhodecode.com/licenses/
-
-
-from rhodecode.lib.paster_commands import BasePasterCommand
-
-
-class Command(BasePasterCommand):
- """
- Start the celery worker
-
- Starts the celery worker that uses a paste.deploy configuration
- file.
- """
- usage = 'CONFIG_FILE [celeryd options...]'
- summary = __doc__.splitlines()[0]
- description = "".join(__doc__.splitlines()[2:])
-
- parser = BasePasterCommand.standard_parser(quiet=True)
-
- def update_parser(self):
- pass
-
- def command(self):
- cmd = 'rc-setup-app %s' % self.path_to_ini_file
- raise Exception('This Command is deprecated please run: %s' % cmd)
diff --git a/rhodecode/lib/paster_commands/ishell.py b/rhodecode/lib/paster_commands/ishell.py
deleted file mode 100644
--- a/rhodecode/lib/paster_commands/ishell.py
+++ /dev/null
@@ -1,80 +0,0 @@
-
-
-# Copyright (C) 2013-2023 RhodeCode GmbH
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License, version 3
-# (only), as published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see .
-#
-# This program is dual-licensed. If you wish to learn more about the
-# RhodeCode Enterprise Edition, including its added features, Support services,
-# and proprietary license terms, please see https://rhodecode.com/licenses/
-
-"""
-interactive shell paster command for RhodeCode
-"""
-
-import os
-import sys
-import logging
-
-from rhodecode.lib.paster_commands import BasePasterCommand
-
-# fix rhodecode import
-from os.path import dirname as dn
-rc_path = dn(dn(dn(os.path.realpath(__file__))))
-sys.path.append(rc_path)
-
-log = logging.getLogger(__name__)
-
-welcome_banner = """Welcome to RhodeCode iShell.
-Type `exit` to exit the shell.
-iShell is interactive shell to interact directly with the
-internal RhodeCode APIs. You can rescue your lost password,
-or reset some user/system settings.
-"""
-
-
-class Command(BasePasterCommand):
-
- max_args = 1
- min_args = 1
-
- usage = "CONFIG_FILE"
- group_name = "RhodeCode"
- takes_config_file = -1
- parser = BasePasterCommand.standard_parser(verbose=True)
- summary = "Interactive shell"
-
- def command(self):
- #get SqlAlchemy session
- self._init_session()
-
- # imports, used in ipython shell
- import os
- import sys
- import time
- import shutil
- import datetime
- from rhodecode.model.db import *
-
- try:
- from IPython import embed
- from traitlets.config import Config
- cfg = Config()
- cfg.InteractiveShellEmbed.confirm_exit = False
- embed(config=cfg, banner1=welcome_banner)
- except ImportError:
- print('ipython installation required for ishell')
- sys.exit(-1)
-
- def update_parser(self):
- pass
diff --git a/rhodecode/lib/paster_commands/upgrade_db.py b/rhodecode/lib/paster_commands/upgrade_db.py
deleted file mode 100644
--- a/rhodecode/lib/paster_commands/upgrade_db.py
+++ /dev/null
@@ -1,63 +0,0 @@
-
-# Copyright (C) 2010-2023 RhodeCode GmbH
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License, version 3
-# (only), as published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see .
-#
-# This program is dual-licensed. If you wish to learn more about the
-# RhodeCode Enterprise Edition, including its added features, Support services,
-# and proprietary license terms, please see https://rhodecode.com/licenses/
-
-import logging
-
-from rhodecode.lib.paster_commands import BasePasterCommand, Command
-
-log = logging.getLogger(__name__)
-
-
-class UpgradeDb(BasePasterCommand):
- """
- Command used for paster to upgrade our database to newer version
- """
-
- max_args = 1
- min_args = 1
-
- usage = "CONFIG_FILE"
- summary = "Upgrades current db to newer version"
- group_name = "RhodeCode"
-
- parser = Command.standard_parser(verbose=True)
-
- def command(self):
- from rhodecode.lib.rc_commands import upgrade_db
- upgrade_db.command(
- self.path_to_ini_file, self.options.__dict__.get('force_ask'), None)
-
- def update_parser(self):
- self.parser.add_option('--sql',
- action='store_true',
- dest='just_sql',
- help="Prints upgrade sql for further investigation",
- default=False)
-
- self.parser.add_option('--force-yes',
- action='store_true',
- dest='force_ask',
- default=None,
- help='Force yes to every question')
- self.parser.add_option('--force-no',
- action='store_false',
- dest='force_ask',
- default=None,
- help='Force no to every question')
-
diff --git a/rhodecode/lib/pyramid_shell/__init__.py b/rhodecode/lib/pyramid_shell/__init__.py
--- a/rhodecode/lib/pyramid_shell/__init__.py
+++ b/rhodecode/lib/pyramid_shell/__init__.py
@@ -41,6 +41,12 @@ or reset some user/system settings.
"""
+def import_all_from_module(module_name):
+ import importlib
+ module = importlib.import_module(module_name)
+ globals().update({k: v for k, v in module.__dict__.items() if not k.startswith('_')})
+
+
def ipython_shell_runner(env, help):
# imports, used in ipython shell
@@ -50,7 +56,7 @@ def ipython_shell_runner(env, help):
import shutil
import datetime
from rhodecode.model import user, user_group, repo, repo_group
- from rhodecode.model.db import *
+ import_all_from_module('rhodecode.model.db')
try:
import IPython
diff --git a/rhodecode/lib/pyramid_utils.py b/rhodecode/lib/pyramid_utils.py
--- a/rhodecode/lib/pyramid_utils.py
+++ b/rhodecode/lib/pyramid_utils.py
@@ -19,40 +19,35 @@
# and proprietary license terms, please see https://rhodecode.com/licenses/
import os
-import configparser
+
from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # pragma: no cover
-
-from rhodecode.lib.request import Request
-
-
-def get_config(ini_path, **kwargs):
- parser = configparser.ConfigParser(**kwargs)
- parser.read(ini_path)
- return parser
-
-
-def get_app_config(ini_path):
- from paste.deploy.loadwsgi import appconfig
- return appconfig(f'config:{ini_path}', relative_to=os.getcwd())
+from pyramid.threadlocal import get_current_request as pyramid_current_request
def bootstrap(config_uri, options=None, env=None):
+ from rhodecode.config.utils import DEFAULT_USER
+ from rhodecode.lib.config_utils import get_app_config_lightweight
from rhodecode.lib.utils2 import AttributeDict
+ from rhodecode.lib.request import Request
if env:
os.environ.update(env)
- config = get_config(config_uri)
- base_url = 'http://rhodecode.local'
- try:
- base_url = config.get('app:main', 'app.base_url')
- except (configparser.NoSectionError, configparser.NoOptionError):
- pass
+ config = get_app_config_lightweight(config_uri)
+ base_url = config['app.base_url']
request = Request.blank('/', base_url=base_url)
# fake inject a running user for bootstrap request !
- request.user = AttributeDict({'username': 'bootstrap-user',
+ request.user = AttributeDict({'username': DEFAULT_USER,
'user_id': 1,
'ip_addr': '127.0.0.1'})
return pyramid_bootstrap(config_uri, request=request, options=options)
+
+def get_current_request():
+ pyramid_req = pyramid_current_request()
+ if not pyramid_req:
+ # maybe we're in celery context and need to get the PYRAMID_REQUEST
+ from rhodecode.lib.celerylib.loader import celery_app
+ pyramid_req = celery_app.conf['PYRAMID_REQUEST']
+ return pyramid_req
diff --git a/rhodecode/lib/rc_commands/setup_rc.py b/rhodecode/lib/rc_commands/setup_rc.py
--- a/rhodecode/lib/rc_commands/setup_rc.py
+++ b/rhodecode/lib/rc_commands/setup_rc.py
@@ -20,7 +20,8 @@ import logging
import click
import pyramid.paster
-from rhodecode.lib.pyramid_utils import bootstrap, get_app_config
+from rhodecode.lib.pyramid_utils import bootstrap
+from rhodecode.lib.config_utils import get_app_config
from rhodecode.lib.db_manage import DbManage
from rhodecode.lib.utils2 import get_encryption_key
from rhodecode.model.db import Session
diff --git a/rhodecode/lib/statsd_client.py b/rhodecode/lib/statsd_client.py
--- a/rhodecode/lib/statsd_client.py
+++ b/rhodecode/lib/statsd_client.py
@@ -25,6 +25,9 @@ class StatsdClientClass(Singleton):
statsd_client = None
statsd = None
+ def __repr__(self):
+ return f"{self.__class__}(statsd={self.statsd})"
+
def __getattribute__(self, name):
if name.startswith("statsd"):
diff --git a/rhodecode/lib/str_utils.py b/rhodecode/lib/str_utils.py
--- a/rhodecode/lib/str_utils.py
+++ b/rhodecode/lib/str_utils.py
@@ -167,3 +167,17 @@ def convert_special_chars(str_) -> str:
value = safe_str(str_)
converted_value = unidecode(value)
return converted_value
+
+
+def splitnewlines(text: bytes):
+ """
+ like splitlines, but only split on newlines.
+ """
+
+ lines = [_l + b'\n' for _l in text.split(b'\n')]
+ if lines:
+ if lines[-1] == b'\n':
+ lines.pop()
+ else:
+ lines[-1] = lines[-1][:-1]
+ return lines
diff --git a/rhodecode/lib/svn_txn_utils.py b/rhodecode/lib/svn_txn_utils.py
new file mode 100644
--- /dev/null
+++ b/rhodecode/lib/svn_txn_utils.py
@@ -0,0 +1,132 @@
+# Copyright (C) 2010-2023 RhodeCode GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License, version 3
+# (only), as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is dual-licensed. If you wish to learn more about the
+# RhodeCode Enterprise Edition, including its added features, Support services,
+# and proprietary license terms, please see https://rhodecode.com/licenses/
+
+import logging
+import redis
+
+from ..lib import rc_cache
+from ..lib.ext_json import json
+
+
+log = logging.getLogger(__name__)
+
+redis_client = None
+
+
+class RedisTxnClient:
+
+ def __init__(self, url):
+ self.url = url
+ self._create_client(url)
+
+ def _create_client(self, url):
+ connection_pool = redis.ConnectionPool.from_url(url)
+ self.writer_client = redis.StrictRedis(
+ connection_pool=connection_pool
+ )
+ self.reader_client = self.writer_client
+
+ def set(self, key, value, expire=24 * 60000):
+ self.writer_client.set(key, value, ex=expire)
+
+ def get(self, key):
+ return self.reader_client.get(key)
+
+ def delete(self, key):
+ self.writer_client.delete(key)
+
+
+def get_redis_client(url=''):
+
+ global redis_client
+ if redis_client is not None:
+ return redis_client
+ if not url:
+ from rhodecode import CONFIG
+ url = CONFIG['vcs.svn.redis_conn']
+ redis_client = RedisTxnClient(url)
+ return redis_client
+
+
+def extract_svn_txn_id(data: bytes):
+ """
+ Helper method for extraction of svn txn_id from submitted XML data during
+ POST operations
+ """
+ import re
+ from lxml import etree
+
+ try:
+ root = etree.fromstring(data)
+ pat = re.compile(r'/txn/(?P.*)')
+ for el in root:
+ if el.tag == '{DAV:}source':
+ for sub_el in el:
+ if sub_el.tag == '{DAV:}href':
+ match = pat.search(sub_el.text)
+ if match:
+ svn_tx_id = match.groupdict()['txn_id']
+ return svn_tx_id
+ except Exception:
+ log.exception('Failed to extract txn_id')
+
+
+def get_txn_id_data_key(repo_path, svn_txn_id):
+ log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id)
+ repo_key = rc_cache.utils.compute_key_from_params(repo_path)
+ final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id'
+ log.debug('computed final key: %s', final_key)
+
+ return final_key
+
+
+def store_txn_id_data(repo_path, svn_txn_id, data_dict):
+ log.debug('svn-txn-id: %s, storing data', svn_txn_id)
+
+ if not svn_txn_id:
+ log.warning('Cannot store txn_id because it is empty')
+ return
+
+ redis_conn = get_redis_client()
+
+ store_key = get_txn_id_data_key(repo_path, svn_txn_id)
+ store_data = json.dumps(data_dict)
+ redis_conn.set(store_key, store_data)
+
+
+def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False):
+ """
+ Reads txn_id from store and if present returns the data for callback manager
+ """
+ log.debug('svn-txn-id: %s, retrieving data', svn_txn_id)
+ redis_conn = get_redis_client()
+
+ store_key = get_txn_id_data_key(repo_path, svn_txn_id)
+ data = {}
+ redis_conn.get(store_key)
+ try:
+ raw_data = redis_conn.get(store_key)
+ data = json.loads(raw_data)
+ except Exception:
+ log.exception('Failed to get txn_id metadata')
+
+ if rm_on_read:
+ log.debug('Cleaning up txn_id at %s', store_key)
+ redis_conn.delete(store_key)
+
+ return data
diff --git a/rhodecode/lib/system_info.py b/rhodecode/lib/system_info.py
--- a/rhodecode/lib/system_info.py
+++ b/rhodecode/lib/system_info.py
@@ -100,7 +100,7 @@ def get_cert_path(ini_path):
default = '/etc/ssl/certs/ca-certificates.crt'
control_ca_bundle = os.path.join(
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
- '.rccontrol-profile/etc/ca-bundle.crt')
+ '/etc/ssl/certs/ca-certificates.crt')
if os.path.isfile(control_ca_bundle):
default = control_ca_bundle
@@ -323,7 +323,7 @@ def cpu():
value['cpu_count'] = psutil.cpu_count()
human_value = value.copy()
- human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu'])
+ human_value['text'] = f'{value["cpu_count"]} cores at {value["cpu"]} %'
return SysInfoRes(value=value, state=state, human_value=human_value)
@@ -331,8 +331,8 @@ def cpu():
@register_sysinfo
def storage():
from rhodecode.lib.helpers import format_byte_size_binary
- from rhodecode.model.settings import VcsSettingsModel
- path = VcsSettingsModel().get_repos_location()
+ from rhodecode.lib.utils import get_rhodecode_repo_store_path
+ path = get_rhodecode_repo_store_path()
value = dict(percent=0, used=0, total=0, path=path, text='')
state = STATE_OK_DEFAULT
@@ -364,8 +364,8 @@ def storage():
@register_sysinfo
def storage_inodes():
- from rhodecode.model.settings import VcsSettingsModel
- path = VcsSettingsModel().get_repos_location()
+ from rhodecode.lib.utils import get_rhodecode_repo_store_path
+ path = get_rhodecode_repo_store_path()
value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
state = STATE_OK_DEFAULT
@@ -398,32 +398,24 @@ def storage_inodes():
@register_sysinfo
def storage_archives():
import rhodecode
- from rhodecode.lib.utils import safe_str
from rhodecode.lib.helpers import format_byte_size_binary
+ from rhodecode.lib.archive_cache import get_archival_cache_store
- msg = 'Archive cache storage is controlled by ' \
- 'archive_cache.store_dir=/path/to/cache option in the .ini file'
- path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
+ storage_type = rhodecode.ConfigGet().get_str('archive_cache.backend.type')
- value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
+ value = dict(percent=0, used=0, total=0, items=0, path='', text='', type=storage_type)
state = STATE_OK_DEFAULT
try:
- items_count = 0
- used = 0
- for root, dirs, files in os.walk(path):
- if root == path:
- items_count = len(dirs)
+ d_cache = get_archival_cache_store(config=rhodecode.CONFIG)
- for f in files:
- try:
- used += os.path.getsize(os.path.join(root, f))
- except OSError:
- pass
+ total_files, total_size, _directory_stats = d_cache.get_statistics()
+
value.update({
'percent': 100,
- 'used': used,
- 'total': used,
- 'items': items_count
+ 'used': total_size,
+ 'total': total_size,
+ 'items': total_files,
+ 'path': d_cache.storage_path
})
except Exception as e:
@@ -442,33 +434,23 @@ def storage_archives():
@register_sysinfo
def storage_gist():
from rhodecode.model.gist import GIST_STORE_LOC
- from rhodecode.model.settings import VcsSettingsModel
- from rhodecode.lib.utils import safe_str
- from rhodecode.lib.helpers import format_byte_size_binary
+ from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path
+ from rhodecode.lib.helpers import format_byte_size_binary, get_directory_statistics
+
path = safe_str(os.path.join(
- VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
+ get_rhodecode_repo_store_path(), GIST_STORE_LOC))
# gist storage
value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
state = STATE_OK_DEFAULT
try:
- items_count = 0
- used = 0
- for root, dirs, files in os.walk(path):
- if root == path:
- items_count = len(dirs)
-
- for f in files:
- try:
- used += os.path.getsize(os.path.join(root, f))
- except OSError:
- pass
+ total_files, total_size, _directory_stats = get_directory_statistics(path)
value.update({
'percent': 100,
- 'used': used,
- 'total': used,
- 'items': items_count
+ 'used': total_size,
+ 'total': total_size,
+ 'items': total_files
})
except Exception as e:
log.exception('failed to fetch gist storage items')
diff --git a/rhodecode/lib/utils.py b/rhodecode/lib/utils.py
--- a/rhodecode/lib/utils.py
+++ b/rhodecode/lib/utils.py
@@ -21,6 +21,7 @@ Utilities library for RhodeCode
"""
import datetime
+
import decorator
import logging
import os
@@ -31,7 +32,7 @@ import socket
import tempfile
import traceback
import tarfile
-import warnings
+
from functools import wraps
from os.path import join as jn
@@ -471,14 +472,14 @@ def get_rhodecode_realm():
return safe_str(realm.app_settings_value)
-def get_rhodecode_base_path():
+def get_rhodecode_repo_store_path():
"""
Returns the base path. The base path is the filesystem path which points
to the repository store.
"""
import rhodecode
- return rhodecode.CONFIG['default_base_path']
+ return rhodecode.CONFIG['repo_store.path']
def map_groups(path):
diff --git a/rhodecode/lib/vcs/__init__.py b/rhodecode/lib/vcs/__init__.py
--- a/rhodecode/lib/vcs/__init__.py
+++ b/rhodecode/lib/vcs/__init__.py
@@ -33,7 +33,8 @@ from rhodecode.lib.vcs.exceptions import
__all__ = [
'get_vcs_instance', 'get_backend',
- 'VCSError', 'RepositoryError', 'CommitError', 'VCSCommunicationError'
+ 'VCSError', 'RepositoryError', 'CommitError', 'VCSCommunicationError',
+ 'CurlSession', 'CurlResponse'
]
log = logging.getLogger(__name__)
@@ -135,7 +136,12 @@ class CurlSession(object):
curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
curl.setopt(curl.WRITEDATA, response_buffer)
curl.setopt(curl.HTTPHEADER, headers_list)
- curl.perform()
+
+ try:
+ curl.perform()
+ except pycurl.error as exc:
+ log.error('Failed to call endpoint url: {} using pycurl'.format(url))
+ raise
status_code = curl.getinfo(pycurl.HTTP_CODE)
content_type = curl.getinfo(pycurl.CONTENT_TYPE)
diff --git a/rhodecode/lib/vcs/backends/git/repository.py b/rhodecode/lib/vcs/backends/git/repository.py
--- a/rhodecode/lib/vcs/backends/git/repository.py
+++ b/rhodecode/lib/vcs/backends/git/repository.py
@@ -326,6 +326,9 @@ class GitRepository(BaseRepository):
def _get_branches(self):
return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
+ def delete_branch(self, branch_name):
+ return self._remote.delete_branch(branch_name)
+
@CachedProperty
def branches(self):
return self._get_branches()
@@ -1037,6 +1040,8 @@ class GitRepository(BaseRepository):
pr_branch, self.path, target_ref.name, enable_hooks=True,
rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
merge_succeeded = True
+ if close_branch and source_ref.name != target_ref.name and not dry_run and source_ref.type == 'branch':
+ self.delete_branch(source_ref.name)
except RepositoryError:
log.exception(
'Failure when doing local push from the shadow '
diff --git a/rhodecode/lib/vcs/backends/hg/repository.py b/rhodecode/lib/vcs/backends/hg/repository.py
--- a/rhodecode/lib/vcs/backends/hg/repository.py
+++ b/rhodecode/lib/vcs/backends/hg/repository.py
@@ -35,6 +35,7 @@ from rhodecode.lib.datelib import (
from rhodecode.lib.str_utils import safe_str
from rhodecode.lib.utils2 import CachedProperty
from rhodecode.lib.vcs import connection, exceptions
+from rhodecode.lib.vcs.conf import settings as vcs_settings
from rhodecode.lib.vcs.backends.base import (
BaseRepository, CollectionGenerator, Config, MergeResponse,
MergeFailureReason, Reference, BasePathPermissionChecker)
@@ -722,7 +723,12 @@ class MercurialRepository(BaseRepository
commit needs to be pushed.
"""
self._update(source_ref.commit_id)
- message = close_message or f"Closing branch: `{source_ref.name}`"
+ message = (close_message or vcs_settings.HG_CLOSE_BRANCH_MESSAGE_TMPL).format(
+ user_name=user_name,
+ user_email=user_email,
+ target_ref_name=target_ref.name,
+ source_ref_name=source_ref.name
+ )
try:
self._remote.commit(
message=safe_str(message),
diff --git a/rhodecode/lib/vcs/conf/settings.py b/rhodecode/lib/vcs/conf/settings.py
--- a/rhodecode/lib/vcs/conf/settings.py
+++ b/rhodecode/lib/vcs/conf/settings.py
@@ -58,6 +58,9 @@ MERGE_MESSAGE_TMPL = (
MERGE_DRY_RUN_MESSAGE = 'dry_run_merge_message_from_rhodecode'
MERGE_DRY_RUN_USER = 'Dry-Run User'
MERGE_DRY_RUN_EMAIL = 'dry-run-merge@rhodecode.com'
+HG_CLOSE_BRANCH_MESSAGE_TMPL = (
+ 'Closing branch: `{source_ref_name}`'
+)
def available_aliases():
diff --git a/rhodecode/lib/vcs/exceptions.py b/rhodecode/lib/vcs/exceptions.py
--- a/rhodecode/lib/vcs/exceptions.py
+++ b/rhodecode/lib/vcs/exceptions.py
@@ -146,6 +146,10 @@ class CommandError(VCSError):
pass
+class ImproperlyConfiguredError(Exception):
+ pass
+
+
class UnhandledException(VCSError):
"""
Signals that something unexpected went wrong.
diff --git a/rhodecode/model/__init__.py b/rhodecode/model/__init__.py
--- a/rhodecode/model/__init__.py
+++ b/rhodecode/model/__init__.py
@@ -21,6 +21,7 @@ import logging
import rhodecode
from rhodecode.model import meta, db
+from rhodecode.lib.utils import get_rhodecode_repo_store_path
from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
log = logging.getLogger(__name__)
@@ -138,3 +139,11 @@ class BaseModel(object):
Returns all instances of what is defined in `cls` class variable
"""
return cls.cls.getAll()
+
+ @property
+ def repos_path(self):
+ """
+ Gets the repositories root path from *ini file
+ """
+
+ return get_rhodecode_repo_store_path()
diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py
--- a/rhodecode/model/db.py
+++ b/rhodecode/model/db.py
@@ -33,9 +33,10 @@ import functools
import traceback
import collections
+import pyotp
from sqlalchemy import (
or_, and_, not_, func, cast, TypeDecorator, event, select,
- true, false, null,
+ true, false, null, union_all,
Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
Text, Float, PickleType, BigInteger)
@@ -51,6 +52,7 @@ from zope.cachedescriptors.property impo
from pyramid.threadlocal import get_current_request
from webhelpers2.text import remove_formatting
+from rhodecode import ConfigGet
from rhodecode.lib.str_utils import safe_bytes
from rhodecode.translation import _
from rhodecode.lib.vcs import get_vcs_instance, VCSError
@@ -126,6 +128,11 @@ def _hash_key(k):
return sha1_safe(k)
+def description_escaper(desc):
+ from rhodecode.lib import helpers as h
+ return h.escape(desc)
+
+
def in_filter_generator(qry, items, limit=500):
"""
Splits IN() into multiple with OR
@@ -197,9 +204,7 @@ class EncryptedTextValue(TypeDecorator):
if not value:
return value
- enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
-
- bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
+ bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY)
return safe_str(bytes_val)
@@ -586,6 +591,7 @@ class User(Base, BaseModel):
DEFAULT_USER = 'default'
DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
+ RECOVERY_CODES_COUNT = 10
user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
username = Column("username", String(255), nullable=True, unique=None, default=None)
@@ -662,16 +668,14 @@ class User(Base, BaseModel):
@hybrid_property
def first_name(self):
- from rhodecode.lib import helpers as h
if self.name:
- return h.escape(self.name)
+ return description_escaper(self.name)
return self.name
@hybrid_property
def last_name(self):
- from rhodecode.lib import helpers as h
if self.lastname:
- return h.escape(self.lastname)
+ return description_escaper(self.lastname)
return self.lastname
@hybrid_property
@@ -793,16 +797,148 @@ class User(Base, BaseModel):
Session.commit()
return artifact_token.api_key
- @classmethod
- def get(cls, user_id, cache=False):
- if not user_id:
- return
-
- user = cls.query()
- if cache:
- user = user.options(
- FromCache("sql_cache_short", f"get_users_{user_id}"))
- return user.get(user_id)
+ def is_totp_valid(self, received_code, secret):
+ totp = pyotp.TOTP(secret)
+ return totp.verify(received_code)
+
+ def is_2fa_recovery_code_valid(self, received_code, secret):
+ encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', [])
+ recovery_codes = self.get_2fa_recovery_codes()
+ if received_code in recovery_codes:
+ encrypted_recovery_codes.pop(recovery_codes.index(received_code))
+ self.update_userdata(recovery_codes_2fa=encrypted_recovery_codes)
+ return True
+ return False
+
+ @hybrid_property
+ def has_forced_2fa(self):
+ """
+ Checks if 2fa was forced for current user
+ """
+ from rhodecode.model.settings import SettingsModel
+ if value := SettingsModel().get_setting_by_name(f'auth_{self.extern_type}_global_2fa'):
+ return value.app_settings_value
+ return False
+
+ @hybrid_property
+ def has_enabled_2fa(self):
+ """
+ Checks if user enabled 2fa
+ """
+ if value := self.has_forced_2fa:
+ return value
+ return self.user_data.get('enabled_2fa', False)
+
+ @has_enabled_2fa.setter
+ def has_enabled_2fa(self, val):
+ val = str2bool(val)
+ self.update_userdata(enabled_2fa=val)
+ if not val:
+ # NOTE: setting to false we clear the user_data to not store any 2fa artifacts
+ self.update_userdata(secret_2fa=None, recovery_codes_2fa=[], check_2fa=False)
+ Session().commit()
+
+ @hybrid_property
+ def check_2fa_required(self):
+ """
+ Check if check 2fa flag is set for this user
+ """
+ value = self.user_data.get('check_2fa', False)
+ return value
+
+ @check_2fa_required.setter
+ def check_2fa_required(self, val):
+ val = str2bool(val)
+ self.update_userdata(check_2fa=val)
+ Session().commit()
+
+ @hybrid_property
+ def has_seen_2fa_codes(self):
+ """
+ get the flag about if user has seen 2fa recovery codes
+ """
+ value = self.user_data.get('recovery_codes_2fa_seen', False)
+ return value
+
+ @has_seen_2fa_codes.setter
+ def has_seen_2fa_codes(self, val):
+ val = str2bool(val)
+ self.update_userdata(recovery_codes_2fa_seen=val)
+ Session().commit()
+
+ @hybrid_property
+ def needs_2fa_configure(self):
+ """
+ Determines if setup2fa has completed for this user. Means he has all needed data for 2fa to work.
+
+ Currently this is 2fa enabled and secret exists
+ """
+ if self.has_enabled_2fa:
+ return not self.user_data.get('secret_2fa')
+ return False
+
+ def init_2fa_recovery_codes(self, persist=True, force=False):
+ """
+ Creates 2fa recovery codes
+ """
+ recovery_codes = self.user_data.get('recovery_codes_2fa', [])
+ encrypted_codes = []
+ if not recovery_codes or force:
+ for _ in range(self.RECOVERY_CODES_COUNT):
+ recovery_code = pyotp.random_base32()
+ recovery_codes.append(recovery_code)
+ encrypted_code = enc_utils.encrypt_value(safe_bytes(recovery_code), enc_key=ENCRYPTION_KEY)
+ encrypted_codes.append(safe_str(encrypted_code))
+ if persist:
+ self.update_userdata(recovery_codes_2fa=encrypted_codes, recovery_codes_2fa_seen=False)
+ return recovery_codes
+ # User should not check the same recovery codes more than once
+ return []
+
+ def get_2fa_recovery_codes(self):
+ encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', [])
+
+ recovery_codes = list(map(
+ lambda val: safe_str(
+ enc_utils.decrypt_value(
+ val,
+ enc_key=ENCRYPTION_KEY
+ )),
+ encrypted_recovery_codes))
+ return recovery_codes
+
+ def init_secret_2fa(self, persist=True, force=False):
+ secret_2fa = self.user_data.get('secret_2fa')
+ if not secret_2fa or force:
+ secret = pyotp.random_base32()
+ if persist:
+ self.update_userdata(secret_2fa=safe_str(enc_utils.encrypt_value(safe_bytes(secret), enc_key=ENCRYPTION_KEY)))
+ return secret
+ return ''
+
+ @hybrid_property
+ def secret_2fa(self) -> str:
+ """
+ get stored secret for 2fa
+ """
+ secret_2fa = self.user_data.get('secret_2fa')
+ if secret_2fa:
+ return safe_str(
+ enc_utils.decrypt_value(secret_2fa, enc_key=ENCRYPTION_KEY))
+ return ''
+
+ @secret_2fa.setter
+ def secret_2fa(self, value: str) -> None:
+ encrypted_value = enc_utils.encrypt_value(safe_bytes(value), enc_key=ENCRYPTION_KEY)
+ self.update_userdata(secret_2fa=safe_str(encrypted_value))
+
+ def regenerate_2fa_recovery_codes(self):
+ """
+ Regenerates 2fa recovery codes upon request
+ """
+ new_recovery_codes = self.init_2fa_recovery_codes(force=True)
+ Session().commit()
+ return new_recovery_codes
@classmethod
def extra_valid_auth_tokens(cls, user, role=None):
@@ -930,13 +1066,24 @@ class User(Base, BaseModel):
@user_data.setter
def user_data(self, val):
if not isinstance(val, dict):
- raise Exception('user_data must be dict, got %s' % type(val))
+ raise Exception(f'user_data must be dict, got {type(val)}')
try:
self._user_data = safe_bytes(json.dumps(val))
except Exception:
log.error(traceback.format_exc())
@classmethod
+ def get(cls, user_id, cache=False):
+ if not user_id:
+ return
+
+ user = cls.query()
+ if cache:
+ user = user.options(
+ FromCache("sql_cache_short", f"get_users_{user_id}"))
+ return user.get(user_id)
+
+ @classmethod
def get_by_username(cls, username, case_insensitive=False,
cache=False):
@@ -954,6 +1101,12 @@ class User(Base, BaseModel):
return cls.execute(q).scalar_one_or_none()
@classmethod
+ def get_by_username_or_primary_email(cls, user_identifier):
+ qs = union_all(cls.select().where(func.lower(cls.username) == func.lower(user_identifier)),
+ cls.select().where(func.lower(cls.email) == func.lower(user_identifier)))
+ return cls.execute(cls.select(User).from_statement(qs)).scalar_one_or_none()
+
+ @classmethod
def get_by_auth_token(cls, auth_token, cache=False):
q = cls.select(User)\
@@ -1218,8 +1371,7 @@ class UserApiKeys(Base, BaseModel):
@hybrid_property
def description_safe(self):
- from rhodecode.lib import helpers as h
- return h.escape(self.description)
+ return description_escaper(self.description)
@property
def expired(self):
@@ -1322,8 +1474,7 @@ class UserIpMap(Base, BaseModel):
@hybrid_property
def description_safe(self):
- from rhodecode.lib import helpers as h
- return h.escape(self.description)
+ return description_escaper(self.description)
@classmethod
def _get_ip_range(cls, ip_addr):
@@ -1461,8 +1612,7 @@ class UserGroup(Base, BaseModel):
@hybrid_property
def description_safe(self):
- from rhodecode.lib import helpers as h
- return h.escape(self.user_group_description)
+ return description_escaper(self.user_group_description)
@hybrid_property
def group_data(self):
@@ -1514,7 +1664,7 @@ class UserGroup(Base, BaseModel):
user_group = cls.query()
if cache:
user_group = user_group.options(
- FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
+ FromCache("sql_cache_short", f"get_users_group_{user_group_id}"))
return user_group.get(user_group_id)
def permissions(self, with_admins=True, with_owner=True,
@@ -1806,8 +1956,7 @@ class Repository(Base, BaseModel):
@hybrid_property
def description_safe(self):
- from rhodecode.lib import helpers as h
- return h.escape(self.description)
+ return description_escaper(self.description)
@hybrid_property
def landing_rev(self):
@@ -1908,7 +2057,7 @@ class Repository(Base, BaseModel):
if val:
return val
else:
- cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
+ cache_key = f"get_repo_by_name_{_hash_key(repo_name)}"
q = q.options(
FromCache("sql_cache_short", cache_key))
@@ -1942,8 +2091,8 @@ class Repository(Base, BaseModel):
:param cls:
"""
- from rhodecode.lib.utils import get_rhodecode_base_path
- return get_rhodecode_base_path()
+ from rhodecode.lib.utils import get_rhodecode_repo_store_path
+ return get_rhodecode_repo_store_path()
@classmethod
def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
@@ -2009,16 +2158,13 @@ class Repository(Base, BaseModel):
def groups_and_repo(self):
return self.groups_with_parents, self
- @LazyProperty
+ @property
def repo_path(self):
"""
Returns base full path for that repository means where it actually
exists on a filesystem
"""
- q = Session().query(RhodeCodeUi).filter(
- RhodeCodeUi.ui_key == self.NAME_SEP)
- q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
- return q.one().ui_value
+ return self.base_path()
@property
def repo_full_path(self):
@@ -2768,8 +2914,7 @@ class RepoGroup(Base, BaseModel):
@hybrid_property
def description_safe(self):
- from rhodecode.lib import helpers as h
- return h.escape(self.group_description)
+ return description_escaper(self.group_description)
@classmethod
def hash_repo_group_name(cls, repo_group_name):
@@ -4271,8 +4416,7 @@ class _PullRequestBase(BaseModel):
@hybrid_property
def description_safe(self):
- from rhodecode.lib import helpers as h
- return h.escape(self.description)
+ return description_escaper(self.description)
@hybrid_property
def revisions(self):
@@ -4438,6 +4582,12 @@ class PullRequest(Base, _PullRequestBase
else:
return f''
+ def __str__(self):
+ if self.pull_request_id:
+ return f'#{self.pull_request_id}'
+ else:
+ return f'#{id(self)!r}'
+
reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
@@ -4874,8 +5024,7 @@ class Gist(Base, BaseModel):
@hybrid_property
def description_safe(self):
- from rhodecode.lib import helpers as h
- return h.escape(self.gist_description)
+ return description_escaper(self.gist_description)
@classmethod
def get_or_404(cls, id_):
@@ -4903,10 +5052,9 @@ class Gist(Base, BaseModel):
:param cls:
"""
from rhodecode.model.gist import GIST_STORE_LOC
- q = Session().query(RhodeCodeUi)\
- .filter(RhodeCodeUi.ui_key == URL_SEP)
- q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
- return os.path.join(q.one().ui_value, GIST_STORE_LOC)
+ from rhodecode.lib.utils import get_rhodecode_repo_store_path
+ repo_store_path = get_rhodecode_repo_store_path()
+ return os.path.join(repo_store_path, GIST_STORE_LOC)
def get_api_data(self):
"""
@@ -4928,8 +5076,7 @@ class Gist(Base, BaseModel):
return data
def __json__(self):
- data = dict(
- )
+ data = dict()
data.update(self.get_api_data())
return data
# SCM functions
@@ -5334,8 +5481,7 @@ class ScheduleEntry(Base, BaseModel):
@schedule_type.setter
def schedule_type(self, val):
if val not in self.schedule_types:
- raise ValueError('Value must be on of `{}` and got `{}`'.format(
- val, self.schedule_type))
+ raise ValueError(f'Value must be on of `{val}` and got `{self.schedule_type}`')
self._schedule_type = val
@@ -5343,21 +5489,25 @@ class ScheduleEntry(Base, BaseModel):
def get_uid(cls, obj):
args = obj.task_args
kwargs = obj.task_kwargs
+
if isinstance(args, JsonRaw):
try:
- args = json.loads(args)
+ args = json.loads(str(args))
except ValueError:
+ log.exception('json.loads of args failed...')
args = tuple()
if isinstance(kwargs, JsonRaw):
try:
- kwargs = json.loads(kwargs)
+ kwargs = json.loads(str(kwargs))
except ValueError:
+ log.exception('json.loads of kwargs failed...')
kwargs = dict()
dot_notation = obj.task_dot_notation
- val = '.'.join(map(safe_str, [
- sorted(dot_notation), args, sorted(kwargs.items())]))
+ val = '.'.join(map(safe_str, [dot_notation, args, sorted(kwargs.items())]))
+ log.debug('calculating task uid using id:`%s`', val)
+
return sha1(safe_bytes(val))
@classmethod
@@ -5368,6 +5518,10 @@ class ScheduleEntry(Base, BaseModel):
def get_by_schedule_id(cls, schedule_id):
return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
+ @classmethod
+ def get_by_task_uid(cls, task_uid):
+ return cls.query().filter(cls.task_uid == task_uid).scalar()
+
@property
def task(self):
return self.task_dot_notation
@@ -5549,18 +5703,23 @@ class UserBookmark(Base, BaseModel):
@classmethod
def get_bookmarks_for_user(cls, user_id, cache=True):
- bookmarks = cls.query() \
- .filter(UserBookmark.user_id == user_id) \
- .options(joinedload(UserBookmark.repository)) \
- .options(joinedload(UserBookmark.repository_group)) \
+ bookmarks = select(
+ UserBookmark.title,
+ UserBookmark.position,
+ ) \
+ .add_columns(Repository.repo_id, Repository.repo_type, Repository.repo_name) \
+ .add_columns(RepoGroup.group_id, RepoGroup.group_name) \
+ .where(UserBookmark.user_id == user_id) \
+ .outerjoin(Repository, Repository.repo_id == UserBookmark.bookmark_repo_id) \
+ .outerjoin(RepoGroup, RepoGroup.group_id == UserBookmark.bookmark_repo_group_id) \
.order_by(UserBookmark.position.asc())
if cache:
bookmarks = bookmarks.options(
- FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
+ FromCache("sql_cache_short", f"get_user_{user_id}_bookmarks")
)
- return bookmarks.all()
+ return Session().execute(bookmarks).all()
def __repr__(self):
return f''
diff --git a/rhodecode/model/forms.py b/rhodecode/model/forms.py
--- a/rhodecode/model/forms.py
+++ b/rhodecode/model/forms.py
@@ -104,6 +104,31 @@ def LoginForm(localizer):
return _LoginForm
+def TOTPForm(localizer, user, allow_recovery_code_use=False):
+ _ = localizer
+
+ class _TOTPForm(formencode.Schema):
+ allow_extra_fields = True
+ filter_extra_fields = False
+ totp = v.Regex(r'^(?:\d{6}|[A-Z0-9]{32})$')
+ secret_totp = v.String()
+
+ def to_python(self, value, state=None):
+ validation_checks = [user.is_totp_valid]
+ if allow_recovery_code_use:
+ validation_checks.append(user.is_2fa_recovery_code_valid)
+ form_data = super().to_python(value, state)
+ received_code = form_data['totp']
+ secret = form_data.get('secret_totp')
+
+ if not any(map(lambda func: func(received_code, secret), validation_checks)):
+ error_msg = _('Code is invalid. Try again!')
+ raise formencode.Invalid(error_msg, v, state, error_dict={'totp': error_msg})
+ return form_data
+
+ return _TOTPForm
+
+
def UserForm(localizer, edit=False, available_languages=None, old_data=None):
old_data = old_data or {}
available_languages = available_languages or []
@@ -421,10 +446,6 @@ class _BaseVcsSettingsForm(formencode.Sc
rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
- # svn
- vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
- vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
-
# cache
rhodecode_diff_cache = v.StringBoolean(if_missing=False)
@@ -434,10 +455,6 @@ def ApplicationUiSettingsForm(localizer)
class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
web_push_ssl = v.StringBoolean(if_missing=False)
- paths_root_path = All(
- v.ValidPath(localizer),
- v.UnicodeString(strip=True, min=1, not_empty=True)
- )
largefiles_usercache = All(
v.ValidPath(localizer),
v.UnicodeString(strip=True, min=2, not_empty=True))
diff --git a/rhodecode/model/pull_request.py b/rhodecode/model/pull_request.py
--- a/rhodecode/model/pull_request.py
+++ b/rhodecode/model/pull_request.py
@@ -38,7 +38,7 @@ from rhodecode.translation import lazy_u
from rhodecode.lib import helpers as h, hooks_utils, diffs
from rhodecode.lib import audit_logger
from collections import OrderedDict
-from rhodecode.lib.hooks_daemon import prepare_callback_daemon
+from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
from rhodecode.lib.ext_json import sjson as json
from rhodecode.lib.markup_renderer import (
DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
diff --git a/rhodecode/model/repo.py b/rhodecode/model/repo.py
--- a/rhodecode/model/repo.py
+++ b/rhodecode/model/repo.py
@@ -83,14 +83,6 @@ class RepoModel(BaseModel):
return repo_to_perm
- @LazyProperty
- def repos_path(self):
- """
- Gets the repositories root path from database
- """
- settings_model = VcsSettingsModel(sa=self.sa)
- return settings_model.get_repos_location()
-
def get(self, repo_id):
repo = self.sa.query(Repository) \
.filter(Repository.repo_id == repo_id)
@@ -608,7 +600,7 @@ class RepoModel(BaseModel):
# we need to flush here, in order to check if database won't
# throw any exceptions, create filesystem dirs at the very end
self.sa.flush()
- events.trigger(events.RepoCreateEvent(new_repo))
+ events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
return new_repo
except Exception:
diff --git a/rhodecode/model/repo_group.py b/rhodecode/model/repo_group.py
--- a/rhodecode/model/repo_group.py
+++ b/rhodecode/model/repo_group.py
@@ -62,15 +62,6 @@ class RepoGroupModel(BaseModel):
def get_repo_group(self, repo_group):
return self._get_repo_group(repo_group)
- @LazyProperty
- def repos_path(self):
- """
- Gets the repositories root path from database
- """
-
- settings_model = VcsSettingsModel(sa=self.sa)
- return settings_model.get_repos_location()
-
def get_by_group_name(self, repo_group_name, cache=None):
repo = self.sa.query(RepoGroup) \
.filter(RepoGroup.group_name == repo_group_name)
diff --git a/rhodecode/model/scm.py b/rhodecode/model/scm.py
--- a/rhodecode/model/scm.py
+++ b/rhodecode/model/scm.py
@@ -189,15 +189,6 @@ class ScmModel(BaseModel):
Generic Scm Model
"""
- @LazyProperty
- def repos_path(self):
- """
- Gets the repositories root path from database
- """
-
- settings_model = VcsSettingsModel(sa=self.sa)
- return settings_model.get_repos_location()
-
def repo_scan(self, repos_path=None):
"""
Listing of repositories in given path. This path should not be a
diff --git a/rhodecode/model/settings.py b/rhodecode/model/settings.py
--- a/rhodecode/model/settings.py
+++ b/rhodecode/model/settings.py
@@ -499,11 +499,6 @@ class VcsSettingsModel(object):
('vcs_git_lfs', 'store_location')
)
- GLOBAL_SVN_SETTINGS = (
- ('vcs_svn_proxy', 'http_requests_enabled'),
- ('vcs_svn_proxy', 'http_server_url')
- )
-
SVN_BRANCH_SECTION = 'vcs_svn_branch'
SVN_TAG_SECTION = 'vcs_svn_tag'
SSL_SETTING = ('web', 'push_ssl')
@@ -718,25 +713,10 @@ class VcsSettingsModel(object):
# branch/tags patterns
self._create_svn_settings(self.global_settings, data)
- http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
- http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
- self.GLOBAL_SVN_SETTINGS, data)
-
- self._create_or_update_ui(
- self.global_settings, *http_requests_enabled,
- value=safe_str(data[http_requests_enabled_key]))
- self._create_or_update_ui(
- self.global_settings, *http_server_url,
- value=data[http_server_url_key])
-
def update_global_ssl_setting(self, value):
self._create_or_update_ui(
self.global_settings, *self.SSL_SETTING, value=value)
- def update_global_path_setting(self, value):
- self._create_or_update_ui(
- self.global_settings, *self.PATH_SETTING, value=value)
-
@assert_repo_settings
def delete_repo_svn_pattern(self, id_):
ui = self.repo_settings.UiDbModel.get(id_)
@@ -811,9 +791,6 @@ class VcsSettingsModel(object):
else:
return self.get_repo_general_settings()
- def get_repos_location(self):
- return self.global_settings.get_ui_by_key('/').ui_value
-
def _filter_ui_settings(self, settings):
filtered_settings = [
s for s in settings if self._should_keep_setting(s)]
diff --git a/rhodecode/model/user.py b/rhodecode/model/user.py
--- a/rhodecode/model/user.py
+++ b/rhodecode/model/user.py
@@ -37,7 +37,7 @@ from rhodecode.lib.str_utils import safe
from rhodecode.lib.exceptions import (
DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
UserOwnsUserGroupsException, NotAllowedToCreateUserError,
- UserOwnsPullRequestsException, UserOwnsArtifactsException)
+ UserOwnsPullRequestsException, UserOwnsArtifactsException, DuplicateUpdateUserError)
from rhodecode.lib.caching_query import FromCache
from rhodecode.model import BaseModel
from rhodecode.model.db import (
@@ -114,6 +114,7 @@ class UserModel(BaseModel):
else:
user = self.sa.query(User)\
.filter(User.username == username)
+
if cache:
name_key = _hash_key(username)
user = user.options(
@@ -308,6 +309,10 @@ class UserModel(BaseModel):
log.debug('Checking for existing account in RhodeCode '
'database with user_id `%s` ', updating_user_id)
user = User.get(updating_user_id)
+ # now also validate if USERNAME belongs to potentially other user
+ maybe_other_user = User.get_by_username(username, case_insensitive=True)
+ if maybe_other_user and maybe_other_user.user_id != updating_user_id:
+ raise DuplicateUpdateUserError(f'different user exists with the {username} username')
else:
log.debug('Checking for existing account in RhodeCode '
'database with username `%s` ', username)
@@ -761,25 +766,29 @@ class UserModel(BaseModel):
'AuthUser: fill data execution based on: '
'user_id:%s api_key:%s username:%s', user_id, api_key, username)
try:
+ found_with = ''
dbuser = None
if user_id:
dbuser = self.get(user_id)
+ found_with = 'user_id'
elif api_key:
dbuser = self.get_by_auth_token(api_key)
+ found_with = 'auth_token'
elif username:
dbuser = self.get_by_username(username)
+ found_with = 'username'
if not dbuser:
log.warning(
- 'Unable to lookup user by id:%s api_key:%s username:%s',
- user_id, token_obfuscate(api_key), username)
+ 'Unable to lookup user by id:%s api_key:%s username:%s, found with: %s',
+ user_id, token_obfuscate(api_key), username, found_with)
return False
if not dbuser.active:
log.debug('User `%s:%s` is inactive, skipping fill data',
username, user_id)
return False
- log.debug('AuthUser: filling found user:%s data', dbuser)
+ log.debug('AuthUser: filling found user:%s data, found with: %s', dbuser, found_with)
attrs = {
'user_id': dbuser.user_id,
diff --git a/rhodecode/model/validation_schema/schemas/user_schema.py b/rhodecode/model/validation_schema/schemas/user_schema.py
--- a/rhodecode/model/validation_schema/schemas/user_schema.py
+++ b/rhodecode/model/validation_schema/schemas/user_schema.py
@@ -64,6 +64,7 @@ class ChangePasswordSchema(colander.Sche
@colander.deferred
def deferred_username_validator(node, kw):
+ old_username = kw.get('username')
def name_validator(node, value):
msg = _(
@@ -74,6 +75,11 @@ def deferred_username_validator(node, kw
if not re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value):
raise colander.Invalid(node, msg)
+ if value != old_username:
+ existing_user = User.get_by_username(value, case_insensitive=True)
+ if existing_user:
+ raise colander.Invalid(node, 'Username is already taken')
+
return name_validator
diff --git a/rhodecode/model/validators.py b/rhodecode/model/validators.py
--- a/rhodecode/model/validators.py
+++ b/rhodecode/model/validators.py
@@ -432,7 +432,7 @@ def ValidAuth(localizer):
if not authenticate(username, password, '', HTTP_TYPE,
skip_missing=True):
- user = User.get_by_username(username)
+ user = User.get_by_username_or_primary_email(username)
if user and not user.active:
log.warning('user %s is disabled', username)
msg = M(self, 'disabled_account', state)
diff --git a/rhodecode/public/js/rhodecode/routes.js b/rhodecode/public/js/rhodecode/routes.js
--- a/rhodecode/public/js/rhodecode/routes.js
+++ b/rhodecode/public/js/rhodecode/routes.js
@@ -91,10 +91,12 @@ function registerRCRoutes() {
pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
pyroutes.register('auth_home', '/_admin/auth*traverse', []);
pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
+ pyroutes.register('branch_remove', '/%(repo_name)s/branches/%(branch_name)s/remove', ['repo_name', 'branch_name']);
pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
pyroutes.register('channelstream_proxy', '/_channelstream', []);
pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
+ pyroutes.register('check_2fa', '/_admin/check_2fa', []);
pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
@@ -214,6 +216,8 @@ function registerRCRoutes() {
pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
+ pyroutes.register('my_account_configure_2fa', '/_admin/my_account/configure_2fa', []);
+ pyroutes.register('my_account_configure_2fa_update', '/_admin/my_account/configure_2fa_update', []);
pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
@@ -230,7 +234,9 @@ function registerRCRoutes() {
pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
+ pyroutes.register('my_account_regenerate_2fa_recovery_codes', '/_admin/my_account/regenerate_recovery_codes', []);
pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
+ pyroutes.register('my_account_show_2fa_recovery_codes', '/_admin/my_account/recovery_codes', []);
pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
@@ -243,6 +249,7 @@ function registerRCRoutes() {
pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
pyroutes.register('notifications_show_all', '/_admin/notifications', []);
pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
+ pyroutes.register('ops_celery_error_test', '/_admin/ops/error-celery', []);
pyroutes.register('ops_error_test', '/_admin/ops/error', []);
pyroutes.register('ops_healthcheck', '/_admin/ops/status', []);
pyroutes.register('ops_ping', '/_admin/ops/ping', []);
@@ -379,6 +386,7 @@ function registerRCRoutes() {
pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
+ pyroutes.register('setup_2fa', '/_admin/setup_2fa', []);
pyroutes.register('store_user_session_value', '/_store_session_attr', []);
pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
diff --git a/rhodecode/subscribers.py b/rhodecode/subscribers.py
--- a/rhodecode/subscribers.py
+++ b/rhodecode/subscribers.py
@@ -116,8 +116,9 @@ def scan_repositories_if_enabled(event):
import_on_startup = settings['startup.import_repos']
if vcs_server_enabled and import_on_startup:
from rhodecode.model.scm import ScmModel
- from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
- repositories = ScmModel().repo_scan(get_rhodecode_base_path())
+ from rhodecode.lib.utils import repo2db_mapper
+ scm = ScmModel()
+ repositories = scm.repo_scan(scm.repos_path)
repo2db_mapper(repositories, remove_obsolete=False)
diff --git a/rhodecode/templates/admin/auth/plugin_settings.mako b/rhodecode/templates/admin/auth/plugin_settings.mako
--- a/rhodecode/templates/admin/auth/plugin_settings.mako
+++ b/rhodecode/templates/admin/auth/plugin_settings.mako
@@ -63,7 +63,12 @@
%elif node.widget == "password":
${h.password(node.name, defaults.get(node.name), class_="large")}
%elif node.widget == "bool":
-
+ %if node.name == "global_2fa" and c.rhodecode_edition_id != "EE":
+
+ <%node.description = _('This feature is available in RhodeCode EE edition only. Contact {sales_email} to obtain a trial license.').format(sales_email='sales@rhodecode.com')%>
+ %else:
+
+ ${_('You have not seen your 2FA recovery codes yet.')}
+ ${_('Please save them in a safe place, or you will lose access to your account in case of lost access to authenticator app.')}
+
## generate always 10 entries
- % for item in (c.bookmark_items + [None for i in range(10)])[:10]:
+ % for item in (c.user_bookmark_items + [None for i in range(10)])[:10]:
% if item is None:
## empty placehodlder
${form_item()}
% else:
## actual entry
- ${form_item(position=item.position, title=item.title, redirect_url=item.redirect_url, repo=item.repository, repo_group=item.repository_group)}
+ ${form_item(position=item[0].position, title=item[0].title, redirect_url=item[0].redirect_url, repo=item[1], repo_group=item[2])}
% endif
% endfor
diff --git a/rhodecode/templates/admin/settings/settings_system.mako b/rhodecode/templates/admin/settings/settings_system.mako
--- a/rhodecode/templates/admin/settings/settings_system.mako
+++ b/rhodecode/templates/admin/settings/settings_system.mako
@@ -84,6 +84,6 @@
diff --git a/rhodecode/templates/admin/settings/settings_system_update.mako b/rhodecode/templates/admin/settings/settings_system_update.mako
--- a/rhodecode/templates/admin/settings/settings_system_update.mako
+++ b/rhodecode/templates/admin/settings/settings_system_update.mako
@@ -1,25 +1,30 @@
## upgrade block rendered afte on-click check
-
+
%if c.should_upgrade:
- A new version is available:
+ A new version is available !
+
+
+
%if c.latest_data.get('title'):
- ${h.literal(c.latest_data['title'])}
+ RhodeCode ${c.latest_ver} - ${h.literal(c.latest_data['title'])}
%else:
- ${c.latest_ver}
+ RhodeCode ${c.latest_ver}
%endif
%else:
- This instance is already running the latest stable version ${c.latest_ver}.
+ Your current version, ${c.cur_ver}, is up-to-date as it is equal to or newer than the latest available version, ${c.latest_ver}.
%endif
-
+
% if c.should_upgrade and c.important_notices:
-
-
- %else:
- ${_('Repository location change is disabled. You can enable this by changing the `allow_repo_location_change` inside .ini file.')}
- ## form still requires this but we cannot internally change it anyway
- ${h.hidden('paths_root_path',size=30,readonly="readonly", class_="disabled")}
- %endif
-
-
-
- ${_('Filesystem location where repositories should be stored. After changing this value a restart and rescan of the repository folder are required.')}
-
-
-
- % endif
-
% if display_globals or repo_type in ['git', 'hg']:
Edit
diff --git a/rhodecode/templates/ejs_templates/templates.html b/rhodecode/templates/ejs_templates/templates.html
--- a/rhodecode/templates/ejs_templates/templates.html
+++ b/rhodecode/templates/ejs_templates/templates.html
@@ -269,6 +269,18 @@ They are permanent until deleted, or con