##// END OF EJS Templates
fix(permission-flush): use delete method for permission cache invalidation as it's multi-process safe....
super-admin -
r5266:a1331d35 default
parent child Browse files
Show More
@@ -1,191 +1,191 b''
1 # required for pushd to work..
1 # required for pushd to work..
2 SHELL = /bin/bash
2 SHELL = /bin/bash
3
3
4
4
5 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
5 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
6 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
6 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
7
7
8 .PHONY: clean
8 .PHONY: clean
9 ## Cleanup compiled and cache py files
9 ## Cleanup compiled and cache py files
10 clean:
10 clean:
11 make test-clean
11 make test-clean
12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
13 find . -type d -name "build" -prune -exec rm -rf '{}' ';'
13 find . -type d -name "build" -prune -exec rm -rf '{}' ';'
14
14
15
15
16 .PHONY: test
16 .PHONY: test
17 ## run test-clean and tests
17 ## run test-clean and tests
18 test:
18 test:
19 make test-clean
19 make test-clean
20 make test-only
20 make test-only
21
21
22
22
23 .PHONY: test-clean
23 .PHONY: test-clean
24 ## run test-clean and tests
24 ## run test-clean and tests
25 test-clean:
25 test-clean:
26 rm -rf coverage.xml htmlcov junit.xml pylint.log result
26 rm -rf coverage.xml htmlcov junit.xml pylint.log result
27 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
27 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
28 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
28 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
29
29
30
30
31 .PHONY: test-only
31 .PHONY: test-only
32 ## Run tests only without cleanup
32 ## Run tests only without cleanup
33 test-only:
33 test-only:
34 PYTHONHASHSEED=random \
34 PYTHONHASHSEED=random \
35 py.test -x -vv -r xw -p no:sugar \
35 py.test -x -vv -r xw -p no:sugar \
36 --cov-report=term-missing --cov-report=html \
36 --cov-report=term-missing --cov-report=html \
37 --cov=rhodecode rhodecode
37 --cov=rhodecode rhodecode
38
38
39
39
40 .PHONY: test-only-mysql
40 .PHONY: test-only-mysql
41 ## run tests against mysql
41 ## run tests against mysql
42 test-only-mysql:
42 test-only-mysql:
43 PYTHONHASHSEED=random \
43 PYTHONHASHSEED=random \
44 py.test -x -vv -r xw -p no:sugar \
44 py.test -x -vv -r xw -p no:sugar \
45 --cov-report=term-missing --cov-report=html \
45 --cov-report=term-missing --cov-report=html \
46 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
46 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
47 --cov=rhodecode rhodecode
47 --cov=rhodecode rhodecode
48
48
49
49
50 .PHONY: test-only-postgres
50 .PHONY: test-only-postgres
51 ## run tests against postgres
51 ## run tests against postgres
52 test-only-postgres:
52 test-only-postgres:
53 PYTHONHASHSEED=random \
53 PYTHONHASHSEED=random \
54 py.test -x -vv -r xw -p no:sugar \
54 py.test -x -vv -r xw -p no:sugar \
55 --cov-report=term-missing --cov-report=html \
55 --cov-report=term-missing --cov-report=html \
56 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
56 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
57 --cov=rhodecode rhodecode
57 --cov=rhodecode rhodecode
58
58
59 .PHONY: ruff-check
59 .PHONY: ruff-check
60 ## run a ruff analysis
60 ## run a ruff analysis
61 ruff-check:
61 ruff-check:
62 ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
62 ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
63
63
64
64
65 .PHONY: docs
65 .PHONY: docs
66 ## build docs
66 ## build docs
67 docs:
67 docs:
68 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html)
68 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html)
69
69
70
70
71 .PHONY: docs-clean
71 .PHONY: docs-clean
72 ## Cleanup docs
72 ## Cleanup docs
73 docs-clean:
73 docs-clean:
74 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean)
74 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean)
75
75
76
76
77 .PHONY: docs-cleanup
77 .PHONY: docs-cleanup
78 ## Cleanup docs
78 ## Cleanup docs
79 docs-cleanup:
79 docs-cleanup:
80 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup)
80 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup)
81
81
82
82
83 .PHONY: web-build
83 .PHONY: web-build
84 ## Build JS packages static/js
84 ## Build JS packages static/js
85 web-build:
85 web-build:
86 docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt"
86 docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt"
87 # run static file check
87 # run static file check
88 ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/
88 ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/
89 rm -rf node_modules
89 rm -rf node_modules
90
90
91
91
92 .PHONY: pip-packages
92 .PHONY: pip-packages
93 ## Show outdated packages
93 ## Show outdated packages
94 pip-packages:
94 pip-packages:
95 python ${OUTDATED_PACKAGES}
95 python ${OUTDATED_PACKAGES}
96
96
97
97
98 .PHONY: build
98 .PHONY: build
99 ## Build sdist/egg
99 ## Build sdist/egg
100 build:
100 build:
101 python -m build
101 python -m build
102
102
103
103
104 .PHONY: dev-sh
104 .PHONY: dev-sh
105 ## make dev-sh
105 ## make dev-sh
106 dev-sh:
106 dev-sh:
107 sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list"
107 sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list"
108 sudo apt-get update
108 sudo apt-get update
109 sudo apt-get install -y zsh carapace-bin
109 sudo apt-get install -y zsh carapace-bin
110 rm -rf /home/rhodecode/.oh-my-zsh
110 rm -rf /home/rhodecode/.oh-my-zsh
111 curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh | sh
111 curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh | sh
112 echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
112 echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
113 PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
113 PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
114
114
115
115
116 .PHONY: dev-cleanup
116 .PHONY: dev-cleanup
117 ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
117 ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
118 dev-cleanup:
118 dev-cleanup:
119 pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
119 pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
120 rm -rf /tmp/*
120 rm -rf /tmp/*
121
121
122
122
123 .PHONY: dev-env
123 .PHONY: dev-env
124 ## make dev-env based on the requirements files and install develop of packages
124 ## make dev-env based on the requirements files and install develop of packages
125 dev-env:
125 dev-env:
126 pip install build virtualenv
126 pip install build virtualenv
127 pushd ../rhodecode-vcsserver/ && make dev-env && popd
127 pushd ../rhodecode-vcsserver/ && make dev-env && popd
128 pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
128 pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
129 pip install --no-index --find-links=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
129 pip install --no-index --find-links=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
130 pip install -e .
130 pip install -e .
131
131
132
132
133 .PHONY: sh
133 .PHONY: sh
134 ## shortcut for make dev-sh dev-env
134 ## shortcut for make dev-sh dev-env
135 sh:
135 sh:
136 (make dev-env; make dev-sh)
136 (make dev-env; make dev-sh)
137
137
138
138
139 .PHONY: dev-srv
139 .PHONY: dev-srv
140 ## run develop server instance, docker exec -it $(docker ps -q --filter 'name=dev-enterprise-ce') /bin/bash
140 ## run develop server instance, docker exec -it $(docker ps -q --filter 'name=dev-enterprise-ce') /bin/bash
141 dev-srv:
141 dev-srv:
142 pserve --reload .dev/dev.ini
142 pserve --reload .dev/dev.ini
143
143
144
144
145 .PHONY: dev-srv-g
145 .PHONY: dev-srv-g
146 ## run gunicorn multi process workers
146 ## run gunicorn multi process workers
147 dev-srv-g:
147 dev-srv-g:
148 gunicorn --workers=2 --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py
148 gunicorn --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120
149
149
150
150
151 # Default command on calling make
151 # Default command on calling make
152 .DEFAULT_GOAL := show-help
152 .DEFAULT_GOAL := show-help
153
153
154 .PHONY: show-help
154 .PHONY: show-help
155 show-help:
155 show-help:
156 @echo "$$(tput bold)Available rules:$$(tput sgr0)"
156 @echo "$$(tput bold)Available rules:$$(tput sgr0)"
157 @echo
157 @echo
158 @sed -n -e "/^## / { \
158 @sed -n -e "/^## / { \
159 h; \
159 h; \
160 s/.*//; \
160 s/.*//; \
161 :doc" \
161 :doc" \
162 -e "H; \
162 -e "H; \
163 n; \
163 n; \
164 s/^## //; \
164 s/^## //; \
165 t doc" \
165 t doc" \
166 -e "s/:.*//; \
166 -e "s/:.*//; \
167 G; \
167 G; \
168 s/\\n## /---/; \
168 s/\\n## /---/; \
169 s/\\n/ /g; \
169 s/\\n/ /g; \
170 p; \
170 p; \
171 }" ${MAKEFILE_LIST} \
171 }" ${MAKEFILE_LIST} \
172 | LC_ALL='C' sort --ignore-case \
172 | LC_ALL='C' sort --ignore-case \
173 | awk -F '---' \
173 | awk -F '---' \
174 -v ncol=$$(tput cols) \
174 -v ncol=$$(tput cols) \
175 -v indent=19 \
175 -v indent=19 \
176 -v col_on="$$(tput setaf 6)" \
176 -v col_on="$$(tput setaf 6)" \
177 -v col_off="$$(tput sgr0)" \
177 -v col_off="$$(tput sgr0)" \
178 '{ \
178 '{ \
179 printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
179 printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
180 n = split($$2, words, " "); \
180 n = split($$2, words, " "); \
181 line_length = ncol - indent; \
181 line_length = ncol - indent; \
182 for (i = 1; i <= n; i++) { \
182 for (i = 1; i <= n; i++) { \
183 line_length -= length(words[i]) + 1; \
183 line_length -= length(words[i]) + 1; \
184 if (line_length <= 0) { \
184 if (line_length <= 0) { \
185 line_length = ncol - indent - length(words[i]) - 1; \
185 line_length = ncol - indent - length(words[i]) - 1; \
186 printf "\n%*s ", -indent, " "; \
186 printf "\n%*s ", -indent, " "; \
187 } \
187 } \
188 printf "%s ", words[i]; \
188 printf "%s ", words[i]; \
189 } \
189 } \
190 printf "\n"; \
190 printf "\n"; \
191 }'
191 }'
@@ -1,53 +1,53 b''
1 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20
20
21 from rhodecode import events
21 from rhodecode import events
22 from rhodecode.lib import rc_cache
22 from rhodecode.lib import rc_cache
23
23
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26 # names of namespaces used for different permission related cached
26 # names of namespaces used for different permission related cached
27 # during flush operation we need to take care of all those
27 # during flush operation we need to take care of all those
28 cache_namespaces = [
28 cache_namespaces = [
29 f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
29 f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
30 f'cache_user_repo_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
30 f'cache_user_repo_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
31 f'cache_user_user_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
31 f'cache_user_user_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
32 f'cache_user_repo_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}'
32 f'cache_user_repo_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}'
33 ]
33 ]
34
34
35
35
36 def trigger_user_permission_flush(event):
36 def trigger_user_permission_flush(event):
37 """
37 """
38 Subscriber to the `UserPermissionsChange`. This triggers the
38 Subscriber to the `UserPermissionsChange`. This triggers the
39 automatic flush of permission caches, so the users affected receive new permissions
39 automatic flush of permission caches, so the users affected receive new permissions
40 Right Away
40 Right Away
41 """
41 """
42
42
43 affected_user_ids = set(event.user_ids)
43 affected_user_ids = set(event.user_ids)
44 for user_id in affected_user_ids:
44 for user_id in affected_user_ids:
45 for cache_namespace_uid_tmpl in cache_namespaces:
45 for cache_namespace_uid_tmpl in cache_namespaces:
46 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
46 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
47 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
47 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
48 log.debug('Invalidated %s cache keys for user_id: %s and namespace %s',
48 log.debug('Invalidated %s cache keys for user_id: %s and namespace %s',
49 del_keys, user_id, cache_namespace_uid)
49 del_keys, user_id, cache_namespace_uid)
50
50
51
51
52 def includeme(config):
52 def includeme(config):
53 config.add_subscriber(trigger_user_permission_flush, events.UserPermissionsChange)
53 config.add_subscriber(trigger_user_permission_flush, events.UserPermissionsChange)
@@ -1,1321 +1,1321 b''
1 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import datetime
20 import datetime
21 import formencode
21 import formencode
22 import formencode.htmlfill
22 import formencode.htmlfill
23
23
24 from pyramid.httpexceptions import HTTPFound
24 from pyramid.httpexceptions import HTTPFound
25 from pyramid.renderers import render
25 from pyramid.renderers import render
26 from pyramid.response import Response
26 from pyramid.response import Response
27
27
28 from rhodecode import events
28 from rhodecode import events
29 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
29 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
30 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
30 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
31 from rhodecode.authentication.base import get_authn_registry, RhodeCodeExternalAuthPlugin
31 from rhodecode.authentication.base import get_authn_registry, RhodeCodeExternalAuthPlugin
32 from rhodecode.authentication.plugins import auth_rhodecode
32 from rhodecode.authentication.plugins import auth_rhodecode
33 from rhodecode.events import trigger
33 from rhodecode.events import trigger
34 from rhodecode.model.db import true, UserNotice
34 from rhodecode.model.db import true, UserNotice
35
35
36 from rhodecode.lib import audit_logger, rc_cache, auth
36 from rhodecode.lib import audit_logger, rc_cache, auth
37 from rhodecode.lib.exceptions import (
37 from rhodecode.lib.exceptions import (
38 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
38 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
39 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
39 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
40 UserOwnsArtifactsException, DefaultUserException)
40 UserOwnsArtifactsException, DefaultUserException)
41 from rhodecode.lib import ext_json
41 from rhodecode.lib import ext_json
42 from rhodecode.lib.auth import (
42 from rhodecode.lib.auth import (
43 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
43 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45 from rhodecode.lib.helpers import SqlPage
45 from rhodecode.lib.helpers import SqlPage
46 from rhodecode.lib.utils2 import safe_int, safe_str, AttributeDict
46 from rhodecode.lib.utils2 import safe_int, safe_str, AttributeDict
47 from rhodecode.model.auth_token import AuthTokenModel
47 from rhodecode.model.auth_token import AuthTokenModel
48 from rhodecode.model.forms import (
48 from rhodecode.model.forms import (
49 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
49 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
50 UserExtraEmailForm, UserExtraIpForm)
50 UserExtraEmailForm, UserExtraIpForm)
51 from rhodecode.model.permission import PermissionModel
51 from rhodecode.model.permission import PermissionModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.ssh_key import SshKeyModel
53 from rhodecode.model.ssh_key import SshKeyModel
54 from rhodecode.model.user import UserModel
54 from rhodecode.model.user import UserModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.db import (
56 from rhodecode.model.db import (
57 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
57 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
58 UserApiKeys, UserSshKeys, RepoGroup)
58 UserApiKeys, UserSshKeys, RepoGroup)
59 from rhodecode.model.meta import Session
59 from rhodecode.model.meta import Session
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class AdminUsersView(BaseAppView, DataGridAppView):
64 class AdminUsersView(BaseAppView, DataGridAppView):
65
65
66 def load_default_context(self):
66 def load_default_context(self):
67 c = self._get_local_tmpl_context()
67 c = self._get_local_tmpl_context()
68 return c
68 return c
69
69
70 @LoginRequired()
70 @LoginRequired()
71 @HasPermissionAllDecorator('hg.admin')
71 @HasPermissionAllDecorator('hg.admin')
72 def users_list(self):
72 def users_list(self):
73 c = self.load_default_context()
73 c = self.load_default_context()
74 return self._get_template_context(c)
74 return self._get_template_context(c)
75
75
76 @LoginRequired()
76 @LoginRequired()
77 @HasPermissionAllDecorator('hg.admin')
77 @HasPermissionAllDecorator('hg.admin')
78 def users_list_data(self):
78 def users_list_data(self):
79 self.load_default_context()
79 self.load_default_context()
80 column_map = {
80 column_map = {
81 'first_name': 'name',
81 'first_name': 'name',
82 'last_name': 'lastname',
82 'last_name': 'lastname',
83 }
83 }
84 draw, start, limit = self._extract_chunk(self.request)
84 draw, start, limit = self._extract_chunk(self.request)
85 search_q, order_by, order_dir = self._extract_ordering(
85 search_q, order_by, order_dir = self._extract_ordering(
86 self.request, column_map=column_map)
86 self.request, column_map=column_map)
87 _render = self.request.get_partial_renderer(
87 _render = self.request.get_partial_renderer(
88 'rhodecode:templates/data_table/_dt_elements.mako')
88 'rhodecode:templates/data_table/_dt_elements.mako')
89
89
90 def user_actions(user_id, username):
90 def user_actions(user_id, username):
91 return _render("user_actions", user_id, username)
91 return _render("user_actions", user_id, username)
92
92
93 users_data_total_count = User.query()\
93 users_data_total_count = User.query()\
94 .filter(User.username != User.DEFAULT_USER) \
94 .filter(User.username != User.DEFAULT_USER) \
95 .count()
95 .count()
96
96
97 users_data_total_inactive_count = User.query()\
97 users_data_total_inactive_count = User.query()\
98 .filter(User.username != User.DEFAULT_USER) \
98 .filter(User.username != User.DEFAULT_USER) \
99 .filter(User.active != true())\
99 .filter(User.active != true())\
100 .count()
100 .count()
101
101
102 # json generate
102 # json generate
103 base_q = User.query().filter(User.username != User.DEFAULT_USER)
103 base_q = User.query().filter(User.username != User.DEFAULT_USER)
104 base_inactive_q = base_q.filter(User.active != true())
104 base_inactive_q = base_q.filter(User.active != true())
105
105
106 if search_q:
106 if search_q:
107 like_expression = '%{}%'.format(safe_str(search_q))
107 like_expression = '%{}%'.format(safe_str(search_q))
108 base_q = base_q.filter(or_(
108 base_q = base_q.filter(or_(
109 User.username.ilike(like_expression),
109 User.username.ilike(like_expression),
110 User._email.ilike(like_expression),
110 User._email.ilike(like_expression),
111 User.name.ilike(like_expression),
111 User.name.ilike(like_expression),
112 User.lastname.ilike(like_expression),
112 User.lastname.ilike(like_expression),
113 ))
113 ))
114 base_inactive_q = base_q.filter(User.active != true())
114 base_inactive_q = base_q.filter(User.active != true())
115
115
116 users_data_total_filtered_count = base_q.count()
116 users_data_total_filtered_count = base_q.count()
117 users_data_total_filtered_inactive_count = base_inactive_q.count()
117 users_data_total_filtered_inactive_count = base_inactive_q.count()
118
118
119 sort_col = getattr(User, order_by, None)
119 sort_col = getattr(User, order_by, None)
120 if sort_col:
120 if sort_col:
121 if order_dir == 'asc':
121 if order_dir == 'asc':
122 # handle null values properly to order by NULL last
122 # handle null values properly to order by NULL last
123 if order_by in ['last_activity']:
123 if order_by in ['last_activity']:
124 sort_col = coalesce(sort_col, datetime.date.max)
124 sort_col = coalesce(sort_col, datetime.date.max)
125 sort_col = sort_col.asc()
125 sort_col = sort_col.asc()
126 else:
126 else:
127 # handle null values properly to order by NULL last
127 # handle null values properly to order by NULL last
128 if order_by in ['last_activity']:
128 if order_by in ['last_activity']:
129 sort_col = coalesce(sort_col, datetime.date.min)
129 sort_col = coalesce(sort_col, datetime.date.min)
130 sort_col = sort_col.desc()
130 sort_col = sort_col.desc()
131
131
132 base_q = base_q.order_by(sort_col)
132 base_q = base_q.order_by(sort_col)
133 base_q = base_q.offset(start).limit(limit)
133 base_q = base_q.offset(start).limit(limit)
134
134
135 users_list = base_q.all()
135 users_list = base_q.all()
136
136
137 users_data = []
137 users_data = []
138 for user in users_list:
138 for user in users_list:
139 users_data.append({
139 users_data.append({
140 "username": h.gravatar_with_user(self.request, user.username),
140 "username": h.gravatar_with_user(self.request, user.username),
141 "email": user.email,
141 "email": user.email,
142 "first_name": user.first_name,
142 "first_name": user.first_name,
143 "last_name": user.last_name,
143 "last_name": user.last_name,
144 "last_login": h.format_date(user.last_login),
144 "last_login": h.format_date(user.last_login),
145 "last_activity": h.format_date(user.last_activity),
145 "last_activity": h.format_date(user.last_activity),
146 "active": h.bool2icon(user.active),
146 "active": h.bool2icon(user.active),
147 "active_raw": user.active,
147 "active_raw": user.active,
148 "admin": h.bool2icon(user.admin),
148 "admin": h.bool2icon(user.admin),
149 "extern_type": user.extern_type,
149 "extern_type": user.extern_type,
150 "extern_name": user.extern_name,
150 "extern_name": user.extern_name,
151 "action": user_actions(user.user_id, user.username),
151 "action": user_actions(user.user_id, user.username),
152 })
152 })
153 data = ({
153 data = ({
154 'draw': draw,
154 'draw': draw,
155 'data': users_data,
155 'data': users_data,
156 'recordsTotal': users_data_total_count,
156 'recordsTotal': users_data_total_count,
157 'recordsFiltered': users_data_total_filtered_count,
157 'recordsFiltered': users_data_total_filtered_count,
158 'recordsTotalInactive': users_data_total_inactive_count,
158 'recordsTotalInactive': users_data_total_inactive_count,
159 'recordsFilteredInactive': users_data_total_filtered_inactive_count
159 'recordsFilteredInactive': users_data_total_filtered_inactive_count
160 })
160 })
161
161
162 return data
162 return data
163
163
164 def _set_personal_repo_group_template_vars(self, c_obj):
164 def _set_personal_repo_group_template_vars(self, c_obj):
165 DummyUser = AttributeDict({
165 DummyUser = AttributeDict({
166 'username': '${username}',
166 'username': '${username}',
167 'user_id': '${user_id}',
167 'user_id': '${user_id}',
168 })
168 })
169 c_obj.default_create_repo_group = RepoGroupModel() \
169 c_obj.default_create_repo_group = RepoGroupModel() \
170 .get_default_create_personal_repo_group()
170 .get_default_create_personal_repo_group()
171 c_obj.personal_repo_group_name = RepoGroupModel() \
171 c_obj.personal_repo_group_name = RepoGroupModel() \
172 .get_personal_group_name(DummyUser)
172 .get_personal_group_name(DummyUser)
173
173
174 @LoginRequired()
174 @LoginRequired()
175 @HasPermissionAllDecorator('hg.admin')
175 @HasPermissionAllDecorator('hg.admin')
176 def users_new(self):
176 def users_new(self):
177 _ = self.request.translate
177 _ = self.request.translate
178 c = self.load_default_context()
178 c = self.load_default_context()
179 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
179 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
180 self._set_personal_repo_group_template_vars(c)
180 self._set_personal_repo_group_template_vars(c)
181 return self._get_template_context(c)
181 return self._get_template_context(c)
182
182
183 @LoginRequired()
183 @LoginRequired()
184 @HasPermissionAllDecorator('hg.admin')
184 @HasPermissionAllDecorator('hg.admin')
185 @CSRFRequired()
185 @CSRFRequired()
186 def users_create(self):
186 def users_create(self):
187 _ = self.request.translate
187 _ = self.request.translate
188 c = self.load_default_context()
188 c = self.load_default_context()
189 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
189 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
190 user_model = UserModel()
190 user_model = UserModel()
191 user_form = UserForm(self.request.translate)()
191 user_form = UserForm(self.request.translate)()
192 try:
192 try:
193 form_result = user_form.to_python(dict(self.request.POST))
193 form_result = user_form.to_python(dict(self.request.POST))
194 user = user_model.create(form_result)
194 user = user_model.create(form_result)
195 Session().flush()
195 Session().flush()
196 creation_data = user.get_api_data()
196 creation_data = user.get_api_data()
197 username = form_result['username']
197 username = form_result['username']
198
198
199 audit_logger.store_web(
199 audit_logger.store_web(
200 'user.create', action_data={'data': creation_data},
200 'user.create', action_data={'data': creation_data},
201 user=c.rhodecode_user)
201 user=c.rhodecode_user)
202
202
203 user_link = h.link_to(
203 user_link = h.link_to(
204 h.escape(username),
204 h.escape(username),
205 h.route_path('user_edit', user_id=user.user_id))
205 h.route_path('user_edit', user_id=user.user_id))
206 h.flash(h.literal(_('Created user %(user_link)s')
206 h.flash(h.literal(_('Created user %(user_link)s')
207 % {'user_link': user_link}), category='success')
207 % {'user_link': user_link}), category='success')
208 Session().commit()
208 Session().commit()
209 except formencode.Invalid as errors:
209 except formencode.Invalid as errors:
210 self._set_personal_repo_group_template_vars(c)
210 self._set_personal_repo_group_template_vars(c)
211 data = render(
211 data = render(
212 'rhodecode:templates/admin/users/user_add.mako',
212 'rhodecode:templates/admin/users/user_add.mako',
213 self._get_template_context(c), self.request)
213 self._get_template_context(c), self.request)
214 html = formencode.htmlfill.render(
214 html = formencode.htmlfill.render(
215 data,
215 data,
216 defaults=errors.value,
216 defaults=errors.value,
217 errors=errors.unpack_errors() or {},
217 errors=errors.unpack_errors() or {},
218 prefix_error=False,
218 prefix_error=False,
219 encoding="UTF-8",
219 encoding="UTF-8",
220 force_defaults=False
220 force_defaults=False
221 )
221 )
222 return Response(html)
222 return Response(html)
223 except UserCreationError as e:
223 except UserCreationError as e:
224 h.flash(safe_str(e), 'error')
224 h.flash(safe_str(e), 'error')
225 except Exception:
225 except Exception:
226 log.exception("Exception creation of user")
226 log.exception("Exception creation of user")
227 h.flash(_('Error occurred during creation of user %s')
227 h.flash(_('Error occurred during creation of user %s')
228 % self.request.POST.get('username'), category='error')
228 % self.request.POST.get('username'), category='error')
229 raise HTTPFound(h.route_path('users'))
229 raise HTTPFound(h.route_path('users'))
230
230
231
231
232 class UsersView(UserAppView):
232 class UsersView(UserAppView):
233 ALLOW_SCOPED_TOKENS = False
233 ALLOW_SCOPED_TOKENS = False
234 """
234 """
235 This view has alternative version inside EE, if modified please take a look
235 This view has alternative version inside EE, if modified please take a look
236 in there as well.
236 in there as well.
237 """
237 """
238
238
239 def get_auth_plugins(self):
239 def get_auth_plugins(self):
240 valid_plugins = []
240 valid_plugins = []
241 authn_registry = get_authn_registry(self.request.registry)
241 authn_registry = get_authn_registry(self.request.registry)
242 for plugin in authn_registry.get_plugins_for_authentication():
242 for plugin in authn_registry.get_plugins_for_authentication():
243 if isinstance(plugin, RhodeCodeExternalAuthPlugin):
243 if isinstance(plugin, RhodeCodeExternalAuthPlugin):
244 valid_plugins.append(plugin)
244 valid_plugins.append(plugin)
245 elif plugin.name == 'rhodecode':
245 elif plugin.name == 'rhodecode':
246 valid_plugins.append(plugin)
246 valid_plugins.append(plugin)
247
247
248 # extend our choices if user has set a bound plugin which isn't enabled at the
248 # extend our choices if user has set a bound plugin which isn't enabled at the
249 # moment
249 # moment
250 extern_type = self.db_user.extern_type
250 extern_type = self.db_user.extern_type
251 if extern_type not in [x.uid for x in valid_plugins]:
251 if extern_type not in [x.uid for x in valid_plugins]:
252 try:
252 try:
253 plugin = authn_registry.get_plugin_by_uid(extern_type)
253 plugin = authn_registry.get_plugin_by_uid(extern_type)
254 if plugin:
254 if plugin:
255 valid_plugins.append(plugin)
255 valid_plugins.append(plugin)
256
256
257 except Exception:
257 except Exception:
258 log.exception(
258 log.exception(
259 f'Could not extend user plugins with `{extern_type}`')
259 f'Could not extend user plugins with `{extern_type}`')
260 return valid_plugins
260 return valid_plugins
261
261
262 def load_default_context(self):
262 def load_default_context(self):
263 req = self.request
263 req = self.request
264
264
265 c = self._get_local_tmpl_context()
265 c = self._get_local_tmpl_context()
266 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
266 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
267 c.allowed_languages = [
267 c.allowed_languages = [
268 ('en', 'English (en)'),
268 ('en', 'English (en)'),
269 ('de', 'German (de)'),
269 ('de', 'German (de)'),
270 ('fr', 'French (fr)'),
270 ('fr', 'French (fr)'),
271 ('it', 'Italian (it)'),
271 ('it', 'Italian (it)'),
272 ('ja', 'Japanese (ja)'),
272 ('ja', 'Japanese (ja)'),
273 ('pl', 'Polish (pl)'),
273 ('pl', 'Polish (pl)'),
274 ('pt', 'Portuguese (pt)'),
274 ('pt', 'Portuguese (pt)'),
275 ('ru', 'Russian (ru)'),
275 ('ru', 'Russian (ru)'),
276 ('zh', 'Chinese (zh)'),
276 ('zh', 'Chinese (zh)'),
277 ]
277 ]
278
278
279 c.allowed_extern_types = [
279 c.allowed_extern_types = [
280 (x.uid, x.get_display_name()) for x in self.get_auth_plugins()
280 (x.uid, x.get_display_name()) for x in self.get_auth_plugins()
281 ]
281 ]
282 perms = req.registry.settings.get('available_permissions')
282 perms = req.registry.settings.get('available_permissions')
283 if not perms:
283 if not perms:
284 # inject info about available permissions
284 # inject info about available permissions
285 auth.set_available_permissions(req.registry.settings)
285 auth.set_available_permissions(req.registry.settings)
286
286
287 c.available_permissions = req.registry.settings['available_permissions']
287 c.available_permissions = req.registry.settings['available_permissions']
288 PermissionModel().set_global_permission_choices(
288 PermissionModel().set_global_permission_choices(
289 c, gettext_translator=req.translate)
289 c, gettext_translator=req.translate)
290
290
291 return c
291 return c
292
292
293 @LoginRequired()
293 @LoginRequired()
294 @HasPermissionAllDecorator('hg.admin')
294 @HasPermissionAllDecorator('hg.admin')
295 @CSRFRequired()
295 @CSRFRequired()
296 def user_update(self):
296 def user_update(self):
297 _ = self.request.translate
297 _ = self.request.translate
298 c = self.load_default_context()
298 c = self.load_default_context()
299
299
300 user_id = self.db_user_id
300 user_id = self.db_user_id
301 c.user = self.db_user
301 c.user = self.db_user
302
302
303 c.active = 'profile'
303 c.active = 'profile'
304 c.extern_type = c.user.extern_type
304 c.extern_type = c.user.extern_type
305 c.extern_name = c.user.extern_name
305 c.extern_name = c.user.extern_name
306 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
306 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
307 available_languages = [x[0] for x in c.allowed_languages]
307 available_languages = [x[0] for x in c.allowed_languages]
308 _form = UserForm(self.request.translate, edit=True,
308 _form = UserForm(self.request.translate, edit=True,
309 available_languages=available_languages,
309 available_languages=available_languages,
310 old_data={'user_id': user_id,
310 old_data={'user_id': user_id,
311 'email': c.user.email})()
311 'email': c.user.email})()
312
312
313 c.edit_mode = self.request.POST.get('edit') == '1'
313 c.edit_mode = self.request.POST.get('edit') == '1'
314 form_result = {}
314 form_result = {}
315 old_values = c.user.get_api_data()
315 old_values = c.user.get_api_data()
316 try:
316 try:
317 form_result = _form.to_python(dict(self.request.POST))
317 form_result = _form.to_python(dict(self.request.POST))
318 skip_attrs = ['extern_name']
318 skip_attrs = ['extern_name']
319 # TODO: plugin should define if username can be updated
319 # TODO: plugin should define if username can be updated
320
320
321 if c.extern_type != "rhodecode" and not c.edit_mode:
321 if c.extern_type != "rhodecode" and not c.edit_mode:
322 # forbid updating username for external accounts
322 # forbid updating username for external accounts
323 skip_attrs.append('username')
323 skip_attrs.append('username')
324
324
325 UserModel().update_user(
325 UserModel().update_user(
326 user_id, skip_attrs=skip_attrs, **form_result)
326 user_id, skip_attrs=skip_attrs, **form_result)
327
327
328 audit_logger.store_web(
328 audit_logger.store_web(
329 'user.edit', action_data={'old_data': old_values},
329 'user.edit', action_data={'old_data': old_values},
330 user=c.rhodecode_user)
330 user=c.rhodecode_user)
331
331
332 Session().commit()
332 Session().commit()
333 h.flash(_('User updated successfully'), category='success')
333 h.flash(_('User updated successfully'), category='success')
334 except formencode.Invalid as errors:
334 except formencode.Invalid as errors:
335 data = render(
335 data = render(
336 'rhodecode:templates/admin/users/user_edit.mako',
336 'rhodecode:templates/admin/users/user_edit.mako',
337 self._get_template_context(c), self.request)
337 self._get_template_context(c), self.request)
338 html = formencode.htmlfill.render(
338 html = formencode.htmlfill.render(
339 data,
339 data,
340 defaults=errors.value,
340 defaults=errors.value,
341 errors=errors.unpack_errors() or {},
341 errors=errors.unpack_errors() or {},
342 prefix_error=False,
342 prefix_error=False,
343 encoding="UTF-8",
343 encoding="UTF-8",
344 force_defaults=False
344 force_defaults=False
345 )
345 )
346 return Response(html)
346 return Response(html)
347 except UserCreationError as e:
347 except UserCreationError as e:
348 h.flash(safe_str(e), 'error')
348 h.flash(safe_str(e), 'error')
349 except Exception:
349 except Exception:
350 log.exception("Exception updating user")
350 log.exception("Exception updating user")
351 h.flash(_('Error occurred during update of user %s')
351 h.flash(_('Error occurred during update of user %s')
352 % form_result.get('username'), category='error')
352 % form_result.get('username'), category='error')
353 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
353 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
354
354
355 @LoginRequired()
355 @LoginRequired()
356 @HasPermissionAllDecorator('hg.admin')
356 @HasPermissionAllDecorator('hg.admin')
357 @CSRFRequired()
357 @CSRFRequired()
358 def user_delete(self):
358 def user_delete(self):
359 _ = self.request.translate
359 _ = self.request.translate
360 c = self.load_default_context()
360 c = self.load_default_context()
361 c.user = self.db_user
361 c.user = self.db_user
362
362
363 _repos = len(c.user.repositories)
363 _repos = len(c.user.repositories)
364 _repo_groups = len(c.user.repository_groups)
364 _repo_groups = len(c.user.repository_groups)
365 _user_groups = len(c.user.user_groups)
365 _user_groups = len(c.user.user_groups)
366 _pull_requests = len(c.user.user_pull_requests)
366 _pull_requests = len(c.user.user_pull_requests)
367 _artifacts = len(c.user.artifacts)
367 _artifacts = len(c.user.artifacts)
368
368
369 handle_repos = None
369 handle_repos = None
370 handle_repo_groups = None
370 handle_repo_groups = None
371 handle_user_groups = None
371 handle_user_groups = None
372 handle_pull_requests = None
372 handle_pull_requests = None
373 handle_artifacts = None
373 handle_artifacts = None
374
374
375 # calls for flash of handle based on handle case detach or delete
375 # calls for flash of handle based on handle case detach or delete
376 def set_handle_flash_repos():
376 def set_handle_flash_repos():
377 handle = handle_repos
377 handle = handle_repos
378 if handle == 'detach':
378 if handle == 'detach':
379 h.flash(_('Detached %s repositories') % _repos,
379 h.flash(_('Detached %s repositories') % _repos,
380 category='success')
380 category='success')
381 elif handle == 'delete':
381 elif handle == 'delete':
382 h.flash(_('Deleted %s repositories') % _repos,
382 h.flash(_('Deleted %s repositories') % _repos,
383 category='success')
383 category='success')
384
384
385 def set_handle_flash_repo_groups():
385 def set_handle_flash_repo_groups():
386 handle = handle_repo_groups
386 handle = handle_repo_groups
387 if handle == 'detach':
387 if handle == 'detach':
388 h.flash(_('Detached %s repository groups') % _repo_groups,
388 h.flash(_('Detached %s repository groups') % _repo_groups,
389 category='success')
389 category='success')
390 elif handle == 'delete':
390 elif handle == 'delete':
391 h.flash(_('Deleted %s repository groups') % _repo_groups,
391 h.flash(_('Deleted %s repository groups') % _repo_groups,
392 category='success')
392 category='success')
393
393
394 def set_handle_flash_user_groups():
394 def set_handle_flash_user_groups():
395 handle = handle_user_groups
395 handle = handle_user_groups
396 if handle == 'detach':
396 if handle == 'detach':
397 h.flash(_('Detached %s user groups') % _user_groups,
397 h.flash(_('Detached %s user groups') % _user_groups,
398 category='success')
398 category='success')
399 elif handle == 'delete':
399 elif handle == 'delete':
400 h.flash(_('Deleted %s user groups') % _user_groups,
400 h.flash(_('Deleted %s user groups') % _user_groups,
401 category='success')
401 category='success')
402
402
403 def set_handle_flash_pull_requests():
403 def set_handle_flash_pull_requests():
404 handle = handle_pull_requests
404 handle = handle_pull_requests
405 if handle == 'detach':
405 if handle == 'detach':
406 h.flash(_('Detached %s pull requests') % _pull_requests,
406 h.flash(_('Detached %s pull requests') % _pull_requests,
407 category='success')
407 category='success')
408 elif handle == 'delete':
408 elif handle == 'delete':
409 h.flash(_('Deleted %s pull requests') % _pull_requests,
409 h.flash(_('Deleted %s pull requests') % _pull_requests,
410 category='success')
410 category='success')
411
411
412 def set_handle_flash_artifacts():
412 def set_handle_flash_artifacts():
413 handle = handle_artifacts
413 handle = handle_artifacts
414 if handle == 'detach':
414 if handle == 'detach':
415 h.flash(_('Detached %s artifacts') % _artifacts,
415 h.flash(_('Detached %s artifacts') % _artifacts,
416 category='success')
416 category='success')
417 elif handle == 'delete':
417 elif handle == 'delete':
418 h.flash(_('Deleted %s artifacts') % _artifacts,
418 h.flash(_('Deleted %s artifacts') % _artifacts,
419 category='success')
419 category='success')
420
420
421 handle_user = User.get_first_super_admin()
421 handle_user = User.get_first_super_admin()
422 handle_user_id = safe_int(self.request.POST.get('detach_user_id'))
422 handle_user_id = safe_int(self.request.POST.get('detach_user_id'))
423 if handle_user_id:
423 if handle_user_id:
424 # NOTE(marcink): we get new owner for objects...
424 # NOTE(marcink): we get new owner for objects...
425 handle_user = User.get_or_404(handle_user_id)
425 handle_user = User.get_or_404(handle_user_id)
426
426
427 if _repos and self.request.POST.get('user_repos'):
427 if _repos and self.request.POST.get('user_repos'):
428 handle_repos = self.request.POST['user_repos']
428 handle_repos = self.request.POST['user_repos']
429
429
430 if _repo_groups and self.request.POST.get('user_repo_groups'):
430 if _repo_groups and self.request.POST.get('user_repo_groups'):
431 handle_repo_groups = self.request.POST['user_repo_groups']
431 handle_repo_groups = self.request.POST['user_repo_groups']
432
432
433 if _user_groups and self.request.POST.get('user_user_groups'):
433 if _user_groups and self.request.POST.get('user_user_groups'):
434 handle_user_groups = self.request.POST['user_user_groups']
434 handle_user_groups = self.request.POST['user_user_groups']
435
435
436 if _pull_requests and self.request.POST.get('user_pull_requests'):
436 if _pull_requests and self.request.POST.get('user_pull_requests'):
437 handle_pull_requests = self.request.POST['user_pull_requests']
437 handle_pull_requests = self.request.POST['user_pull_requests']
438
438
439 if _artifacts and self.request.POST.get('user_artifacts'):
439 if _artifacts and self.request.POST.get('user_artifacts'):
440 handle_artifacts = self.request.POST['user_artifacts']
440 handle_artifacts = self.request.POST['user_artifacts']
441
441
442 old_values = c.user.get_api_data()
442 old_values = c.user.get_api_data()
443
443
444 try:
444 try:
445
445
446 UserModel().delete(
446 UserModel().delete(
447 c.user,
447 c.user,
448 handle_repos=handle_repos,
448 handle_repos=handle_repos,
449 handle_repo_groups=handle_repo_groups,
449 handle_repo_groups=handle_repo_groups,
450 handle_user_groups=handle_user_groups,
450 handle_user_groups=handle_user_groups,
451 handle_pull_requests=handle_pull_requests,
451 handle_pull_requests=handle_pull_requests,
452 handle_artifacts=handle_artifacts,
452 handle_artifacts=handle_artifacts,
453 handle_new_owner=handle_user
453 handle_new_owner=handle_user
454 )
454 )
455
455
456 audit_logger.store_web(
456 audit_logger.store_web(
457 'user.delete', action_data={'old_data': old_values},
457 'user.delete', action_data={'old_data': old_values},
458 user=c.rhodecode_user)
458 user=c.rhodecode_user)
459
459
460 Session().commit()
460 Session().commit()
461 set_handle_flash_repos()
461 set_handle_flash_repos()
462 set_handle_flash_repo_groups()
462 set_handle_flash_repo_groups()
463 set_handle_flash_user_groups()
463 set_handle_flash_user_groups()
464 set_handle_flash_pull_requests()
464 set_handle_flash_pull_requests()
465 set_handle_flash_artifacts()
465 set_handle_flash_artifacts()
466 username = h.escape(old_values['username'])
466 username = h.escape(old_values['username'])
467 h.flash(_('Successfully deleted user `{}`').format(username), category='success')
467 h.flash(_('Successfully deleted user `{}`').format(username), category='success')
468 except (UserOwnsReposException, UserOwnsRepoGroupsException,
468 except (UserOwnsReposException, UserOwnsRepoGroupsException,
469 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
469 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
470 UserOwnsArtifactsException, DefaultUserException) as e:
470 UserOwnsArtifactsException, DefaultUserException) as e:
471
471
472 h.flash(safe_str(e), category='warning')
472 h.flash(safe_str(e), category='warning')
473 except Exception:
473 except Exception:
474 log.exception("Exception during deletion of user")
474 log.exception("Exception during deletion of user")
475 h.flash(_('An error occurred during deletion of user'),
475 h.flash(_('An error occurred during deletion of user'),
476 category='error')
476 category='error')
477 raise HTTPFound(h.route_path('users'))
477 raise HTTPFound(h.route_path('users'))
478
478
479 @LoginRequired()
479 @LoginRequired()
480 @HasPermissionAllDecorator('hg.admin')
480 @HasPermissionAllDecorator('hg.admin')
481 def user_edit(self):
481 def user_edit(self):
482 _ = self.request.translate
482 _ = self.request.translate
483 c = self.load_default_context()
483 c = self.load_default_context()
484 c.user = self.db_user
484 c.user = self.db_user
485
485
486 c.active = 'profile'
486 c.active = 'profile'
487 c.extern_type = c.user.extern_type
487 c.extern_type = c.user.extern_type
488 c.extern_name = c.user.extern_name
488 c.extern_name = c.user.extern_name
489 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
489 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
490 c.edit_mode = self.request.GET.get('edit') == '1'
490 c.edit_mode = self.request.GET.get('edit') == '1'
491
491
492 defaults = c.user.get_dict()
492 defaults = c.user.get_dict()
493 defaults.update({'language': c.user.user_data.get('language')})
493 defaults.update({'language': c.user.user_data.get('language')})
494
494
495 data = render(
495 data = render(
496 'rhodecode:templates/admin/users/user_edit.mako',
496 'rhodecode:templates/admin/users/user_edit.mako',
497 self._get_template_context(c), self.request)
497 self._get_template_context(c), self.request)
498 html = formencode.htmlfill.render(
498 html = formencode.htmlfill.render(
499 data,
499 data,
500 defaults=defaults,
500 defaults=defaults,
501 encoding="UTF-8",
501 encoding="UTF-8",
502 force_defaults=False
502 force_defaults=False
503 )
503 )
504 return Response(html)
504 return Response(html)
505
505
506 @LoginRequired()
506 @LoginRequired()
507 @HasPermissionAllDecorator('hg.admin')
507 @HasPermissionAllDecorator('hg.admin')
508 def user_edit_advanced(self):
508 def user_edit_advanced(self):
509 _ = self.request.translate
509 _ = self.request.translate
510 c = self.load_default_context()
510 c = self.load_default_context()
511
511
512 user_id = self.db_user_id
512 user_id = self.db_user_id
513 c.user = self.db_user
513 c.user = self.db_user
514
514
515 c.detach_user = User.get_first_super_admin()
515 c.detach_user = User.get_first_super_admin()
516 detach_user_id = safe_int(self.request.GET.get('detach_user_id'))
516 detach_user_id = safe_int(self.request.GET.get('detach_user_id'))
517 if detach_user_id:
517 if detach_user_id:
518 c.detach_user = User.get_or_404(detach_user_id)
518 c.detach_user = User.get_or_404(detach_user_id)
519
519
520 c.active = 'advanced'
520 c.active = 'advanced'
521 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
521 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
522 c.personal_repo_group_name = RepoGroupModel()\
522 c.personal_repo_group_name = RepoGroupModel()\
523 .get_personal_group_name(c.user)
523 .get_personal_group_name(c.user)
524
524
525 c.user_to_review_rules = sorted(
525 c.user_to_review_rules = sorted(
526 (x.user for x in c.user.user_review_rules),
526 (x.user for x in c.user.user_review_rules),
527 key=lambda u: u.username.lower())
527 key=lambda u: u.username.lower())
528
528
529 defaults = c.user.get_dict()
529 defaults = c.user.get_dict()
530
530
531 # Interim workaround if the user participated on any pull requests as a
531 # Interim workaround if the user participated on any pull requests as a
532 # reviewer.
532 # reviewer.
533 has_review = len(c.user.reviewer_pull_requests)
533 has_review = len(c.user.reviewer_pull_requests)
534 c.can_delete_user = not has_review
534 c.can_delete_user = not has_review
535 c.can_delete_user_message = ''
535 c.can_delete_user_message = ''
536 inactive_link = h.link_to(
536 inactive_link = h.link_to(
537 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
537 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
538 if has_review == 1:
538 if has_review == 1:
539 c.can_delete_user_message = h.literal(_(
539 c.can_delete_user_message = h.literal(_(
540 'The user participates as reviewer in {} pull request and '
540 'The user participates as reviewer in {} pull request and '
541 'cannot be deleted. \nYou can set the user to '
541 'cannot be deleted. \nYou can set the user to '
542 '"{}" instead of deleting it.').format(
542 '"{}" instead of deleting it.').format(
543 has_review, inactive_link))
543 has_review, inactive_link))
544 elif has_review:
544 elif has_review:
545 c.can_delete_user_message = h.literal(_(
545 c.can_delete_user_message = h.literal(_(
546 'The user participates as reviewer in {} pull requests and '
546 'The user participates as reviewer in {} pull requests and '
547 'cannot be deleted. \nYou can set the user to '
547 'cannot be deleted. \nYou can set the user to '
548 '"{}" instead of deleting it.').format(
548 '"{}" instead of deleting it.').format(
549 has_review, inactive_link))
549 has_review, inactive_link))
550
550
551 data = render(
551 data = render(
552 'rhodecode:templates/admin/users/user_edit.mako',
552 'rhodecode:templates/admin/users/user_edit.mako',
553 self._get_template_context(c), self.request)
553 self._get_template_context(c), self.request)
554 html = formencode.htmlfill.render(
554 html = formencode.htmlfill.render(
555 data,
555 data,
556 defaults=defaults,
556 defaults=defaults,
557 encoding="UTF-8",
557 encoding="UTF-8",
558 force_defaults=False
558 force_defaults=False
559 )
559 )
560 return Response(html)
560 return Response(html)
561
561
562 @LoginRequired()
562 @LoginRequired()
563 @HasPermissionAllDecorator('hg.admin')
563 @HasPermissionAllDecorator('hg.admin')
564 def user_edit_global_perms(self):
564 def user_edit_global_perms(self):
565 _ = self.request.translate
565 _ = self.request.translate
566 c = self.load_default_context()
566 c = self.load_default_context()
567 c.user = self.db_user
567 c.user = self.db_user
568
568
569 c.active = 'global_perms'
569 c.active = 'global_perms'
570
570
571 c.default_user = User.get_default_user()
571 c.default_user = User.get_default_user()
572 defaults = c.user.get_dict()
572 defaults = c.user.get_dict()
573 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
573 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
574 defaults.update(c.default_user.get_default_perms())
574 defaults.update(c.default_user.get_default_perms())
575 defaults.update(c.user.get_default_perms())
575 defaults.update(c.user.get_default_perms())
576
576
577 data = render(
577 data = render(
578 'rhodecode:templates/admin/users/user_edit.mako',
578 'rhodecode:templates/admin/users/user_edit.mako',
579 self._get_template_context(c), self.request)
579 self._get_template_context(c), self.request)
580 html = formencode.htmlfill.render(
580 html = formencode.htmlfill.render(
581 data,
581 data,
582 defaults=defaults,
582 defaults=defaults,
583 encoding="UTF-8",
583 encoding="UTF-8",
584 force_defaults=False
584 force_defaults=False
585 )
585 )
586 return Response(html)
586 return Response(html)
587
587
588 @LoginRequired()
588 @LoginRequired()
589 @HasPermissionAllDecorator('hg.admin')
589 @HasPermissionAllDecorator('hg.admin')
590 @CSRFRequired()
590 @CSRFRequired()
591 def user_edit_global_perms_update(self):
591 def user_edit_global_perms_update(self):
592 _ = self.request.translate
592 _ = self.request.translate
593 c = self.load_default_context()
593 c = self.load_default_context()
594
594
595 user_id = self.db_user_id
595 user_id = self.db_user_id
596 c.user = self.db_user
596 c.user = self.db_user
597
597
598 c.active = 'global_perms'
598 c.active = 'global_perms'
599 try:
599 try:
600 # first stage that verifies the checkbox
600 # first stage that verifies the checkbox
601 _form = UserIndividualPermissionsForm(self.request.translate)
601 _form = UserIndividualPermissionsForm(self.request.translate)
602 form_result = _form.to_python(dict(self.request.POST))
602 form_result = _form.to_python(dict(self.request.POST))
603 inherit_perms = form_result['inherit_default_permissions']
603 inherit_perms = form_result['inherit_default_permissions']
604 c.user.inherit_default_permissions = inherit_perms
604 c.user.inherit_default_permissions = inherit_perms
605 Session().add(c.user)
605 Session().add(c.user)
606
606
607 if not inherit_perms:
607 if not inherit_perms:
608 # only update the individual ones if we un check the flag
608 # only update the individual ones if we un check the flag
609 _form = UserPermissionsForm(
609 _form = UserPermissionsForm(
610 self.request.translate,
610 self.request.translate,
611 [x[0] for x in c.repo_create_choices],
611 [x[0] for x in c.repo_create_choices],
612 [x[0] for x in c.repo_create_on_write_choices],
612 [x[0] for x in c.repo_create_on_write_choices],
613 [x[0] for x in c.repo_group_create_choices],
613 [x[0] for x in c.repo_group_create_choices],
614 [x[0] for x in c.user_group_create_choices],
614 [x[0] for x in c.user_group_create_choices],
615 [x[0] for x in c.fork_choices],
615 [x[0] for x in c.fork_choices],
616 [x[0] for x in c.inherit_default_permission_choices])()
616 [x[0] for x in c.inherit_default_permission_choices])()
617
617
618 form_result = _form.to_python(dict(self.request.POST))
618 form_result = _form.to_python(dict(self.request.POST))
619 form_result.update({'perm_user_id': c.user.user_id})
619 form_result.update({'perm_user_id': c.user.user_id})
620
620
621 PermissionModel().update_user_permissions(form_result)
621 PermissionModel().update_user_permissions(form_result)
622
622
623 # TODO(marcink): implement global permissions
623 # TODO(marcink): implement global permissions
624 # audit_log.store_web('user.edit.permissions')
624 # audit_log.store_web('user.edit.permissions')
625
625
626 Session().commit()
626 Session().commit()
627
627
628 h.flash(_('User global permissions updated successfully'),
628 h.flash(_('User global permissions updated successfully'),
629 category='success')
629 category='success')
630
630
631 except formencode.Invalid as errors:
631 except formencode.Invalid as errors:
632 data = render(
632 data = render(
633 'rhodecode:templates/admin/users/user_edit.mako',
633 'rhodecode:templates/admin/users/user_edit.mako',
634 self._get_template_context(c), self.request)
634 self._get_template_context(c), self.request)
635 html = formencode.htmlfill.render(
635 html = formencode.htmlfill.render(
636 data,
636 data,
637 defaults=errors.value,
637 defaults=errors.value,
638 errors=errors.unpack_errors() or {},
638 errors=errors.unpack_errors() or {},
639 prefix_error=False,
639 prefix_error=False,
640 encoding="UTF-8",
640 encoding="UTF-8",
641 force_defaults=False
641 force_defaults=False
642 )
642 )
643 return Response(html)
643 return Response(html)
644 except Exception:
644 except Exception:
645 log.exception("Exception during permissions saving")
645 log.exception("Exception during permissions saving")
646 h.flash(_('An error occurred during permissions saving'),
646 h.flash(_('An error occurred during permissions saving'),
647 category='error')
647 category='error')
648
648
649 affected_user_ids = [user_id]
649 affected_user_ids = [user_id]
650 PermissionModel().trigger_permission_flush(affected_user_ids)
650 PermissionModel().trigger_permission_flush(affected_user_ids)
651 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
651 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
652
652
653 @LoginRequired()
653 @LoginRequired()
654 @HasPermissionAllDecorator('hg.admin')
654 @HasPermissionAllDecorator('hg.admin')
655 @CSRFRequired()
655 @CSRFRequired()
656 def user_enable_force_password_reset(self):
656 def user_enable_force_password_reset(self):
657 _ = self.request.translate
657 _ = self.request.translate
658 c = self.load_default_context()
658 c = self.load_default_context()
659
659
660 user_id = self.db_user_id
660 user_id = self.db_user_id
661 c.user = self.db_user
661 c.user = self.db_user
662
662
663 try:
663 try:
664 c.user.update_userdata(force_password_change=True)
664 c.user.update_userdata(force_password_change=True)
665
665
666 msg = _('Force password change enabled for user')
666 msg = _('Force password change enabled for user')
667 audit_logger.store_web('user.edit.password_reset.enabled',
667 audit_logger.store_web('user.edit.password_reset.enabled',
668 user=c.rhodecode_user)
668 user=c.rhodecode_user)
669
669
670 Session().commit()
670 Session().commit()
671 h.flash(msg, category='success')
671 h.flash(msg, category='success')
672 except Exception:
672 except Exception:
673 log.exception("Exception during password reset for user")
673 log.exception("Exception during password reset for user")
674 h.flash(_('An error occurred during password reset for user'),
674 h.flash(_('An error occurred during password reset for user'),
675 category='error')
675 category='error')
676
676
677 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
677 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
678
678
679 @LoginRequired()
679 @LoginRequired()
680 @HasPermissionAllDecorator('hg.admin')
680 @HasPermissionAllDecorator('hg.admin')
681 @CSRFRequired()
681 @CSRFRequired()
682 def user_disable_force_password_reset(self):
682 def user_disable_force_password_reset(self):
683 _ = self.request.translate
683 _ = self.request.translate
684 c = self.load_default_context()
684 c = self.load_default_context()
685
685
686 user_id = self.db_user_id
686 user_id = self.db_user_id
687 c.user = self.db_user
687 c.user = self.db_user
688
688
689 try:
689 try:
690 c.user.update_userdata(force_password_change=False)
690 c.user.update_userdata(force_password_change=False)
691
691
692 msg = _('Force password change disabled for user')
692 msg = _('Force password change disabled for user')
693 audit_logger.store_web(
693 audit_logger.store_web(
694 'user.edit.password_reset.disabled',
694 'user.edit.password_reset.disabled',
695 user=c.rhodecode_user)
695 user=c.rhodecode_user)
696
696
697 Session().commit()
697 Session().commit()
698 h.flash(msg, category='success')
698 h.flash(msg, category='success')
699 except Exception:
699 except Exception:
700 log.exception("Exception during password reset for user")
700 log.exception("Exception during password reset for user")
701 h.flash(_('An error occurred during password reset for user'),
701 h.flash(_('An error occurred during password reset for user'),
702 category='error')
702 category='error')
703
703
704 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
704 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
705
705
706 @LoginRequired()
706 @LoginRequired()
707 @HasPermissionAllDecorator('hg.admin')
707 @HasPermissionAllDecorator('hg.admin')
708 @CSRFRequired()
708 @CSRFRequired()
709 def user_notice_dismiss(self):
709 def user_notice_dismiss(self):
710 _ = self.request.translate
710 _ = self.request.translate
711 c = self.load_default_context()
711 c = self.load_default_context()
712
712
713 user_id = self.db_user_id
713 user_id = self.db_user_id
714 c.user = self.db_user
714 c.user = self.db_user
715 user_notice_id = safe_int(self.request.POST.get('notice_id'))
715 user_notice_id = safe_int(self.request.POST.get('notice_id'))
716 notice = UserNotice().query()\
716 notice = UserNotice().query()\
717 .filter(UserNotice.user_id == user_id)\
717 .filter(UserNotice.user_id == user_id)\
718 .filter(UserNotice.user_notice_id == user_notice_id)\
718 .filter(UserNotice.user_notice_id == user_notice_id)\
719 .scalar()
719 .scalar()
720 read = False
720 read = False
721 if notice:
721 if notice:
722 notice.notice_read = True
722 notice.notice_read = True
723 Session().add(notice)
723 Session().add(notice)
724 Session().commit()
724 Session().commit()
725 read = True
725 read = True
726
726
727 return {'notice': user_notice_id, 'read': read}
727 return {'notice': user_notice_id, 'read': read}
728
728
729 @LoginRequired()
729 @LoginRequired()
730 @HasPermissionAllDecorator('hg.admin')
730 @HasPermissionAllDecorator('hg.admin')
731 @CSRFRequired()
731 @CSRFRequired()
732 def user_create_personal_repo_group(self):
732 def user_create_personal_repo_group(self):
733 """
733 """
734 Create personal repository group for this user
734 Create personal repository group for this user
735 """
735 """
736 from rhodecode.model.repo_group import RepoGroupModel
736 from rhodecode.model.repo_group import RepoGroupModel
737
737
738 _ = self.request.translate
738 _ = self.request.translate
739 c = self.load_default_context()
739 c = self.load_default_context()
740
740
741 user_id = self.db_user_id
741 user_id = self.db_user_id
742 c.user = self.db_user
742 c.user = self.db_user
743
743
744 personal_repo_group = RepoGroup.get_user_personal_repo_group(
744 personal_repo_group = RepoGroup.get_user_personal_repo_group(
745 c.user.user_id)
745 c.user.user_id)
746 if personal_repo_group:
746 if personal_repo_group:
747 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
747 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
748
748
749 personal_repo_group_name = RepoGroupModel().get_personal_group_name(c.user)
749 personal_repo_group_name = RepoGroupModel().get_personal_group_name(c.user)
750 named_personal_group = RepoGroup.get_by_group_name(
750 named_personal_group = RepoGroup.get_by_group_name(
751 personal_repo_group_name)
751 personal_repo_group_name)
752 try:
752 try:
753
753
754 if named_personal_group and named_personal_group.user_id == c.user.user_id:
754 if named_personal_group and named_personal_group.user_id == c.user.user_id:
755 # migrate the same named group, and mark it as personal
755 # migrate the same named group, and mark it as personal
756 named_personal_group.personal = True
756 named_personal_group.personal = True
757 Session().add(named_personal_group)
757 Session().add(named_personal_group)
758 Session().commit()
758 Session().commit()
759 msg = _('Linked repository group `{}` as personal'.format(
759 msg = _('Linked repository group `{}` as personal'.format(
760 personal_repo_group_name))
760 personal_repo_group_name))
761 h.flash(msg, category='success')
761 h.flash(msg, category='success')
762 elif not named_personal_group:
762 elif not named_personal_group:
763 RepoGroupModel().create_personal_repo_group(c.user)
763 RepoGroupModel().create_personal_repo_group(c.user)
764
764
765 msg = _('Created repository group `{}`'.format(
765 msg = _('Created repository group `{}`'.format(
766 personal_repo_group_name))
766 personal_repo_group_name))
767 h.flash(msg, category='success')
767 h.flash(msg, category='success')
768 else:
768 else:
769 msg = _('Repository group `{}` is already taken'.format(
769 msg = _('Repository group `{}` is already taken'.format(
770 personal_repo_group_name))
770 personal_repo_group_name))
771 h.flash(msg, category='warning')
771 h.flash(msg, category='warning')
772 except Exception:
772 except Exception:
773 log.exception("Exception during repository group creation")
773 log.exception("Exception during repository group creation")
774 msg = _(
774 msg = _(
775 'An error occurred during repository group creation for user')
775 'An error occurred during repository group creation for user')
776 h.flash(msg, category='error')
776 h.flash(msg, category='error')
777 Session().rollback()
777 Session().rollback()
778
778
779 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
779 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
780
780
781 @LoginRequired()
781 @LoginRequired()
782 @HasPermissionAllDecorator('hg.admin')
782 @HasPermissionAllDecorator('hg.admin')
783 def auth_tokens(self):
783 def auth_tokens(self):
784 _ = self.request.translate
784 _ = self.request.translate
785 c = self.load_default_context()
785 c = self.load_default_context()
786 c.user = self.db_user
786 c.user = self.db_user
787
787
788 c.active = 'auth_tokens'
788 c.active = 'auth_tokens'
789
789
790 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
790 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
791 c.role_values = [
791 c.role_values = [
792 (x, AuthTokenModel.cls._get_role_name(x))
792 (x, AuthTokenModel.cls._get_role_name(x))
793 for x in AuthTokenModel.cls.ROLES]
793 for x in AuthTokenModel.cls.ROLES]
794 c.role_options = [(c.role_values, _("Role"))]
794 c.role_options = [(c.role_values, _("Role"))]
795 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
795 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
796 c.user.user_id, show_expired=True)
796 c.user.user_id, show_expired=True)
797 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
797 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
798 return self._get_template_context(c)
798 return self._get_template_context(c)
799
799
800 @LoginRequired()
800 @LoginRequired()
801 @HasPermissionAllDecorator('hg.admin')
801 @HasPermissionAllDecorator('hg.admin')
802 def auth_tokens_view(self):
802 def auth_tokens_view(self):
803 _ = self.request.translate
803 _ = self.request.translate
804 c = self.load_default_context()
804 c = self.load_default_context()
805 c.user = self.db_user
805 c.user = self.db_user
806
806
807 auth_token_id = self.request.POST.get('auth_token_id')
807 auth_token_id = self.request.POST.get('auth_token_id')
808
808
809 if auth_token_id:
809 if auth_token_id:
810 token = UserApiKeys.get_or_404(auth_token_id)
810 token = UserApiKeys.get_or_404(auth_token_id)
811
811
812 return {
812 return {
813 'auth_token': token.api_key
813 'auth_token': token.api_key
814 }
814 }
815
815
816 def maybe_attach_token_scope(self, token):
816 def maybe_attach_token_scope(self, token):
817 # implemented in EE edition
817 # implemented in EE edition
818 pass
818 pass
819
819
820 @LoginRequired()
820 @LoginRequired()
821 @HasPermissionAllDecorator('hg.admin')
821 @HasPermissionAllDecorator('hg.admin')
822 @CSRFRequired()
822 @CSRFRequired()
823 def auth_tokens_add(self):
823 def auth_tokens_add(self):
824 _ = self.request.translate
824 _ = self.request.translate
825 c = self.load_default_context()
825 c = self.load_default_context()
826
826
827 user_id = self.db_user_id
827 user_id = self.db_user_id
828 c.user = self.db_user
828 c.user = self.db_user
829
829
830 user_data = c.user.get_api_data()
830 user_data = c.user.get_api_data()
831 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
831 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
832 description = self.request.POST.get('description')
832 description = self.request.POST.get('description')
833 role = self.request.POST.get('role')
833 role = self.request.POST.get('role')
834
834
835 token = UserModel().add_auth_token(
835 token = UserModel().add_auth_token(
836 user=c.user.user_id,
836 user=c.user.user_id,
837 lifetime_minutes=lifetime, role=role, description=description,
837 lifetime_minutes=lifetime, role=role, description=description,
838 scope_callback=self.maybe_attach_token_scope)
838 scope_callback=self.maybe_attach_token_scope)
839 token_data = token.get_api_data()
839 token_data = token.get_api_data()
840
840
841 audit_logger.store_web(
841 audit_logger.store_web(
842 'user.edit.token.add', action_data={
842 'user.edit.token.add', action_data={
843 'data': {'token': token_data, 'user': user_data}},
843 'data': {'token': token_data, 'user': user_data}},
844 user=self._rhodecode_user, )
844 user=self._rhodecode_user, )
845 Session().commit()
845 Session().commit()
846
846
847 h.flash(_("Auth token successfully created"), category='success')
847 h.flash(_("Auth token successfully created"), category='success')
848 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
848 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
849
849
850 @LoginRequired()
850 @LoginRequired()
851 @HasPermissionAllDecorator('hg.admin')
851 @HasPermissionAllDecorator('hg.admin')
852 @CSRFRequired()
852 @CSRFRequired()
853 def auth_tokens_delete(self):
853 def auth_tokens_delete(self):
854 _ = self.request.translate
854 _ = self.request.translate
855 c = self.load_default_context()
855 c = self.load_default_context()
856
856
857 user_id = self.db_user_id
857 user_id = self.db_user_id
858 c.user = self.db_user
858 c.user = self.db_user
859
859
860 user_data = c.user.get_api_data()
860 user_data = c.user.get_api_data()
861
861
862 del_auth_token = self.request.POST.get('del_auth_token')
862 del_auth_token = self.request.POST.get('del_auth_token')
863
863
864 if del_auth_token:
864 if del_auth_token:
865 token = UserApiKeys.get_or_404(del_auth_token)
865 token = UserApiKeys.get_or_404(del_auth_token)
866 token_data = token.get_api_data()
866 token_data = token.get_api_data()
867
867
868 AuthTokenModel().delete(del_auth_token, c.user.user_id)
868 AuthTokenModel().delete(del_auth_token, c.user.user_id)
869 audit_logger.store_web(
869 audit_logger.store_web(
870 'user.edit.token.delete', action_data={
870 'user.edit.token.delete', action_data={
871 'data': {'token': token_data, 'user': user_data}},
871 'data': {'token': token_data, 'user': user_data}},
872 user=self._rhodecode_user,)
872 user=self._rhodecode_user,)
873 Session().commit()
873 Session().commit()
874 h.flash(_("Auth token successfully deleted"), category='success')
874 h.flash(_("Auth token successfully deleted"), category='success')
875
875
876 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
876 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
877
877
878 @LoginRequired()
878 @LoginRequired()
879 @HasPermissionAllDecorator('hg.admin')
879 @HasPermissionAllDecorator('hg.admin')
880 def ssh_keys(self):
880 def ssh_keys(self):
881 _ = self.request.translate
881 _ = self.request.translate
882 c = self.load_default_context()
882 c = self.load_default_context()
883 c.user = self.db_user
883 c.user = self.db_user
884
884
885 c.active = 'ssh_keys'
885 c.active = 'ssh_keys'
886 c.default_key = self.request.GET.get('default_key')
886 c.default_key = self.request.GET.get('default_key')
887 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
887 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
888 return self._get_template_context(c)
888 return self._get_template_context(c)
889
889
890 @LoginRequired()
890 @LoginRequired()
891 @HasPermissionAllDecorator('hg.admin')
891 @HasPermissionAllDecorator('hg.admin')
892 def ssh_keys_generate_keypair(self):
892 def ssh_keys_generate_keypair(self):
893 _ = self.request.translate
893 _ = self.request.translate
894 c = self.load_default_context()
894 c = self.load_default_context()
895
895
896 c.user = self.db_user
896 c.user = self.db_user
897
897
898 c.active = 'ssh_keys_generate'
898 c.active = 'ssh_keys_generate'
899 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
899 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
900 private_format = self.request.GET.get('private_format') \
900 private_format = self.request.GET.get('private_format') \
901 or SshKeyModel.DEFAULT_PRIVATE_KEY_FORMAT
901 or SshKeyModel.DEFAULT_PRIVATE_KEY_FORMAT
902 c.private, c.public = SshKeyModel().generate_keypair(
902 c.private, c.public = SshKeyModel().generate_keypair(
903 comment=comment, private_format=private_format)
903 comment=comment, private_format=private_format)
904
904
905 return self._get_template_context(c)
905 return self._get_template_context(c)
906
906
907 @LoginRequired()
907 @LoginRequired()
908 @HasPermissionAllDecorator('hg.admin')
908 @HasPermissionAllDecorator('hg.admin')
909 @CSRFRequired()
909 @CSRFRequired()
910 def ssh_keys_add(self):
910 def ssh_keys_add(self):
911 _ = self.request.translate
911 _ = self.request.translate
912 c = self.load_default_context()
912 c = self.load_default_context()
913
913
914 user_id = self.db_user_id
914 user_id = self.db_user_id
915 c.user = self.db_user
915 c.user = self.db_user
916
916
917 user_data = c.user.get_api_data()
917 user_data = c.user.get_api_data()
918 key_data = self.request.POST.get('key_data')
918 key_data = self.request.POST.get('key_data')
919 description = self.request.POST.get('description')
919 description = self.request.POST.get('description')
920
920
921 fingerprint = 'unknown'
921 fingerprint = 'unknown'
922 try:
922 try:
923 if not key_data:
923 if not key_data:
924 raise ValueError('Please add a valid public key')
924 raise ValueError('Please add a valid public key')
925
925
926 key = SshKeyModel().parse_key(key_data.strip())
926 key = SshKeyModel().parse_key(key_data.strip())
927 fingerprint = key.hash_md5()
927 fingerprint = key.hash_md5()
928
928
929 ssh_key = SshKeyModel().create(
929 ssh_key = SshKeyModel().create(
930 c.user.user_id, fingerprint, key.keydata, description)
930 c.user.user_id, fingerprint, key.keydata, description)
931 ssh_key_data = ssh_key.get_api_data()
931 ssh_key_data = ssh_key.get_api_data()
932
932
933 audit_logger.store_web(
933 audit_logger.store_web(
934 'user.edit.ssh_key.add', action_data={
934 'user.edit.ssh_key.add', action_data={
935 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
935 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
936 user=self._rhodecode_user, )
936 user=self._rhodecode_user, )
937 Session().commit()
937 Session().commit()
938
938
939 # Trigger an event on change of keys.
939 # Trigger an event on change of keys.
940 trigger(SshKeyFileChangeEvent(), self.request.registry)
940 trigger(SshKeyFileChangeEvent(), self.request.registry)
941
941
942 h.flash(_("Ssh Key successfully created"), category='success')
942 h.flash(_("Ssh Key successfully created"), category='success')
943
943
944 except IntegrityError:
944 except IntegrityError:
945 log.exception("Exception during ssh key saving")
945 log.exception("Exception during ssh key saving")
946 err = 'Such key with fingerprint `{}` already exists, ' \
946 err = 'Such key with fingerprint `{}` already exists, ' \
947 'please use a different one'.format(fingerprint)
947 'please use a different one'.format(fingerprint)
948 h.flash(_('An error occurred during ssh key saving: {}').format(err),
948 h.flash(_('An error occurred during ssh key saving: {}').format(err),
949 category='error')
949 category='error')
950 except Exception as e:
950 except Exception as e:
951 log.exception("Exception during ssh key saving")
951 log.exception("Exception during ssh key saving")
952 h.flash(_('An error occurred during ssh key saving: {}').format(e),
952 h.flash(_('An error occurred during ssh key saving: {}').format(e),
953 category='error')
953 category='error')
954
954
955 return HTTPFound(
955 return HTTPFound(
956 h.route_path('edit_user_ssh_keys', user_id=user_id))
956 h.route_path('edit_user_ssh_keys', user_id=user_id))
957
957
958 @LoginRequired()
958 @LoginRequired()
959 @HasPermissionAllDecorator('hg.admin')
959 @HasPermissionAllDecorator('hg.admin')
960 @CSRFRequired()
960 @CSRFRequired()
961 def ssh_keys_delete(self):
961 def ssh_keys_delete(self):
962 _ = self.request.translate
962 _ = self.request.translate
963 c = self.load_default_context()
963 c = self.load_default_context()
964
964
965 user_id = self.db_user_id
965 user_id = self.db_user_id
966 c.user = self.db_user
966 c.user = self.db_user
967
967
968 user_data = c.user.get_api_data()
968 user_data = c.user.get_api_data()
969
969
970 del_ssh_key = self.request.POST.get('del_ssh_key')
970 del_ssh_key = self.request.POST.get('del_ssh_key')
971
971
972 if del_ssh_key:
972 if del_ssh_key:
973 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
973 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
974 ssh_key_data = ssh_key.get_api_data()
974 ssh_key_data = ssh_key.get_api_data()
975
975
976 SshKeyModel().delete(del_ssh_key, c.user.user_id)
976 SshKeyModel().delete(del_ssh_key, c.user.user_id)
977 audit_logger.store_web(
977 audit_logger.store_web(
978 'user.edit.ssh_key.delete', action_data={
978 'user.edit.ssh_key.delete', action_data={
979 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
979 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
980 user=self._rhodecode_user,)
980 user=self._rhodecode_user,)
981 Session().commit()
981 Session().commit()
982 # Trigger an event on change of keys.
982 # Trigger an event on change of keys.
983 trigger(SshKeyFileChangeEvent(), self.request.registry)
983 trigger(SshKeyFileChangeEvent(), self.request.registry)
984 h.flash(_("Ssh key successfully deleted"), category='success')
984 h.flash(_("Ssh key successfully deleted"), category='success')
985
985
986 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
986 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
987
987
988 @LoginRequired()
988 @LoginRequired()
989 @HasPermissionAllDecorator('hg.admin')
989 @HasPermissionAllDecorator('hg.admin')
990 def emails(self):
990 def emails(self):
991 _ = self.request.translate
991 _ = self.request.translate
992 c = self.load_default_context()
992 c = self.load_default_context()
993 c.user = self.db_user
993 c.user = self.db_user
994
994
995 c.active = 'emails'
995 c.active = 'emails'
996 c.user_email_map = UserEmailMap.query() \
996 c.user_email_map = UserEmailMap.query() \
997 .filter(UserEmailMap.user == c.user).all()
997 .filter(UserEmailMap.user == c.user).all()
998
998
999 return self._get_template_context(c)
999 return self._get_template_context(c)
1000
1000
1001 @LoginRequired()
1001 @LoginRequired()
1002 @HasPermissionAllDecorator('hg.admin')
1002 @HasPermissionAllDecorator('hg.admin')
1003 @CSRFRequired()
1003 @CSRFRequired()
1004 def emails_add(self):
1004 def emails_add(self):
1005 _ = self.request.translate
1005 _ = self.request.translate
1006 c = self.load_default_context()
1006 c = self.load_default_context()
1007
1007
1008 user_id = self.db_user_id
1008 user_id = self.db_user_id
1009 c.user = self.db_user
1009 c.user = self.db_user
1010
1010
1011 email = self.request.POST.get('new_email')
1011 email = self.request.POST.get('new_email')
1012 user_data = c.user.get_api_data()
1012 user_data = c.user.get_api_data()
1013 try:
1013 try:
1014
1014
1015 form = UserExtraEmailForm(self.request.translate)()
1015 form = UserExtraEmailForm(self.request.translate)()
1016 data = form.to_python({'email': email})
1016 data = form.to_python({'email': email})
1017 email = data['email']
1017 email = data['email']
1018
1018
1019 UserModel().add_extra_email(c.user.user_id, email)
1019 UserModel().add_extra_email(c.user.user_id, email)
1020 audit_logger.store_web(
1020 audit_logger.store_web(
1021 'user.edit.email.add',
1021 'user.edit.email.add',
1022 action_data={'email': email, 'user': user_data},
1022 action_data={'email': email, 'user': user_data},
1023 user=self._rhodecode_user)
1023 user=self._rhodecode_user)
1024 Session().commit()
1024 Session().commit()
1025 h.flash(_("Added new email address `%s` for user account") % email,
1025 h.flash(_("Added new email address `%s` for user account") % email,
1026 category='success')
1026 category='success')
1027 except formencode.Invalid as error:
1027 except formencode.Invalid as error:
1028 msg = error.unpack_errors()['email']
1028 msg = error.unpack_errors()['email']
1029 h.flash(h.escape(msg), category='error')
1029 h.flash(h.escape(msg), category='error')
1030 except IntegrityError:
1030 except IntegrityError:
1031 log.warning("Email %s already exists", email)
1031 log.warning("Email %s already exists", email)
1032 h.flash(_('Email `{}` is already registered for another user.').format(email),
1032 h.flash(_('Email `{}` is already registered for another user.').format(email),
1033 category='error')
1033 category='error')
1034 except Exception:
1034 except Exception:
1035 log.exception("Exception during email saving")
1035 log.exception("Exception during email saving")
1036 h.flash(_('An error occurred during email saving'),
1036 h.flash(_('An error occurred during email saving'),
1037 category='error')
1037 category='error')
1038 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1038 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1039
1039
1040 @LoginRequired()
1040 @LoginRequired()
1041 @HasPermissionAllDecorator('hg.admin')
1041 @HasPermissionAllDecorator('hg.admin')
1042 @CSRFRequired()
1042 @CSRFRequired()
1043 def emails_delete(self):
1043 def emails_delete(self):
1044 _ = self.request.translate
1044 _ = self.request.translate
1045 c = self.load_default_context()
1045 c = self.load_default_context()
1046
1046
1047 user_id = self.db_user_id
1047 user_id = self.db_user_id
1048 c.user = self.db_user
1048 c.user = self.db_user
1049
1049
1050 email_id = self.request.POST.get('del_email_id')
1050 email_id = self.request.POST.get('del_email_id')
1051 user_model = UserModel()
1051 user_model = UserModel()
1052
1052
1053 email = UserEmailMap.query().get(email_id).email
1053 email = UserEmailMap.query().get(email_id).email
1054 user_data = c.user.get_api_data()
1054 user_data = c.user.get_api_data()
1055 user_model.delete_extra_email(c.user.user_id, email_id)
1055 user_model.delete_extra_email(c.user.user_id, email_id)
1056 audit_logger.store_web(
1056 audit_logger.store_web(
1057 'user.edit.email.delete',
1057 'user.edit.email.delete',
1058 action_data={'email': email, 'user': user_data},
1058 action_data={'email': email, 'user': user_data},
1059 user=self._rhodecode_user)
1059 user=self._rhodecode_user)
1060 Session().commit()
1060 Session().commit()
1061 h.flash(_("Removed email address from user account"),
1061 h.flash(_("Removed email address from user account"),
1062 category='success')
1062 category='success')
1063 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1063 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1064
1064
1065 @LoginRequired()
1065 @LoginRequired()
1066 @HasPermissionAllDecorator('hg.admin')
1066 @HasPermissionAllDecorator('hg.admin')
1067 def ips(self):
1067 def ips(self):
1068 _ = self.request.translate
1068 _ = self.request.translate
1069 c = self.load_default_context()
1069 c = self.load_default_context()
1070 c.user = self.db_user
1070 c.user = self.db_user
1071
1071
1072 c.active = 'ips'
1072 c.active = 'ips'
1073 c.user_ip_map = UserIpMap.query() \
1073 c.user_ip_map = UserIpMap.query() \
1074 .filter(UserIpMap.user == c.user).all()
1074 .filter(UserIpMap.user == c.user).all()
1075
1075
1076 c.inherit_default_ips = c.user.inherit_default_permissions
1076 c.inherit_default_ips = c.user.inherit_default_permissions
1077 c.default_user_ip_map = UserIpMap.query() \
1077 c.default_user_ip_map = UserIpMap.query() \
1078 .filter(UserIpMap.user == User.get_default_user()).all()
1078 .filter(UserIpMap.user == User.get_default_user()).all()
1079
1079
1080 return self._get_template_context(c)
1080 return self._get_template_context(c)
1081
1081
1082 @LoginRequired()
1082 @LoginRequired()
1083 @HasPermissionAllDecorator('hg.admin')
1083 @HasPermissionAllDecorator('hg.admin')
1084 @CSRFRequired()
1084 @CSRFRequired()
1085 # NOTE(marcink): this view is allowed for default users, as we can
1085 # NOTE(marcink): this view is allowed for default users, as we can
1086 # edit their IP white list
1086 # edit their IP white list
1087 def ips_add(self):
1087 def ips_add(self):
1088 _ = self.request.translate
1088 _ = self.request.translate
1089 c = self.load_default_context()
1089 c = self.load_default_context()
1090
1090
1091 user_id = self.db_user_id
1091 user_id = self.db_user_id
1092 c.user = self.db_user
1092 c.user = self.db_user
1093
1093
1094 user_model = UserModel()
1094 user_model = UserModel()
1095 desc = self.request.POST.get('description')
1095 desc = self.request.POST.get('description')
1096 try:
1096 try:
1097 ip_list = user_model.parse_ip_range(
1097 ip_list = user_model.parse_ip_range(
1098 self.request.POST.get('new_ip'))
1098 self.request.POST.get('new_ip'))
1099 except Exception as e:
1099 except Exception as e:
1100 ip_list = []
1100 ip_list = []
1101 log.exception("Exception during ip saving")
1101 log.exception("Exception during ip saving")
1102 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1102 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1103 category='error')
1103 category='error')
1104 added = []
1104 added = []
1105 user_data = c.user.get_api_data()
1105 user_data = c.user.get_api_data()
1106 for ip in ip_list:
1106 for ip in ip_list:
1107 try:
1107 try:
1108 form = UserExtraIpForm(self.request.translate)()
1108 form = UserExtraIpForm(self.request.translate)()
1109 data = form.to_python({'ip': ip})
1109 data = form.to_python({'ip': ip})
1110 ip = data['ip']
1110 ip = data['ip']
1111
1111
1112 user_model.add_extra_ip(c.user.user_id, ip, desc)
1112 user_model.add_extra_ip(c.user.user_id, ip, desc)
1113 audit_logger.store_web(
1113 audit_logger.store_web(
1114 'user.edit.ip.add',
1114 'user.edit.ip.add',
1115 action_data={'ip': ip, 'user': user_data},
1115 action_data={'ip': ip, 'user': user_data},
1116 user=self._rhodecode_user)
1116 user=self._rhodecode_user)
1117 Session().commit()
1117 Session().commit()
1118 added.append(ip)
1118 added.append(ip)
1119 except formencode.Invalid as error:
1119 except formencode.Invalid as error:
1120 msg = error.unpack_errors()['ip']
1120 msg = error.unpack_errors()['ip']
1121 h.flash(msg, category='error')
1121 h.flash(msg, category='error')
1122 except Exception:
1122 except Exception:
1123 log.exception("Exception during ip saving")
1123 log.exception("Exception during ip saving")
1124 h.flash(_('An error occurred during ip saving'),
1124 h.flash(_('An error occurred during ip saving'),
1125 category='error')
1125 category='error')
1126 if added:
1126 if added:
1127 h.flash(
1127 h.flash(
1128 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1128 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1129 category='success')
1129 category='success')
1130 if 'default_user' in self.request.POST:
1130 if 'default_user' in self.request.POST:
1131 # case for editing global IP list we do it for 'DEFAULT' user
1131 # case for editing global IP list we do it for 'DEFAULT' user
1132 raise HTTPFound(h.route_path('admin_permissions_ips'))
1132 raise HTTPFound(h.route_path('admin_permissions_ips'))
1133 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1133 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1134
1134
1135 @LoginRequired()
1135 @LoginRequired()
1136 @HasPermissionAllDecorator('hg.admin')
1136 @HasPermissionAllDecorator('hg.admin')
1137 @CSRFRequired()
1137 @CSRFRequired()
1138 # NOTE(marcink): this view is allowed for default users, as we can
1138 # NOTE(marcink): this view is allowed for default users, as we can
1139 # edit their IP white list
1139 # edit their IP white list
1140 def ips_delete(self):
1140 def ips_delete(self):
1141 _ = self.request.translate
1141 _ = self.request.translate
1142 c = self.load_default_context()
1142 c = self.load_default_context()
1143
1143
1144 user_id = self.db_user_id
1144 user_id = self.db_user_id
1145 c.user = self.db_user
1145 c.user = self.db_user
1146
1146
1147 ip_id = self.request.POST.get('del_ip_id')
1147 ip_id = self.request.POST.get('del_ip_id')
1148 user_model = UserModel()
1148 user_model = UserModel()
1149 user_data = c.user.get_api_data()
1149 user_data = c.user.get_api_data()
1150 ip = UserIpMap.query().get(ip_id).ip_addr
1150 ip = UserIpMap.query().get(ip_id).ip_addr
1151 user_model.delete_extra_ip(c.user.user_id, ip_id)
1151 user_model.delete_extra_ip(c.user.user_id, ip_id)
1152 audit_logger.store_web(
1152 audit_logger.store_web(
1153 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1153 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1154 user=self._rhodecode_user)
1154 user=self._rhodecode_user)
1155 Session().commit()
1155 Session().commit()
1156 h.flash(_("Removed ip address from user whitelist"), category='success')
1156 h.flash(_("Removed ip address from user whitelist"), category='success')
1157
1157
1158 if 'default_user' in self.request.POST:
1158 if 'default_user' in self.request.POST:
1159 # case for editing global IP list we do it for 'DEFAULT' user
1159 # case for editing global IP list we do it for 'DEFAULT' user
1160 raise HTTPFound(h.route_path('admin_permissions_ips'))
1160 raise HTTPFound(h.route_path('admin_permissions_ips'))
1161 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1161 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1162
1162
1163 @LoginRequired()
1163 @LoginRequired()
1164 @HasPermissionAllDecorator('hg.admin')
1164 @HasPermissionAllDecorator('hg.admin')
1165 def groups_management(self):
1165 def groups_management(self):
1166 c = self.load_default_context()
1166 c = self.load_default_context()
1167 c.user = self.db_user
1167 c.user = self.db_user
1168 c.data = c.user.group_member
1168 c.data = c.user.group_member
1169
1169
1170 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1170 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1171 for group in c.user.group_member]
1171 for group in c.user.group_member]
1172 c.groups = ext_json.str_json(groups)
1172 c.groups = ext_json.str_json(groups)
1173 c.active = 'groups'
1173 c.active = 'groups'
1174
1174
1175 return self._get_template_context(c)
1175 return self._get_template_context(c)
1176
1176
1177 @LoginRequired()
1177 @LoginRequired()
1178 @HasPermissionAllDecorator('hg.admin')
1178 @HasPermissionAllDecorator('hg.admin')
1179 @CSRFRequired()
1179 @CSRFRequired()
1180 def groups_management_updates(self):
1180 def groups_management_updates(self):
1181 _ = self.request.translate
1181 _ = self.request.translate
1182 c = self.load_default_context()
1182 c = self.load_default_context()
1183
1183
1184 user_id = self.db_user_id
1184 user_id = self.db_user_id
1185 c.user = self.db_user
1185 c.user = self.db_user
1186
1186
1187 user_groups = set(self.request.POST.getall('users_group_id'))
1187 user_groups = set(self.request.POST.getall('users_group_id'))
1188 user_groups_objects = []
1188 user_groups_objects = []
1189
1189
1190 for ugid in user_groups:
1190 for ugid in user_groups:
1191 user_groups_objects.append(
1191 user_groups_objects.append(
1192 UserGroupModel().get_group(safe_int(ugid)))
1192 UserGroupModel().get_group(safe_int(ugid)))
1193 user_group_model = UserGroupModel()
1193 user_group_model = UserGroupModel()
1194 added_to_groups, removed_from_groups = \
1194 added_to_groups, removed_from_groups = \
1195 user_group_model.change_groups(c.user, user_groups_objects)
1195 user_group_model.change_groups(c.user, user_groups_objects)
1196
1196
1197 user_data = c.user.get_api_data()
1197 user_data = c.user.get_api_data()
1198 for user_group_id in added_to_groups:
1198 for user_group_id in added_to_groups:
1199 user_group = UserGroup.get(user_group_id)
1199 user_group = UserGroup.get(user_group_id)
1200 old_values = user_group.get_api_data()
1200 old_values = user_group.get_api_data()
1201 audit_logger.store_web(
1201 audit_logger.store_web(
1202 'user_group.edit.member.add',
1202 'user_group.edit.member.add',
1203 action_data={'user': user_data, 'old_data': old_values},
1203 action_data={'user': user_data, 'old_data': old_values},
1204 user=self._rhodecode_user)
1204 user=self._rhodecode_user)
1205
1205
1206 for user_group_id in removed_from_groups:
1206 for user_group_id in removed_from_groups:
1207 user_group = UserGroup.get(user_group_id)
1207 user_group = UserGroup.get(user_group_id)
1208 old_values = user_group.get_api_data()
1208 old_values = user_group.get_api_data()
1209 audit_logger.store_web(
1209 audit_logger.store_web(
1210 'user_group.edit.member.delete',
1210 'user_group.edit.member.delete',
1211 action_data={'user': user_data, 'old_data': old_values},
1211 action_data={'user': user_data, 'old_data': old_values},
1212 user=self._rhodecode_user)
1212 user=self._rhodecode_user)
1213
1213
1214 Session().commit()
1214 Session().commit()
1215 c.active = 'user_groups_management'
1215 c.active = 'user_groups_management'
1216 h.flash(_("Groups successfully changed"), category='success')
1216 h.flash(_("Groups successfully changed"), category='success')
1217
1217
1218 return HTTPFound(h.route_path(
1218 return HTTPFound(h.route_path(
1219 'edit_user_groups_management', user_id=user_id))
1219 'edit_user_groups_management', user_id=user_id))
1220
1220
1221 @LoginRequired()
1221 @LoginRequired()
1222 @HasPermissionAllDecorator('hg.admin')
1222 @HasPermissionAllDecorator('hg.admin')
1223 def user_audit_logs(self):
1223 def user_audit_logs(self):
1224 _ = self.request.translate
1224 _ = self.request.translate
1225 c = self.load_default_context()
1225 c = self.load_default_context()
1226 c.user = self.db_user
1226 c.user = self.db_user
1227
1227
1228 c.active = 'audit'
1228 c.active = 'audit'
1229
1229
1230 p = safe_int(self.request.GET.get('page', 1), 1)
1230 p = safe_int(self.request.GET.get('page', 1), 1)
1231
1231
1232 filter_term = self.request.GET.get('filter')
1232 filter_term = self.request.GET.get('filter')
1233 user_log = UserModel().get_user_log(c.user, filter_term)
1233 user_log = UserModel().get_user_log(c.user, filter_term)
1234
1234
1235 def url_generator(page_num):
1235 def url_generator(page_num):
1236 query_params = {
1236 query_params = {
1237 'page': page_num
1237 'page': page_num
1238 }
1238 }
1239 if filter_term:
1239 if filter_term:
1240 query_params['filter'] = filter_term
1240 query_params['filter'] = filter_term
1241 return self.request.current_route_path(_query=query_params)
1241 return self.request.current_route_path(_query=query_params)
1242
1242
1243 c.audit_logs = SqlPage(
1243 c.audit_logs = SqlPage(
1244 user_log, page=p, items_per_page=10, url_maker=url_generator)
1244 user_log, page=p, items_per_page=10, url_maker=url_generator)
1245 c.filter_term = filter_term
1245 c.filter_term = filter_term
1246 return self._get_template_context(c)
1246 return self._get_template_context(c)
1247
1247
1248 @LoginRequired()
1248 @LoginRequired()
1249 @HasPermissionAllDecorator('hg.admin')
1249 @HasPermissionAllDecorator('hg.admin')
1250 def user_audit_logs_download(self):
1250 def user_audit_logs_download(self):
1251 _ = self.request.translate
1251 _ = self.request.translate
1252 c = self.load_default_context()
1252 c = self.load_default_context()
1253 c.user = self.db_user
1253 c.user = self.db_user
1254
1254
1255 user_log = UserModel().get_user_log(c.user, filter_term=None)
1255 user_log = UserModel().get_user_log(c.user, filter_term=None)
1256
1256
1257 audit_log_data = {}
1257 audit_log_data = {}
1258 for entry in user_log:
1258 for entry in user_log:
1259 audit_log_data[entry.user_log_id] = entry.get_dict()
1259 audit_log_data[entry.user_log_id] = entry.get_dict()
1260
1260
1261 response = Response(ext_json.formatted_str_json(audit_log_data))
1261 response = Response(ext_json.formatted_str_json(audit_log_data))
1262 response.content_disposition = f'attachment; filename=user_{c.user.user_id}_audit_logs.json'
1262 response.content_disposition = f'attachment; filename=user_{c.user.user_id}_audit_logs.json'
1263 response.content_type = 'application/json'
1263 response.content_type = 'application/json'
1264
1264
1265 return response
1265 return response
1266
1266
1267 @LoginRequired()
1267 @LoginRequired()
1268 @HasPermissionAllDecorator('hg.admin')
1268 @HasPermissionAllDecorator('hg.admin')
1269 def user_perms_summary(self):
1269 def user_perms_summary(self):
1270 _ = self.request.translate
1270 _ = self.request.translate
1271 c = self.load_default_context()
1271 c = self.load_default_context()
1272 c.user = self.db_user
1272 c.user = self.db_user
1273
1273
1274 c.active = 'perms_summary'
1274 c.active = 'perms_summary'
1275 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1275 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1276
1276
1277 return self._get_template_context(c)
1277 return self._get_template_context(c)
1278
1278
1279 @LoginRequired()
1279 @LoginRequired()
1280 @HasPermissionAllDecorator('hg.admin')
1280 @HasPermissionAllDecorator('hg.admin')
1281 def user_perms_summary_json(self):
1281 def user_perms_summary_json(self):
1282 self.load_default_context()
1282 self.load_default_context()
1283 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1283 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1284
1284
1285 return perm_user.permissions
1285 return perm_user.permissions
1286
1286
1287 @LoginRequired()
1287 @LoginRequired()
1288 @HasPermissionAllDecorator('hg.admin')
1288 @HasPermissionAllDecorator('hg.admin')
1289 def user_caches(self):
1289 def user_caches(self):
1290 _ = self.request.translate
1290 _ = self.request.translate
1291 c = self.load_default_context()
1291 c = self.load_default_context()
1292 c.user = self.db_user
1292 c.user = self.db_user
1293
1293
1294 c.active = 'caches'
1294 c.active = 'caches'
1295 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1295 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1296
1296
1297 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1297 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1298 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1298 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1299 c.backend = c.region.backend
1299 c.backend = c.region.backend
1300 c.user_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
1300 c.user_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
1301
1301
1302 return self._get_template_context(c)
1302 return self._get_template_context(c)
1303
1303
1304 @LoginRequired()
1304 @LoginRequired()
1305 @HasPermissionAllDecorator('hg.admin')
1305 @HasPermissionAllDecorator('hg.admin')
1306 @CSRFRequired()
1306 @CSRFRequired()
1307 def user_caches_update(self):
1307 def user_caches_update(self):
1308 _ = self.request.translate
1308 _ = self.request.translate
1309 c = self.load_default_context()
1309 c = self.load_default_context()
1310 c.user = self.db_user
1310 c.user = self.db_user
1311
1311
1312 c.active = 'caches'
1312 c.active = 'caches'
1313 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1313 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1314
1314
1315 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1315 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1316 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
1316 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
1317
1317
1318 h.flash(_("Deleted {} cache keys").format(del_keys), category='success')
1318 h.flash(_("Deleted {} cache keys").format(del_keys), category='success')
1319
1319
1320 return HTTPFound(h.route_path(
1320 return HTTPFound(h.route_path(
1321 'edit_user_caches', user_id=c.user.user_id))
1321 'edit_user_caches', user_id=c.user.user_id))
@@ -1,299 +1,335 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import errno
19 import errno
20 import fcntl
20 import fcntl
21 import functools
21 import functools
22 import logging
22 import logging
23 import os
23 import os
24 import pickle
24 import pickle
25 import time
25 import time
26
26
27 import gevent
27 import gevent
28 import msgpack
28 import msgpack
29 import redis
29 import redis
30
30
31 flock_org = fcntl.flock
31 flock_org = fcntl.flock
32 from typing import Union
32 from typing import Union
33
33
34 from dogpile.cache.api import Deserializer, Serializer
34 from dogpile.cache.api import Deserializer, Serializer
35 from dogpile.cache.backends import file as file_backend
35 from dogpile.cache.backends import file as file_backend
36 from dogpile.cache.backends import memory as memory_backend
36 from dogpile.cache.backends import memory as memory_backend
37 from dogpile.cache.backends import redis as redis_backend
37 from dogpile.cache.backends import redis as redis_backend
38 from dogpile.cache.backends.file import FileLock
38 from dogpile.cache.backends.file import FileLock
39 from dogpile.cache.util import memoized_property
39 from dogpile.cache.util import memoized_property
40
40
41 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
41 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
42 from rhodecode.lib.str_utils import safe_bytes, safe_str
42 from rhodecode.lib.str_utils import safe_bytes, safe_str
43 from rhodecode.lib.type_utils import str2bool
43 from rhodecode.lib.type_utils import str2bool
44
44
45 _default_max_size = 1024
45 _default_max_size = 1024
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class LRUMemoryBackend(memory_backend.MemoryBackend):
50 class LRUMemoryBackend(memory_backend.MemoryBackend):
51 key_prefix = 'lru_mem_backend'
51 key_prefix = 'lru_mem_backend'
52 pickle_values = False
52 pickle_values = False
53
53
54 def __init__(self, arguments):
54 def __init__(self, arguments):
55 self.max_size = arguments.pop('max_size', _default_max_size)
55 self.max_size = arguments.pop('max_size', _default_max_size)
56
56
57 LRUDictClass = LRUDict
57 LRUDictClass = LRUDict
58 if arguments.pop('log_key_count', None):
58 if arguments.pop('log_key_count', None):
59 LRUDictClass = LRUDictDebug
59 LRUDictClass = LRUDictDebug
60
60
61 arguments['cache_dict'] = LRUDictClass(self.max_size)
61 arguments['cache_dict'] = LRUDictClass(self.max_size)
62 super().__init__(arguments)
62 super().__init__(arguments)
63
63
64 def __repr__(self):
64 def __repr__(self):
65 return f'{self.__class__}(maxsize=`{self.max_size}`)'
65 return f'{self.__class__}(maxsize=`{self.max_size}`)'
66
66
67 def __str__(self):
67 def __str__(self):
68 return self.__repr__()
68 return self.__repr__()
69
69
70 def delete(self, key):
70 def delete(self, key):
71 try:
71 try:
72 del self._cache[key]
72 del self._cache[key]
73 except KeyError:
73 except KeyError:
74 # we don't care if key isn't there at deletion
74 # we don't care if key isn't there at deletion
75 pass
75 pass
76
76
77 def list_keys(self, prefix):
78 return list(self._cache.keys())
79
77 def delete_multi(self, keys):
80 def delete_multi(self, keys):
78 for key in keys:
81 for key in keys:
79 self.delete(key)
82 self.delete(key)
80
83
84 def delete_multi_by_prefix(self, prefix):
85 cache_keys = self.list_keys(prefix=prefix)
86 num_affected_keys = len(cache_keys)
87 if num_affected_keys:
88 self.delete_multi(cache_keys)
89 return num_affected_keys
90
81
91
82 class PickleSerializer:
92 class PickleSerializer:
83 serializer: None | Serializer = staticmethod( # type: ignore
93 serializer: None | Serializer = staticmethod( # type: ignore
84 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
94 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
85 )
95 )
86 deserializer: None | Deserializer = staticmethod( # type: ignore
96 deserializer: None | Deserializer = staticmethod( # type: ignore
87 functools.partial(pickle.loads)
97 functools.partial(pickle.loads)
88 )
98 )
89
99
90
100
91 class MsgPackSerializer(object):
101 class MsgPackSerializer(object):
92 serializer: None | Serializer = staticmethod( # type: ignore
102 serializer: None | Serializer = staticmethod( # type: ignore
93 msgpack.packb
103 msgpack.packb
94 )
104 )
95 deserializer: None | Deserializer = staticmethod( # type: ignore
105 deserializer: None | Deserializer = staticmethod( # type: ignore
96 functools.partial(msgpack.unpackb, use_list=False)
106 functools.partial(msgpack.unpackb, use_list=False)
97 )
107 )
98
108
99
109
100 class CustomLockFactory(FileLock):
110 class CustomLockFactory(FileLock):
101
111
102 @memoized_property
112 @memoized_property
103 def _module(self):
113 def _module(self):
104
114
105 def gevent_flock(fd, operation):
115 def gevent_flock(fd, operation):
106 """
116 """
107 Gevent compatible flock
117 Gevent compatible flock
108 """
118 """
109 # set non-blocking, this will cause an exception if we cannot acquire a lock
119 # set non-blocking, this will cause an exception if we cannot acquire a lock
110 operation |= fcntl.LOCK_NB
120 operation |= fcntl.LOCK_NB
111 start_lock_time = time.time()
121 start_lock_time = time.time()
112 timeout = 60 * 15 # 15min
122 timeout = 60 * 15 # 15min
113 while True:
123 while True:
114 try:
124 try:
115 flock_org(fd, operation)
125 flock_org(fd, operation)
116 # lock has been acquired
126 # lock has been acquired
117 break
127 break
118 except (OSError, IOError) as e:
128 except (OSError, IOError) as e:
119 # raise on other errors than Resource temporarily unavailable
129 # raise on other errors than Resource temporarily unavailable
120 if e.errno != errno.EAGAIN:
130 if e.errno != errno.EAGAIN:
121 raise
131 raise
122 elif (time.time() - start_lock_time) > timeout:
132 elif (time.time() - start_lock_time) > timeout:
123 # waited to much time on a lock, better fail than loop for ever
133 # waited to much time on a lock, better fail than loop for ever
124 log.error('Failed to acquire lock on `%s` after waiting %ss',
134 log.error('Failed to acquire lock on `%s` after waiting %ss',
125 self.filename, timeout)
135 self.filename, timeout)
126 raise
136 raise
127 wait_timeout = 0.03
137 wait_timeout = 0.03
128 log.debug('Failed to acquire lock on `%s`, retry in %ss',
138 log.debug('Failed to acquire lock on `%s`, retry in %ss',
129 self.filename, wait_timeout)
139 self.filename, wait_timeout)
130 gevent.sleep(wait_timeout)
140 gevent.sleep(wait_timeout)
131
141
132 fcntl.flock = gevent_flock
142 fcntl.flock = gevent_flock
133 return fcntl
143 return fcntl
134
144
135
145
136 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
146 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
137 key_prefix = 'file_backend'
147 key_prefix = 'file_backend'
138
148
139 def __init__(self, arguments):
149 def __init__(self, arguments):
140 arguments['lock_factory'] = CustomLockFactory
150 arguments['lock_factory'] = CustomLockFactory
141 db_file = arguments.get('filename')
151 db_file = arguments.get('filename')
142
152
143 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
153 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
144 db_file_dir = os.path.dirname(db_file)
154 db_file_dir = os.path.dirname(db_file)
145 if not os.path.isdir(db_file_dir):
155 if not os.path.isdir(db_file_dir):
146 os.makedirs(db_file_dir)
156 os.makedirs(db_file_dir)
147
157
148 try:
158 try:
149 super().__init__(arguments)
159 super().__init__(arguments)
150 except Exception:
160 except Exception:
151 log.exception('Failed to initialize db at: %s', db_file)
161 log.exception('Failed to initialize db at: %s', db_file)
152 raise
162 raise
153
163
154 def __repr__(self):
164 def __repr__(self):
155 return f'{self.__class__}(file=`{self.filename}`)'
165 return f'{self.__class__}(file=`{self.filename}`)'
156
166
157 def __str__(self):
167 def __str__(self):
158 return self.__repr__()
168 return self.__repr__()
159
169
160 def _get_keys_pattern(self, prefix: bytes = b''):
170 def _get_keys_pattern(self, prefix: bytes = b''):
161 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
171 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
162
172
163 def list_keys(self, prefix: bytes = b''):
173 def list_keys(self, prefix: bytes = b''):
164 prefix = self._get_keys_pattern(prefix)
174 prefix = self._get_keys_pattern(prefix)
165
175
166 def cond(dbm_key: bytes):
176 def cond(dbm_key: bytes):
167 if not prefix:
177 if not prefix:
168 return True
178 return True
169
179
170 if dbm_key.startswith(prefix):
180 if dbm_key.startswith(prefix):
171 return True
181 return True
172 return False
182 return False
173
183
174 with self._dbm_file(True) as dbm:
184 with self._dbm_file(True) as dbm:
175 try:
185 try:
176 return list(filter(cond, dbm.keys()))
186 return list(filter(cond, dbm.keys()))
177 except Exception:
187 except Exception:
178 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
188 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
179 raise
189 raise
180
190
191 def delete_multi_by_prefix(self, prefix):
192 cache_keys = self.list_keys(prefix=prefix)
193 num_affected_keys = len(cache_keys)
194 if num_affected_keys:
195 self.delete_multi(cache_keys)
196 return num_affected_keys
197
181 def get_store(self):
198 def get_store(self):
182 return self.filename
199 return self.filename
183
200
184
201
185 class BaseRedisBackend(redis_backend.RedisBackend):
202 class BaseRedisBackend(redis_backend.RedisBackend):
186 key_prefix = ''
203 key_prefix = ''
187
204
188 def __init__(self, arguments):
205 def __init__(self, arguments):
189 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
206 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
190 super().__init__(arguments)
207 super().__init__(arguments)
191
208
192 self._lock_timeout = self.lock_timeout
209 self._lock_timeout = self.lock_timeout
193 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
210 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
194
211
195 if self._lock_auto_renewal and not self._lock_timeout:
212 if self._lock_auto_renewal and not self._lock_timeout:
196 # set default timeout for auto_renewal
213 # set default timeout for auto_renewal
197 self._lock_timeout = 30
214 self._lock_timeout = 30
198
215
199 def __repr__(self):
216 def __repr__(self):
200 return f'{self.__class__}(conn=`{self.db_conn}`)'
217 return f'{self.__class__}(conn=`{self.db_conn}`)'
201
218
202 def __str__(self):
219 def __str__(self):
203 return self.__repr__()
220 return self.__repr__()
204
221
205 def _create_client(self):
222 def _create_client(self):
206 args = {}
223 args = {}
207
224
208 if self.url is not None:
225 if self.url is not None:
209 args.update(url=self.url)
226 args.update(url=self.url)
210
227
211 else:
228 else:
212 args.update(
229 args.update(
213 host=self.host, password=self.password,
230 host=self.host, password=self.password,
214 port=self.port, db=self.db
231 port=self.port, db=self.db
215 )
232 )
216
233
217 connection_pool = redis.ConnectionPool(**args)
234 connection_pool = redis.ConnectionPool(**args)
218 self.writer_client = redis.StrictRedis(
235 self.writer_client = redis.StrictRedis(
219 connection_pool=connection_pool
236 connection_pool=connection_pool
220 )
237 )
221 self.reader_client = self.writer_client
238 self.reader_client = self.writer_client
222
239
223 def _get_keys_pattern(self, prefix: bytes = b''):
240 def _get_keys_pattern(self, prefix: bytes = b''):
224 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
241 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
225
242
226 def list_keys(self, prefix: bytes = b''):
243 def list_keys(self, prefix: bytes = b''):
227 prefix = self._get_keys_pattern(prefix)
244 prefix = self._get_keys_pattern(prefix)
228 return self.reader_client.keys(prefix)
245 return self.reader_client.keys(prefix)
229
246
247 def delete_multi_by_prefix(self, prefix, use_lua=False):
248 if use_lua:
249 # high efficient LUA script to delete ALL keys by prefix...
250 lua = """local keys = redis.call('keys', ARGV[1])
251 for i=1,#keys,5000 do
252 redis.call('del', unpack(keys, i, math.min(i+(5000-1), #keys)))
253 end
254 return #keys"""
255 num_affected_keys = self.writer_client.eval(
256 lua,
257 0,
258 f"{prefix}*")
259 else:
260 cache_keys = self.list_keys(prefix=prefix)
261 num_affected_keys = len(cache_keys)
262 if num_affected_keys:
263 self.delete_multi(cache_keys)
264 return num_affected_keys
265
230 def get_store(self):
266 def get_store(self):
231 return self.reader_client.connection_pool
267 return self.reader_client.connection_pool
232
268
233 def get_mutex(self, key):
269 def get_mutex(self, key):
234 if self.distributed_lock:
270 if self.distributed_lock:
235 lock_key = f'_lock_{safe_str(key)}'
271 lock_key = f'_lock_{safe_str(key)}'
236 return get_mutex_lock(
272 return get_mutex_lock(
237 self.writer_client, lock_key,
273 self.writer_client, lock_key,
238 self._lock_timeout,
274 self._lock_timeout,
239 auto_renewal=self._lock_auto_renewal
275 auto_renewal=self._lock_auto_renewal
240 )
276 )
241 else:
277 else:
242 return None
278 return None
243
279
244
280
245 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
281 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
246 key_prefix = 'redis_pickle_backend'
282 key_prefix = 'redis_pickle_backend'
247 pass
283 pass
248
284
249
285
250 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
286 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
251 key_prefix = 'redis_msgpack_backend'
287 key_prefix = 'redis_msgpack_backend'
252 pass
288 pass
253
289
254
290
255 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
291 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
256 from rhodecode.lib._vendor import redis_lock
292 from rhodecode.lib._vendor import redis_lock
257
293
258 class _RedisLockWrapper(object):
294 class _RedisLockWrapper(object):
259 """LockWrapper for redis_lock"""
295 """LockWrapper for redis_lock"""
260
296
261 @classmethod
297 @classmethod
262 def get_lock(cls):
298 def get_lock(cls):
263 return redis_lock.Lock(
299 return redis_lock.Lock(
264 redis_client=client,
300 redis_client=client,
265 name=lock_key,
301 name=lock_key,
266 expire=lock_timeout,
302 expire=lock_timeout,
267 auto_renewal=auto_renewal,
303 auto_renewal=auto_renewal,
268 strict=True,
304 strict=True,
269 )
305 )
270
306
271 def __repr__(self):
307 def __repr__(self):
272 return f"{self.__class__.__name__}:{lock_key}"
308 return f"{self.__class__.__name__}:{lock_key}"
273
309
274 def __str__(self):
310 def __str__(self):
275 return f"{self.__class__.__name__}:{lock_key}"
311 return f"{self.__class__.__name__}:{lock_key}"
276
312
277 def __init__(self):
313 def __init__(self):
278 self.lock = self.get_lock()
314 self.lock = self.get_lock()
279 self.lock_key = lock_key
315 self.lock_key = lock_key
280
316
281 def acquire(self, wait=True):
317 def acquire(self, wait=True):
282 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
318 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
283 try:
319 try:
284 acquired = self.lock.acquire(wait)
320 acquired = self.lock.acquire(wait)
285 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
321 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
286 return acquired
322 return acquired
287 except redis_lock.AlreadyAcquired:
323 except redis_lock.AlreadyAcquired:
288 return False
324 return False
289 except redis_lock.AlreadyStarted:
325 except redis_lock.AlreadyStarted:
290 # refresh thread exists, but it also means we acquired the lock
326 # refresh thread exists, but it also means we acquired the lock
291 return True
327 return True
292
328
293 def release(self):
329 def release(self):
294 try:
330 try:
295 self.lock.release()
331 self.lock.release()
296 except redis_lock.NotAcquired:
332 except redis_lock.NotAcquired:
297 pass
333 pass
298
334
299 return _RedisLockWrapper()
335 return _RedisLockWrapper()
@@ -1,406 +1,404 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import functools
19 import functools
20 import logging
20 import logging
21 import os
21 import os
22 import threading
22 import threading
23 import time
23 import time
24
24
25 import decorator
25 import decorator
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.lib.hash_utils import sha1
29 from rhodecode.lib.hash_utils import sha1
30 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
32
32
33 from . import region_meta, cache_key_meta
33 from . import region_meta, cache_key_meta
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def isCython(func):
38 def isCython(func):
39 """
39 """
40 Private helper that checks if a function is a cython function.
40 Private helper that checks if a function is a cython function.
41 """
41 """
42 return func.__class__.__name__ == 'cython_function_or_method'
42 return func.__class__.__name__ == 'cython_function_or_method'
43
43
44
44
45 class RhodeCodeCacheRegion(CacheRegion):
45 class RhodeCodeCacheRegion(CacheRegion):
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return f'{self.__class__}(name={self.name})'
48 return f'{self.__class__}(name={self.name})'
49
49
50 def conditional_cache_on_arguments(
50 def conditional_cache_on_arguments(
51 self, namespace=None,
51 self, namespace=None,
52 expiration_time=None,
52 expiration_time=None,
53 should_cache_fn=None,
53 should_cache_fn=None,
54 to_str=str,
54 to_str=str,
55 function_key_generator=None,
55 function_key_generator=None,
56 condition=True):
56 condition=True):
57 """
57 """
58 Custom conditional decorator, that will not touch any dogpile internals if
58 Custom conditional decorator, that will not touch any dogpile internals if
59 condition isn't meet. This works a bit different from should_cache_fn
59 condition isn't meet. This works a bit different from should_cache_fn
60 And it's faster in cases we don't ever want to compute cached values
60 And it's faster in cases we don't ever want to compute cached values
61 """
61 """
62 expiration_time_is_callable = callable(expiration_time)
62 expiration_time_is_callable = callable(expiration_time)
63 if not namespace:
63 if not namespace:
64 namespace = getattr(self, '_default_namespace', None)
64 namespace = getattr(self, '_default_namespace', None)
65
65
66 if function_key_generator is None:
66 if function_key_generator is None:
67 function_key_generator = self.function_key_generator
67 function_key_generator = self.function_key_generator
68
68
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70
70
71 if not condition:
71 if not condition:
72 log.debug('Calling un-cached method:%s', user_func.__name__)
72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 start = time.time()
73 start = time.time()
74 result = user_func(*arg, **kw)
74 result = user_func(*arg, **kw)
75 total = time.time() - start
75 total = time.time() - start
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 return result
77 return result
78
78
79 key = func_key_generator(*arg, **kw)
79 key = func_key_generator(*arg, **kw)
80
80
81 timeout = expiration_time() if expiration_time_is_callable \
81 timeout = expiration_time() if expiration_time_is_callable \
82 else expiration_time
82 else expiration_time
83
83
84 log.debug('Calling cached method:`%s`', user_func.__name__)
84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86
86
87 def cache_decorator(user_func):
87 def cache_decorator(user_func):
88 if to_str is str:
88 if to_str is str:
89 # backwards compatible
89 # backwards compatible
90 key_generator = function_key_generator(namespace, user_func)
90 key_generator = function_key_generator(namespace, user_func)
91 else:
91 else:
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93
93
94 def refresh(*arg, **kw):
94 def refresh(*arg, **kw):
95 """
95 """
96 Like invalidate, but regenerates the value instead
96 Like invalidate, but regenerates the value instead
97 """
97 """
98 key = key_generator(*arg, **kw)
98 key = key_generator(*arg, **kw)
99 value = user_func(*arg, **kw)
99 value = user_func(*arg, **kw)
100 self.set(key, value)
100 self.set(key, value)
101 return value
101 return value
102
102
103 def invalidate(*arg, **kw):
103 def invalidate(*arg, **kw):
104 key = key_generator(*arg, **kw)
104 key = key_generator(*arg, **kw)
105 self.delete(key)
105 self.delete(key)
106
106
107 def set_(value, *arg, **kw):
107 def set_(value, *arg, **kw):
108 key = key_generator(*arg, **kw)
108 key = key_generator(*arg, **kw)
109 self.set(key, value)
109 self.set(key, value)
110
110
111 def get(*arg, **kw):
111 def get(*arg, **kw):
112 key = key_generator(*arg, **kw)
112 key = key_generator(*arg, **kw)
113 return self.get(key)
113 return self.get(key)
114
114
115 user_func.set = set_
115 user_func.set = set_
116 user_func.invalidate = invalidate
116 user_func.invalidate = invalidate
117 user_func.get = get
117 user_func.get = get
118 user_func.refresh = refresh
118 user_func.refresh = refresh
119 user_func.key_generator = key_generator
119 user_func.key_generator = key_generator
120 user_func.original = user_func
120 user_func.original = user_func
121
121
122 # Use `decorate` to preserve the signature of :param:`user_func`.
122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 return decorator.decorate(user_func, functools.partial(
123 return decorator.decorate(user_func, functools.partial(
124 get_or_create_for_user_func, key_generator))
124 get_or_create_for_user_func, key_generator))
125
125
126 return cache_decorator
126 return cache_decorator
127
127
128
128
129 def make_region(*arg, **kw):
129 def make_region(*arg, **kw):
130 return RhodeCodeCacheRegion(*arg, **kw)
130 return RhodeCodeCacheRegion(*arg, **kw)
131
131
132
132
133 def get_default_cache_settings(settings, prefixes=None):
133 def get_default_cache_settings(settings, prefixes=None):
134 prefixes = prefixes or []
134 prefixes = prefixes or []
135 cache_settings = {}
135 cache_settings = {}
136 for key in settings.keys():
136 for key in settings.keys():
137 for prefix in prefixes:
137 for prefix in prefixes:
138 if key.startswith(prefix):
138 if key.startswith(prefix):
139 name = key.split(prefix)[1].strip()
139 name = key.split(prefix)[1].strip()
140 val = settings[key]
140 val = settings[key]
141 if isinstance(val, str):
141 if isinstance(val, str):
142 val = val.strip()
142 val = val.strip()
143 cache_settings[name] = val
143 cache_settings[name] = val
144 return cache_settings
144 return cache_settings
145
145
146
146
147 def compute_key_from_params(*args):
147 def compute_key_from_params(*args):
148 """
148 """
149 Helper to compute key from given params to be used in cache manager
149 Helper to compute key from given params to be used in cache manager
150 """
150 """
151 return sha1(safe_bytes("_".join(map(str, args))))
151 return sha1(safe_bytes("_".join(map(str, args))))
152
152
153
153
154 def custom_key_generator(backend, namespace, fn):
154 def custom_key_generator(backend, namespace, fn):
155 func_name = fn.__name__
155 func_name = fn.__name__
156
156
157 def generate_key(*args):
157 def generate_key(*args):
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 namespace_pref = namespace or 'default_namespace'
159 namespace_pref = namespace or 'default_namespace'
160 arg_key = compute_key_from_params(*args)
160 arg_key = compute_key_from_params(*args)
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162
162
163 return final_key
163 return final_key
164
164
165 return generate_key
165 return generate_key
166
166
167
167
168 def backend_key_generator(backend):
168 def backend_key_generator(backend):
169 """
169 """
170 Special wrapper that also sends over the backend to the key generator
170 Special wrapper that also sends over the backend to the key generator
171 """
171 """
172 def wrapper(namespace, fn):
172 def wrapper(namespace, fn):
173 return custom_key_generator(backend, namespace, fn)
173 return custom_key_generator(backend, namespace, fn)
174 return wrapper
174 return wrapper
175
175
176
176
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 from .backends import FileNamespaceBackend
178 from .backends import FileNamespaceBackend
179 from . import async_creation_runner
179 from . import async_creation_runner
180
180
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 if not region_obj:
182 if not region_obj:
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185
185
186 region_uid_name = f'{region_name}:{region_namespace}'
186 region_uid_name = f'{region_name}:{region_namespace}'
187
187
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
190 if not region_namespace:
190 if not region_namespace:
191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
192
192
193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
194 if region_exist:
194 if region_exist:
195 log.debug('Using already configured region: %s', region_namespace)
195 log.debug('Using already configured region: %s', region_namespace)
196 return region_exist
196 return region_exist
197
197
198 expiration_time = region_obj.expiration_time
198 expiration_time = region_obj.expiration_time
199
199
200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
201 namespace_cache_dir = cache_dir
201 namespace_cache_dir = cache_dir
202
202
203 # we default the namespace_cache_dir to our default cache dir.
203 # we default the namespace_cache_dir to our default cache dir.
204 # however, if this backend is configured with filename= param, we prioritize that
204 # however, if this backend is configured with filename= param, we prioritize that
205 # so all caches within that particular region, even those namespaced end up in the same path
205 # so all caches within that particular region, even those namespaced end up in the same path
206 if region_obj.actual_backend.filename:
206 if region_obj.actual_backend.filename:
207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
208
208
209 if not os.path.isdir(namespace_cache_dir):
209 if not os.path.isdir(namespace_cache_dir):
210 os.makedirs(namespace_cache_dir)
210 os.makedirs(namespace_cache_dir)
211 new_region = make_region(
211 new_region = make_region(
212 name=region_uid_name,
212 name=region_uid_name,
213 function_key_generator=backend_key_generator(region_obj.actual_backend)
213 function_key_generator=backend_key_generator(region_obj.actual_backend)
214 )
214 )
215
215
216 namespace_filename = os.path.join(
216 namespace_filename = os.path.join(
217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
218 # special type that allows 1db per namespace
218 # special type that allows 1db per namespace
219 new_region.configure(
219 new_region.configure(
220 backend='dogpile.cache.rc.file_namespace',
220 backend='dogpile.cache.rc.file_namespace',
221 expiration_time=expiration_time,
221 expiration_time=expiration_time,
222 arguments={"filename": namespace_filename}
222 arguments={"filename": namespace_filename}
223 )
223 )
224
224
225 # create and save in region caches
225 # create and save in region caches
226 log.debug('configuring new region: %s', region_uid_name)
226 log.debug('configuring new region: %s', region_uid_name)
227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
228
228
229 region_obj._default_namespace = region_namespace
229 region_obj._default_namespace = region_namespace
230 if use_async_runner:
230 if use_async_runner:
231 region_obj.async_creation_runner = async_creation_runner
231 region_obj.async_creation_runner = async_creation_runner
232 return region_obj
232 return region_obj
233
233
234
234
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
237
237
238 if not isinstance(cache_region, RhodeCodeCacheRegion):
238 if not isinstance(cache_region, RhodeCodeCacheRegion):
239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
240 log.debug('clearing cache region: %s with method=%s', cache_region, method)
240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
241 cache_region, cache_namespace_uid, method)
241
242
242 num_affected_keys = None
243 num_affected_keys = 0
243
244
244 if method == CLEAR_INVALIDATE:
245 if method == CLEAR_INVALIDATE:
245 # NOTE: The CacheRegion.invalidate() method’s default mode of
246 # NOTE: The CacheRegion.invalidate() method’s default mode of
246 # operation is to set a timestamp local to this CacheRegion in this Python process only.
247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
247 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
248 cache_region.invalidate(hard=True)
249 cache_region.invalidate(hard=True)
249
250
250 if method == CLEAR_DELETE:
251 if method == CLEAR_DELETE:
251 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
252 num_affected_keys = len(cache_keys)
253 if num_affected_keys:
254 cache_region.delete_multi(cache_keys)
255
253
256 return num_affected_keys
254 return num_affected_keys
257
255
258
256
259 class ActiveRegionCache(object):
257 class ActiveRegionCache(object):
260 def __init__(self, context, cache_data):
258 def __init__(self, context, cache_data):
261 self.context = context
259 self.context = context
262 self.cache_data = cache_data
260 self.cache_data = cache_data
263
261
264 def should_invalidate(self):
262 def should_invalidate(self):
265 return False
263 return False
266
264
267
265
268 class FreshRegionCache(object):
266 class FreshRegionCache(object):
269 def __init__(self, context, cache_data):
267 def __init__(self, context, cache_data):
270 self.context = context
268 self.context = context
271 self.cache_data = cache_data
269 self.cache_data = cache_data
272
270
273 def should_invalidate(self):
271 def should_invalidate(self):
274 return True
272 return True
275
273
276
274
277 class InvalidationContext(object):
275 class InvalidationContext(object):
278 """
276 """
279 usage::
277 usage::
280
278
281 from rhodecode.lib import rc_cache
279 from rhodecode.lib import rc_cache
282
280
283 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
281 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
284 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
282 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
285
283
286 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
284 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
287 def heavy_compute(cache_name, param1, param2):
285 def heavy_compute(cache_name, param1, param2):
288 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
286 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
289
287
290 # invalidation namespace is shared namespace key for all process caches
288 # invalidation namespace is shared namespace key for all process caches
291 # we use it to send a global signal
289 # we use it to send a global signal
292 invalidation_namespace = 'repo_cache:1'
290 invalidation_namespace = 'repo_cache:1'
293
291
294 inv_context_manager = rc_cache.InvalidationContext(
292 inv_context_manager = rc_cache.InvalidationContext(
295 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
293 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
296 with inv_context_manager as invalidation_context:
294 with inv_context_manager as invalidation_context:
297 args = ('one', 'two')
295 args = ('one', 'two')
298 # re-compute and store cache if we get invalidate signal
296 # re-compute and store cache if we get invalidate signal
299 if invalidation_context.should_invalidate():
297 if invalidation_context.should_invalidate():
300 result = heavy_compute.refresh(*args)
298 result = heavy_compute.refresh(*args)
301 else:
299 else:
302 result = heavy_compute(*args)
300 result = heavy_compute(*args)
303
301
304 compute_time = inv_context_manager.compute_time
302 compute_time = inv_context_manager.compute_time
305 log.debug('result computed in %.4fs', compute_time)
303 log.debug('result computed in %.4fs', compute_time)
306
304
307 # To send global invalidation signal, simply run
305 # To send global invalidation signal, simply run
308 CacheKey.set_invalidate(invalidation_namespace)
306 CacheKey.set_invalidate(invalidation_namespace)
309
307
310 """
308 """
311
309
312 def __repr__(self):
310 def __repr__(self):
313 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
311 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
314
312
315 def __init__(self, uid, invalidation_namespace='',
313 def __init__(self, uid, invalidation_namespace='',
316 raise_exception=False, thread_scoped=None):
314 raise_exception=False, thread_scoped=None):
317 self.uid = uid
315 self.uid = uid
318 self.invalidation_namespace = invalidation_namespace
316 self.invalidation_namespace = invalidation_namespace
319 self.raise_exception = raise_exception
317 self.raise_exception = raise_exception
320 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
318 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
321 self.thread_id = 'global'
319 self.thread_id = 'global'
322
320
323 if thread_scoped is None:
321 if thread_scoped is None:
324 # if we set "default" we can override this via .ini settings
322 # if we set "default" we can override this via .ini settings
325 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
323 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
326
324
327 # Append the thread id to the cache key if this invalidation context
325 # Append the thread id to the cache key if this invalidation context
328 # should be scoped to the current thread.
326 # should be scoped to the current thread.
329 if thread_scoped is True:
327 if thread_scoped is True:
330 self.thread_id = threading.current_thread().ident
328 self.thread_id = threading.current_thread().ident
331
329
332 self.cache_key = compute_key_from_params(uid)
330 self.cache_key = compute_key_from_params(uid)
333 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
331 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
334 self.proc_id, self.thread_id, self.cache_key)
332 self.proc_id, self.thread_id, self.cache_key)
335 self.proc_key = f'proc:{self.proc_id}'
333 self.proc_key = f'proc:{self.proc_id}'
336 self.compute_time = 0
334 self.compute_time = 0
337
335
338 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
336 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
339 from rhodecode.model.db import CacheKey
337 from rhodecode.model.db import CacheKey
340
338
341 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
339 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
342 # fetch all cache keys for this namespace and convert them to a map to find if we
340 # fetch all cache keys for this namespace and convert them to a map to find if we
343 # have specific cache_key object registered. We do this because we want to have
341 # have specific cache_key object registered. We do this because we want to have
344 # all consistent cache_state_uid for newly registered objects
342 # all consistent cache_state_uid for newly registered objects
345 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
343 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
346 cache_obj = cache_obj_map.get(self.cache_key)
344 cache_obj = cache_obj_map.get(self.cache_key)
347 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
345 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
348
346
349 if not cache_obj:
347 if not cache_obj:
350 new_cache_args = invalidation_namespace
348 new_cache_args = invalidation_namespace
351 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
349 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
352 cache_state_uid = None
350 cache_state_uid = None
353 if first_cache_obj:
351 if first_cache_obj:
354 cache_state_uid = first_cache_obj.cache_state_uid
352 cache_state_uid = first_cache_obj.cache_state_uid
355 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
353 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
356 cache_state_uid=cache_state_uid)
354 cache_state_uid=cache_state_uid)
357 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
355 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
358
356
359 return cache_obj
357 return cache_obj
360
358
361 def __enter__(self):
359 def __enter__(self):
362 """
360 """
363 Test if current object is valid, and return CacheRegion function
361 Test if current object is valid, and return CacheRegion function
364 that does invalidation and calculation
362 that does invalidation and calculation
365 """
363 """
366 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
364 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
367 # register or get a new key based on uid
365 # register or get a new key based on uid
368 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
366 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
369 cache_data = self.cache_obj.get_dict()
367 cache_data = self.cache_obj.get_dict()
370 self._start_time = time.time()
368 self._start_time = time.time()
371 if self.cache_obj.cache_active:
369 if self.cache_obj.cache_active:
372 # means our cache obj is existing and marked as it's
370 # means our cache obj is existing and marked as it's
373 # cache is not outdated, we return ActiveRegionCache
371 # cache is not outdated, we return ActiveRegionCache
374 self.skip_cache_active_change = True
372 self.skip_cache_active_change = True
375
373
376 return ActiveRegionCache(context=self, cache_data=cache_data)
374 return ActiveRegionCache(context=self, cache_data=cache_data)
377
375
378 # the key is either not existing or set to False, we return
376 # the key is either not existing or set to False, we return
379 # the real invalidator which re-computes value. We additionally set
377 # the real invalidator which re-computes value. We additionally set
380 # the flag to actually update the Database objects
378 # the flag to actually update the Database objects
381 self.skip_cache_active_change = False
379 self.skip_cache_active_change = False
382 return FreshRegionCache(context=self, cache_data=cache_data)
380 return FreshRegionCache(context=self, cache_data=cache_data)
383
381
384 def __exit__(self, exc_type, exc_val, exc_tb):
382 def __exit__(self, exc_type, exc_val, exc_tb):
385 from rhodecode.model.db import IntegrityError, Session
383 from rhodecode.model.db import IntegrityError, Session
386
384
387 # save compute time
385 # save compute time
388 self.compute_time = time.time() - self._start_time
386 self.compute_time = time.time() - self._start_time
389
387
390 if self.skip_cache_active_change:
388 if self.skip_cache_active_change:
391 return
389 return
392
390
393 try:
391 try:
394 self.cache_obj.cache_active = True
392 self.cache_obj.cache_active = True
395 Session().add(self.cache_obj)
393 Session().add(self.cache_obj)
396 Session().commit()
394 Session().commit()
397 except IntegrityError:
395 except IntegrityError:
398 # if we catch integrity error, it means we inserted this object
396 # if we catch integrity error, it means we inserted this object
399 # assumption is that's really an edge race-condition case and
397 # assumption is that's really an edge race-condition case and
400 # it's safe is to skip it
398 # it's safe is to skip it
401 Session().rollback()
399 Session().rollback()
402 except Exception:
400 except Exception:
403 log.exception('Failed to commit on cache key update')
401 log.exception('Failed to commit on cache key update')
404 Session().rollback()
402 Session().rollback()
405 if self.raise_exception:
403 if self.raise_exception:
406 raise
404 raise
General Comments 0
You need to be logged in to leave comments. Login now