##// END OF EJS Templates
fix(permission-flush): use delete method for permission cache invalidation as it's multi-process safe....
super-admin -
r5266:a1331d35 default
parent child Browse files
Show More
@@ -1,191 +1,191 b''
1 1 # required for pushd to work..
2 2 SHELL = /bin/bash
3 3
4 4
5 5 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
6 6 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
7 7
8 8 .PHONY: clean
9 9 ## Cleanup compiled and cache py files
10 10 clean:
11 11 make test-clean
12 12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
13 13 find . -type d -name "build" -prune -exec rm -rf '{}' ';'
14 14
15 15
16 16 .PHONY: test
17 17 ## run test-clean and tests
18 18 test:
19 19 make test-clean
20 20 make test-only
21 21
22 22
23 23 .PHONY: test-clean
24 24 ## run test-clean and tests
25 25 test-clean:
26 26 rm -rf coverage.xml htmlcov junit.xml pylint.log result
27 27 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
28 28 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
29 29
30 30
31 31 .PHONY: test-only
32 32 ## Run tests only without cleanup
33 33 test-only:
34 34 PYTHONHASHSEED=random \
35 35 py.test -x -vv -r xw -p no:sugar \
36 36 --cov-report=term-missing --cov-report=html \
37 37 --cov=rhodecode rhodecode
38 38
39 39
40 40 .PHONY: test-only-mysql
41 41 ## run tests against mysql
42 42 test-only-mysql:
43 43 PYTHONHASHSEED=random \
44 44 py.test -x -vv -r xw -p no:sugar \
45 45 --cov-report=term-missing --cov-report=html \
46 46 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
47 47 --cov=rhodecode rhodecode
48 48
49 49
50 50 .PHONY: test-only-postgres
51 51 ## run tests against postgres
52 52 test-only-postgres:
53 53 PYTHONHASHSEED=random \
54 54 py.test -x -vv -r xw -p no:sugar \
55 55 --cov-report=term-missing --cov-report=html \
56 56 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
57 57 --cov=rhodecode rhodecode
58 58
59 59 .PHONY: ruff-check
60 60 ## run a ruff analysis
61 61 ruff-check:
62 62 ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
63 63
64 64
65 65 .PHONY: docs
66 66 ## build docs
67 67 docs:
68 68 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html)
69 69
70 70
71 71 .PHONY: docs-clean
72 72 ## Cleanup docs
73 73 docs-clean:
74 74 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean)
75 75
76 76
77 77 .PHONY: docs-cleanup
78 78 ## Cleanup docs
79 79 docs-cleanup:
80 80 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup)
81 81
82 82
83 83 .PHONY: web-build
84 84 ## Build JS packages static/js
85 85 web-build:
86 86 docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt"
87 87 # run static file check
88 88 ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/
89 89 rm -rf node_modules
90 90
91 91
92 92 .PHONY: pip-packages
93 93 ## Show outdated packages
94 94 pip-packages:
95 95 python ${OUTDATED_PACKAGES}
96 96
97 97
98 98 .PHONY: build
99 99 ## Build sdist/egg
100 100 build:
101 101 python -m build
102 102
103 103
104 104 .PHONY: dev-sh
105 105 ## make dev-sh
106 106 dev-sh:
107 107 sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list"
108 108 sudo apt-get update
109 109 sudo apt-get install -y zsh carapace-bin
110 110 rm -rf /home/rhodecode/.oh-my-zsh
111 111 curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh | sh
112 112 echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
113 113 PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
114 114
115 115
116 116 .PHONY: dev-cleanup
117 117 ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
118 118 dev-cleanup:
119 119 pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
120 120 rm -rf /tmp/*
121 121
122 122
123 123 .PHONY: dev-env
124 124 ## make dev-env based on the requirements files and install develop of packages
125 125 dev-env:
126 126 pip install build virtualenv
127 127 pushd ../rhodecode-vcsserver/ && make dev-env && popd
128 128 pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
129 129 pip install --no-index --find-links=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
130 130 pip install -e .
131 131
132 132
133 133 .PHONY: sh
134 134 ## shortcut for make dev-sh dev-env
135 135 sh:
136 136 (make dev-env; make dev-sh)
137 137
138 138
139 139 .PHONY: dev-srv
140 140 ## run develop server instance, docker exec -it $(docker ps -q --filter 'name=dev-enterprise-ce') /bin/bash
141 141 dev-srv:
142 142 pserve --reload .dev/dev.ini
143 143
144 144
145 145 .PHONY: dev-srv-g
146 146 ## run gunicorn multi process workers
147 147 dev-srv-g:
148 gunicorn --workers=2 --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py
148 gunicorn --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120
149 149
150 150
151 151 # Default command on calling make
152 152 .DEFAULT_GOAL := show-help
153 153
154 154 .PHONY: show-help
155 155 show-help:
156 156 @echo "$$(tput bold)Available rules:$$(tput sgr0)"
157 157 @echo
158 158 @sed -n -e "/^## / { \
159 159 h; \
160 160 s/.*//; \
161 161 :doc" \
162 162 -e "H; \
163 163 n; \
164 164 s/^## //; \
165 165 t doc" \
166 166 -e "s/:.*//; \
167 167 G; \
168 168 s/\\n## /---/; \
169 169 s/\\n/ /g; \
170 170 p; \
171 171 }" ${MAKEFILE_LIST} \
172 172 | LC_ALL='C' sort --ignore-case \
173 173 | awk -F '---' \
174 174 -v ncol=$$(tput cols) \
175 175 -v indent=19 \
176 176 -v col_on="$$(tput setaf 6)" \
177 177 -v col_off="$$(tput sgr0)" \
178 178 '{ \
179 179 printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
180 180 n = split($$2, words, " "); \
181 181 line_length = ncol - indent; \
182 182 for (i = 1; i <= n; i++) { \
183 183 line_length -= length(words[i]) + 1; \
184 184 if (line_length <= 0) { \
185 185 line_length = ncol - indent - length(words[i]) - 1; \
186 186 printf "\n%*s ", -indent, " "; \
187 187 } \
188 188 printf "%s ", words[i]; \
189 189 } \
190 190 printf "\n"; \
191 191 }'
@@ -1,53 +1,53 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20
21 21 from rhodecode import events
22 22 from rhodecode.lib import rc_cache
23 23
24 24 log = logging.getLogger(__name__)
25 25
26 26 # names of namespaces used for different permission related cached
27 27 # during flush operation we need to take care of all those
28 28 cache_namespaces = [
29 29 f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
30 30 f'cache_user_repo_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
31 31 f'cache_user_user_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
32 32 f'cache_user_repo_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}'
33 33 ]
34 34
35 35
36 36 def trigger_user_permission_flush(event):
37 37 """
38 38 Subscriber to the `UserPermissionsChange`. This triggers the
39 39 automatic flush of permission caches, so the users affected receive new permissions
40 40 Right Away
41 41 """
42 42
43 43 affected_user_ids = set(event.user_ids)
44 44 for user_id in affected_user_ids:
45 45 for cache_namespace_uid_tmpl in cache_namespaces:
46 46 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
47 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
47 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
48 48 log.debug('Invalidated %s cache keys for user_id: %s and namespace %s',
49 49 del_keys, user_id, cache_namespace_uid)
50 50
51 51
52 52 def includeme(config):
53 53 config.add_subscriber(trigger_user_permission_flush, events.UserPermissionsChange)
@@ -1,1321 +1,1321 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import datetime
21 21 import formencode
22 22 import formencode.htmlfill
23 23
24 24 from pyramid.httpexceptions import HTTPFound
25 25 from pyramid.renderers import render
26 26 from pyramid.response import Response
27 27
28 28 from rhodecode import events
29 29 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
30 30 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
31 31 from rhodecode.authentication.base import get_authn_registry, RhodeCodeExternalAuthPlugin
32 32 from rhodecode.authentication.plugins import auth_rhodecode
33 33 from rhodecode.events import trigger
34 34 from rhodecode.model.db import true, UserNotice
35 35
36 36 from rhodecode.lib import audit_logger, rc_cache, auth
37 37 from rhodecode.lib.exceptions import (
38 38 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
39 39 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
40 40 UserOwnsArtifactsException, DefaultUserException)
41 41 from rhodecode.lib import ext_json
42 42 from rhodecode.lib.auth import (
43 43 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
44 44 from rhodecode.lib import helpers as h
45 45 from rhodecode.lib.helpers import SqlPage
46 46 from rhodecode.lib.utils2 import safe_int, safe_str, AttributeDict
47 47 from rhodecode.model.auth_token import AuthTokenModel
48 48 from rhodecode.model.forms import (
49 49 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
50 50 UserExtraEmailForm, UserExtraIpForm)
51 51 from rhodecode.model.permission import PermissionModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.ssh_key import SshKeyModel
54 54 from rhodecode.model.user import UserModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.db import (
57 57 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
58 58 UserApiKeys, UserSshKeys, RepoGroup)
59 59 from rhodecode.model.meta import Session
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class AdminUsersView(BaseAppView, DataGridAppView):
65 65
66 66 def load_default_context(self):
67 67 c = self._get_local_tmpl_context()
68 68 return c
69 69
70 70 @LoginRequired()
71 71 @HasPermissionAllDecorator('hg.admin')
72 72 def users_list(self):
73 73 c = self.load_default_context()
74 74 return self._get_template_context(c)
75 75
76 76 @LoginRequired()
77 77 @HasPermissionAllDecorator('hg.admin')
78 78 def users_list_data(self):
79 79 self.load_default_context()
80 80 column_map = {
81 81 'first_name': 'name',
82 82 'last_name': 'lastname',
83 83 }
84 84 draw, start, limit = self._extract_chunk(self.request)
85 85 search_q, order_by, order_dir = self._extract_ordering(
86 86 self.request, column_map=column_map)
87 87 _render = self.request.get_partial_renderer(
88 88 'rhodecode:templates/data_table/_dt_elements.mako')
89 89
90 90 def user_actions(user_id, username):
91 91 return _render("user_actions", user_id, username)
92 92
93 93 users_data_total_count = User.query()\
94 94 .filter(User.username != User.DEFAULT_USER) \
95 95 .count()
96 96
97 97 users_data_total_inactive_count = User.query()\
98 98 .filter(User.username != User.DEFAULT_USER) \
99 99 .filter(User.active != true())\
100 100 .count()
101 101
102 102 # json generate
103 103 base_q = User.query().filter(User.username != User.DEFAULT_USER)
104 104 base_inactive_q = base_q.filter(User.active != true())
105 105
106 106 if search_q:
107 107 like_expression = '%{}%'.format(safe_str(search_q))
108 108 base_q = base_q.filter(or_(
109 109 User.username.ilike(like_expression),
110 110 User._email.ilike(like_expression),
111 111 User.name.ilike(like_expression),
112 112 User.lastname.ilike(like_expression),
113 113 ))
114 114 base_inactive_q = base_q.filter(User.active != true())
115 115
116 116 users_data_total_filtered_count = base_q.count()
117 117 users_data_total_filtered_inactive_count = base_inactive_q.count()
118 118
119 119 sort_col = getattr(User, order_by, None)
120 120 if sort_col:
121 121 if order_dir == 'asc':
122 122 # handle null values properly to order by NULL last
123 123 if order_by in ['last_activity']:
124 124 sort_col = coalesce(sort_col, datetime.date.max)
125 125 sort_col = sort_col.asc()
126 126 else:
127 127 # handle null values properly to order by NULL last
128 128 if order_by in ['last_activity']:
129 129 sort_col = coalesce(sort_col, datetime.date.min)
130 130 sort_col = sort_col.desc()
131 131
132 132 base_q = base_q.order_by(sort_col)
133 133 base_q = base_q.offset(start).limit(limit)
134 134
135 135 users_list = base_q.all()
136 136
137 137 users_data = []
138 138 for user in users_list:
139 139 users_data.append({
140 140 "username": h.gravatar_with_user(self.request, user.username),
141 141 "email": user.email,
142 142 "first_name": user.first_name,
143 143 "last_name": user.last_name,
144 144 "last_login": h.format_date(user.last_login),
145 145 "last_activity": h.format_date(user.last_activity),
146 146 "active": h.bool2icon(user.active),
147 147 "active_raw": user.active,
148 148 "admin": h.bool2icon(user.admin),
149 149 "extern_type": user.extern_type,
150 150 "extern_name": user.extern_name,
151 151 "action": user_actions(user.user_id, user.username),
152 152 })
153 153 data = ({
154 154 'draw': draw,
155 155 'data': users_data,
156 156 'recordsTotal': users_data_total_count,
157 157 'recordsFiltered': users_data_total_filtered_count,
158 158 'recordsTotalInactive': users_data_total_inactive_count,
159 159 'recordsFilteredInactive': users_data_total_filtered_inactive_count
160 160 })
161 161
162 162 return data
163 163
164 164 def _set_personal_repo_group_template_vars(self, c_obj):
165 165 DummyUser = AttributeDict({
166 166 'username': '${username}',
167 167 'user_id': '${user_id}',
168 168 })
169 169 c_obj.default_create_repo_group = RepoGroupModel() \
170 170 .get_default_create_personal_repo_group()
171 171 c_obj.personal_repo_group_name = RepoGroupModel() \
172 172 .get_personal_group_name(DummyUser)
173 173
174 174 @LoginRequired()
175 175 @HasPermissionAllDecorator('hg.admin')
176 176 def users_new(self):
177 177 _ = self.request.translate
178 178 c = self.load_default_context()
179 179 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
180 180 self._set_personal_repo_group_template_vars(c)
181 181 return self._get_template_context(c)
182 182
183 183 @LoginRequired()
184 184 @HasPermissionAllDecorator('hg.admin')
185 185 @CSRFRequired()
186 186 def users_create(self):
187 187 _ = self.request.translate
188 188 c = self.load_default_context()
189 189 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
190 190 user_model = UserModel()
191 191 user_form = UserForm(self.request.translate)()
192 192 try:
193 193 form_result = user_form.to_python(dict(self.request.POST))
194 194 user = user_model.create(form_result)
195 195 Session().flush()
196 196 creation_data = user.get_api_data()
197 197 username = form_result['username']
198 198
199 199 audit_logger.store_web(
200 200 'user.create', action_data={'data': creation_data},
201 201 user=c.rhodecode_user)
202 202
203 203 user_link = h.link_to(
204 204 h.escape(username),
205 205 h.route_path('user_edit', user_id=user.user_id))
206 206 h.flash(h.literal(_('Created user %(user_link)s')
207 207 % {'user_link': user_link}), category='success')
208 208 Session().commit()
209 209 except formencode.Invalid as errors:
210 210 self._set_personal_repo_group_template_vars(c)
211 211 data = render(
212 212 'rhodecode:templates/admin/users/user_add.mako',
213 213 self._get_template_context(c), self.request)
214 214 html = formencode.htmlfill.render(
215 215 data,
216 216 defaults=errors.value,
217 217 errors=errors.unpack_errors() or {},
218 218 prefix_error=False,
219 219 encoding="UTF-8",
220 220 force_defaults=False
221 221 )
222 222 return Response(html)
223 223 except UserCreationError as e:
224 224 h.flash(safe_str(e), 'error')
225 225 except Exception:
226 226 log.exception("Exception creation of user")
227 227 h.flash(_('Error occurred during creation of user %s')
228 228 % self.request.POST.get('username'), category='error')
229 229 raise HTTPFound(h.route_path('users'))
230 230
231 231
232 232 class UsersView(UserAppView):
233 233 ALLOW_SCOPED_TOKENS = False
234 234 """
235 235 This view has alternative version inside EE, if modified please take a look
236 236 in there as well.
237 237 """
238 238
239 239 def get_auth_plugins(self):
240 240 valid_plugins = []
241 241 authn_registry = get_authn_registry(self.request.registry)
242 242 for plugin in authn_registry.get_plugins_for_authentication():
243 243 if isinstance(plugin, RhodeCodeExternalAuthPlugin):
244 244 valid_plugins.append(plugin)
245 245 elif plugin.name == 'rhodecode':
246 246 valid_plugins.append(plugin)
247 247
248 248 # extend our choices if user has set a bound plugin which isn't enabled at the
249 249 # moment
250 250 extern_type = self.db_user.extern_type
251 251 if extern_type not in [x.uid for x in valid_plugins]:
252 252 try:
253 253 plugin = authn_registry.get_plugin_by_uid(extern_type)
254 254 if plugin:
255 255 valid_plugins.append(plugin)
256 256
257 257 except Exception:
258 258 log.exception(
259 259 f'Could not extend user plugins with `{extern_type}`')
260 260 return valid_plugins
261 261
262 262 def load_default_context(self):
263 263 req = self.request
264 264
265 265 c = self._get_local_tmpl_context()
266 266 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
267 267 c.allowed_languages = [
268 268 ('en', 'English (en)'),
269 269 ('de', 'German (de)'),
270 270 ('fr', 'French (fr)'),
271 271 ('it', 'Italian (it)'),
272 272 ('ja', 'Japanese (ja)'),
273 273 ('pl', 'Polish (pl)'),
274 274 ('pt', 'Portuguese (pt)'),
275 275 ('ru', 'Russian (ru)'),
276 276 ('zh', 'Chinese (zh)'),
277 277 ]
278 278
279 279 c.allowed_extern_types = [
280 280 (x.uid, x.get_display_name()) for x in self.get_auth_plugins()
281 281 ]
282 282 perms = req.registry.settings.get('available_permissions')
283 283 if not perms:
284 284 # inject info about available permissions
285 285 auth.set_available_permissions(req.registry.settings)
286 286
287 287 c.available_permissions = req.registry.settings['available_permissions']
288 288 PermissionModel().set_global_permission_choices(
289 289 c, gettext_translator=req.translate)
290 290
291 291 return c
292 292
293 293 @LoginRequired()
294 294 @HasPermissionAllDecorator('hg.admin')
295 295 @CSRFRequired()
296 296 def user_update(self):
297 297 _ = self.request.translate
298 298 c = self.load_default_context()
299 299
300 300 user_id = self.db_user_id
301 301 c.user = self.db_user
302 302
303 303 c.active = 'profile'
304 304 c.extern_type = c.user.extern_type
305 305 c.extern_name = c.user.extern_name
306 306 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
307 307 available_languages = [x[0] for x in c.allowed_languages]
308 308 _form = UserForm(self.request.translate, edit=True,
309 309 available_languages=available_languages,
310 310 old_data={'user_id': user_id,
311 311 'email': c.user.email})()
312 312
313 313 c.edit_mode = self.request.POST.get('edit') == '1'
314 314 form_result = {}
315 315 old_values = c.user.get_api_data()
316 316 try:
317 317 form_result = _form.to_python(dict(self.request.POST))
318 318 skip_attrs = ['extern_name']
319 319 # TODO: plugin should define if username can be updated
320 320
321 321 if c.extern_type != "rhodecode" and not c.edit_mode:
322 322 # forbid updating username for external accounts
323 323 skip_attrs.append('username')
324 324
325 325 UserModel().update_user(
326 326 user_id, skip_attrs=skip_attrs, **form_result)
327 327
328 328 audit_logger.store_web(
329 329 'user.edit', action_data={'old_data': old_values},
330 330 user=c.rhodecode_user)
331 331
332 332 Session().commit()
333 333 h.flash(_('User updated successfully'), category='success')
334 334 except formencode.Invalid as errors:
335 335 data = render(
336 336 'rhodecode:templates/admin/users/user_edit.mako',
337 337 self._get_template_context(c), self.request)
338 338 html = formencode.htmlfill.render(
339 339 data,
340 340 defaults=errors.value,
341 341 errors=errors.unpack_errors() or {},
342 342 prefix_error=False,
343 343 encoding="UTF-8",
344 344 force_defaults=False
345 345 )
346 346 return Response(html)
347 347 except UserCreationError as e:
348 348 h.flash(safe_str(e), 'error')
349 349 except Exception:
350 350 log.exception("Exception updating user")
351 351 h.flash(_('Error occurred during update of user %s')
352 352 % form_result.get('username'), category='error')
353 353 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
354 354
355 355 @LoginRequired()
356 356 @HasPermissionAllDecorator('hg.admin')
357 357 @CSRFRequired()
358 358 def user_delete(self):
359 359 _ = self.request.translate
360 360 c = self.load_default_context()
361 361 c.user = self.db_user
362 362
363 363 _repos = len(c.user.repositories)
364 364 _repo_groups = len(c.user.repository_groups)
365 365 _user_groups = len(c.user.user_groups)
366 366 _pull_requests = len(c.user.user_pull_requests)
367 367 _artifacts = len(c.user.artifacts)
368 368
369 369 handle_repos = None
370 370 handle_repo_groups = None
371 371 handle_user_groups = None
372 372 handle_pull_requests = None
373 373 handle_artifacts = None
374 374
375 375 # calls for flash of handle based on handle case detach or delete
376 376 def set_handle_flash_repos():
377 377 handle = handle_repos
378 378 if handle == 'detach':
379 379 h.flash(_('Detached %s repositories') % _repos,
380 380 category='success')
381 381 elif handle == 'delete':
382 382 h.flash(_('Deleted %s repositories') % _repos,
383 383 category='success')
384 384
385 385 def set_handle_flash_repo_groups():
386 386 handle = handle_repo_groups
387 387 if handle == 'detach':
388 388 h.flash(_('Detached %s repository groups') % _repo_groups,
389 389 category='success')
390 390 elif handle == 'delete':
391 391 h.flash(_('Deleted %s repository groups') % _repo_groups,
392 392 category='success')
393 393
394 394 def set_handle_flash_user_groups():
395 395 handle = handle_user_groups
396 396 if handle == 'detach':
397 397 h.flash(_('Detached %s user groups') % _user_groups,
398 398 category='success')
399 399 elif handle == 'delete':
400 400 h.flash(_('Deleted %s user groups') % _user_groups,
401 401 category='success')
402 402
403 403 def set_handle_flash_pull_requests():
404 404 handle = handle_pull_requests
405 405 if handle == 'detach':
406 406 h.flash(_('Detached %s pull requests') % _pull_requests,
407 407 category='success')
408 408 elif handle == 'delete':
409 409 h.flash(_('Deleted %s pull requests') % _pull_requests,
410 410 category='success')
411 411
412 412 def set_handle_flash_artifacts():
413 413 handle = handle_artifacts
414 414 if handle == 'detach':
415 415 h.flash(_('Detached %s artifacts') % _artifacts,
416 416 category='success')
417 417 elif handle == 'delete':
418 418 h.flash(_('Deleted %s artifacts') % _artifacts,
419 419 category='success')
420 420
421 421 handle_user = User.get_first_super_admin()
422 422 handle_user_id = safe_int(self.request.POST.get('detach_user_id'))
423 423 if handle_user_id:
424 424 # NOTE(marcink): we get new owner for objects...
425 425 handle_user = User.get_or_404(handle_user_id)
426 426
427 427 if _repos and self.request.POST.get('user_repos'):
428 428 handle_repos = self.request.POST['user_repos']
429 429
430 430 if _repo_groups and self.request.POST.get('user_repo_groups'):
431 431 handle_repo_groups = self.request.POST['user_repo_groups']
432 432
433 433 if _user_groups and self.request.POST.get('user_user_groups'):
434 434 handle_user_groups = self.request.POST['user_user_groups']
435 435
436 436 if _pull_requests and self.request.POST.get('user_pull_requests'):
437 437 handle_pull_requests = self.request.POST['user_pull_requests']
438 438
439 439 if _artifacts and self.request.POST.get('user_artifacts'):
440 440 handle_artifacts = self.request.POST['user_artifacts']
441 441
442 442 old_values = c.user.get_api_data()
443 443
444 444 try:
445 445
446 446 UserModel().delete(
447 447 c.user,
448 448 handle_repos=handle_repos,
449 449 handle_repo_groups=handle_repo_groups,
450 450 handle_user_groups=handle_user_groups,
451 451 handle_pull_requests=handle_pull_requests,
452 452 handle_artifacts=handle_artifacts,
453 453 handle_new_owner=handle_user
454 454 )
455 455
456 456 audit_logger.store_web(
457 457 'user.delete', action_data={'old_data': old_values},
458 458 user=c.rhodecode_user)
459 459
460 460 Session().commit()
461 461 set_handle_flash_repos()
462 462 set_handle_flash_repo_groups()
463 463 set_handle_flash_user_groups()
464 464 set_handle_flash_pull_requests()
465 465 set_handle_flash_artifacts()
466 466 username = h.escape(old_values['username'])
467 467 h.flash(_('Successfully deleted user `{}`').format(username), category='success')
468 468 except (UserOwnsReposException, UserOwnsRepoGroupsException,
469 469 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
470 470 UserOwnsArtifactsException, DefaultUserException) as e:
471 471
472 472 h.flash(safe_str(e), category='warning')
473 473 except Exception:
474 474 log.exception("Exception during deletion of user")
475 475 h.flash(_('An error occurred during deletion of user'),
476 476 category='error')
477 477 raise HTTPFound(h.route_path('users'))
478 478
479 479 @LoginRequired()
480 480 @HasPermissionAllDecorator('hg.admin')
481 481 def user_edit(self):
482 482 _ = self.request.translate
483 483 c = self.load_default_context()
484 484 c.user = self.db_user
485 485
486 486 c.active = 'profile'
487 487 c.extern_type = c.user.extern_type
488 488 c.extern_name = c.user.extern_name
489 489 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
490 490 c.edit_mode = self.request.GET.get('edit') == '1'
491 491
492 492 defaults = c.user.get_dict()
493 493 defaults.update({'language': c.user.user_data.get('language')})
494 494
495 495 data = render(
496 496 'rhodecode:templates/admin/users/user_edit.mako',
497 497 self._get_template_context(c), self.request)
498 498 html = formencode.htmlfill.render(
499 499 data,
500 500 defaults=defaults,
501 501 encoding="UTF-8",
502 502 force_defaults=False
503 503 )
504 504 return Response(html)
505 505
506 506 @LoginRequired()
507 507 @HasPermissionAllDecorator('hg.admin')
508 508 def user_edit_advanced(self):
509 509 _ = self.request.translate
510 510 c = self.load_default_context()
511 511
512 512 user_id = self.db_user_id
513 513 c.user = self.db_user
514 514
515 515 c.detach_user = User.get_first_super_admin()
516 516 detach_user_id = safe_int(self.request.GET.get('detach_user_id'))
517 517 if detach_user_id:
518 518 c.detach_user = User.get_or_404(detach_user_id)
519 519
520 520 c.active = 'advanced'
521 521 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
522 522 c.personal_repo_group_name = RepoGroupModel()\
523 523 .get_personal_group_name(c.user)
524 524
525 525 c.user_to_review_rules = sorted(
526 526 (x.user for x in c.user.user_review_rules),
527 527 key=lambda u: u.username.lower())
528 528
529 529 defaults = c.user.get_dict()
530 530
531 531 # Interim workaround if the user participated on any pull requests as a
532 532 # reviewer.
533 533 has_review = len(c.user.reviewer_pull_requests)
534 534 c.can_delete_user = not has_review
535 535 c.can_delete_user_message = ''
536 536 inactive_link = h.link_to(
537 537 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
538 538 if has_review == 1:
539 539 c.can_delete_user_message = h.literal(_(
540 540 'The user participates as reviewer in {} pull request and '
541 541 'cannot be deleted. \nYou can set the user to '
542 542 '"{}" instead of deleting it.').format(
543 543 has_review, inactive_link))
544 544 elif has_review:
545 545 c.can_delete_user_message = h.literal(_(
546 546 'The user participates as reviewer in {} pull requests and '
547 547 'cannot be deleted. \nYou can set the user to '
548 548 '"{}" instead of deleting it.').format(
549 549 has_review, inactive_link))
550 550
551 551 data = render(
552 552 'rhodecode:templates/admin/users/user_edit.mako',
553 553 self._get_template_context(c), self.request)
554 554 html = formencode.htmlfill.render(
555 555 data,
556 556 defaults=defaults,
557 557 encoding="UTF-8",
558 558 force_defaults=False
559 559 )
560 560 return Response(html)
561 561
562 562 @LoginRequired()
563 563 @HasPermissionAllDecorator('hg.admin')
564 564 def user_edit_global_perms(self):
565 565 _ = self.request.translate
566 566 c = self.load_default_context()
567 567 c.user = self.db_user
568 568
569 569 c.active = 'global_perms'
570 570
571 571 c.default_user = User.get_default_user()
572 572 defaults = c.user.get_dict()
573 573 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
574 574 defaults.update(c.default_user.get_default_perms())
575 575 defaults.update(c.user.get_default_perms())
576 576
577 577 data = render(
578 578 'rhodecode:templates/admin/users/user_edit.mako',
579 579 self._get_template_context(c), self.request)
580 580 html = formencode.htmlfill.render(
581 581 data,
582 582 defaults=defaults,
583 583 encoding="UTF-8",
584 584 force_defaults=False
585 585 )
586 586 return Response(html)
587 587
588 588 @LoginRequired()
589 589 @HasPermissionAllDecorator('hg.admin')
590 590 @CSRFRequired()
591 591 def user_edit_global_perms_update(self):
592 592 _ = self.request.translate
593 593 c = self.load_default_context()
594 594
595 595 user_id = self.db_user_id
596 596 c.user = self.db_user
597 597
598 598 c.active = 'global_perms'
599 599 try:
600 600 # first stage that verifies the checkbox
601 601 _form = UserIndividualPermissionsForm(self.request.translate)
602 602 form_result = _form.to_python(dict(self.request.POST))
603 603 inherit_perms = form_result['inherit_default_permissions']
604 604 c.user.inherit_default_permissions = inherit_perms
605 605 Session().add(c.user)
606 606
607 607 if not inherit_perms:
608 608 # only update the individual ones if we un check the flag
609 609 _form = UserPermissionsForm(
610 610 self.request.translate,
611 611 [x[0] for x in c.repo_create_choices],
612 612 [x[0] for x in c.repo_create_on_write_choices],
613 613 [x[0] for x in c.repo_group_create_choices],
614 614 [x[0] for x in c.user_group_create_choices],
615 615 [x[0] for x in c.fork_choices],
616 616 [x[0] for x in c.inherit_default_permission_choices])()
617 617
618 618 form_result = _form.to_python(dict(self.request.POST))
619 619 form_result.update({'perm_user_id': c.user.user_id})
620 620
621 621 PermissionModel().update_user_permissions(form_result)
622 622
623 623 # TODO(marcink): implement global permissions
624 624 # audit_log.store_web('user.edit.permissions')
625 625
626 626 Session().commit()
627 627
628 628 h.flash(_('User global permissions updated successfully'),
629 629 category='success')
630 630
631 631 except formencode.Invalid as errors:
632 632 data = render(
633 633 'rhodecode:templates/admin/users/user_edit.mako',
634 634 self._get_template_context(c), self.request)
635 635 html = formencode.htmlfill.render(
636 636 data,
637 637 defaults=errors.value,
638 638 errors=errors.unpack_errors() or {},
639 639 prefix_error=False,
640 640 encoding="UTF-8",
641 641 force_defaults=False
642 642 )
643 643 return Response(html)
644 644 except Exception:
645 645 log.exception("Exception during permissions saving")
646 646 h.flash(_('An error occurred during permissions saving'),
647 647 category='error')
648 648
649 649 affected_user_ids = [user_id]
650 650 PermissionModel().trigger_permission_flush(affected_user_ids)
651 651 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
652 652
653 653 @LoginRequired()
654 654 @HasPermissionAllDecorator('hg.admin')
655 655 @CSRFRequired()
656 656 def user_enable_force_password_reset(self):
657 657 _ = self.request.translate
658 658 c = self.load_default_context()
659 659
660 660 user_id = self.db_user_id
661 661 c.user = self.db_user
662 662
663 663 try:
664 664 c.user.update_userdata(force_password_change=True)
665 665
666 666 msg = _('Force password change enabled for user')
667 667 audit_logger.store_web('user.edit.password_reset.enabled',
668 668 user=c.rhodecode_user)
669 669
670 670 Session().commit()
671 671 h.flash(msg, category='success')
672 672 except Exception:
673 673 log.exception("Exception during password reset for user")
674 674 h.flash(_('An error occurred during password reset for user'),
675 675 category='error')
676 676
677 677 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
678 678
679 679 @LoginRequired()
680 680 @HasPermissionAllDecorator('hg.admin')
681 681 @CSRFRequired()
682 682 def user_disable_force_password_reset(self):
683 683 _ = self.request.translate
684 684 c = self.load_default_context()
685 685
686 686 user_id = self.db_user_id
687 687 c.user = self.db_user
688 688
689 689 try:
690 690 c.user.update_userdata(force_password_change=False)
691 691
692 692 msg = _('Force password change disabled for user')
693 693 audit_logger.store_web(
694 694 'user.edit.password_reset.disabled',
695 695 user=c.rhodecode_user)
696 696
697 697 Session().commit()
698 698 h.flash(msg, category='success')
699 699 except Exception:
700 700 log.exception("Exception during password reset for user")
701 701 h.flash(_('An error occurred during password reset for user'),
702 702 category='error')
703 703
704 704 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
705 705
706 706 @LoginRequired()
707 707 @HasPermissionAllDecorator('hg.admin')
708 708 @CSRFRequired()
709 709 def user_notice_dismiss(self):
710 710 _ = self.request.translate
711 711 c = self.load_default_context()
712 712
713 713 user_id = self.db_user_id
714 714 c.user = self.db_user
715 715 user_notice_id = safe_int(self.request.POST.get('notice_id'))
716 716 notice = UserNotice().query()\
717 717 .filter(UserNotice.user_id == user_id)\
718 718 .filter(UserNotice.user_notice_id == user_notice_id)\
719 719 .scalar()
720 720 read = False
721 721 if notice:
722 722 notice.notice_read = True
723 723 Session().add(notice)
724 724 Session().commit()
725 725 read = True
726 726
727 727 return {'notice': user_notice_id, 'read': read}
728 728
729 729 @LoginRequired()
730 730 @HasPermissionAllDecorator('hg.admin')
731 731 @CSRFRequired()
732 732 def user_create_personal_repo_group(self):
733 733 """
734 734 Create personal repository group for this user
735 735 """
736 736 from rhodecode.model.repo_group import RepoGroupModel
737 737
738 738 _ = self.request.translate
739 739 c = self.load_default_context()
740 740
741 741 user_id = self.db_user_id
742 742 c.user = self.db_user
743 743
744 744 personal_repo_group = RepoGroup.get_user_personal_repo_group(
745 745 c.user.user_id)
746 746 if personal_repo_group:
747 747 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
748 748
749 749 personal_repo_group_name = RepoGroupModel().get_personal_group_name(c.user)
750 750 named_personal_group = RepoGroup.get_by_group_name(
751 751 personal_repo_group_name)
752 752 try:
753 753
754 754 if named_personal_group and named_personal_group.user_id == c.user.user_id:
755 755 # migrate the same named group, and mark it as personal
756 756 named_personal_group.personal = True
757 757 Session().add(named_personal_group)
758 758 Session().commit()
759 759 msg = _('Linked repository group `{}` as personal'.format(
760 760 personal_repo_group_name))
761 761 h.flash(msg, category='success')
762 762 elif not named_personal_group:
763 763 RepoGroupModel().create_personal_repo_group(c.user)
764 764
765 765 msg = _('Created repository group `{}`'.format(
766 766 personal_repo_group_name))
767 767 h.flash(msg, category='success')
768 768 else:
769 769 msg = _('Repository group `{}` is already taken'.format(
770 770 personal_repo_group_name))
771 771 h.flash(msg, category='warning')
772 772 except Exception:
773 773 log.exception("Exception during repository group creation")
774 774 msg = _(
775 775 'An error occurred during repository group creation for user')
776 776 h.flash(msg, category='error')
777 777 Session().rollback()
778 778
779 779 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
780 780
781 781 @LoginRequired()
782 782 @HasPermissionAllDecorator('hg.admin')
783 783 def auth_tokens(self):
784 784 _ = self.request.translate
785 785 c = self.load_default_context()
786 786 c.user = self.db_user
787 787
788 788 c.active = 'auth_tokens'
789 789
790 790 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
791 791 c.role_values = [
792 792 (x, AuthTokenModel.cls._get_role_name(x))
793 793 for x in AuthTokenModel.cls.ROLES]
794 794 c.role_options = [(c.role_values, _("Role"))]
795 795 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
796 796 c.user.user_id, show_expired=True)
797 797 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
798 798 return self._get_template_context(c)
799 799
800 800 @LoginRequired()
801 801 @HasPermissionAllDecorator('hg.admin')
802 802 def auth_tokens_view(self):
803 803 _ = self.request.translate
804 804 c = self.load_default_context()
805 805 c.user = self.db_user
806 806
807 807 auth_token_id = self.request.POST.get('auth_token_id')
808 808
809 809 if auth_token_id:
810 810 token = UserApiKeys.get_or_404(auth_token_id)
811 811
812 812 return {
813 813 'auth_token': token.api_key
814 814 }
815 815
816 816 def maybe_attach_token_scope(self, token):
817 817 # implemented in EE edition
818 818 pass
819 819
820 820 @LoginRequired()
821 821 @HasPermissionAllDecorator('hg.admin')
822 822 @CSRFRequired()
823 823 def auth_tokens_add(self):
824 824 _ = self.request.translate
825 825 c = self.load_default_context()
826 826
827 827 user_id = self.db_user_id
828 828 c.user = self.db_user
829 829
830 830 user_data = c.user.get_api_data()
831 831 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
832 832 description = self.request.POST.get('description')
833 833 role = self.request.POST.get('role')
834 834
835 835 token = UserModel().add_auth_token(
836 836 user=c.user.user_id,
837 837 lifetime_minutes=lifetime, role=role, description=description,
838 838 scope_callback=self.maybe_attach_token_scope)
839 839 token_data = token.get_api_data()
840 840
841 841 audit_logger.store_web(
842 842 'user.edit.token.add', action_data={
843 843 'data': {'token': token_data, 'user': user_data}},
844 844 user=self._rhodecode_user, )
845 845 Session().commit()
846 846
847 847 h.flash(_("Auth token successfully created"), category='success')
848 848 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
849 849
850 850 @LoginRequired()
851 851 @HasPermissionAllDecorator('hg.admin')
852 852 @CSRFRequired()
853 853 def auth_tokens_delete(self):
854 854 _ = self.request.translate
855 855 c = self.load_default_context()
856 856
857 857 user_id = self.db_user_id
858 858 c.user = self.db_user
859 859
860 860 user_data = c.user.get_api_data()
861 861
862 862 del_auth_token = self.request.POST.get('del_auth_token')
863 863
864 864 if del_auth_token:
865 865 token = UserApiKeys.get_or_404(del_auth_token)
866 866 token_data = token.get_api_data()
867 867
868 868 AuthTokenModel().delete(del_auth_token, c.user.user_id)
869 869 audit_logger.store_web(
870 870 'user.edit.token.delete', action_data={
871 871 'data': {'token': token_data, 'user': user_data}},
872 872 user=self._rhodecode_user,)
873 873 Session().commit()
874 874 h.flash(_("Auth token successfully deleted"), category='success')
875 875
876 876 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
877 877
878 878 @LoginRequired()
879 879 @HasPermissionAllDecorator('hg.admin')
880 880 def ssh_keys(self):
881 881 _ = self.request.translate
882 882 c = self.load_default_context()
883 883 c.user = self.db_user
884 884
885 885 c.active = 'ssh_keys'
886 886 c.default_key = self.request.GET.get('default_key')
887 887 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
888 888 return self._get_template_context(c)
889 889
890 890 @LoginRequired()
891 891 @HasPermissionAllDecorator('hg.admin')
892 892 def ssh_keys_generate_keypair(self):
893 893 _ = self.request.translate
894 894 c = self.load_default_context()
895 895
896 896 c.user = self.db_user
897 897
898 898 c.active = 'ssh_keys_generate'
899 899 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
900 900 private_format = self.request.GET.get('private_format') \
901 901 or SshKeyModel.DEFAULT_PRIVATE_KEY_FORMAT
902 902 c.private, c.public = SshKeyModel().generate_keypair(
903 903 comment=comment, private_format=private_format)
904 904
905 905 return self._get_template_context(c)
906 906
907 907 @LoginRequired()
908 908 @HasPermissionAllDecorator('hg.admin')
909 909 @CSRFRequired()
910 910 def ssh_keys_add(self):
911 911 _ = self.request.translate
912 912 c = self.load_default_context()
913 913
914 914 user_id = self.db_user_id
915 915 c.user = self.db_user
916 916
917 917 user_data = c.user.get_api_data()
918 918 key_data = self.request.POST.get('key_data')
919 919 description = self.request.POST.get('description')
920 920
921 921 fingerprint = 'unknown'
922 922 try:
923 923 if not key_data:
924 924 raise ValueError('Please add a valid public key')
925 925
926 926 key = SshKeyModel().parse_key(key_data.strip())
927 927 fingerprint = key.hash_md5()
928 928
929 929 ssh_key = SshKeyModel().create(
930 930 c.user.user_id, fingerprint, key.keydata, description)
931 931 ssh_key_data = ssh_key.get_api_data()
932 932
933 933 audit_logger.store_web(
934 934 'user.edit.ssh_key.add', action_data={
935 935 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
936 936 user=self._rhodecode_user, )
937 937 Session().commit()
938 938
939 939 # Trigger an event on change of keys.
940 940 trigger(SshKeyFileChangeEvent(), self.request.registry)
941 941
942 942 h.flash(_("Ssh Key successfully created"), category='success')
943 943
944 944 except IntegrityError:
945 945 log.exception("Exception during ssh key saving")
946 946 err = 'Such key with fingerprint `{}` already exists, ' \
947 947 'please use a different one'.format(fingerprint)
948 948 h.flash(_('An error occurred during ssh key saving: {}').format(err),
949 949 category='error')
950 950 except Exception as e:
951 951 log.exception("Exception during ssh key saving")
952 952 h.flash(_('An error occurred during ssh key saving: {}').format(e),
953 953 category='error')
954 954
955 955 return HTTPFound(
956 956 h.route_path('edit_user_ssh_keys', user_id=user_id))
957 957
958 958 @LoginRequired()
959 959 @HasPermissionAllDecorator('hg.admin')
960 960 @CSRFRequired()
961 961 def ssh_keys_delete(self):
962 962 _ = self.request.translate
963 963 c = self.load_default_context()
964 964
965 965 user_id = self.db_user_id
966 966 c.user = self.db_user
967 967
968 968 user_data = c.user.get_api_data()
969 969
970 970 del_ssh_key = self.request.POST.get('del_ssh_key')
971 971
972 972 if del_ssh_key:
973 973 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
974 974 ssh_key_data = ssh_key.get_api_data()
975 975
976 976 SshKeyModel().delete(del_ssh_key, c.user.user_id)
977 977 audit_logger.store_web(
978 978 'user.edit.ssh_key.delete', action_data={
979 979 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
980 980 user=self._rhodecode_user,)
981 981 Session().commit()
982 982 # Trigger an event on change of keys.
983 983 trigger(SshKeyFileChangeEvent(), self.request.registry)
984 984 h.flash(_("Ssh key successfully deleted"), category='success')
985 985
986 986 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
987 987
988 988 @LoginRequired()
989 989 @HasPermissionAllDecorator('hg.admin')
990 990 def emails(self):
991 991 _ = self.request.translate
992 992 c = self.load_default_context()
993 993 c.user = self.db_user
994 994
995 995 c.active = 'emails'
996 996 c.user_email_map = UserEmailMap.query() \
997 997 .filter(UserEmailMap.user == c.user).all()
998 998
999 999 return self._get_template_context(c)
1000 1000
1001 1001 @LoginRequired()
1002 1002 @HasPermissionAllDecorator('hg.admin')
1003 1003 @CSRFRequired()
1004 1004 def emails_add(self):
1005 1005 _ = self.request.translate
1006 1006 c = self.load_default_context()
1007 1007
1008 1008 user_id = self.db_user_id
1009 1009 c.user = self.db_user
1010 1010
1011 1011 email = self.request.POST.get('new_email')
1012 1012 user_data = c.user.get_api_data()
1013 1013 try:
1014 1014
1015 1015 form = UserExtraEmailForm(self.request.translate)()
1016 1016 data = form.to_python({'email': email})
1017 1017 email = data['email']
1018 1018
1019 1019 UserModel().add_extra_email(c.user.user_id, email)
1020 1020 audit_logger.store_web(
1021 1021 'user.edit.email.add',
1022 1022 action_data={'email': email, 'user': user_data},
1023 1023 user=self._rhodecode_user)
1024 1024 Session().commit()
1025 1025 h.flash(_("Added new email address `%s` for user account") % email,
1026 1026 category='success')
1027 1027 except formencode.Invalid as error:
1028 1028 msg = error.unpack_errors()['email']
1029 1029 h.flash(h.escape(msg), category='error')
1030 1030 except IntegrityError:
1031 1031 log.warning("Email %s already exists", email)
1032 1032 h.flash(_('Email `{}` is already registered for another user.').format(email),
1033 1033 category='error')
1034 1034 except Exception:
1035 1035 log.exception("Exception during email saving")
1036 1036 h.flash(_('An error occurred during email saving'),
1037 1037 category='error')
1038 1038 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1039 1039
1040 1040 @LoginRequired()
1041 1041 @HasPermissionAllDecorator('hg.admin')
1042 1042 @CSRFRequired()
1043 1043 def emails_delete(self):
1044 1044 _ = self.request.translate
1045 1045 c = self.load_default_context()
1046 1046
1047 1047 user_id = self.db_user_id
1048 1048 c.user = self.db_user
1049 1049
1050 1050 email_id = self.request.POST.get('del_email_id')
1051 1051 user_model = UserModel()
1052 1052
1053 1053 email = UserEmailMap.query().get(email_id).email
1054 1054 user_data = c.user.get_api_data()
1055 1055 user_model.delete_extra_email(c.user.user_id, email_id)
1056 1056 audit_logger.store_web(
1057 1057 'user.edit.email.delete',
1058 1058 action_data={'email': email, 'user': user_data},
1059 1059 user=self._rhodecode_user)
1060 1060 Session().commit()
1061 1061 h.flash(_("Removed email address from user account"),
1062 1062 category='success')
1063 1063 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1064 1064
1065 1065 @LoginRequired()
1066 1066 @HasPermissionAllDecorator('hg.admin')
1067 1067 def ips(self):
1068 1068 _ = self.request.translate
1069 1069 c = self.load_default_context()
1070 1070 c.user = self.db_user
1071 1071
1072 1072 c.active = 'ips'
1073 1073 c.user_ip_map = UserIpMap.query() \
1074 1074 .filter(UserIpMap.user == c.user).all()
1075 1075
1076 1076 c.inherit_default_ips = c.user.inherit_default_permissions
1077 1077 c.default_user_ip_map = UserIpMap.query() \
1078 1078 .filter(UserIpMap.user == User.get_default_user()).all()
1079 1079
1080 1080 return self._get_template_context(c)
1081 1081
1082 1082 @LoginRequired()
1083 1083 @HasPermissionAllDecorator('hg.admin')
1084 1084 @CSRFRequired()
1085 1085 # NOTE(marcink): this view is allowed for default users, as we can
1086 1086 # edit their IP white list
1087 1087 def ips_add(self):
1088 1088 _ = self.request.translate
1089 1089 c = self.load_default_context()
1090 1090
1091 1091 user_id = self.db_user_id
1092 1092 c.user = self.db_user
1093 1093
1094 1094 user_model = UserModel()
1095 1095 desc = self.request.POST.get('description')
1096 1096 try:
1097 1097 ip_list = user_model.parse_ip_range(
1098 1098 self.request.POST.get('new_ip'))
1099 1099 except Exception as e:
1100 1100 ip_list = []
1101 1101 log.exception("Exception during ip saving")
1102 1102 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1103 1103 category='error')
1104 1104 added = []
1105 1105 user_data = c.user.get_api_data()
1106 1106 for ip in ip_list:
1107 1107 try:
1108 1108 form = UserExtraIpForm(self.request.translate)()
1109 1109 data = form.to_python({'ip': ip})
1110 1110 ip = data['ip']
1111 1111
1112 1112 user_model.add_extra_ip(c.user.user_id, ip, desc)
1113 1113 audit_logger.store_web(
1114 1114 'user.edit.ip.add',
1115 1115 action_data={'ip': ip, 'user': user_data},
1116 1116 user=self._rhodecode_user)
1117 1117 Session().commit()
1118 1118 added.append(ip)
1119 1119 except formencode.Invalid as error:
1120 1120 msg = error.unpack_errors()['ip']
1121 1121 h.flash(msg, category='error')
1122 1122 except Exception:
1123 1123 log.exception("Exception during ip saving")
1124 1124 h.flash(_('An error occurred during ip saving'),
1125 1125 category='error')
1126 1126 if added:
1127 1127 h.flash(
1128 1128 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1129 1129 category='success')
1130 1130 if 'default_user' in self.request.POST:
1131 1131 # case for editing global IP list we do it for 'DEFAULT' user
1132 1132 raise HTTPFound(h.route_path('admin_permissions_ips'))
1133 1133 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1134 1134
1135 1135 @LoginRequired()
1136 1136 @HasPermissionAllDecorator('hg.admin')
1137 1137 @CSRFRequired()
1138 1138 # NOTE(marcink): this view is allowed for default users, as we can
1139 1139 # edit their IP white list
1140 1140 def ips_delete(self):
1141 1141 _ = self.request.translate
1142 1142 c = self.load_default_context()
1143 1143
1144 1144 user_id = self.db_user_id
1145 1145 c.user = self.db_user
1146 1146
1147 1147 ip_id = self.request.POST.get('del_ip_id')
1148 1148 user_model = UserModel()
1149 1149 user_data = c.user.get_api_data()
1150 1150 ip = UserIpMap.query().get(ip_id).ip_addr
1151 1151 user_model.delete_extra_ip(c.user.user_id, ip_id)
1152 1152 audit_logger.store_web(
1153 1153 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1154 1154 user=self._rhodecode_user)
1155 1155 Session().commit()
1156 1156 h.flash(_("Removed ip address from user whitelist"), category='success')
1157 1157
1158 1158 if 'default_user' in self.request.POST:
1159 1159 # case for editing global IP list we do it for 'DEFAULT' user
1160 1160 raise HTTPFound(h.route_path('admin_permissions_ips'))
1161 1161 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1162 1162
1163 1163 @LoginRequired()
1164 1164 @HasPermissionAllDecorator('hg.admin')
1165 1165 def groups_management(self):
1166 1166 c = self.load_default_context()
1167 1167 c.user = self.db_user
1168 1168 c.data = c.user.group_member
1169 1169
1170 1170 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1171 1171 for group in c.user.group_member]
1172 1172 c.groups = ext_json.str_json(groups)
1173 1173 c.active = 'groups'
1174 1174
1175 1175 return self._get_template_context(c)
1176 1176
1177 1177 @LoginRequired()
1178 1178 @HasPermissionAllDecorator('hg.admin')
1179 1179 @CSRFRequired()
1180 1180 def groups_management_updates(self):
1181 1181 _ = self.request.translate
1182 1182 c = self.load_default_context()
1183 1183
1184 1184 user_id = self.db_user_id
1185 1185 c.user = self.db_user
1186 1186
1187 1187 user_groups = set(self.request.POST.getall('users_group_id'))
1188 1188 user_groups_objects = []
1189 1189
1190 1190 for ugid in user_groups:
1191 1191 user_groups_objects.append(
1192 1192 UserGroupModel().get_group(safe_int(ugid)))
1193 1193 user_group_model = UserGroupModel()
1194 1194 added_to_groups, removed_from_groups = \
1195 1195 user_group_model.change_groups(c.user, user_groups_objects)
1196 1196
1197 1197 user_data = c.user.get_api_data()
1198 1198 for user_group_id in added_to_groups:
1199 1199 user_group = UserGroup.get(user_group_id)
1200 1200 old_values = user_group.get_api_data()
1201 1201 audit_logger.store_web(
1202 1202 'user_group.edit.member.add',
1203 1203 action_data={'user': user_data, 'old_data': old_values},
1204 1204 user=self._rhodecode_user)
1205 1205
1206 1206 for user_group_id in removed_from_groups:
1207 1207 user_group = UserGroup.get(user_group_id)
1208 1208 old_values = user_group.get_api_data()
1209 1209 audit_logger.store_web(
1210 1210 'user_group.edit.member.delete',
1211 1211 action_data={'user': user_data, 'old_data': old_values},
1212 1212 user=self._rhodecode_user)
1213 1213
1214 1214 Session().commit()
1215 1215 c.active = 'user_groups_management'
1216 1216 h.flash(_("Groups successfully changed"), category='success')
1217 1217
1218 1218 return HTTPFound(h.route_path(
1219 1219 'edit_user_groups_management', user_id=user_id))
1220 1220
1221 1221 @LoginRequired()
1222 1222 @HasPermissionAllDecorator('hg.admin')
1223 1223 def user_audit_logs(self):
1224 1224 _ = self.request.translate
1225 1225 c = self.load_default_context()
1226 1226 c.user = self.db_user
1227 1227
1228 1228 c.active = 'audit'
1229 1229
1230 1230 p = safe_int(self.request.GET.get('page', 1), 1)
1231 1231
1232 1232 filter_term = self.request.GET.get('filter')
1233 1233 user_log = UserModel().get_user_log(c.user, filter_term)
1234 1234
1235 1235 def url_generator(page_num):
1236 1236 query_params = {
1237 1237 'page': page_num
1238 1238 }
1239 1239 if filter_term:
1240 1240 query_params['filter'] = filter_term
1241 1241 return self.request.current_route_path(_query=query_params)
1242 1242
1243 1243 c.audit_logs = SqlPage(
1244 1244 user_log, page=p, items_per_page=10, url_maker=url_generator)
1245 1245 c.filter_term = filter_term
1246 1246 return self._get_template_context(c)
1247 1247
1248 1248 @LoginRequired()
1249 1249 @HasPermissionAllDecorator('hg.admin')
1250 1250 def user_audit_logs_download(self):
1251 1251 _ = self.request.translate
1252 1252 c = self.load_default_context()
1253 1253 c.user = self.db_user
1254 1254
1255 1255 user_log = UserModel().get_user_log(c.user, filter_term=None)
1256 1256
1257 1257 audit_log_data = {}
1258 1258 for entry in user_log:
1259 1259 audit_log_data[entry.user_log_id] = entry.get_dict()
1260 1260
1261 1261 response = Response(ext_json.formatted_str_json(audit_log_data))
1262 1262 response.content_disposition = f'attachment; filename=user_{c.user.user_id}_audit_logs.json'
1263 1263 response.content_type = 'application/json'
1264 1264
1265 1265 return response
1266 1266
1267 1267 @LoginRequired()
1268 1268 @HasPermissionAllDecorator('hg.admin')
1269 1269 def user_perms_summary(self):
1270 1270 _ = self.request.translate
1271 1271 c = self.load_default_context()
1272 1272 c.user = self.db_user
1273 1273
1274 1274 c.active = 'perms_summary'
1275 1275 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1276 1276
1277 1277 return self._get_template_context(c)
1278 1278
1279 1279 @LoginRequired()
1280 1280 @HasPermissionAllDecorator('hg.admin')
1281 1281 def user_perms_summary_json(self):
1282 1282 self.load_default_context()
1283 1283 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1284 1284
1285 1285 return perm_user.permissions
1286 1286
1287 1287 @LoginRequired()
1288 1288 @HasPermissionAllDecorator('hg.admin')
1289 1289 def user_caches(self):
1290 1290 _ = self.request.translate
1291 1291 c = self.load_default_context()
1292 1292 c.user = self.db_user
1293 1293
1294 1294 c.active = 'caches'
1295 1295 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1296 1296
1297 1297 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1298 1298 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1299 1299 c.backend = c.region.backend
1300 1300 c.user_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
1301 1301
1302 1302 return self._get_template_context(c)
1303 1303
1304 1304 @LoginRequired()
1305 1305 @HasPermissionAllDecorator('hg.admin')
1306 1306 @CSRFRequired()
1307 1307 def user_caches_update(self):
1308 1308 _ = self.request.translate
1309 1309 c = self.load_default_context()
1310 1310 c.user = self.db_user
1311 1311
1312 1312 c.active = 'caches'
1313 1313 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1314 1314
1315 1315 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1316 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
1316 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
1317 1317
1318 1318 h.flash(_("Deleted {} cache keys").format(del_keys), category='success')
1319 1319
1320 1320 return HTTPFound(h.route_path(
1321 1321 'edit_user_caches', user_id=c.user.user_id))
@@ -1,299 +1,335 b''
1 1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import errno
20 20 import fcntl
21 21 import functools
22 22 import logging
23 23 import os
24 24 import pickle
25 25 import time
26 26
27 27 import gevent
28 28 import msgpack
29 29 import redis
30 30
31 31 flock_org = fcntl.flock
32 32 from typing import Union
33 33
34 34 from dogpile.cache.api import Deserializer, Serializer
35 35 from dogpile.cache.backends import file as file_backend
36 36 from dogpile.cache.backends import memory as memory_backend
37 37 from dogpile.cache.backends import redis as redis_backend
38 38 from dogpile.cache.backends.file import FileLock
39 39 from dogpile.cache.util import memoized_property
40 40
41 41 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
42 42 from rhodecode.lib.str_utils import safe_bytes, safe_str
43 43 from rhodecode.lib.type_utils import str2bool
44 44
45 45 _default_max_size = 1024
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class LRUMemoryBackend(memory_backend.MemoryBackend):
51 51 key_prefix = 'lru_mem_backend'
52 52 pickle_values = False
53 53
54 54 def __init__(self, arguments):
55 55 self.max_size = arguments.pop('max_size', _default_max_size)
56 56
57 57 LRUDictClass = LRUDict
58 58 if arguments.pop('log_key_count', None):
59 59 LRUDictClass = LRUDictDebug
60 60
61 61 arguments['cache_dict'] = LRUDictClass(self.max_size)
62 62 super().__init__(arguments)
63 63
64 64 def __repr__(self):
65 65 return f'{self.__class__}(maxsize=`{self.max_size}`)'
66 66
67 67 def __str__(self):
68 68 return self.__repr__()
69 69
70 70 def delete(self, key):
71 71 try:
72 72 del self._cache[key]
73 73 except KeyError:
74 74 # we don't care if key isn't there at deletion
75 75 pass
76 76
77 def list_keys(self, prefix):
78 return list(self._cache.keys())
79
77 80 def delete_multi(self, keys):
78 81 for key in keys:
79 82 self.delete(key)
80 83
84 def delete_multi_by_prefix(self, prefix):
85 cache_keys = self.list_keys(prefix=prefix)
86 num_affected_keys = len(cache_keys)
87 if num_affected_keys:
88 self.delete_multi(cache_keys)
89 return num_affected_keys
90
81 91
82 92 class PickleSerializer:
83 93 serializer: None | Serializer = staticmethod( # type: ignore
84 94 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
85 95 )
86 96 deserializer: None | Deserializer = staticmethod( # type: ignore
87 97 functools.partial(pickle.loads)
88 98 )
89 99
90 100
91 101 class MsgPackSerializer(object):
92 102 serializer: None | Serializer = staticmethod( # type: ignore
93 103 msgpack.packb
94 104 )
95 105 deserializer: None | Deserializer = staticmethod( # type: ignore
96 106 functools.partial(msgpack.unpackb, use_list=False)
97 107 )
98 108
99 109
100 110 class CustomLockFactory(FileLock):
101 111
102 112 @memoized_property
103 113 def _module(self):
104 114
105 115 def gevent_flock(fd, operation):
106 116 """
107 117 Gevent compatible flock
108 118 """
109 119 # set non-blocking, this will cause an exception if we cannot acquire a lock
110 120 operation |= fcntl.LOCK_NB
111 121 start_lock_time = time.time()
112 122 timeout = 60 * 15 # 15min
113 123 while True:
114 124 try:
115 125 flock_org(fd, operation)
116 126 # lock has been acquired
117 127 break
118 128 except (OSError, IOError) as e:
119 129 # raise on other errors than Resource temporarily unavailable
120 130 if e.errno != errno.EAGAIN:
121 131 raise
122 132 elif (time.time() - start_lock_time) > timeout:
123 133 # waited to much time on a lock, better fail than loop for ever
124 134 log.error('Failed to acquire lock on `%s` after waiting %ss',
125 135 self.filename, timeout)
126 136 raise
127 137 wait_timeout = 0.03
128 138 log.debug('Failed to acquire lock on `%s`, retry in %ss',
129 139 self.filename, wait_timeout)
130 140 gevent.sleep(wait_timeout)
131 141
132 142 fcntl.flock = gevent_flock
133 143 return fcntl
134 144
135 145
136 146 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
137 147 key_prefix = 'file_backend'
138 148
139 149 def __init__(self, arguments):
140 150 arguments['lock_factory'] = CustomLockFactory
141 151 db_file = arguments.get('filename')
142 152
143 153 log.debug('initialing cache-backend=%s db in %s', self.__class__.__name__, db_file)
144 154 db_file_dir = os.path.dirname(db_file)
145 155 if not os.path.isdir(db_file_dir):
146 156 os.makedirs(db_file_dir)
147 157
148 158 try:
149 159 super().__init__(arguments)
150 160 except Exception:
151 161 log.exception('Failed to initialize db at: %s', db_file)
152 162 raise
153 163
154 164 def __repr__(self):
155 165 return f'{self.__class__}(file=`{self.filename}`)'
156 166
157 167 def __str__(self):
158 168 return self.__repr__()
159 169
160 170 def _get_keys_pattern(self, prefix: bytes = b''):
161 171 return b'%b:%b' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
162 172
163 173 def list_keys(self, prefix: bytes = b''):
164 174 prefix = self._get_keys_pattern(prefix)
165 175
166 176 def cond(dbm_key: bytes):
167 177 if not prefix:
168 178 return True
169 179
170 180 if dbm_key.startswith(prefix):
171 181 return True
172 182 return False
173 183
174 184 with self._dbm_file(True) as dbm:
175 185 try:
176 186 return list(filter(cond, dbm.keys()))
177 187 except Exception:
178 188 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
179 189 raise
180 190
191 def delete_multi_by_prefix(self, prefix):
192 cache_keys = self.list_keys(prefix=prefix)
193 num_affected_keys = len(cache_keys)
194 if num_affected_keys:
195 self.delete_multi(cache_keys)
196 return num_affected_keys
197
181 198 def get_store(self):
182 199 return self.filename
183 200
184 201
185 202 class BaseRedisBackend(redis_backend.RedisBackend):
186 203 key_prefix = ''
187 204
188 205 def __init__(self, arguments):
189 206 self.db_conn = arguments.get('host', '') or arguments.get('url', '') or 'redis-host'
190 207 super().__init__(arguments)
191 208
192 209 self._lock_timeout = self.lock_timeout
193 210 self._lock_auto_renewal = str2bool(arguments.pop("lock_auto_renewal", True))
194 211
195 212 if self._lock_auto_renewal and not self._lock_timeout:
196 213 # set default timeout for auto_renewal
197 214 self._lock_timeout = 30
198 215
199 216 def __repr__(self):
200 217 return f'{self.__class__}(conn=`{self.db_conn}`)'
201 218
202 219 def __str__(self):
203 220 return self.__repr__()
204 221
205 222 def _create_client(self):
206 223 args = {}
207 224
208 225 if self.url is not None:
209 226 args.update(url=self.url)
210 227
211 228 else:
212 229 args.update(
213 230 host=self.host, password=self.password,
214 231 port=self.port, db=self.db
215 232 )
216 233
217 234 connection_pool = redis.ConnectionPool(**args)
218 235 self.writer_client = redis.StrictRedis(
219 236 connection_pool=connection_pool
220 237 )
221 238 self.reader_client = self.writer_client
222 239
223 240 def _get_keys_pattern(self, prefix: bytes = b''):
224 241 return b'%b:%b*' % (safe_bytes(self.key_prefix), safe_bytes(prefix))
225 242
226 243 def list_keys(self, prefix: bytes = b''):
227 244 prefix = self._get_keys_pattern(prefix)
228 245 return self.reader_client.keys(prefix)
229 246
247 def delete_multi_by_prefix(self, prefix, use_lua=False):
248 if use_lua:
249 # high efficient LUA script to delete ALL keys by prefix...
250 lua = """local keys = redis.call('keys', ARGV[1])
251 for i=1,#keys,5000 do
252 redis.call('del', unpack(keys, i, math.min(i+(5000-1), #keys)))
253 end
254 return #keys"""
255 num_affected_keys = self.writer_client.eval(
256 lua,
257 0,
258 f"{prefix}*")
259 else:
260 cache_keys = self.list_keys(prefix=prefix)
261 num_affected_keys = len(cache_keys)
262 if num_affected_keys:
263 self.delete_multi(cache_keys)
264 return num_affected_keys
265
230 266 def get_store(self):
231 267 return self.reader_client.connection_pool
232 268
233 269 def get_mutex(self, key):
234 270 if self.distributed_lock:
235 271 lock_key = f'_lock_{safe_str(key)}'
236 272 return get_mutex_lock(
237 273 self.writer_client, lock_key,
238 274 self._lock_timeout,
239 275 auto_renewal=self._lock_auto_renewal
240 276 )
241 277 else:
242 278 return None
243 279
244 280
245 281 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
246 282 key_prefix = 'redis_pickle_backend'
247 283 pass
248 284
249 285
250 286 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
251 287 key_prefix = 'redis_msgpack_backend'
252 288 pass
253 289
254 290
255 291 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
256 292 from rhodecode.lib._vendor import redis_lock
257 293
258 294 class _RedisLockWrapper(object):
259 295 """LockWrapper for redis_lock"""
260 296
261 297 @classmethod
262 298 def get_lock(cls):
263 299 return redis_lock.Lock(
264 300 redis_client=client,
265 301 name=lock_key,
266 302 expire=lock_timeout,
267 303 auto_renewal=auto_renewal,
268 304 strict=True,
269 305 )
270 306
271 307 def __repr__(self):
272 308 return f"{self.__class__.__name__}:{lock_key}"
273 309
274 310 def __str__(self):
275 311 return f"{self.__class__.__name__}:{lock_key}"
276 312
277 313 def __init__(self):
278 314 self.lock = self.get_lock()
279 315 self.lock_key = lock_key
280 316
281 317 def acquire(self, wait=True):
282 318 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
283 319 try:
284 320 acquired = self.lock.acquire(wait)
285 321 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
286 322 return acquired
287 323 except redis_lock.AlreadyAcquired:
288 324 return False
289 325 except redis_lock.AlreadyStarted:
290 326 # refresh thread exists, but it also means we acquired the lock
291 327 return True
292 328
293 329 def release(self):
294 330 try:
295 331 self.lock.release()
296 332 except redis_lock.NotAcquired:
297 333 pass
298 334
299 335 return _RedisLockWrapper()
@@ -1,406 +1,404 b''
1 1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import functools
20 20 import logging
21 21 import os
22 22 import threading
23 23 import time
24 24
25 25 import decorator
26 26 from dogpile.cache import CacheRegion
27 27
28 28 import rhodecode
29 29 from rhodecode.lib.hash_utils import sha1
30 30 from rhodecode.lib.str_utils import safe_bytes
31 31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
32 32
33 33 from . import region_meta, cache_key_meta
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 37
38 38 def isCython(func):
39 39 """
40 40 Private helper that checks if a function is a cython function.
41 41 """
42 42 return func.__class__.__name__ == 'cython_function_or_method'
43 43
44 44
45 45 class RhodeCodeCacheRegion(CacheRegion):
46 46
47 47 def __repr__(self):
48 48 return f'{self.__class__}(name={self.name})'
49 49
50 50 def conditional_cache_on_arguments(
51 51 self, namespace=None,
52 52 expiration_time=None,
53 53 should_cache_fn=None,
54 54 to_str=str,
55 55 function_key_generator=None,
56 56 condition=True):
57 57 """
58 58 Custom conditional decorator, that will not touch any dogpile internals if
59 59 condition isn't meet. This works a bit different from should_cache_fn
60 60 And it's faster in cases we don't ever want to compute cached values
61 61 """
62 62 expiration_time_is_callable = callable(expiration_time)
63 63 if not namespace:
64 64 namespace = getattr(self, '_default_namespace', None)
65 65
66 66 if function_key_generator is None:
67 67 function_key_generator = self.function_key_generator
68 68
69 69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70 70
71 71 if not condition:
72 72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 73 start = time.time()
74 74 result = user_func(*arg, **kw)
75 75 total = time.time() - start
76 76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 77 return result
78 78
79 79 key = func_key_generator(*arg, **kw)
80 80
81 81 timeout = expiration_time() if expiration_time_is_callable \
82 82 else expiration_time
83 83
84 84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86 86
87 87 def cache_decorator(user_func):
88 88 if to_str is str:
89 89 # backwards compatible
90 90 key_generator = function_key_generator(namespace, user_func)
91 91 else:
92 92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93 93
94 94 def refresh(*arg, **kw):
95 95 """
96 96 Like invalidate, but regenerates the value instead
97 97 """
98 98 key = key_generator(*arg, **kw)
99 99 value = user_func(*arg, **kw)
100 100 self.set(key, value)
101 101 return value
102 102
103 103 def invalidate(*arg, **kw):
104 104 key = key_generator(*arg, **kw)
105 105 self.delete(key)
106 106
107 107 def set_(value, *arg, **kw):
108 108 key = key_generator(*arg, **kw)
109 109 self.set(key, value)
110 110
111 111 def get(*arg, **kw):
112 112 key = key_generator(*arg, **kw)
113 113 return self.get(key)
114 114
115 115 user_func.set = set_
116 116 user_func.invalidate = invalidate
117 117 user_func.get = get
118 118 user_func.refresh = refresh
119 119 user_func.key_generator = key_generator
120 120 user_func.original = user_func
121 121
122 122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 123 return decorator.decorate(user_func, functools.partial(
124 124 get_or_create_for_user_func, key_generator))
125 125
126 126 return cache_decorator
127 127
128 128
129 129 def make_region(*arg, **kw):
130 130 return RhodeCodeCacheRegion(*arg, **kw)
131 131
132 132
133 133 def get_default_cache_settings(settings, prefixes=None):
134 134 prefixes = prefixes or []
135 135 cache_settings = {}
136 136 for key in settings.keys():
137 137 for prefix in prefixes:
138 138 if key.startswith(prefix):
139 139 name = key.split(prefix)[1].strip()
140 140 val = settings[key]
141 141 if isinstance(val, str):
142 142 val = val.strip()
143 143 cache_settings[name] = val
144 144 return cache_settings
145 145
146 146
147 147 def compute_key_from_params(*args):
148 148 """
149 149 Helper to compute key from given params to be used in cache manager
150 150 """
151 151 return sha1(safe_bytes("_".join(map(str, args))))
152 152
153 153
154 154 def custom_key_generator(backend, namespace, fn):
155 155 func_name = fn.__name__
156 156
157 157 def generate_key(*args):
158 158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 159 namespace_pref = namespace or 'default_namespace'
160 160 arg_key = compute_key_from_params(*args)
161 161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162 162
163 163 return final_key
164 164
165 165 return generate_key
166 166
167 167
168 168 def backend_key_generator(backend):
169 169 """
170 170 Special wrapper that also sends over the backend to the key generator
171 171 """
172 172 def wrapper(namespace, fn):
173 173 return custom_key_generator(backend, namespace, fn)
174 174 return wrapper
175 175
176 176
177 177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 178 from .backends import FileNamespaceBackend
179 179 from . import async_creation_runner
180 180
181 181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 182 if not region_obj:
183 183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185 185
186 186 region_uid_name = f'{region_name}:{region_namespace}'
187 187
188 188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
189 189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
190 190 if not region_namespace:
191 191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
192 192
193 193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
194 194 if region_exist:
195 195 log.debug('Using already configured region: %s', region_namespace)
196 196 return region_exist
197 197
198 198 expiration_time = region_obj.expiration_time
199 199
200 200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
201 201 namespace_cache_dir = cache_dir
202 202
203 203 # we default the namespace_cache_dir to our default cache dir.
204 204 # however, if this backend is configured with filename= param, we prioritize that
205 205 # so all caches within that particular region, even those namespaced end up in the same path
206 206 if region_obj.actual_backend.filename:
207 207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
208 208
209 209 if not os.path.isdir(namespace_cache_dir):
210 210 os.makedirs(namespace_cache_dir)
211 211 new_region = make_region(
212 212 name=region_uid_name,
213 213 function_key_generator=backend_key_generator(region_obj.actual_backend)
214 214 )
215 215
216 216 namespace_filename = os.path.join(
217 217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
218 218 # special type that allows 1db per namespace
219 219 new_region.configure(
220 220 backend='dogpile.cache.rc.file_namespace',
221 221 expiration_time=expiration_time,
222 222 arguments={"filename": namespace_filename}
223 223 )
224 224
225 225 # create and save in region caches
226 226 log.debug('configuring new region: %s', region_uid_name)
227 227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
228 228
229 229 region_obj._default_namespace = region_namespace
230 230 if use_async_runner:
231 231 region_obj.async_creation_runner = async_creation_runner
232 232 return region_obj
233 233
234 234
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
236 236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
237 237
238 238 if not isinstance(cache_region, RhodeCodeCacheRegion):
239 239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
240 log.debug('clearing cache region: %s with method=%s', cache_region, method)
240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
241 cache_region, cache_namespace_uid, method)
241 242
242 num_affected_keys = None
243 num_affected_keys = 0
243 244
244 245 if method == CLEAR_INVALIDATE:
245 246 # NOTE: The CacheRegion.invalidate() method’s default mode of
246 247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
247 248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
248 249 cache_region.invalidate(hard=True)
249 250
250 251 if method == CLEAR_DELETE:
251 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
252 num_affected_keys = len(cache_keys)
253 if num_affected_keys:
254 cache_region.delete_multi(cache_keys)
252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
255 253
256 254 return num_affected_keys
257 255
258 256
259 257 class ActiveRegionCache(object):
260 258 def __init__(self, context, cache_data):
261 259 self.context = context
262 260 self.cache_data = cache_data
263 261
264 262 def should_invalidate(self):
265 263 return False
266 264
267 265
268 266 class FreshRegionCache(object):
269 267 def __init__(self, context, cache_data):
270 268 self.context = context
271 269 self.cache_data = cache_data
272 270
273 271 def should_invalidate(self):
274 272 return True
275 273
276 274
277 275 class InvalidationContext(object):
278 276 """
279 277 usage::
280 278
281 279 from rhodecode.lib import rc_cache
282 280
283 281 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
284 282 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
285 283
286 284 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
287 285 def heavy_compute(cache_name, param1, param2):
288 286 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
289 287
290 288 # invalidation namespace is shared namespace key for all process caches
291 289 # we use it to send a global signal
292 290 invalidation_namespace = 'repo_cache:1'
293 291
294 292 inv_context_manager = rc_cache.InvalidationContext(
295 293 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
296 294 with inv_context_manager as invalidation_context:
297 295 args = ('one', 'two')
298 296 # re-compute and store cache if we get invalidate signal
299 297 if invalidation_context.should_invalidate():
300 298 result = heavy_compute.refresh(*args)
301 299 else:
302 300 result = heavy_compute(*args)
303 301
304 302 compute_time = inv_context_manager.compute_time
305 303 log.debug('result computed in %.4fs', compute_time)
306 304
307 305 # To send global invalidation signal, simply run
308 306 CacheKey.set_invalidate(invalidation_namespace)
309 307
310 308 """
311 309
312 310 def __repr__(self):
313 311 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
314 312
315 313 def __init__(self, uid, invalidation_namespace='',
316 314 raise_exception=False, thread_scoped=None):
317 315 self.uid = uid
318 316 self.invalidation_namespace = invalidation_namespace
319 317 self.raise_exception = raise_exception
320 318 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
321 319 self.thread_id = 'global'
322 320
323 321 if thread_scoped is None:
324 322 # if we set "default" we can override this via .ini settings
325 323 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
326 324
327 325 # Append the thread id to the cache key if this invalidation context
328 326 # should be scoped to the current thread.
329 327 if thread_scoped is True:
330 328 self.thread_id = threading.current_thread().ident
331 329
332 330 self.cache_key = compute_key_from_params(uid)
333 331 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
334 332 self.proc_id, self.thread_id, self.cache_key)
335 333 self.proc_key = f'proc:{self.proc_id}'
336 334 self.compute_time = 0
337 335
338 336 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
339 337 from rhodecode.model.db import CacheKey
340 338
341 339 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
342 340 # fetch all cache keys for this namespace and convert them to a map to find if we
343 341 # have specific cache_key object registered. We do this because we want to have
344 342 # all consistent cache_state_uid for newly registered objects
345 343 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
346 344 cache_obj = cache_obj_map.get(self.cache_key)
347 345 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
348 346
349 347 if not cache_obj:
350 348 new_cache_args = invalidation_namespace
351 349 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
352 350 cache_state_uid = None
353 351 if first_cache_obj:
354 352 cache_state_uid = first_cache_obj.cache_state_uid
355 353 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
356 354 cache_state_uid=cache_state_uid)
357 355 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
358 356
359 357 return cache_obj
360 358
361 359 def __enter__(self):
362 360 """
363 361 Test if current object is valid, and return CacheRegion function
364 362 that does invalidation and calculation
365 363 """
366 364 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
367 365 # register or get a new key based on uid
368 366 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
369 367 cache_data = self.cache_obj.get_dict()
370 368 self._start_time = time.time()
371 369 if self.cache_obj.cache_active:
372 370 # means our cache obj is existing and marked as it's
373 371 # cache is not outdated, we return ActiveRegionCache
374 372 self.skip_cache_active_change = True
375 373
376 374 return ActiveRegionCache(context=self, cache_data=cache_data)
377 375
378 376 # the key is either not existing or set to False, we return
379 377 # the real invalidator which re-computes value. We additionally set
380 378 # the flag to actually update the Database objects
381 379 self.skip_cache_active_change = False
382 380 return FreshRegionCache(context=self, cache_data=cache_data)
383 381
384 382 def __exit__(self, exc_type, exc_val, exc_tb):
385 383 from rhodecode.model.db import IntegrityError, Session
386 384
387 385 # save compute time
388 386 self.compute_time = time.time() - self._start_time
389 387
390 388 if self.skip_cache_active_change:
391 389 return
392 390
393 391 try:
394 392 self.cache_obj.cache_active = True
395 393 Session().add(self.cache_obj)
396 394 Session().commit()
397 395 except IntegrityError:
398 396 # if we catch integrity error, it means we inserted this object
399 397 # assumption is that's really an edge race-condition case and
400 398 # it's safe is to skip it
401 399 Session().rollback()
402 400 except Exception:
403 401 log.exception('Failed to commit on cache key update')
404 402 Session().rollback()
405 403 if self.raise_exception:
406 404 raise
General Comments 0
You need to be logged in to leave comments. Login now