##// END OF EJS Templates
feat(upgrade): added feature to bulk upgrade hooks from 4.X -> 5.X fixes RCCE-34
super-admin -
r5275:c32427b8 default
parent child Browse files
Show More
@@ -1,191 +1,191 b''
1 1 # required for pushd to work..
2 2 SHELL = /bin/bash
3 3
4 4
5 5 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
6 6 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
7 7
8 8 .PHONY: clean
9 9 ## Cleanup compiled and cache py files
10 10 clean:
11 11 make test-clean
12 12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
13 13 find . -type d -name "build" -prune -exec rm -rf '{}' ';'
14 14
15 15
16 16 .PHONY: test
17 17 ## run test-clean and tests
18 18 test:
19 19 make test-clean
20 20 make test-only
21 21
22 22
23 23 .PHONY: test-clean
24 24 ## run test-clean and tests
25 25 test-clean:
26 26 rm -rf coverage.xml htmlcov junit.xml pylint.log result
27 27 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
28 28 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
29 29
30 30
31 31 .PHONY: test-only
32 32 ## Run tests only without cleanup
33 33 test-only:
34 34 PYTHONHASHSEED=random \
35 35 py.test -x -vv -r xw -p no:sugar \
36 36 --cov-report=term-missing --cov-report=html \
37 37 --cov=rhodecode rhodecode
38 38
39 39
40 40 .PHONY: test-only-mysql
41 41 ## run tests against mysql
42 42 test-only-mysql:
43 43 PYTHONHASHSEED=random \
44 44 py.test -x -vv -r xw -p no:sugar \
45 45 --cov-report=term-missing --cov-report=html \
46 46 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
47 47 --cov=rhodecode rhodecode
48 48
49 49
50 50 .PHONY: test-only-postgres
51 51 ## run tests against postgres
52 52 test-only-postgres:
53 53 PYTHONHASHSEED=random \
54 54 py.test -x -vv -r xw -p no:sugar \
55 55 --cov-report=term-missing --cov-report=html \
56 56 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
57 57 --cov=rhodecode rhodecode
58 58
59 59 .PHONY: ruff-check
60 60 ## run a ruff analysis
61 61 ruff-check:
62 62 ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
63 63
64 64
65 65 .PHONY: docs
66 66 ## build docs
67 67 docs:
68 68 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html)
69 69
70 70
71 71 .PHONY: docs-clean
72 72 ## Cleanup docs
73 73 docs-clean:
74 74 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean)
75 75
76 76
77 77 .PHONY: docs-cleanup
78 78 ## Cleanup docs
79 79 docs-cleanup:
80 80 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup)
81 81
82 82
83 83 .PHONY: web-build
84 84 ## Build JS packages static/js
85 85 web-build:
86 86 docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt"
87 87 # run static file check
88 88 ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/
89 89 rm -rf node_modules
90 90
91 91
92 92 .PHONY: pip-packages
93 93 ## Show outdated packages
94 94 pip-packages:
95 95 python ${OUTDATED_PACKAGES}
96 96
97 97
98 98 .PHONY: build
99 99 ## Build sdist/egg
100 100 build:
101 101 python -m build
102 102
103 103
104 104 .PHONY: dev-sh
105 105 ## make dev-sh
106 106 dev-sh:
107 107 sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list"
108 108 sudo apt-get update
109 109 sudo apt-get install -y zsh carapace-bin
110 110 rm -rf /home/rhodecode/.oh-my-zsh
111 111 curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh | sh
112 112 echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
113 113 PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
114 114
115 115
116 116 .PHONY: dev-cleanup
117 117 ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
118 118 dev-cleanup:
119 119 pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
120 120 rm -rf /tmp/*
121 121
122 122
123 123 .PHONY: dev-env
124 124 ## make dev-env based on the requirements files and install develop of packages
125 125 dev-env:
126 126 pip install build virtualenv
127 127 pushd ../rhodecode-vcsserver/ && make dev-env && popd
128 128 pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
129 129 pip install --no-index --find-links=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
130 130 pip install -e .
131 131
132 132
133 133 .PHONY: sh
134 134 ## shortcut for make dev-sh dev-env
135 135 sh:
136 136 (make dev-env; make dev-sh)
137 137
138 138
139 139 .PHONY: dev-srv
140 140 ## run develop server instance, docker exec -it $(docker ps -q --filter 'name=dev-enterprise-ce') /bin/bash
141 141 dev-srv:
142 142 pserve --reload .dev/dev.ini
143 143
144 144
145 145 .PHONY: dev-srv-g
146 146 ## run gunicorn multi process workers
147 147 dev-srv-g:
148 gunicorn --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120
148 gunicorn --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120 --reload
149 149
150 150
151 151 # Default command on calling make
152 152 .DEFAULT_GOAL := show-help
153 153
154 154 .PHONY: show-help
155 155 show-help:
156 156 @echo "$$(tput bold)Available rules:$$(tput sgr0)"
157 157 @echo
158 158 @sed -n -e "/^## / { \
159 159 h; \
160 160 s/.*//; \
161 161 :doc" \
162 162 -e "H; \
163 163 n; \
164 164 s/^## //; \
165 165 t doc" \
166 166 -e "s/:.*//; \
167 167 G; \
168 168 s/\\n## /---/; \
169 169 s/\\n/ /g; \
170 170 p; \
171 171 }" ${MAKEFILE_LIST} \
172 172 | LC_ALL='C' sort --ignore-case \
173 173 | awk -F '---' \
174 174 -v ncol=$$(tput cols) \
175 175 -v indent=19 \
176 176 -v col_on="$$(tput setaf 6)" \
177 177 -v col_off="$$(tput sgr0)" \
178 178 '{ \
179 179 printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
180 180 n = split($$2, words, " "); \
181 181 line_length = ncol - indent; \
182 182 for (i = 1; i <= n; i++) { \
183 183 line_length -= length(words[i]) + 1; \
184 184 if (line_length <= 0) { \
185 185 line_length = ncol - indent - length(words[i]) - 1; \
186 186 printf "\n%*s ", -indent, " "; \
187 187 } \
188 188 printf "%s ", words[i]; \
189 189 } \
190 190 printf "\n"; \
191 191 }'
@@ -1,424 +1,424 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import itertools
21 21 import base64
22 22
23 23 from rhodecode.api import (
24 24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25 25
26 26 from rhodecode.api.utils import (
27 27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 28 from rhodecode.lib.utils import repo2db_mapper
29 29 from rhodecode.lib import system_info
30 30 from rhodecode.lib import user_sessions
31 31 from rhodecode.lib import exc_tracking
32 32 from rhodecode.lib.ext_json import json
33 33 from rhodecode.lib.utils2 import safe_int
34 34 from rhodecode.model.db import UserIpMap
35 35 from rhodecode.model.scm import ScmModel
36 36 from rhodecode.model.settings import VcsSettingsModel
37 37 from rhodecode.apps.file_store import utils
38 38 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
39 39 FileOverSizeException
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 @jsonrpc_method()
45 45 def get_server_info(request, apiuser):
46 46 """
47 47 Returns the |RCE| server information.
48 48
49 49 This includes the running version of |RCE| and all installed
50 50 packages. This command takes the following options:
51 51
52 52 :param apiuser: This is filled automatically from the |authtoken|.
53 53 :type apiuser: AuthUser
54 54
55 55 Example output:
56 56
57 57 .. code-block:: bash
58 58
59 59 id : <id_given_in_input>
60 60 result : {
61 61 'modules': [<module name>,...]
62 62 'py_version': <python version>,
63 63 'platform': <platform type>,
64 64 'rhodecode_version': <rhodecode version>
65 65 }
66 66 error : null
67 67 """
68 68
69 69 if not has_superadmin_permission(apiuser):
70 70 raise JSONRPCForbidden()
71 71
72 72 server_info = ScmModel().get_server_info(request.environ)
73 73 # rhodecode-index requires those
74 74
75 75 server_info['index_storage'] = server_info['search']['value']['location']
76 76 server_info['storage'] = server_info['storage']['value']['path']
77 77
78 78 return server_info
79 79
80 80
81 81 @jsonrpc_method()
82 82 def get_repo_store(request, apiuser):
83 83 """
84 84 Returns the |RCE| repository storage information.
85 85
86 86 :param apiuser: This is filled automatically from the |authtoken|.
87 87 :type apiuser: AuthUser
88 88
89 89 Example output:
90 90
91 91 .. code-block:: bash
92 92
93 93 id : <id_given_in_input>
94 94 result : {
95 95 'modules': [<module name>,...]
96 96 'py_version': <python version>,
97 97 'platform': <platform type>,
98 98 'rhodecode_version': <rhodecode version>
99 99 }
100 100 error : null
101 101 """
102 102
103 103 if not has_superadmin_permission(apiuser):
104 104 raise JSONRPCForbidden()
105 105
106 106 path = VcsSettingsModel().get_repos_location()
107 107 return {"path": path}
108 108
109 109
110 110 @jsonrpc_method()
111 111 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
112 112 """
113 113 Displays the IP Address as seen from the |RCE| server.
114 114
115 115 * This command displays the IP Address, as well as all the defined IP
116 116 addresses for the specified user. If the ``userid`` is not set, the
117 117 data returned is for the user calling the method.
118 118
119 119 This command can only be run using an |authtoken| with admin rights to
120 120 the specified repository.
121 121
122 122 This command takes the following options:
123 123
124 124 :param apiuser: This is filled automatically from |authtoken|.
125 125 :type apiuser: AuthUser
126 126 :param userid: Sets the userid for which associated IP Address data
127 127 is returned.
128 128 :type userid: Optional(str or int)
129 129
130 130 Example output:
131 131
132 132 .. code-block:: bash
133 133
134 134 id : <id_given_in_input>
135 135 result : {
136 136 "server_ip_addr": "<ip_from_clien>",
137 137 "user_ips": [
138 138 {
139 139 "ip_addr": "<ip_with_mask>",
140 140 "ip_range": ["<start_ip>", "<end_ip>"],
141 141 },
142 142 ...
143 143 ]
144 144 }
145 145
146 146 """
147 147 if not has_superadmin_permission(apiuser):
148 148 raise JSONRPCForbidden()
149 149
150 150 userid = Optional.extract(userid, evaluate_locals=locals())
151 151 userid = getattr(userid, 'user_id', userid)
152 152
153 153 user = get_user_or_error(userid)
154 154 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
155 155 return {
156 156 'server_ip_addr': request.rpc_ip_addr,
157 157 'user_ips': ips
158 158 }
159 159
160 160
161 161 @jsonrpc_method()
162 162 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
163 163 """
164 164 Triggers a rescan of the specified repositories.
165 165
166 166 * If the ``remove_obsolete`` option is set, it also deletes repositories
167 167 that are found in the database but not on the file system, so called
168 168 "clean zombies".
169 169
170 170 This command can only be run using an |authtoken| with admin rights to
171 171 the specified repository.
172 172
173 173 This command takes the following options:
174 174
175 175 :param apiuser: This is filled automatically from the |authtoken|.
176 176 :type apiuser: AuthUser
177 177 :param remove_obsolete: Deletes repositories from the database that
178 178 are not found on the filesystem.
179 179 :type remove_obsolete: Optional(``True`` | ``False``)
180 180
181 181 Example output:
182 182
183 183 .. code-block:: bash
184 184
185 185 id : <id_given_in_input>
186 186 result : {
187 187 'added': [<added repository name>,...]
188 188 'removed': [<removed repository name>,...]
189 189 }
190 190 error : null
191 191
192 192 Example error output:
193 193
194 194 .. code-block:: bash
195 195
196 196 id : <id_given_in_input>
197 197 result : null
198 198 error : {
199 199 'Error occurred during rescan repositories action'
200 200 }
201 201
202 202 """
203 203 if not has_superadmin_permission(apiuser):
204 204 raise JSONRPCForbidden()
205 205
206 206 try:
207 207 rm_obsolete = Optional.extract(remove_obsolete)
208 208 added, removed = repo2db_mapper(ScmModel().repo_scan(),
209 remove_obsolete=rm_obsolete)
209 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
210 210 return {'added': added, 'removed': removed}
211 211 except Exception:
212 212 log.exception('Failed to run repo rescann')
213 213 raise JSONRPCError(
214 214 'Error occurred during rescan repositories action'
215 215 )
216 216
217 217
218 218 @jsonrpc_method()
219 219 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
220 220 """
221 221 Triggers a session cleanup action.
222 222
223 223 If the ``older_then`` option is set, only sessions that hasn't been
224 224 accessed in the given number of days will be removed.
225 225
226 226 This command can only be run using an |authtoken| with admin rights to
227 227 the specified repository.
228 228
229 229 This command takes the following options:
230 230
231 231 :param apiuser: This is filled automatically from the |authtoken|.
232 232 :type apiuser: AuthUser
233 233 :param older_then: Deletes session that hasn't been accessed
234 234 in given number of days.
235 235 :type older_then: Optional(int)
236 236
237 237 Example output:
238 238
239 239 .. code-block:: bash
240 240
241 241 id : <id_given_in_input>
242 242 result: {
243 243 "backend": "<type of backend>",
244 244 "sessions_removed": <number_of_removed_sessions>
245 245 }
246 246 error : null
247 247
248 248 Example error output:
249 249
250 250 .. code-block:: bash
251 251
252 252 id : <id_given_in_input>
253 253 result : null
254 254 error : {
255 255 'Error occurred during session cleanup'
256 256 }
257 257
258 258 """
259 259 if not has_superadmin_permission(apiuser):
260 260 raise JSONRPCForbidden()
261 261
262 262 older_then = safe_int(Optional.extract(older_then)) or 60
263 263 older_than_seconds = 60 * 60 * 24 * older_then
264 264
265 265 config = system_info.rhodecode_config().get_value()['value']['config']
266 266 session_model = user_sessions.get_session_handler(
267 267 config.get('beaker.session.type', 'memory'))(config)
268 268
269 269 backend = session_model.SESSION_TYPE
270 270 try:
271 271 cleaned = session_model.clean_sessions(
272 272 older_than_seconds=older_than_seconds)
273 273 return {'sessions_removed': cleaned, 'backend': backend}
274 274 except user_sessions.CleanupCommand as msg:
275 275 return {'cleanup_command': str(msg), 'backend': backend}
276 276 except Exception as e:
277 277 log.exception('Failed session cleanup')
278 278 raise JSONRPCError(
279 279 'Error occurred during session cleanup'
280 280 )
281 281
282 282
283 283 @jsonrpc_method()
284 284 def get_method(request, apiuser, pattern=Optional('*')):
285 285 """
286 286 Returns list of all available API methods. By default match pattern
287 287 os "*" but any other pattern can be specified. eg *comment* will return
288 288 all methods with comment inside them. If just single method is matched
289 289 returned data will also include method specification
290 290
291 291 This command can only be run using an |authtoken| with admin rights to
292 292 the specified repository.
293 293
294 294 This command takes the following options:
295 295
296 296 :param apiuser: This is filled automatically from the |authtoken|.
297 297 :type apiuser: AuthUser
298 298 :param pattern: pattern to match method names against
299 299 :type pattern: Optional("*")
300 300
301 301 Example output:
302 302
303 303 .. code-block:: bash
304 304
305 305 id : <id_given_in_input>
306 306 "result": [
307 307 "changeset_comment",
308 308 "comment_pull_request",
309 309 "comment_commit"
310 310 ]
311 311 error : null
312 312
313 313 .. code-block:: bash
314 314
315 315 id : <id_given_in_input>
316 316 "result": [
317 317 "comment_commit",
318 318 {
319 319 "apiuser": "<RequiredType>",
320 320 "comment_type": "<Optional:u'note'>",
321 321 "commit_id": "<RequiredType>",
322 322 "message": "<RequiredType>",
323 323 "repoid": "<RequiredType>",
324 324 "request": "<RequiredType>",
325 325 "resolves_comment_id": "<Optional:None>",
326 326 "status": "<Optional:None>",
327 327 "userid": "<Optional:<OptionalAttr:apiuser>>"
328 328 }
329 329 ]
330 330 error : null
331 331 """
332 332 from rhodecode.config.patches import inspect_getargspec
333 333 inspect = inspect_getargspec()
334 334
335 335 if not has_superadmin_permission(apiuser):
336 336 raise JSONRPCForbidden()
337 337
338 338 pattern = Optional.extract(pattern)
339 339
340 340 matches = find_methods(request.registry.jsonrpc_methods, pattern)
341 341
342 342 args_desc = []
343 343 matches_keys = list(matches.keys())
344 344 if len(matches_keys) == 1:
345 345 func = matches[matches_keys[0]]
346 346
347 347 argspec = inspect.getargspec(func)
348 348 arglist = argspec[0]
349 349 defaults = list(map(repr, argspec[3] or []))
350 350
351 351 default_empty = '<RequiredType>'
352 352
353 353 # kw arguments required by this method
354 354 func_kwargs = dict(itertools.zip_longest(
355 355 reversed(arglist), reversed(defaults), fillvalue=default_empty))
356 356 args_desc.append(func_kwargs)
357 357
358 358 return matches_keys + args_desc
359 359
360 360
361 361 @jsonrpc_method()
362 362 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
363 363 """
364 364 Stores sent exception inside the built-in exception tracker in |RCE| server.
365 365
366 366 This command can only be run using an |authtoken| with admin rights to
367 367 the specified repository.
368 368
369 369 This command takes the following options:
370 370
371 371 :param apiuser: This is filled automatically from the |authtoken|.
372 372 :type apiuser: AuthUser
373 373
374 374 :param exc_data_json: JSON data with exception e.g
375 375 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
376 376 :type exc_data_json: JSON data
377 377
378 378 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
379 379 :type prefix: Optional("rhodecode")
380 380
381 381 Example output:
382 382
383 383 .. code-block:: bash
384 384
385 385 id : <id_given_in_input>
386 386 "result": {
387 387 "exc_id": 139718459226384,
388 388 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
389 389 }
390 390 error : null
391 391 """
392 392 if not has_superadmin_permission(apiuser):
393 393 raise JSONRPCForbidden()
394 394
395 395 prefix = Optional.extract(prefix)
396 396 exc_id = exc_tracking.generate_id()
397 397
398 398 try:
399 399 exc_data = json.loads(exc_data_json)
400 400 except Exception:
401 401 log.error('Failed to parse JSON: %r', exc_data_json)
402 402 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
403 403 'Please make sure it contains a valid JSON.')
404 404
405 405 try:
406 406 exc_traceback = exc_data['exc_traceback']
407 407 exc_type_name = exc_data['exc_type_name']
408 408 exc_value = ''
409 409 except KeyError as err:
410 410 raise JSONRPCError(
411 411 f'Missing exc_traceback, or exc_type_name '
412 412 f'in exc_data_json field. Missing: {err}')
413 413
414 414 class ExcType:
415 415 __name__ = exc_type_name
416 416
417 417 exc_info = (ExcType(), exc_value, exc_traceback)
418 418
419 419 exc_tracking._store_exception(
420 420 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
421 421
422 422 exc_url = request.route_url(
423 423 'admin_settings_exception_tracker_show', exception_id=exc_id)
424 424 return {'exc_id': exc_id, 'exc_url': exc_url}
@@ -1,714 +1,714 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 import logging
21 21 import collections
22 22
23 23 import datetime
24 24 import formencode
25 25 import formencode.htmlfill
26 26
27 27 import rhodecode
28 28
29 29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
30 30 from pyramid.renderers import render
31 31 from pyramid.response import Response
32 32
33 33 from rhodecode.apps._base import BaseAppView
34 34 from rhodecode.apps._base.navigation import navigation_list
35 35 from rhodecode.apps.svn_support.config_keys import generate_config
36 36 from rhodecode.lib import helpers as h
37 37 from rhodecode.lib.auth import (
38 38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 39 from rhodecode.lib.celerylib import tasks, run_task
40 40 from rhodecode.lib.str_utils import safe_str
41 41 from rhodecode.lib.utils import repo2db_mapper
42 42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 43 from rhodecode.lib.index import searcher_from_config
44 44
45 45 from rhodecode.model.db import RhodeCodeUi, Repository
46 46 from rhodecode.model.forms import (ApplicationSettingsForm,
47 47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
48 48 LabsSettingsForm, IssueTrackerPatternsForm)
49 49 from rhodecode.model.permission import PermissionModel
50 50 from rhodecode.model.repo_group import RepoGroupModel
51 51
52 52 from rhodecode.model.scm import ScmModel
53 53 from rhodecode.model.notification import EmailNotificationModel
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.settings import (
56 56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 57 SettingsModel)
58 58
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62
63 63 class AdminSettingsView(BaseAppView):
64 64
65 65 def load_default_context(self):
66 66 c = self._get_local_tmpl_context()
67 67 c.labs_active = str2bool(
68 68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 69 c.navlist = navigation_list(self.request)
70 70 return c
71 71
72 72 @classmethod
73 73 def _get_ui_settings(cls):
74 74 ret = RhodeCodeUi.query().all()
75 75
76 76 if not ret:
77 77 raise Exception('Could not get application ui settings !')
78 78 settings = {}
79 79 for each in ret:
80 80 k = each.ui_key
81 81 v = each.ui_value
82 82 if k == '/':
83 83 k = 'root_path'
84 84
85 85 if k in ['push_ssl', 'publish', 'enabled']:
86 86 v = str2bool(v)
87 87
88 88 if k.find('.') != -1:
89 89 k = k.replace('.', '_')
90 90
91 91 if each.ui_section in ['hooks', 'extensions']:
92 92 v = each.ui_active
93 93
94 94 settings[each.ui_section + '_' + k] = v
95 95 return settings
96 96
97 97 @classmethod
98 98 def _form_defaults(cls):
99 99 defaults = SettingsModel().get_all_settings()
100 100 defaults.update(cls._get_ui_settings())
101 101
102 102 defaults.update({
103 103 'new_svn_branch': '',
104 104 'new_svn_tag': '',
105 105 })
106 106 return defaults
107 107
108 108 @LoginRequired()
109 109 @HasPermissionAllDecorator('hg.admin')
110 110 def settings_vcs(self):
111 111 c = self.load_default_context()
112 112 c.active = 'vcs'
113 113 model = VcsSettingsModel()
114 114 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
115 115 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
116 116
117 117 settings = self.request.registry.settings
118 118 c.svn_proxy_generate_config = settings[generate_config]
119 119
120 120 defaults = self._form_defaults()
121 121
122 122 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
123 123
124 124 data = render('rhodecode:templates/admin/settings/settings.mako',
125 125 self._get_template_context(c), self.request)
126 126 html = formencode.htmlfill.render(
127 127 data,
128 128 defaults=defaults,
129 129 encoding="UTF-8",
130 130 force_defaults=False
131 131 )
132 132 return Response(html)
133 133
134 134 @LoginRequired()
135 135 @HasPermissionAllDecorator('hg.admin')
136 136 @CSRFRequired()
137 137 def settings_vcs_update(self):
138 138 _ = self.request.translate
139 139 c = self.load_default_context()
140 140 c.active = 'vcs'
141 141
142 142 model = VcsSettingsModel()
143 143 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
144 144 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
145 145
146 146 settings = self.request.registry.settings
147 147 c.svn_proxy_generate_config = settings[generate_config]
148 148
149 149 application_form = ApplicationUiSettingsForm(self.request.translate)()
150 150
151 151 try:
152 152 form_result = application_form.to_python(dict(self.request.POST))
153 153 except formencode.Invalid as errors:
154 154 h.flash(
155 155 _("Some form inputs contain invalid data."),
156 156 category='error')
157 157 data = render('rhodecode:templates/admin/settings/settings.mako',
158 158 self._get_template_context(c), self.request)
159 159 html = formencode.htmlfill.render(
160 160 data,
161 161 defaults=errors.value,
162 162 errors=errors.unpack_errors() or {},
163 163 prefix_error=False,
164 164 encoding="UTF-8",
165 165 force_defaults=False
166 166 )
167 167 return Response(html)
168 168
169 169 try:
170 170 if c.visual.allow_repo_location_change:
171 171 model.update_global_path_setting(form_result['paths_root_path'])
172 172
173 173 model.update_global_ssl_setting(form_result['web_push_ssl'])
174 174 model.update_global_hook_settings(form_result)
175 175
176 176 model.create_or_update_global_svn_settings(form_result)
177 177 model.create_or_update_global_hg_settings(form_result)
178 178 model.create_or_update_global_git_settings(form_result)
179 179 model.create_or_update_global_pr_settings(form_result)
180 180 except Exception:
181 181 log.exception("Exception while updating settings")
182 182 h.flash(_('Error occurred during updating '
183 183 'application settings'), category='error')
184 184 else:
185 185 Session().commit()
186 186 h.flash(_('Updated VCS settings'), category='success')
187 187 raise HTTPFound(h.route_path('admin_settings_vcs'))
188 188
189 189 data = render('rhodecode:templates/admin/settings/settings.mako',
190 190 self._get_template_context(c), self.request)
191 191 html = formencode.htmlfill.render(
192 192 data,
193 193 defaults=self._form_defaults(),
194 194 encoding="UTF-8",
195 195 force_defaults=False
196 196 )
197 197 return Response(html)
198 198
199 199 @LoginRequired()
200 200 @HasPermissionAllDecorator('hg.admin')
201 201 @CSRFRequired()
202 202 def settings_vcs_delete_svn_pattern(self):
203 203 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
204 204 model = VcsSettingsModel()
205 205 try:
206 206 model.delete_global_svn_pattern(delete_pattern_id)
207 207 except SettingNotFound:
208 208 log.exception(
209 209 'Failed to delete svn_pattern with id %s', delete_pattern_id)
210 210 raise HTTPNotFound()
211 211
212 212 Session().commit()
213 213 return True
214 214
215 215 @LoginRequired()
216 216 @HasPermissionAllDecorator('hg.admin')
217 217 def settings_mapping(self):
218 218 c = self.load_default_context()
219 219 c.active = 'mapping'
220
220 c.storage_path = VcsSettingsModel().get_repos_location()
221 221 data = render('rhodecode:templates/admin/settings/settings.mako',
222 222 self._get_template_context(c), self.request)
223 223 html = formencode.htmlfill.render(
224 224 data,
225 225 defaults=self._form_defaults(),
226 226 encoding="UTF-8",
227 227 force_defaults=False
228 228 )
229 229 return Response(html)
230 230
231 231 @LoginRequired()
232 232 @HasPermissionAllDecorator('hg.admin')
233 233 @CSRFRequired()
234 234 def settings_mapping_update(self):
235 235 _ = self.request.translate
236 236 c = self.load_default_context()
237 237 c.active = 'mapping'
238 238 rm_obsolete = self.request.POST.get('destroy', False)
239 239 invalidate_cache = self.request.POST.get('invalidate', False)
240 240 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241 241
242 242 if invalidate_cache:
243 243 log.debug('invalidating all repositories cache')
244 244 for repo in Repository.get_all():
245 245 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
246 246
247 247 filesystem_repos = ScmModel().repo_scan()
248 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete)
248 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 249 PermissionModel().trigger_permission_flush()
250 250
251 251 def _repr(rm_repo):
252 252 return ', '.join(map(safe_str, rm_repo)) or '-'
253 253
254 254 h.flash(_('Repositories successfully '
255 255 'rescanned added: %s ; removed: %s') %
256 256 (_repr(added), _repr(removed)),
257 257 category='success')
258 258 raise HTTPFound(h.route_path('admin_settings_mapping'))
259 259
260 260 @LoginRequired()
261 261 @HasPermissionAllDecorator('hg.admin')
262 262 def settings_global(self):
263 263 c = self.load_default_context()
264 264 c.active = 'global'
265 265 c.personal_repo_group_default_pattern = RepoGroupModel()\
266 266 .get_personal_group_name_pattern()
267 267
268 268 data = render('rhodecode:templates/admin/settings/settings.mako',
269 269 self._get_template_context(c), self.request)
270 270 html = formencode.htmlfill.render(
271 271 data,
272 272 defaults=self._form_defaults(),
273 273 encoding="UTF-8",
274 274 force_defaults=False
275 275 )
276 276 return Response(html)
277 277
278 278 @LoginRequired()
279 279 @HasPermissionAllDecorator('hg.admin')
280 280 @CSRFRequired()
281 281 def settings_global_update(self):
282 282 _ = self.request.translate
283 283 c = self.load_default_context()
284 284 c.active = 'global'
285 285 c.personal_repo_group_default_pattern = RepoGroupModel()\
286 286 .get_personal_group_name_pattern()
287 287 application_form = ApplicationSettingsForm(self.request.translate)()
288 288 try:
289 289 form_result = application_form.to_python(dict(self.request.POST))
290 290 except formencode.Invalid as errors:
291 291 h.flash(
292 292 _("Some form inputs contain invalid data."),
293 293 category='error')
294 294 data = render('rhodecode:templates/admin/settings/settings.mako',
295 295 self._get_template_context(c), self.request)
296 296 html = formencode.htmlfill.render(
297 297 data,
298 298 defaults=errors.value,
299 299 errors=errors.unpack_errors() or {},
300 300 prefix_error=False,
301 301 encoding="UTF-8",
302 302 force_defaults=False
303 303 )
304 304 return Response(html)
305 305
306 306 settings = [
307 307 ('title', 'rhodecode_title', 'unicode'),
308 308 ('realm', 'rhodecode_realm', 'unicode'),
309 309 ('pre_code', 'rhodecode_pre_code', 'unicode'),
310 310 ('post_code', 'rhodecode_post_code', 'unicode'),
311 311 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
312 312 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
313 313 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
314 314 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
315 315 ]
316 316
317 317 try:
318 318 for setting, form_key, type_ in settings:
319 319 sett = SettingsModel().create_or_update_setting(
320 320 setting, form_result[form_key], type_)
321 321 Session().add(sett)
322 322
323 323 Session().commit()
324 324 SettingsModel().invalidate_settings_cache()
325 325 h.flash(_('Updated application settings'), category='success')
326 326 except Exception:
327 327 log.exception("Exception while updating application settings")
328 328 h.flash(
329 329 _('Error occurred during updating application settings'),
330 330 category='error')
331 331
332 332 raise HTTPFound(h.route_path('admin_settings_global'))
333 333
334 334 @LoginRequired()
335 335 @HasPermissionAllDecorator('hg.admin')
336 336 def settings_visual(self):
337 337 c = self.load_default_context()
338 338 c.active = 'visual'
339 339
340 340 data = render('rhodecode:templates/admin/settings/settings.mako',
341 341 self._get_template_context(c), self.request)
342 342 html = formencode.htmlfill.render(
343 343 data,
344 344 defaults=self._form_defaults(),
345 345 encoding="UTF-8",
346 346 force_defaults=False
347 347 )
348 348 return Response(html)
349 349
350 350 @LoginRequired()
351 351 @HasPermissionAllDecorator('hg.admin')
352 352 @CSRFRequired()
353 353 def settings_visual_update(self):
354 354 _ = self.request.translate
355 355 c = self.load_default_context()
356 356 c.active = 'visual'
357 357 application_form = ApplicationVisualisationForm(self.request.translate)()
358 358 try:
359 359 form_result = application_form.to_python(dict(self.request.POST))
360 360 except formencode.Invalid as errors:
361 361 h.flash(
362 362 _("Some form inputs contain invalid data."),
363 363 category='error')
364 364 data = render('rhodecode:templates/admin/settings/settings.mako',
365 365 self._get_template_context(c), self.request)
366 366 html = formencode.htmlfill.render(
367 367 data,
368 368 defaults=errors.value,
369 369 errors=errors.unpack_errors() or {},
370 370 prefix_error=False,
371 371 encoding="UTF-8",
372 372 force_defaults=False
373 373 )
374 374 return Response(html)
375 375
376 376 try:
377 377 settings = [
378 378 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
379 379 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
380 380 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
381 381 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
382 382 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
383 383 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
384 384 ('show_version', 'rhodecode_show_version', 'bool'),
385 385 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
386 386 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
387 387 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
388 388 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
389 389 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
390 390 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
391 391 ('support_url', 'rhodecode_support_url', 'unicode'),
392 392 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
393 393 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
394 394 ]
395 395 for setting, form_key, type_ in settings:
396 396 sett = SettingsModel().create_or_update_setting(
397 397 setting, form_result[form_key], type_)
398 398 Session().add(sett)
399 399
400 400 Session().commit()
401 401 SettingsModel().invalidate_settings_cache()
402 402 h.flash(_('Updated visualisation settings'), category='success')
403 403 except Exception:
404 404 log.exception("Exception updating visualization settings")
405 405 h.flash(_('Error occurred during updating '
406 406 'visualisation settings'),
407 407 category='error')
408 408
409 409 raise HTTPFound(h.route_path('admin_settings_visual'))
410 410
411 411 @LoginRequired()
412 412 @HasPermissionAllDecorator('hg.admin')
413 413 def settings_issuetracker(self):
414 414 c = self.load_default_context()
415 415 c.active = 'issuetracker'
416 416 defaults = c.rc_config
417 417
418 418 entry_key = 'rhodecode_issuetracker_pat_'
419 419
420 420 c.issuetracker_entries = {}
421 421 for k, v in defaults.items():
422 422 if k.startswith(entry_key):
423 423 uid = k[len(entry_key):]
424 424 c.issuetracker_entries[uid] = None
425 425
426 426 for uid in c.issuetracker_entries:
427 427 c.issuetracker_entries[uid] = AttributeDict({
428 428 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
429 429 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
430 430 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
431 431 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
432 432 })
433 433
434 434 return self._get_template_context(c)
435 435
436 436 @LoginRequired()
437 437 @HasPermissionAllDecorator('hg.admin')
438 438 @CSRFRequired()
439 439 def settings_issuetracker_test(self):
440 440 error_container = []
441 441
442 442 urlified_commit = h.urlify_commit_message(
443 443 self.request.POST.get('test_text', ''),
444 444 'repo_group/test_repo1', error_container=error_container)
445 445 if error_container:
446 446 def converter(inp):
447 447 return h.html_escape(inp)
448 448
449 449 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
450 450
451 451 return urlified_commit
452 452
453 453 @LoginRequired()
454 454 @HasPermissionAllDecorator('hg.admin')
455 455 @CSRFRequired()
456 456 def settings_issuetracker_update(self):
457 457 _ = self.request.translate
458 458 self.load_default_context()
459 459 settings_model = IssueTrackerSettingsModel()
460 460
461 461 try:
462 462 form = IssueTrackerPatternsForm(self.request.translate)()
463 463 data = form.to_python(self.request.POST)
464 464 except formencode.Invalid as errors:
465 465 log.exception('Failed to add new pattern')
466 466 error = errors
467 467 h.flash(_(f'Invalid issue tracker pattern: {error}'),
468 468 category='error')
469 469 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
470 470
471 471 if data:
472 472 for uid in data.get('delete_patterns', []):
473 473 settings_model.delete_entries(uid)
474 474
475 475 for pattern in data.get('patterns', []):
476 476 for setting, value, type_ in pattern:
477 477 sett = settings_model.create_or_update_setting(
478 478 setting, value, type_)
479 479 Session().add(sett)
480 480
481 481 Session().commit()
482 482
483 483 SettingsModel().invalidate_settings_cache()
484 484 h.flash(_('Updated issue tracker entries'), category='success')
485 485 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
486 486
487 487 @LoginRequired()
488 488 @HasPermissionAllDecorator('hg.admin')
489 489 @CSRFRequired()
490 490 def settings_issuetracker_delete(self):
491 491 _ = self.request.translate
492 492 self.load_default_context()
493 493 uid = self.request.POST.get('uid')
494 494 try:
495 495 IssueTrackerSettingsModel().delete_entries(uid)
496 496 except Exception:
497 497 log.exception('Failed to delete issue tracker setting %s', uid)
498 498 raise HTTPNotFound()
499 499
500 500 SettingsModel().invalidate_settings_cache()
501 501 h.flash(_('Removed issue tracker entry.'), category='success')
502 502
503 503 return {'deleted': uid}
504 504
505 505 @LoginRequired()
506 506 @HasPermissionAllDecorator('hg.admin')
507 507 def settings_email(self):
508 508 c = self.load_default_context()
509 509 c.active = 'email'
510 510 c.rhodecode_ini = rhodecode.CONFIG
511 511
512 512 data = render('rhodecode:templates/admin/settings/settings.mako',
513 513 self._get_template_context(c), self.request)
514 514 html = formencode.htmlfill.render(
515 515 data,
516 516 defaults=self._form_defaults(),
517 517 encoding="UTF-8",
518 518 force_defaults=False
519 519 )
520 520 return Response(html)
521 521
522 522 @LoginRequired()
523 523 @HasPermissionAllDecorator('hg.admin')
524 524 @CSRFRequired()
525 525 def settings_email_update(self):
526 526 _ = self.request.translate
527 527 c = self.load_default_context()
528 528 c.active = 'email'
529 529
530 530 test_email = self.request.POST.get('test_email')
531 531
532 532 if not test_email:
533 533 h.flash(_('Please enter email address'), category='error')
534 534 raise HTTPFound(h.route_path('admin_settings_email'))
535 535
536 536 email_kwargs = {
537 537 'date': datetime.datetime.now(),
538 538 'user': self._rhodecode_db_user
539 539 }
540 540
541 541 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
542 542 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
543 543
544 544 recipients = [test_email] if test_email else None
545 545
546 546 run_task(tasks.send_email, recipients, subject,
547 547 email_body_plaintext, email_body)
548 548
549 549 h.flash(_('Send email task created'), category='success')
550 550 raise HTTPFound(h.route_path('admin_settings_email'))
551 551
552 552 @LoginRequired()
553 553 @HasPermissionAllDecorator('hg.admin')
554 554 def settings_hooks(self):
555 555 c = self.load_default_context()
556 556 c.active = 'hooks'
557 557
558 558 model = SettingsModel()
559 559 c.hooks = model.get_builtin_hooks()
560 560 c.custom_hooks = model.get_custom_hooks()
561 561
562 562 data = render('rhodecode:templates/admin/settings/settings.mako',
563 563 self._get_template_context(c), self.request)
564 564 html = formencode.htmlfill.render(
565 565 data,
566 566 defaults=self._form_defaults(),
567 567 encoding="UTF-8",
568 568 force_defaults=False
569 569 )
570 570 return Response(html)
571 571
572 572 @LoginRequired()
573 573 @HasPermissionAllDecorator('hg.admin')
574 574 @CSRFRequired()
575 575 def settings_hooks_update(self):
576 576 _ = self.request.translate
577 577 c = self.load_default_context()
578 578 c.active = 'hooks'
579 579 if c.visual.allow_custom_hooks_settings:
580 580 ui_key = self.request.POST.get('new_hook_ui_key')
581 581 ui_value = self.request.POST.get('new_hook_ui_value')
582 582
583 583 hook_id = self.request.POST.get('hook_id')
584 584 new_hook = False
585 585
586 586 model = SettingsModel()
587 587 try:
588 588 if ui_value and ui_key:
589 589 model.create_or_update_hook(ui_key, ui_value)
590 590 h.flash(_('Added new hook'), category='success')
591 591 new_hook = True
592 592 elif hook_id:
593 593 RhodeCodeUi.delete(hook_id)
594 594 Session().commit()
595 595
596 596 # check for edits
597 597 update = False
598 598 _d = self.request.POST.dict_of_lists()
599 599 for k, v in zip(_d.get('hook_ui_key', []),
600 600 _d.get('hook_ui_value_new', [])):
601 601 model.create_or_update_hook(k, v)
602 602 update = True
603 603
604 604 if update and not new_hook:
605 605 h.flash(_('Updated hooks'), category='success')
606 606 Session().commit()
607 607 except Exception:
608 608 log.exception("Exception during hook creation")
609 609 h.flash(_('Error occurred during hook creation'),
610 610 category='error')
611 611
612 612 raise HTTPFound(h.route_path('admin_settings_hooks'))
613 613
614 614 @LoginRequired()
615 615 @HasPermissionAllDecorator('hg.admin')
616 616 def settings_search(self):
617 617 c = self.load_default_context()
618 618 c.active = 'search'
619 619
620 620 c.searcher = searcher_from_config(self.request.registry.settings)
621 621 c.statistics = c.searcher.statistics(self.request.translate)
622 622
623 623 return self._get_template_context(c)
624 624
625 625 @LoginRequired()
626 626 @HasPermissionAllDecorator('hg.admin')
627 627 def settings_labs(self):
628 628 c = self.load_default_context()
629 629 if not c.labs_active:
630 630 raise HTTPFound(h.route_path('admin_settings'))
631 631
632 632 c.active = 'labs'
633 633 c.lab_settings = _LAB_SETTINGS
634 634
635 635 data = render('rhodecode:templates/admin/settings/settings.mako',
636 636 self._get_template_context(c), self.request)
637 637 html = formencode.htmlfill.render(
638 638 data,
639 639 defaults=self._form_defaults(),
640 640 encoding="UTF-8",
641 641 force_defaults=False
642 642 )
643 643 return Response(html)
644 644
645 645 @LoginRequired()
646 646 @HasPermissionAllDecorator('hg.admin')
647 647 @CSRFRequired()
648 648 def settings_labs_update(self):
649 649 _ = self.request.translate
650 650 c = self.load_default_context()
651 651 c.active = 'labs'
652 652
653 653 application_form = LabsSettingsForm(self.request.translate)()
654 654 try:
655 655 form_result = application_form.to_python(dict(self.request.POST))
656 656 except formencode.Invalid as errors:
657 657 h.flash(
658 658 _("Some form inputs contain invalid data."),
659 659 category='error')
660 660 data = render('rhodecode:templates/admin/settings/settings.mako',
661 661 self._get_template_context(c), self.request)
662 662 html = formencode.htmlfill.render(
663 663 data,
664 664 defaults=errors.value,
665 665 errors=errors.unpack_errors() or {},
666 666 prefix_error=False,
667 667 encoding="UTF-8",
668 668 force_defaults=False
669 669 )
670 670 return Response(html)
671 671
672 672 try:
673 673 session = Session()
674 674 for setting in _LAB_SETTINGS:
675 675 setting_name = setting.key[len('rhodecode_'):]
676 676 sett = SettingsModel().create_or_update_setting(
677 677 setting_name, form_result[setting.key], setting.type)
678 678 session.add(sett)
679 679
680 680 except Exception:
681 681 log.exception('Exception while updating lab settings')
682 682 h.flash(_('Error occurred during updating labs settings'),
683 683 category='error')
684 684 else:
685 685 Session().commit()
686 686 SettingsModel().invalidate_settings_cache()
687 687 h.flash(_('Updated Labs settings'), category='success')
688 688 raise HTTPFound(h.route_path('admin_settings_labs'))
689 689
690 690 data = render('rhodecode:templates/admin/settings/settings.mako',
691 691 self._get_template_context(c), self.request)
692 692 html = formencode.htmlfill.render(
693 693 data,
694 694 defaults=self._form_defaults(),
695 695 encoding="UTF-8",
696 696 force_defaults=False
697 697 )
698 698 return Response(html)
699 699
700 700
701 701 # :param key: name of the setting including the 'rhodecode_' prefix
702 702 # :param type: the RhodeCodeSetting type to use.
703 703 # :param group: the i18ned group in which we should dispaly this setting
704 704 # :param label: the i18ned label we should display for this setting
705 705 # :param help: the i18ned help we should dispaly for this setting
706 706 LabSetting = collections.namedtuple(
707 707 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
708 708
709 709
710 710 # This list has to be kept in sync with the form
711 711 # rhodecode.model.forms.LabsSettingsForm.
712 712 _LAB_SETTINGS = [
713 713
714 714 ]
@@ -1,807 +1,808 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Utilities library for RhodeCode
21 21 """
22 22
23 23 import datetime
24 24 import decorator
25 25 import logging
26 26 import os
27 27 import re
28 28 import sys
29 29 import shutil
30 30 import socket
31 31 import tempfile
32 32 import traceback
33 33 import tarfile
34 34 import warnings
35 35 from os.path import join as jn
36 36
37 37 import paste
38 38 import pkg_resources
39 39 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
40 40
41 41 from mako import exceptions
42 42
43 43 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
44 44 from rhodecode.lib.str_utils import safe_bytes, safe_str
45 45 from rhodecode.lib.vcs.backends.base import Config
46 46 from rhodecode.lib.vcs.exceptions import VCSError
47 47 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
48 48 from rhodecode.lib.ext_json import sjson as json
49 49 from rhodecode.model import meta
50 50 from rhodecode.model.db import (
51 51 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
52 52 from rhodecode.model.meta import Session
53 53
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
58 58
59 59 # String which contains characters that are not allowed in slug names for
60 60 # repositories or repository groups. It is properly escaped to use it in
61 61 # regular expressions.
62 62 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
63 63
64 64 # Regex that matches forbidden characters in repo/group slugs.
65 65 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
66 66
67 67 # Regex that matches allowed characters in repo/group slugs.
68 68 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches whole repo/group slugs.
71 71 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
72 72
73 73 _license_cache = None
74 74
75 75
76 76 def repo_name_slug(value):
77 77 """
78 78 Return slug of name of repository
79 79 This function is called on each creation/modification
80 80 of repository to prevent bad names in repo
81 81 """
82 82
83 83 replacement_char = '-'
84 84
85 85 slug = strip_tags(value)
86 86 slug = convert_accented_entities(slug)
87 87 slug = convert_misc_entities(slug)
88 88
89 89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 90 slug = re.sub(r'[\s]+', '-', slug)
91 91 slug = collapse(slug, replacement_char)
92 92
93 93 return slug
94 94
95 95
96 96 #==============================================================================
97 97 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
98 98 #==============================================================================
99 99 def get_repo_slug(request):
100 100 _repo = ''
101 101
102 102 if hasattr(request, 'db_repo_name'):
103 103 # if our requests has set db reference use it for name, this
104 104 # translates the example.com/_<id> into proper repo names
105 105 _repo = request.db_repo_name
106 106 elif getattr(request, 'matchdict', None):
107 107 # pyramid
108 108 _repo = request.matchdict.get('repo_name')
109 109
110 110 if _repo:
111 111 _repo = _repo.rstrip('/')
112 112 return _repo
113 113
114 114
115 115 def get_repo_group_slug(request):
116 116 _group = ''
117 117 if hasattr(request, 'db_repo_group'):
118 118 # if our requests has set db reference use it for name, this
119 119 # translates the example.com/_<id> into proper repo group names
120 120 _group = request.db_repo_group.group_name
121 121 elif getattr(request, 'matchdict', None):
122 122 # pyramid
123 123 _group = request.matchdict.get('repo_group_name')
124 124
125 125 if _group:
126 126 _group = _group.rstrip('/')
127 127 return _group
128 128
129 129
130 130 def get_user_group_slug(request):
131 131 _user_group = ''
132 132
133 133 if hasattr(request, 'db_user_group'):
134 134 _user_group = request.db_user_group.users_group_name
135 135 elif getattr(request, 'matchdict', None):
136 136 # pyramid
137 137 _user_group = request.matchdict.get('user_group_id')
138 138 _user_group_name = request.matchdict.get('user_group_name')
139 139 try:
140 140 if _user_group:
141 141 _user_group = UserGroup.get(_user_group)
142 142 elif _user_group_name:
143 143 _user_group = UserGroup.get_by_group_name(_user_group_name)
144 144
145 145 if _user_group:
146 146 _user_group = _user_group.users_group_name
147 147 except Exception:
148 148 log.exception('Failed to get user group by id and name')
149 149 # catch all failures here
150 150 return None
151 151
152 152 return _user_group
153 153
154 154
155 155 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
156 156 """
157 157 Scans given path for repos and return (name,(type,path)) tuple
158 158
159 159 :param path: path to scan for repositories
160 160 :param recursive: recursive search and return names with subdirs in front
161 161 """
162 162
163 163 # remove ending slash for better results
164 164 path = path.rstrip(os.sep)
165 165 log.debug('now scanning in %s location recursive:%s...', path, recursive)
166 166
167 167 def _get_repos(p):
168 168 dirpaths = get_dirpaths(p)
169 169 if not _is_dir_writable(p):
170 170 log.warning('repo path without write access: %s', p)
171 171
172 172 for dirpath in dirpaths:
173 173 if os.path.isfile(os.path.join(p, dirpath)):
174 174 continue
175 175 cur_path = os.path.join(p, dirpath)
176 176
177 177 # skip removed repos
178 178 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
179 179 continue
180 180
181 181 #skip .<somethin> dirs
182 182 if dirpath.startswith('.'):
183 183 continue
184 184
185 185 try:
186 186 scm_info = get_scm(cur_path)
187 187 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
188 188 except VCSError:
189 189 if not recursive:
190 190 continue
191 191 #check if this dir containts other repos for recursive scan
192 192 rec_path = os.path.join(p, dirpath)
193 193 if os.path.isdir(rec_path):
194 194 yield from _get_repos(rec_path)
195 195
196 196 return _get_repos(path)
197 197
198 198
199 199 def get_dirpaths(p: str) -> list:
200 200 try:
201 201 # OS-independable way of checking if we have at least read-only
202 202 # access or not.
203 203 dirpaths = os.listdir(p)
204 204 except OSError:
205 205 log.warning('ignoring repo path without read access: %s', p)
206 206 return []
207 207
208 208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 209 # decode paths and suddenly returns unicode objects itself. The items it
210 210 # cannot decode are returned as strings and cause issues.
211 211 #
212 212 # Those paths are ignored here until a solid solution for path handling has
213 213 # been built.
214 214 expected_type = type(p)
215 215
216 216 def _has_correct_type(item):
217 217 if type(item) is not expected_type:
218 218 log.error(
219 219 "Ignoring path %s since it cannot be decoded into str.",
220 220 # Using "repr" to make sure that we see the byte value in case
221 221 # of support.
222 222 repr(item))
223 223 return False
224 224 return True
225 225
226 226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227 227
228 228 return dirpaths
229 229
230 230
231 231 def _is_dir_writable(path):
232 232 """
233 233 Probe if `path` is writable.
234 234
235 235 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 236 possible to create a file inside of `path`, stat does not produce reliable
237 237 results in this case.
238 238 """
239 239 try:
240 240 with tempfile.TemporaryFile(dir=path):
241 241 pass
242 242 except OSError:
243 243 return False
244 244 return True
245 245
246 246
247 247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 248 """
249 249 Returns True if given path is a valid repository False otherwise.
250 250 If expect_scm param is given also, compare if given scm is the same
251 251 as expected from scm parameter. If explicit_scm is given don't try to
252 252 detect the scm, just use the given one to check if repo is valid
253 253
254 254 :param repo_name:
255 255 :param base_path:
256 256 :param expect_scm:
257 257 :param explicit_scm:
258 258 :param config:
259 259
260 260 :return True: if given path is a valid repository
261 261 """
262 262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 263 log.debug('Checking if `%s` is a valid path for repository. '
264 264 'Explicit type: %s', repo_name, explicit_scm)
265 265
266 266 try:
267 267 if explicit_scm:
268 268 detected_scms = [get_scm_backend(explicit_scm)(
269 269 full_path, config=config).alias]
270 270 else:
271 271 detected_scms = get_scm(full_path)
272 272
273 273 if expect_scm:
274 274 return detected_scms[0] == expect_scm
275 275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 276 return True
277 277 except VCSError:
278 278 log.debug('path: %s is not a valid repo !', full_path)
279 279 return False
280 280
281 281
282 282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 283 """
284 284 Returns True if a given path is a repository group, False otherwise
285 285
286 286 :param repo_group_name:
287 287 :param base_path:
288 288 """
289 289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 290 log.debug('Checking if `%s` is a valid path for repository group',
291 291 repo_group_name)
292 292
293 293 # check if it's not a repo
294 294 if is_valid_repo(repo_group_name, base_path):
295 295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
296 296 return False
297 297
298 298 try:
299 299 # we need to check bare git repos at higher level
300 300 # since we might match branches/hooks/info/objects or possible
301 301 # other things inside bare git repo
302 302 maybe_repo = os.path.dirname(full_path)
303 303 if maybe_repo == base_path:
304 304 # skip root level repo check; we know root location CANNOT BE a repo group
305 305 return False
306 306
307 307 scm_ = get_scm(maybe_repo)
308 308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
309 309 return False
310 310 except VCSError:
311 311 pass
312 312
313 313 # check if it's a valid path
314 314 if skip_path_check or os.path.isdir(full_path):
315 315 log.debug('path: %s is a valid repo group !', full_path)
316 316 return True
317 317
318 318 log.debug('path: %s is not a valid repo group !', full_path)
319 319 return False
320 320
321 321
322 322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
323 323 while True:
324 324 ok = input(prompt)
325 325 if ok.lower() in ('y', 'ye', 'yes'):
326 326 return True
327 327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
328 328 return False
329 329 retries = retries - 1
330 330 if retries < 0:
331 331 raise OSError
332 332 print(complaint)
333 333
334 334 # propagated from mercurial documentation
335 335 ui_sections = [
336 336 'alias', 'auth',
337 337 'decode/encode', 'defaults',
338 338 'diff', 'email',
339 339 'extensions', 'format',
340 340 'merge-patterns', 'merge-tools',
341 341 'hooks', 'http_proxy',
342 342 'smtp', 'patch',
343 343 'paths', 'profiling',
344 344 'server', 'trusted',
345 345 'ui', 'web', ]
346 346
347 347
348 348 def config_data_from_db(clear_session=True, repo=None):
349 349 """
350 350 Read the configuration data from the database and return configuration
351 351 tuples.
352 352 """
353 353 from rhodecode.model.settings import VcsSettingsModel
354 354
355 355 config = []
356 356
357 357 sa = meta.Session()
358 358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
359 359
360 360 ui_settings = settings_model.get_ui_settings()
361 361
362 362 ui_data = []
363 363 for setting in ui_settings:
364 364 if setting.active:
365 365 ui_data.append((setting.section, setting.key, setting.value))
366 366 config.append((
367 367 safe_str(setting.section), safe_str(setting.key),
368 368 safe_str(setting.value)))
369 369 if setting.key == 'push_ssl':
370 370 # force set push_ssl requirement to False, rhodecode
371 371 # handles that
372 372 config.append((
373 373 safe_str(setting.section), safe_str(setting.key), False))
374 374 log.debug(
375 375 'settings ui from db@repo[%s]: %s',
376 376 repo,
377 377 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
378 378 if clear_session:
379 379 meta.Session.remove()
380 380
381 381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
382 382 # It's already there and activated/deactivated
383 383 skip_entries = []
384 384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
385 385 if 'pull' not in enabled_hook_classes:
386 386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
387 387 if 'push' not in enabled_hook_classes:
388 388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
389 389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
390 390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
391 391
392 392 config = [entry for entry in config if entry[:2] not in skip_entries]
393 393
394 394 return config
395 395
396 396
397 397 def make_db_config(clear_session=True, repo=None):
398 398 """
399 399 Create a :class:`Config` instance based on the values in the database.
400 400 """
401 401 config = Config()
402 402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
403 403 for section, option, value in config_data:
404 404 config.set(section, option, value)
405 405 return config
406 406
407 407
408 408 def get_enabled_hook_classes(ui_settings):
409 409 """
410 410 Return the enabled hook classes.
411 411
412 412 :param ui_settings: List of ui_settings as returned
413 413 by :meth:`VcsSettingsModel.get_ui_settings`
414 414
415 415 :return: a list with the enabled hook classes. The order is not guaranteed.
416 416 :rtype: list
417 417 """
418 418 enabled_hooks = []
419 419 active_hook_keys = [
420 420 key for section, key, value, active in ui_settings
421 421 if section == 'hooks' and active]
422 422
423 423 hook_names = {
424 424 RhodeCodeUi.HOOK_PUSH: 'push',
425 425 RhodeCodeUi.HOOK_PULL: 'pull',
426 426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
427 427 }
428 428
429 429 for key in active_hook_keys:
430 430 hook = hook_names.get(key)
431 431 if hook:
432 432 enabled_hooks.append(hook)
433 433
434 434 return enabled_hooks
435 435
436 436
437 437 def set_rhodecode_config(config):
438 438 """
439 439 Updates pyramid config with new settings from database
440 440
441 441 :param config:
442 442 """
443 443 from rhodecode.model.settings import SettingsModel
444 444 app_settings = SettingsModel().get_all_settings()
445 445
446 446 for k, v in list(app_settings.items()):
447 447 config[k] = v
448 448
449 449
450 450 def get_rhodecode_realm():
451 451 """
452 452 Return the rhodecode realm from database.
453 453 """
454 454 from rhodecode.model.settings import SettingsModel
455 455 realm = SettingsModel().get_setting_by_name('realm')
456 456 return safe_str(realm.app_settings_value)
457 457
458 458
459 459 def get_rhodecode_base_path():
460 460 """
461 461 Returns the base path. The base path is the filesystem path which points
462 462 to the repository store.
463 463 """
464 464
465 465 import rhodecode
466 466 return rhodecode.CONFIG['default_base_path']
467 467
468 468
469 469 def map_groups(path):
470 470 """
471 471 Given a full path to a repository, create all nested groups that this
472 472 repo is inside. This function creates parent-child relationships between
473 473 groups and creates default perms for all new groups.
474 474
475 475 :param paths: full path to repository
476 476 """
477 477 from rhodecode.model.repo_group import RepoGroupModel
478 478 sa = meta.Session()
479 479 groups = path.split(Repository.NAME_SEP)
480 480 parent = None
481 481 group = None
482 482
483 483 # last element is repo in nested groups structure
484 484 groups = groups[:-1]
485 485 rgm = RepoGroupModel(sa)
486 486 owner = User.get_first_super_admin()
487 487 for lvl, group_name in enumerate(groups):
488 488 group_name = '/'.join(groups[:lvl] + [group_name])
489 489 group = RepoGroup.get_by_group_name(group_name)
490 490 desc = '%s group' % group_name
491 491
492 492 # skip folders that are now removed repos
493 493 if REMOVED_REPO_PAT.match(group_name):
494 494 break
495 495
496 496 if group is None:
497 497 log.debug('creating group level: %s group_name: %s',
498 498 lvl, group_name)
499 499 group = RepoGroup(group_name, parent)
500 500 group.group_description = desc
501 501 group.user = owner
502 502 sa.add(group)
503 503 perm_obj = rgm._create_default_perms(group)
504 504 sa.add(perm_obj)
505 505 sa.flush()
506 506
507 507 parent = group
508 508 return group
509 509
510 510
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
512 512 """
513 513 maps all repos given in initial_repo_list, non existing repositories
514 514 are created, if remove_obsolete is True it also checks for db entries
515 515 that are not in initial_repo_list and removes them.
516 516
517 517 :param initial_repo_list: list of repositories found by scanning methods
518 518 :param remove_obsolete: check for obsolete entries in database
519 519 """
520 520 from rhodecode.model.repo import RepoModel
521 521 from rhodecode.model.repo_group import RepoGroupModel
522 522 from rhodecode.model.settings import SettingsModel
523 523
524 524 sa = meta.Session()
525 525 repo_model = RepoModel()
526 526 user = User.get_first_super_admin()
527 527 added = []
528 528
529 529 # creation defaults
530 530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
531 531 enable_statistics = defs.get('repo_enable_statistics')
532 532 enable_locking = defs.get('repo_enable_locking')
533 533 enable_downloads = defs.get('repo_enable_downloads')
534 534 private = defs.get('repo_private')
535 535
536 536 for name, repo in list(initial_repo_list.items()):
537 537 group = map_groups(name)
538 538 str_name = safe_str(name)
539 539 db_repo = repo_model.get_by_repo_name(str_name)
540
540 541 # found repo that is on filesystem not in RhodeCode database
541 542 if not db_repo:
542 log.info('repository %s not found, creating now', name)
543 log.info('repository `%s` not found in the database, creating now', name)
543 544 added.append(name)
544 545 desc = (repo.description
545 546 if repo.description != 'unknown'
546 547 else '%s repository' % name)
547 548
548 549 db_repo = repo_model._create_repo(
549 550 repo_name=name,
550 551 repo_type=repo.alias,
551 552 description=desc,
552 553 repo_group=getattr(group, 'group_id', None),
553 554 owner=user,
554 555 enable_locking=enable_locking,
555 556 enable_downloads=enable_downloads,
556 557 enable_statistics=enable_statistics,
557 558 private=private,
558 559 state=Repository.STATE_CREATED
559 560 )
560 561 sa.commit()
561 562 # we added that repo just now, and make sure we updated server info
562 563 if db_repo.repo_type == 'git':
563 564 git_repo = db_repo.scm_instance()
564 565 # update repository server-info
565 566 log.debug('Running update server info')
566 567 git_repo._update_server_info()
567 568
568 569 db_repo.update_commit_cache()
569 570
570 571 config = db_repo._config
571 572 config.set('extensions', 'largefiles', '')
572 573 repo = db_repo.scm_instance(config=config)
573 repo.install_hooks()
574 repo.install_hooks(force=force_hooks_rebuild)
574 575
575 576 removed = []
576 577 if remove_obsolete:
577 578 # remove from database those repositories that are not in the filesystem
578 579 for repo in sa.query(Repository).all():
579 580 if repo.repo_name not in list(initial_repo_list.keys()):
580 581 log.debug("Removing non-existing repository found in db `%s`",
581 582 repo.repo_name)
582 583 try:
583 584 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
584 585 sa.commit()
585 586 removed.append(repo.repo_name)
586 587 except Exception:
587 588 # don't hold further removals on error
588 589 log.error(traceback.format_exc())
589 590 sa.rollback()
590 591
591 592 def splitter(full_repo_name):
592 593 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
593 594 gr_name = None
594 595 if len(_parts) == 2:
595 596 gr_name = _parts[0]
596 597 return gr_name
597 598
598 599 initial_repo_group_list = [splitter(x) for x in
599 600 list(initial_repo_list.keys()) if splitter(x)]
600 601
601 602 # remove from database those repository groups that are not in the
602 603 # filesystem due to parent child relationships we need to delete them
603 604 # in a specific order of most nested first
604 605 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
605 606 def nested_sort(gr):
606 607 return len(gr.split('/'))
607 608 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
608 609 if group_name not in initial_repo_group_list:
609 610 repo_group = RepoGroup.get_by_group_name(group_name)
610 611 if (repo_group.children.all() or
611 612 not RepoGroupModel().check_exist_filesystem(
612 613 group_name=group_name, exc_on_failure=False)):
613 614 continue
614 615
615 616 log.info(
616 617 'Removing non-existing repository group found in db `%s`',
617 618 group_name)
618 619 try:
619 620 RepoGroupModel(sa).delete(group_name, fs_remove=False)
620 621 sa.commit()
621 622 removed.append(group_name)
622 623 except Exception:
623 624 # don't hold further removals on error
624 625 log.exception(
625 626 'Unable to remove repository group `%s`',
626 627 group_name)
627 628 sa.rollback()
628 629 raise
629 630
630 631 return added, removed
631 632
632 633
633 634 def load_rcextensions(root_path):
634 635 import rhodecode
635 636 from rhodecode.config import conf
636 637
637 638 path = os.path.join(root_path)
638 639 sys.path.append(path)
639 640
640 641 try:
641 642 rcextensions = __import__('rcextensions')
642 643 except ImportError:
643 644 if os.path.isdir(os.path.join(path, 'rcextensions')):
644 645 log.warning('Unable to load rcextensions from %s', path)
645 646 rcextensions = None
646 647
647 648 if rcextensions:
648 649 log.info('Loaded rcextensions from %s...', rcextensions)
649 650 rhodecode.EXTENSIONS = rcextensions
650 651
651 652 # Additional mappings that are not present in the pygments lexers
652 653 conf.LANGUAGES_EXTENSIONS_MAP.update(
653 654 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
654 655
655 656
656 657 def get_custom_lexer(extension):
657 658 """
658 659 returns a custom lexer if it is defined in rcextensions module, or None
659 660 if there's no custom lexer defined
660 661 """
661 662 import rhodecode
662 663 from pygments import lexers
663 664
664 665 # custom override made by RhodeCode
665 666 if extension in ['mako']:
666 667 return lexers.get_lexer_by_name('html+mako')
667 668
668 669 # check if we didn't define this extension as other lexer
669 670 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
670 671 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
671 672 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
672 673 return lexers.get_lexer_by_name(_lexer_name)
673 674
674 675
675 676 #==============================================================================
676 677 # TEST FUNCTIONS AND CREATORS
677 678 #==============================================================================
678 679 def create_test_index(repo_location, config):
679 680 """
680 681 Makes default test index.
681 682 """
682 683 try:
683 684 import rc_testdata
684 685 except ImportError:
685 686 raise ImportError('Failed to import rc_testdata, '
686 687 'please make sure this package is installed from requirements_test.txt')
687 688 rc_testdata.extract_search_index(
688 689 'vcs_search_index', os.path.dirname(config['search.location']))
689 690
690 691
691 692 def create_test_directory(test_path):
692 693 """
693 694 Create test directory if it doesn't exist.
694 695 """
695 696 if not os.path.isdir(test_path):
696 697 log.debug('Creating testdir %s', test_path)
697 698 os.makedirs(test_path)
698 699
699 700
700 701 def create_test_database(test_path, config):
701 702 """
702 703 Makes a fresh database.
703 704 """
704 705 from rhodecode.lib.db_manage import DbManage
705 706 from rhodecode.lib.utils2 import get_encryption_key
706 707
707 708 # PART ONE create db
708 709 dbconf = config['sqlalchemy.db1.url']
709 710 enc_key = get_encryption_key(config)
710 711
711 712 log.debug('making test db %s', dbconf)
712 713
713 714 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
714 715 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
715 716 dbmanage.create_tables(override=True)
716 717 dbmanage.set_db_version()
717 718 # for tests dynamically set new root paths based on generated content
718 719 dbmanage.create_settings(dbmanage.config_prompt(test_path))
719 720 dbmanage.create_default_user()
720 721 dbmanage.create_test_admin_and_users()
721 722 dbmanage.create_permissions()
722 723 dbmanage.populate_default_permissions()
723 724 Session().commit()
724 725
725 726
726 727 def create_test_repositories(test_path, config):
727 728 """
728 729 Creates test repositories in the temporary directory. Repositories are
729 730 extracted from archives within the rc_testdata package.
730 731 """
731 732 import rc_testdata
732 733 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
733 734
734 735 log.debug('making test vcs repositories')
735 736
736 737 idx_path = config['search.location']
737 738 data_path = config['cache_dir']
738 739
739 740 # clean index and data
740 741 if idx_path and os.path.exists(idx_path):
741 742 log.debug('remove %s', idx_path)
742 743 shutil.rmtree(idx_path)
743 744
744 745 if data_path and os.path.exists(data_path):
745 746 log.debug('remove %s', data_path)
746 747 shutil.rmtree(data_path)
747 748
748 749 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
749 750 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
750 751
751 752 # Note: Subversion is in the process of being integrated with the system,
752 753 # until we have a properly packed version of the test svn repository, this
753 754 # tries to copy over the repo from a package "rc_testdata"
754 755 svn_repo_path = rc_testdata.get_svn_repo_archive()
755 756 with tarfile.open(svn_repo_path) as tar:
756 757 tar.extractall(jn(test_path, SVN_REPO))
757 758
758 759
759 760 def password_changed(auth_user, session):
760 761 # Never report password change in case of default user or anonymous user.
761 762 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
762 763 return False
763 764
764 765 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
765 766 rhodecode_user = session.get('rhodecode_user', {})
766 767 session_password_hash = rhodecode_user.get('password', '')
767 768 return password_hash != session_password_hash
768 769
769 770
770 771 def read_opensource_licenses():
771 772 global _license_cache
772 773
773 774 if not _license_cache:
774 775 licenses = pkg_resources.resource_string(
775 776 'rhodecode', 'config/licenses.json')
776 777 _license_cache = json.loads(licenses)
777 778
778 779 return _license_cache
779 780
780 781
781 782 def generate_platform_uuid():
782 783 """
783 784 Generates platform UUID based on it's name
784 785 """
785 786 import platform
786 787
787 788 try:
788 789 uuid_list = [platform.platform()]
789 790 return sha256_safe(':'.join(uuid_list))
790 791 except Exception as e:
791 792 log.error('Failed to generate host uuid: %s', e)
792 793 return 'UNDEFINED'
793 794
794 795
795 796 def send_test_email(recipients, email_body='TEST EMAIL'):
796 797 """
797 798 Simple code for generating test emails.
798 799 Usage::
799 800
800 801 from rhodecode.lib import utils
801 802 utils.send_test_email()
802 803 """
803 804 from rhodecode.lib.celerylib import tasks, run_task
804 805
805 806 email_body = email_body_plaintext = email_body
806 807 subject = f'SUBJECT FROM: {socket.gethostname()}'
807 808 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,28 +1,33 b''
1 1 ${h.secure_form(h.route_path('admin_settings_mapping_update'), request=request)}
2 2
3 3 <div class="panel panel-default">
4 4 <div class="panel-heading">
5 5 <h3 class="panel-title">${_('Import New Groups or Repositories')}</h3>
6 6 </div>
7 7 <div class="panel-body">
8
9 <p>
10 ${_('This function will scann all data under the current storage path location at')} <code>${c.storage_path}</code>
11 </p>
12
8 13 <div class="checkbox">
9 14 ${h.checkbox('destroy',True)}
10 15 <label for="destroy">${_('Destroy old data')}</label>
11 16 </div>
12 17 <span class="help-block">${_('In case a repository or a group was deleted from the filesystem and it still exists in the database, check this option to remove obsolete data from the database.')}</span>
13 18
14 19 <div class="checkbox">
15 20 ${h.checkbox('invalidate',True)}
16 21 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
17 22 </div>
18 23 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
19 24
20 25 <div class="buttons">
21 26 ${h.submit('rescan',_('Rescan Filesystem'),class_="btn")}
22 27 </div>
23 28
24 29 </div>
25 30 </div>
26 31
27 32
28 33 ${h.end_form()}
@@ -1,1750 +1,1750 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import collections
21 21 import datetime
22 22 import os
23 23 import re
24 24 import pprint
25 25 import shutil
26 26 import socket
27 27 import subprocess
28 28 import time
29 29 import uuid
30 30 import dateutil.tz
31 31 import logging
32 32 import functools
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39 import pyramid.paster
40 40
41 41 import rhodecode
42 42 import rhodecode.lib
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.pull_request import PullRequestModel
50 50 from rhodecode.model.repo import RepoModel
51 51 from rhodecode.model.repo_group import RepoGroupModel
52 52 from rhodecode.model.user import UserModel
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.user_group import UserGroupModel
55 55 from rhodecode.model.integration import IntegrationModel
56 56 from rhodecode.integrations import integration_type_registry
57 57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 58 from rhodecode.lib.utils import repo2db_mapper
59 59 from rhodecode.lib.str_utils import safe_bytes
60 60 from rhodecode.lib.hash_utils import sha1_safe
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 log = logging.getLogger(__name__)
72 72
73 73
74 74 def cmp(a, b):
75 75 # backport cmp from python2 so we can still use it in the custom code in this module
76 76 return (a > b) - (a < b)
77 77
78 78
79 79 @pytest.fixture(scope='session', autouse=True)
80 80 def activate_example_rcextensions(request):
81 81 """
82 82 Patch in an example rcextensions module which verifies passed in kwargs.
83 83 """
84 84 from rhodecode.config import rcextensions
85 85
86 86 old_extensions = rhodecode.EXTENSIONS
87 87 rhodecode.EXTENSIONS = rcextensions
88 88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89 89
90 90 @request.addfinalizer
91 91 def cleanup():
92 92 rhodecode.EXTENSIONS = old_extensions
93 93
94 94
95 95 @pytest.fixture()
96 96 def capture_rcextensions():
97 97 """
98 98 Returns the recorded calls to entry points in rcextensions.
99 99 """
100 100 calls = rhodecode.EXTENSIONS.calls
101 101 calls.clear()
102 102 # Note: At this moment, it is still the empty dict, but that will
103 103 # be filled during the test run and since it is a reference this
104 104 # is enough to make it work.
105 105 return calls
106 106
107 107
108 108 @pytest.fixture(scope='session')
109 109 def http_environ_session():
110 110 """
111 111 Allow to use "http_environ" in session scope.
112 112 """
113 113 return plain_http_environ()
114 114
115 115
116 116 def plain_http_host_stub():
117 117 """
118 118 Value of HTTP_HOST in the test run.
119 119 """
120 120 return 'example.com:80'
121 121
122 122
123 123 @pytest.fixture()
124 124 def http_host_stub():
125 125 """
126 126 Value of HTTP_HOST in the test run.
127 127 """
128 128 return plain_http_host_stub()
129 129
130 130
131 131 def plain_http_host_only_stub():
132 132 """
133 133 Value of HTTP_HOST in the test run.
134 134 """
135 135 return plain_http_host_stub().split(':')[0]
136 136
137 137
138 138 @pytest.fixture()
139 139 def http_host_only_stub():
140 140 """
141 141 Value of HTTP_HOST in the test run.
142 142 """
143 143 return plain_http_host_only_stub()
144 144
145 145
146 146 def plain_http_environ():
147 147 """
148 148 HTTP extra environ keys.
149 149
150 150 User by the test application and as well for setting up the pylons
151 151 environment. In the case of the fixture "app" it should be possible
152 152 to override this for a specific test case.
153 153 """
154 154 return {
155 155 'SERVER_NAME': plain_http_host_only_stub(),
156 156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
157 157 'HTTP_HOST': plain_http_host_stub(),
158 158 'HTTP_USER_AGENT': 'rc-test-agent',
159 159 'REQUEST_METHOD': 'GET'
160 160 }
161 161
162 162
163 163 @pytest.fixture()
164 164 def http_environ():
165 165 """
166 166 HTTP extra environ keys.
167 167
168 168 User by the test application and as well for setting up the pylons
169 169 environment. In the case of the fixture "app" it should be possible
170 170 to override this for a specific test case.
171 171 """
172 172 return plain_http_environ()
173 173
174 174
175 175 @pytest.fixture(scope='session')
176 176 def baseapp(ini_config, vcsserver, http_environ_session):
177 177 from rhodecode.lib.pyramid_utils import get_app_config
178 178 from rhodecode.config.middleware import make_pyramid_app
179 179
180 180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
181 181 pyramid.paster.setup_logging(ini_config)
182 182
183 183 settings = get_app_config(ini_config)
184 184 app = make_pyramid_app({'__file__': ini_config}, **settings)
185 185
186 186 return app
187 187
188 188
189 189 @pytest.fixture(scope='function')
190 190 def app(request, config_stub, baseapp, http_environ):
191 191 app = CustomTestApp(
192 192 baseapp,
193 193 extra_environ=http_environ)
194 194 if request.cls:
195 195 request.cls.app = app
196 196 return app
197 197
198 198
199 199 @pytest.fixture(scope='session')
200 200 def app_settings(baseapp, ini_config):
201 201 """
202 202 Settings dictionary used to create the app.
203 203
204 204 Parses the ini file and passes the result through the sanitize and apply
205 205 defaults mechanism in `rhodecode.config.middleware`.
206 206 """
207 207 return baseapp.config.get_settings()
208 208
209 209
210 210 @pytest.fixture(scope='session')
211 211 def db_connection(ini_settings):
212 212 # Initialize the database connection.
213 213 config_utils.initialize_database(ini_settings)
214 214
215 215
216 216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
217 217
218 218
219 219 def _autologin_user(app, *args):
220 220 session = login_user_session(app, *args)
221 221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 222 return LoginData(csrf_token, session['rhodecode_user'])
223 223
224 224
225 225 @pytest.fixture()
226 226 def autologin_user(app):
227 227 """
228 228 Utility fixture which makes sure that the admin user is logged in
229 229 """
230 230 return _autologin_user(app)
231 231
232 232
233 233 @pytest.fixture()
234 234 def autologin_regular_user(app):
235 235 """
236 236 Utility fixture which makes sure that the regular user is logged in
237 237 """
238 238 return _autologin_user(
239 239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240 240
241 241
242 242 @pytest.fixture(scope='function')
243 243 def csrf_token(request, autologin_user):
244 244 return autologin_user.csrf_token
245 245
246 246
247 247 @pytest.fixture(scope='function')
248 248 def xhr_header(request):
249 249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
250 250
251 251
252 252 @pytest.fixture()
253 253 def real_crypto_backend(monkeypatch):
254 254 """
255 255 Switch the production crypto backend on for this test.
256 256
257 257 During the test run the crypto backend is replaced with a faster
258 258 implementation based on the MD5 algorithm.
259 259 """
260 260 monkeypatch.setattr(rhodecode, 'is_test', False)
261 261
262 262
263 263 @pytest.fixture(scope='class')
264 264 def index_location(request, baseapp):
265 265 index_location = baseapp.config.get_settings()['search.location']
266 266 if request.cls:
267 267 request.cls.index_location = index_location
268 268 return index_location
269 269
270 270
271 271 @pytest.fixture(scope='session', autouse=True)
272 272 def tests_tmp_path(request):
273 273 """
274 274 Create temporary directory to be used during the test session.
275 275 """
276 276 if not os.path.exists(TESTS_TMP_PATH):
277 277 os.makedirs(TESTS_TMP_PATH)
278 278
279 279 if not request.config.getoption('--keep-tmp-path'):
280 280 @request.addfinalizer
281 281 def remove_tmp_path():
282 282 shutil.rmtree(TESTS_TMP_PATH)
283 283
284 284 return TESTS_TMP_PATH
285 285
286 286
287 287 @pytest.fixture()
288 288 def test_repo_group(request):
289 289 """
290 290 Create a temporary repository group, and destroy it after
291 291 usage automatically
292 292 """
293 293 fixture = Fixture()
294 294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
295 295 repo_group = fixture.create_repo_group(repogroupid)
296 296
297 297 def _cleanup():
298 298 fixture.destroy_repo_group(repogroupid)
299 299
300 300 request.addfinalizer(_cleanup)
301 301 return repo_group
302 302
303 303
304 304 @pytest.fixture()
305 305 def test_user_group(request):
306 306 """
307 307 Create a temporary user group, and destroy it after
308 308 usage automatically
309 309 """
310 310 fixture = Fixture()
311 311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
312 312 user_group = fixture.create_user_group(usergroupid)
313 313
314 314 def _cleanup():
315 315 fixture.destroy_user_group(user_group)
316 316
317 317 request.addfinalizer(_cleanup)
318 318 return user_group
319 319
320 320
321 321 @pytest.fixture(scope='session')
322 322 def test_repo(request):
323 323 container = TestRepoContainer()
324 324 request.addfinalizer(container._cleanup)
325 325 return container
326 326
327 327
328 328 class TestRepoContainer(object):
329 329 """
330 330 Container for test repositories which are used read only.
331 331
332 332 Repositories will be created on demand and re-used during the lifetime
333 333 of this object.
334 334
335 335 Usage to get the svn test repository "minimal"::
336 336
337 337 test_repo = TestContainer()
338 338 repo = test_repo('minimal', 'svn')
339 339
340 340 """
341 341
342 342 dump_extractors = {
343 343 'git': utils.extract_git_repo_from_dump,
344 344 'hg': utils.extract_hg_repo_from_dump,
345 345 'svn': utils.extract_svn_repo_from_dump,
346 346 }
347 347
348 348 def __init__(self):
349 349 self._cleanup_repos = []
350 350 self._fixture = Fixture()
351 351 self._repos = {}
352 352
353 353 def __call__(self, dump_name, backend_alias, config=None):
354 354 key = (dump_name, backend_alias)
355 355 if key not in self._repos:
356 356 repo = self._create_repo(dump_name, backend_alias, config)
357 357 self._repos[key] = repo.repo_id
358 358 return Repository.get(self._repos[key])
359 359
360 360 def _create_repo(self, dump_name, backend_alias, config):
361 repo_name = '%s-%s' % (backend_alias, dump_name)
361 repo_name = f'{backend_alias}-{dump_name}'
362 362 backend = get_backend(backend_alias)
363 363 dump_extractor = self.dump_extractors[backend_alias]
364 364 repo_path = dump_extractor(dump_name, repo_name)
365 365
366 366 vcs_repo = backend(repo_path, config=config)
367 367 repo2db_mapper({repo_name: vcs_repo})
368 368
369 369 repo = RepoModel().get_by_repo_name(repo_name)
370 370 self._cleanup_repos.append(repo_name)
371 371 return repo
372 372
373 373 def _cleanup(self):
374 374 for repo_name in reversed(self._cleanup_repos):
375 375 self._fixture.destroy_repo(repo_name)
376 376
377 377
378 378 def backend_base(request, backend_alias, baseapp, test_repo):
379 379 if backend_alias not in request.config.getoption('--backends'):
380 380 pytest.skip("Backend %s not selected." % (backend_alias, ))
381 381
382 382 utils.check_xfail_backends(request.node, backend_alias)
383 383 utils.check_skip_backends(request.node, backend_alias)
384 384
385 385 repo_name = 'vcs_test_%s' % (backend_alias, )
386 386 backend = Backend(
387 387 alias=backend_alias,
388 388 repo_name=repo_name,
389 389 test_name=request.node.name,
390 390 test_repo_container=test_repo)
391 391 request.addfinalizer(backend.cleanup)
392 392 return backend
393 393
394 394
395 395 @pytest.fixture()
396 396 def backend(request, backend_alias, baseapp, test_repo):
397 397 """
398 398 Parametrized fixture which represents a single backend implementation.
399 399
400 400 It respects the option `--backends` to focus the test run on specific
401 401 backend implementations.
402 402
403 403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
404 404 for specific backends. This is intended as a utility for incremental
405 405 development of a new backend implementation.
406 406 """
407 407 return backend_base(request, backend_alias, baseapp, test_repo)
408 408
409 409
410 410 @pytest.fixture()
411 411 def backend_git(request, baseapp, test_repo):
412 412 return backend_base(request, 'git', baseapp, test_repo)
413 413
414 414
415 415 @pytest.fixture()
416 416 def backend_hg(request, baseapp, test_repo):
417 417 return backend_base(request, 'hg', baseapp, test_repo)
418 418
419 419
420 420 @pytest.fixture()
421 421 def backend_svn(request, baseapp, test_repo):
422 422 return backend_base(request, 'svn', baseapp, test_repo)
423 423
424 424
425 425 @pytest.fixture()
426 426 def backend_random(backend_git):
427 427 """
428 428 Use this to express that your tests need "a backend.
429 429
430 430 A few of our tests need a backend, so that we can run the code. This
431 431 fixture is intended to be used for such cases. It will pick one of the
432 432 backends and run the tests.
433 433
434 434 The fixture `backend` would run the test multiple times for each
435 435 available backend which is a pure waste of time if the test is
436 436 independent of the backend type.
437 437 """
438 438 # TODO: johbo: Change this to pick a random backend
439 439 return backend_git
440 440
441 441
442 442 @pytest.fixture()
443 443 def backend_stub(backend_git):
444 444 """
445 445 Use this to express that your tests need a backend stub
446 446
447 447 TODO: mikhail: Implement a real stub logic instead of returning
448 448 a git backend
449 449 """
450 450 return backend_git
451 451
452 452
453 453 @pytest.fixture()
454 454 def repo_stub(backend_stub):
455 455 """
456 456 Use this to express that your tests need a repository stub
457 457 """
458 458 return backend_stub.create_repo()
459 459
460 460
461 461 class Backend(object):
462 462 """
463 463 Represents the test configuration for one supported backend
464 464
465 465 Provides easy access to different test repositories based on
466 466 `__getitem__`. Such repositories will only be created once per test
467 467 session.
468 468 """
469 469
470 470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
471 471 _master_repo = None
472 472 _master_repo_path = ''
473 473 _commit_ids = {}
474 474
475 475 def __init__(self, alias, repo_name, test_name, test_repo_container):
476 476 self.alias = alias
477 477 self.repo_name = repo_name
478 478 self._cleanup_repos = []
479 479 self._test_name = test_name
480 480 self._test_repo_container = test_repo_container
481 481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
482 482 # Fixture will survive in the end.
483 483 self._fixture = Fixture()
484 484
485 485 def __getitem__(self, key):
486 486 return self._test_repo_container(key, self.alias)
487 487
488 488 def create_test_repo(self, key, config=None):
489 489 return self._test_repo_container(key, self.alias, config)
490 490
491 491 @property
492 492 def repo_id(self):
493 493 # just fake some repo_id
494 494 return self.repo.repo_id
495 495
496 496 @property
497 497 def repo(self):
498 498 """
499 499 Returns the "current" repository. This is the vcs_test repo or the
500 500 last repo which has been created with `create_repo`.
501 501 """
502 502 from rhodecode.model.db import Repository
503 503 return Repository.get_by_repo_name(self.repo_name)
504 504
505 505 @property
506 506 def default_branch_name(self):
507 507 VcsRepository = get_backend(self.alias)
508 508 return VcsRepository.DEFAULT_BRANCH_NAME
509 509
510 510 @property
511 511 def default_head_id(self):
512 512 """
513 513 Returns the default head id of the underlying backend.
514 514
515 515 This will be the default branch name in case the backend does have a
516 516 default branch. In the other cases it will point to a valid head
517 517 which can serve as the base to create a new commit on top of it.
518 518 """
519 519 vcsrepo = self.repo.scm_instance()
520 520 head_id = (
521 521 vcsrepo.DEFAULT_BRANCH_NAME or
522 522 vcsrepo.commit_ids[-1])
523 523 return head_id
524 524
525 525 @property
526 526 def commit_ids(self):
527 527 """
528 528 Returns the list of commits for the last created repository
529 529 """
530 530 return self._commit_ids
531 531
532 532 def create_master_repo(self, commits):
533 533 """
534 534 Create a repository and remember it as a template.
535 535
536 536 This allows to easily create derived repositories to construct
537 537 more complex scenarios for diff, compare and pull requests.
538 538
539 539 Returns a commit map which maps from commit message to raw_id.
540 540 """
541 541 self._master_repo = self.create_repo(commits=commits)
542 542 self._master_repo_path = self._master_repo.repo_full_path
543 543
544 544 return self._commit_ids
545 545
546 546 def create_repo(
547 547 self, commits=None, number_of_commits=0, heads=None,
548 548 name_suffix='', bare=False, **kwargs):
549 549 """
550 550 Create a repository and record it for later cleanup.
551 551
552 552 :param commits: Optional. A sequence of dict instances.
553 553 Will add a commit per entry to the new repository.
554 554 :param number_of_commits: Optional. If set to a number, this number of
555 555 commits will be added to the new repository.
556 556 :param heads: Optional. Can be set to a sequence of of commit
557 557 names which shall be pulled in from the master repository.
558 558 :param name_suffix: adds special suffix to generated repo name
559 559 :param bare: set a repo as bare (no checkout)
560 560 """
561 561 self.repo_name = self._next_repo_name() + name_suffix
562 562 repo = self._fixture.create_repo(
563 563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
564 564 self._cleanup_repos.append(repo.repo_name)
565 565
566 566 commits = commits or [
567 567 {'message': f'Commit {x} of {self.repo_name}'}
568 568 for x in range(number_of_commits)]
569 569 vcs_repo = repo.scm_instance()
570 570 vcs_repo.count()
571 571 self._add_commits_to_repo(vcs_repo, commits)
572 572 if heads:
573 573 self.pull_heads(repo, heads)
574 574
575 575 return repo
576 576
577 577 def pull_heads(self, repo, heads, do_fetch=False):
578 578 """
579 579 Make sure that repo contains all commits mentioned in `heads`
580 580 """
581 581 vcsrepo = repo.scm_instance()
582 582 vcsrepo.config.clear_section('hooks')
583 583 commit_ids = [self._commit_ids[h] for h in heads]
584 584 if do_fetch:
585 585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
586 586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
587 587
588 588 def create_fork(self):
589 589 repo_to_fork = self.repo_name
590 590 self.repo_name = self._next_repo_name()
591 591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
592 592 self._cleanup_repos.append(self.repo_name)
593 593 return repo
594 594
595 595 def new_repo_name(self, suffix=''):
596 596 self.repo_name = self._next_repo_name() + suffix
597 597 self._cleanup_repos.append(self.repo_name)
598 598 return self.repo_name
599 599
600 600 def _next_repo_name(self):
601 601 return "%s_%s" % (
602 602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
603 603
604 604 def ensure_file(self, filename, content=b'Test content\n'):
605 605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
606 606 commits = [
607 607 {'added': [
608 608 FileNode(filename, content=content),
609 609 ]},
610 610 ]
611 611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
612 612
613 613 def enable_downloads(self):
614 614 repo = self.repo
615 615 repo.enable_downloads = True
616 616 Session().add(repo)
617 617 Session().commit()
618 618
619 619 def cleanup(self):
620 620 for repo_name in reversed(self._cleanup_repos):
621 621 self._fixture.destroy_repo(repo_name)
622 622
623 623 def _add_commits_to_repo(self, repo, commits):
624 624 commit_ids = _add_commits_to_repo(repo, commits)
625 625 if not commit_ids:
626 626 return
627 627 self._commit_ids = commit_ids
628 628
629 629 # Creating refs for Git to allow fetching them from remote repository
630 630 if self.alias == 'git':
631 631 refs = {}
632 632 for message in self._commit_ids:
633 633 cleanup_message = message.replace(' ', '')
634 634 ref_name = f'refs/test-refs/{cleanup_message}'
635 635 refs[ref_name] = self._commit_ids[message]
636 636 self._create_refs(repo, refs)
637 637
638 638 def _create_refs(self, repo, refs):
639 639 for ref_name, ref_val in refs.items():
640 640 repo.set_refs(ref_name, ref_val)
641 641
642 642
643 643 class VcsBackend(object):
644 644 """
645 645 Represents the test configuration for one supported vcs backend.
646 646 """
647 647
648 648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
649 649
650 650 def __init__(self, alias, repo_path, test_name, test_repo_container):
651 651 self.alias = alias
652 652 self._repo_path = repo_path
653 653 self._cleanup_repos = []
654 654 self._test_name = test_name
655 655 self._test_repo_container = test_repo_container
656 656
657 657 def __getitem__(self, key):
658 658 return self._test_repo_container(key, self.alias).scm_instance()
659 659
660 660 def __repr__(self):
661 661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
662 662
663 663 @property
664 664 def repo(self):
665 665 """
666 666 Returns the "current" repository. This is the vcs_test repo of the last
667 667 repo which has been created.
668 668 """
669 669 Repository = get_backend(self.alias)
670 670 return Repository(self._repo_path)
671 671
672 672 @property
673 673 def backend(self):
674 674 """
675 675 Returns the backend implementation class.
676 676 """
677 677 return get_backend(self.alias)
678 678
679 679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
680 680 bare=False):
681 681 repo_name = self._next_repo_name()
682 682 self._repo_path = get_new_dir(repo_name)
683 683 repo_class = get_backend(self.alias)
684 684 src_url = None
685 685 if _clone_repo:
686 686 src_url = _clone_repo.path
687 687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
688 688 self._cleanup_repos.append(repo)
689 689
690 690 commits = commits or [
691 691 {'message': 'Commit %s of %s' % (x, repo_name)}
692 692 for x in range(number_of_commits)]
693 693 _add_commits_to_repo(repo, commits)
694 694 return repo
695 695
696 696 def clone_repo(self, repo):
697 697 return self.create_repo(_clone_repo=repo)
698 698
699 699 def cleanup(self):
700 700 for repo in self._cleanup_repos:
701 701 shutil.rmtree(repo.path)
702 702
703 703 def new_repo_path(self):
704 704 repo_name = self._next_repo_name()
705 705 self._repo_path = get_new_dir(repo_name)
706 706 return self._repo_path
707 707
708 708 def _next_repo_name(self):
709 709
710 710 return "{}_{}".format(
711 711 self.invalid_repo_name.sub('_', self._test_name),
712 712 len(self._cleanup_repos)
713 713 )
714 714
715 715 def add_file(self, repo, filename, content='Test content\n'):
716 716 imc = repo.in_memory_commit
717 717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
718 718 imc.commit(
719 719 message='Automatic commit from vcsbackend fixture',
720 720 author='Automatic <automatic@rhodecode.com>')
721 721
722 722 def ensure_file(self, filename, content='Test content\n'):
723 723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
724 724 self.add_file(self.repo, filename, content)
725 725
726 726
727 727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
728 728 if backend_alias not in request.config.getoption('--backends'):
729 729 pytest.skip("Backend %s not selected." % (backend_alias, ))
730 730
731 731 utils.check_xfail_backends(request.node, backend_alias)
732 732 utils.check_skip_backends(request.node, backend_alias)
733 733
734 734 repo_name = f'vcs_test_{backend_alias}'
735 735 repo_path = os.path.join(tests_tmp_path, repo_name)
736 736 backend = VcsBackend(
737 737 alias=backend_alias,
738 738 repo_path=repo_path,
739 739 test_name=request.node.name,
740 740 test_repo_container=test_repo)
741 741 request.addfinalizer(backend.cleanup)
742 742 return backend
743 743
744 744
745 745 @pytest.fixture()
746 746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
747 747 """
748 748 Parametrized fixture which represents a single vcs backend implementation.
749 749
750 750 See the fixture `backend` for more details. This one implements the same
751 751 concept, but on vcs level. So it does not provide model instances etc.
752 752
753 753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
754 754 for how this works.
755 755 """
756 756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
757 757
758 758
759 759 @pytest.fixture()
760 760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
762 762
763 763
764 764 @pytest.fixture()
765 765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
767 767
768 768
769 769 @pytest.fixture()
770 770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
772 772
773 773
774 774 @pytest.fixture()
775 775 def vcsbackend_stub(vcsbackend_git):
776 776 """
777 777 Use this to express that your test just needs a stub of a vcsbackend.
778 778
779 779 Plan is to eventually implement an in-memory stub to speed tests up.
780 780 """
781 781 return vcsbackend_git
782 782
783 783
784 784 def _add_commits_to_repo(vcs_repo, commits):
785 785 commit_ids = {}
786 786 if not commits:
787 787 return commit_ids
788 788
789 789 imc = vcs_repo.in_memory_commit
790 790
791 791 for idx, commit in enumerate(commits):
792 792 message = str(commit.get('message', f'Commit {idx}'))
793 793
794 794 for node in commit.get('added', []):
795 795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 796 for node in commit.get('changed', []):
797 797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 798 for node in commit.get('removed', []):
799 799 imc.remove(FileNode(safe_bytes(node.path)))
800 800
801 801 parents = [
802 802 vcs_repo.get_commit(commit_id=commit_ids[p])
803 803 for p in commit.get('parents', [])]
804 804
805 805 operations = ('added', 'changed', 'removed')
806 806 if not any((commit.get(o) for o in operations)):
807 807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
808 808
809 809 commit = imc.commit(
810 810 message=message,
811 811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
812 812 date=commit.get('date'),
813 813 branch=commit.get('branch'),
814 814 parents=parents)
815 815
816 816 commit_ids[commit.message] = commit.raw_id
817 817
818 818 return commit_ids
819 819
820 820
821 821 @pytest.fixture()
822 822 def reposerver(request):
823 823 """
824 824 Allows to serve a backend repository
825 825 """
826 826
827 827 repo_server = RepoServer()
828 828 request.addfinalizer(repo_server.cleanup)
829 829 return repo_server
830 830
831 831
832 832 class RepoServer(object):
833 833 """
834 834 Utility to serve a local repository for the duration of a test case.
835 835
836 836 Supports only Subversion so far.
837 837 """
838 838
839 839 url = None
840 840
841 841 def __init__(self):
842 842 self._cleanup_servers = []
843 843
844 844 def serve(self, vcsrepo):
845 845 if vcsrepo.alias != 'svn':
846 846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847 847
848 848 proc = subprocess.Popen(
849 849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
850 850 '--root', vcsrepo.path])
851 851 self._cleanup_servers.append(proc)
852 852 self.url = 'svn://localhost'
853 853
854 854 def cleanup(self):
855 855 for proc in self._cleanup_servers:
856 856 proc.terminate()
857 857
858 858
859 859 @pytest.fixture()
860 860 def pr_util(backend, request, config_stub):
861 861 """
862 862 Utility for tests of models and for functional tests around pull requests.
863 863
864 864 It gives an instance of :class:`PRTestUtility` which provides various
865 865 utility methods around one pull request.
866 866
867 867 This fixture uses `backend` and inherits its parameterization.
868 868 """
869 869
870 870 util = PRTestUtility(backend)
871 871 request.addfinalizer(util.cleanup)
872 872
873 873 return util
874 874
875 875
876 876 class PRTestUtility(object):
877 877
878 878 pull_request = None
879 879 pull_request_id = None
880 880 mergeable_patcher = None
881 881 mergeable_mock = None
882 882 notification_patcher = None
883 883 commit_ids: dict
884 884
885 885 def __init__(self, backend):
886 886 self.backend = backend
887 887
888 888 def create_pull_request(
889 889 self, commits=None, target_head=None, source_head=None,
890 890 revisions=None, approved=False, author=None, mergeable=False,
891 891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
892 892 title="Test", description="Description"):
893 893 self.set_mergeable(mergeable)
894 894 if not enable_notifications:
895 895 # mock notification side effect
896 896 self.notification_patcher = mock.patch(
897 897 'rhodecode.model.notification.NotificationModel.create')
898 898 self.notification_patcher.start()
899 899
900 900 if not self.pull_request:
901 901 if not commits:
902 902 commits = [
903 903 {'message': 'c1'},
904 904 {'message': 'c2'},
905 905 {'message': 'c3'},
906 906 ]
907 907 target_head = 'c1'
908 908 source_head = 'c2'
909 909 revisions = ['c2']
910 910
911 911 self.commit_ids = self.backend.create_master_repo(commits)
912 912 self.target_repository = self.backend.create_repo(
913 913 heads=[target_head], name_suffix=name_suffix)
914 914 self.source_repository = self.backend.create_repo(
915 915 heads=[source_head], name_suffix=name_suffix)
916 916 self.author = author or UserModel().get_by_username(
917 917 TEST_USER_ADMIN_LOGIN)
918 918
919 919 model = PullRequestModel()
920 920 self.create_parameters = {
921 921 'created_by': self.author,
922 922 'source_repo': self.source_repository.repo_name,
923 923 'source_ref': self._default_branch_reference(source_head),
924 924 'target_repo': self.target_repository.repo_name,
925 925 'target_ref': self._default_branch_reference(target_head),
926 926 'revisions': [self.commit_ids[r] for r in revisions],
927 927 'reviewers': reviewers or self._get_reviewers(),
928 928 'observers': observers or self._get_observers(),
929 929 'title': title,
930 930 'description': description,
931 931 }
932 932 self.pull_request = model.create(**self.create_parameters)
933 933 assert model.get_versions(self.pull_request) == []
934 934
935 935 self.pull_request_id = self.pull_request.pull_request_id
936 936
937 937 if approved:
938 938 self.approve()
939 939
940 940 Session().add(self.pull_request)
941 941 Session().commit()
942 942
943 943 return self.pull_request
944 944
945 945 def approve(self):
946 946 self.create_status_votes(
947 947 ChangesetStatus.STATUS_APPROVED,
948 948 *self.pull_request.reviewers)
949 949
950 950 def close(self):
951 951 PullRequestModel().close_pull_request(self.pull_request, self.author)
952 952
953 953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
954 954 default_branch = branch or self.backend.default_branch_name
955 955 message = self.commit_ids[commit_message]
956 956 reference = f'branch:{default_branch}:{message}'
957 957
958 958 return reference
959 959
960 960 def _get_reviewers(self):
961 961 role = PullRequestReviewers.ROLE_REVIEWER
962 962 return [
963 963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
964 964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
965 965 ]
966 966
967 967 def _get_observers(self):
968 968 return [
969 969
970 970 ]
971 971
972 972 def update_source_repository(self, head=None, do_fetch=False):
973 973 heads = [head or 'c3']
974 974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
975 975
976 976 def update_target_repository(self, head=None, do_fetch=False):
977 977 heads = [head or 'c3']
978 978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
979 979
980 980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
981 981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
982 982 self.pull_request.target_ref = full_ref
983 983 return full_ref
984 984
985 985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
986 986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
987 987 self.pull_request.source_ref = full_ref
988 988 return full_ref
989 989
990 990 def add_one_commit(self, head=None):
991 991 self.update_source_repository(head=head)
992 992 old_commit_ids = set(self.pull_request.revisions)
993 993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
994 994 commit_ids = set(self.pull_request.revisions)
995 995 new_commit_ids = commit_ids - old_commit_ids
996 996 assert len(new_commit_ids) == 1
997 997 return new_commit_ids.pop()
998 998
999 999 def remove_one_commit(self):
1000 1000 assert len(self.pull_request.revisions) == 2
1001 1001 source_vcs = self.source_repository.scm_instance()
1002 1002 removed_commit_id = source_vcs.commit_ids[-1]
1003 1003
1004 1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1005 1005 # remove the if once that's sorted out.
1006 1006 if self.backend.alias == "git":
1007 1007 kwargs = {'branch_name': self.backend.default_branch_name}
1008 1008 else:
1009 1009 kwargs = {}
1010 1010 source_vcs.strip(removed_commit_id, **kwargs)
1011 1011
1012 1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1013 1013 assert len(self.pull_request.revisions) == 1
1014 1014 return removed_commit_id
1015 1015
1016 1016 def create_comment(self, linked_to=None):
1017 1017 comment = CommentsModel().create(
1018 1018 text="Test comment",
1019 1019 repo=self.target_repository.repo_name,
1020 1020 user=self.author,
1021 1021 pull_request=self.pull_request)
1022 1022 assert comment.pull_request_version_id is None
1023 1023
1024 1024 if linked_to:
1025 1025 PullRequestModel()._link_comments_to_version(linked_to)
1026 1026
1027 1027 return comment
1028 1028
1029 1029 def create_inline_comment(
1030 1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1031 1031 comment = CommentsModel().create(
1032 1032 text="Test comment",
1033 1033 repo=self.target_repository.repo_name,
1034 1034 user=self.author,
1035 1035 line_no=line_no,
1036 1036 f_path=file_path,
1037 1037 pull_request=self.pull_request)
1038 1038 assert comment.pull_request_version_id is None
1039 1039
1040 1040 if linked_to:
1041 1041 PullRequestModel()._link_comments_to_version(linked_to)
1042 1042
1043 1043 return comment
1044 1044
1045 1045 def create_version_of_pull_request(self):
1046 1046 pull_request = self.create_pull_request()
1047 1047 version = PullRequestModel()._create_version_from_snapshot(
1048 1048 pull_request)
1049 1049 return version
1050 1050
1051 1051 def create_status_votes(self, status, *reviewers):
1052 1052 for reviewer in reviewers:
1053 1053 ChangesetStatusModel().set_status(
1054 1054 repo=self.pull_request.target_repo,
1055 1055 status=status,
1056 1056 user=reviewer.user_id,
1057 1057 pull_request=self.pull_request)
1058 1058
1059 1059 def set_mergeable(self, value):
1060 1060 if not self.mergeable_patcher:
1061 1061 self.mergeable_patcher = mock.patch.object(
1062 1062 VcsSettingsModel, 'get_general_settings')
1063 1063 self.mergeable_mock = self.mergeable_patcher.start()
1064 1064 self.mergeable_mock.return_value = {
1065 1065 'rhodecode_pr_merge_enabled': value}
1066 1066
1067 1067 def cleanup(self):
1068 1068 # In case the source repository is already cleaned up, the pull
1069 1069 # request will already be deleted.
1070 1070 pull_request = PullRequest().get(self.pull_request_id)
1071 1071 if pull_request:
1072 1072 PullRequestModel().delete(pull_request, pull_request.author)
1073 1073 Session().commit()
1074 1074
1075 1075 if self.notification_patcher:
1076 1076 self.notification_patcher.stop()
1077 1077
1078 1078 if self.mergeable_patcher:
1079 1079 self.mergeable_patcher.stop()
1080 1080
1081 1081
1082 1082 @pytest.fixture()
1083 1083 def user_admin(baseapp):
1084 1084 """
1085 1085 Provides the default admin test user as an instance of `db.User`.
1086 1086 """
1087 1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1088 1088 return user
1089 1089
1090 1090
1091 1091 @pytest.fixture()
1092 1092 def user_regular(baseapp):
1093 1093 """
1094 1094 Provides the default regular test user as an instance of `db.User`.
1095 1095 """
1096 1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1097 1097 return user
1098 1098
1099 1099
1100 1100 @pytest.fixture()
1101 1101 def user_util(request, db_connection):
1102 1102 """
1103 1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1104 1104 """
1105 1105 utility = UserUtility(test_name=request.node.name)
1106 1106 request.addfinalizer(utility.cleanup)
1107 1107 return utility
1108 1108
1109 1109
1110 1110 # TODO: johbo: Split this up into utilities per domain or something similar
1111 1111 class UserUtility(object):
1112 1112
1113 1113 def __init__(self, test_name="test"):
1114 1114 self._test_name = self._sanitize_name(test_name)
1115 1115 self.fixture = Fixture()
1116 1116 self.repo_group_ids = []
1117 1117 self.repos_ids = []
1118 1118 self.user_ids = []
1119 1119 self.user_group_ids = []
1120 1120 self.user_repo_permission_ids = []
1121 1121 self.user_group_repo_permission_ids = []
1122 1122 self.user_repo_group_permission_ids = []
1123 1123 self.user_group_repo_group_permission_ids = []
1124 1124 self.user_user_group_permission_ids = []
1125 1125 self.user_group_user_group_permission_ids = []
1126 1126 self.user_permissions = []
1127 1127
1128 1128 def _sanitize_name(self, name):
1129 1129 for char in ['[', ']']:
1130 1130 name = name.replace(char, '_')
1131 1131 return name
1132 1132
1133 1133 def create_repo_group(
1134 1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1135 1135 group_name = "{prefix}_repogroup_{count}".format(
1136 1136 prefix=self._test_name,
1137 1137 count=len(self.repo_group_ids))
1138 1138 repo_group = self.fixture.create_repo_group(
1139 1139 group_name, cur_user=owner)
1140 1140 if auto_cleanup:
1141 1141 self.repo_group_ids.append(repo_group.group_id)
1142 1142 return repo_group
1143 1143
1144 1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1145 1145 auto_cleanup=True, repo_type='hg', bare=False):
1146 1146 repo_name = "{prefix}_repository_{count}".format(
1147 1147 prefix=self._test_name,
1148 1148 count=len(self.repos_ids))
1149 1149
1150 1150 repository = self.fixture.create_repo(
1151 1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1152 1152 if auto_cleanup:
1153 1153 self.repos_ids.append(repository.repo_id)
1154 1154 return repository
1155 1155
1156 1156 def create_user(self, auto_cleanup=True, **kwargs):
1157 1157 user_name = "{prefix}_user_{count}".format(
1158 1158 prefix=self._test_name,
1159 1159 count=len(self.user_ids))
1160 1160 user = self.fixture.create_user(user_name, **kwargs)
1161 1161 if auto_cleanup:
1162 1162 self.user_ids.append(user.user_id)
1163 1163 return user
1164 1164
1165 1165 def create_additional_user_email(self, user, email):
1166 1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1167 1167 return uem
1168 1168
1169 1169 def create_user_with_group(self):
1170 1170 user = self.create_user()
1171 1171 user_group = self.create_user_group(members=[user])
1172 1172 return user, user_group
1173 1173
1174 1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1175 1175 auto_cleanup=True, **kwargs):
1176 1176 group_name = "{prefix}_usergroup_{count}".format(
1177 1177 prefix=self._test_name,
1178 1178 count=len(self.user_group_ids))
1179 1179 user_group = self.fixture.create_user_group(
1180 1180 group_name, cur_user=owner, **kwargs)
1181 1181
1182 1182 if auto_cleanup:
1183 1183 self.user_group_ids.append(user_group.users_group_id)
1184 1184 if members:
1185 1185 for user in members:
1186 1186 UserGroupModel().add_user_to_group(user_group, user)
1187 1187 return user_group
1188 1188
1189 1189 def grant_user_permission(self, user_name, permission_name):
1190 1190 self.inherit_default_user_permissions(user_name, False)
1191 1191 self.user_permissions.append((user_name, permission_name))
1192 1192
1193 1193 def grant_user_permission_to_repo_group(
1194 1194 self, repo_group, user, permission_name):
1195 1195 permission = RepoGroupModel().grant_user_permission(
1196 1196 repo_group, user, permission_name)
1197 1197 self.user_repo_group_permission_ids.append(
1198 1198 (repo_group.group_id, user.user_id))
1199 1199 return permission
1200 1200
1201 1201 def grant_user_group_permission_to_repo_group(
1202 1202 self, repo_group, user_group, permission_name):
1203 1203 permission = RepoGroupModel().grant_user_group_permission(
1204 1204 repo_group, user_group, permission_name)
1205 1205 self.user_group_repo_group_permission_ids.append(
1206 1206 (repo_group.group_id, user_group.users_group_id))
1207 1207 return permission
1208 1208
1209 1209 def grant_user_permission_to_repo(
1210 1210 self, repo, user, permission_name):
1211 1211 permission = RepoModel().grant_user_permission(
1212 1212 repo, user, permission_name)
1213 1213 self.user_repo_permission_ids.append(
1214 1214 (repo.repo_id, user.user_id))
1215 1215 return permission
1216 1216
1217 1217 def grant_user_group_permission_to_repo(
1218 1218 self, repo, user_group, permission_name):
1219 1219 permission = RepoModel().grant_user_group_permission(
1220 1220 repo, user_group, permission_name)
1221 1221 self.user_group_repo_permission_ids.append(
1222 1222 (repo.repo_id, user_group.users_group_id))
1223 1223 return permission
1224 1224
1225 1225 def grant_user_permission_to_user_group(
1226 1226 self, target_user_group, user, permission_name):
1227 1227 permission = UserGroupModel().grant_user_permission(
1228 1228 target_user_group, user, permission_name)
1229 1229 self.user_user_group_permission_ids.append(
1230 1230 (target_user_group.users_group_id, user.user_id))
1231 1231 return permission
1232 1232
1233 1233 def grant_user_group_permission_to_user_group(
1234 1234 self, target_user_group, user_group, permission_name):
1235 1235 permission = UserGroupModel().grant_user_group_permission(
1236 1236 target_user_group, user_group, permission_name)
1237 1237 self.user_group_user_group_permission_ids.append(
1238 1238 (target_user_group.users_group_id, user_group.users_group_id))
1239 1239 return permission
1240 1240
1241 1241 def revoke_user_permission(self, user_name, permission_name):
1242 1242 self.inherit_default_user_permissions(user_name, True)
1243 1243 UserModel().revoke_perm(user_name, permission_name)
1244 1244
1245 1245 def inherit_default_user_permissions(self, user_name, value):
1246 1246 user = UserModel().get_by_username(user_name)
1247 1247 user.inherit_default_permissions = value
1248 1248 Session().add(user)
1249 1249 Session().commit()
1250 1250
1251 1251 def cleanup(self):
1252 1252 self._cleanup_permissions()
1253 1253 self._cleanup_repos()
1254 1254 self._cleanup_repo_groups()
1255 1255 self._cleanup_user_groups()
1256 1256 self._cleanup_users()
1257 1257
1258 1258 def _cleanup_permissions(self):
1259 1259 if self.user_permissions:
1260 1260 for user_name, permission_name in self.user_permissions:
1261 1261 self.revoke_user_permission(user_name, permission_name)
1262 1262
1263 1263 for permission in self.user_repo_permission_ids:
1264 1264 RepoModel().revoke_user_permission(*permission)
1265 1265
1266 1266 for permission in self.user_group_repo_permission_ids:
1267 1267 RepoModel().revoke_user_group_permission(*permission)
1268 1268
1269 1269 for permission in self.user_repo_group_permission_ids:
1270 1270 RepoGroupModel().revoke_user_permission(*permission)
1271 1271
1272 1272 for permission in self.user_group_repo_group_permission_ids:
1273 1273 RepoGroupModel().revoke_user_group_permission(*permission)
1274 1274
1275 1275 for permission in self.user_user_group_permission_ids:
1276 1276 UserGroupModel().revoke_user_permission(*permission)
1277 1277
1278 1278 for permission in self.user_group_user_group_permission_ids:
1279 1279 UserGroupModel().revoke_user_group_permission(*permission)
1280 1280
1281 1281 def _cleanup_repo_groups(self):
1282 1282 def _repo_group_compare(first_group_id, second_group_id):
1283 1283 """
1284 1284 Gives higher priority to the groups with the most complex paths
1285 1285 """
1286 1286 first_group = RepoGroup.get(first_group_id)
1287 1287 second_group = RepoGroup.get(second_group_id)
1288 1288 first_group_parts = (
1289 1289 len(first_group.group_name.split('/')) if first_group else 0)
1290 1290 second_group_parts = (
1291 1291 len(second_group.group_name.split('/')) if second_group else 0)
1292 1292 return cmp(second_group_parts, first_group_parts)
1293 1293
1294 1294 sorted_repo_group_ids = sorted(
1295 1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1296 1296 for repo_group_id in sorted_repo_group_ids:
1297 1297 self.fixture.destroy_repo_group(repo_group_id)
1298 1298
1299 1299 def _cleanup_repos(self):
1300 1300 sorted_repos_ids = sorted(self.repos_ids)
1301 1301 for repo_id in sorted_repos_ids:
1302 1302 self.fixture.destroy_repo(repo_id)
1303 1303
1304 1304 def _cleanup_user_groups(self):
1305 1305 def _user_group_compare(first_group_id, second_group_id):
1306 1306 """
1307 1307 Gives higher priority to the groups with the most complex paths
1308 1308 """
1309 1309 first_group = UserGroup.get(first_group_id)
1310 1310 second_group = UserGroup.get(second_group_id)
1311 1311 first_group_parts = (
1312 1312 len(first_group.users_group_name.split('/'))
1313 1313 if first_group else 0)
1314 1314 second_group_parts = (
1315 1315 len(second_group.users_group_name.split('/'))
1316 1316 if second_group else 0)
1317 1317 return cmp(second_group_parts, first_group_parts)
1318 1318
1319 1319 sorted_user_group_ids = sorted(
1320 1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1321 1321 for user_group_id in sorted_user_group_ids:
1322 1322 self.fixture.destroy_user_group(user_group_id)
1323 1323
1324 1324 def _cleanup_users(self):
1325 1325 for user_id in self.user_ids:
1326 1326 self.fixture.destroy_user(user_id)
1327 1327
1328 1328
1329 1329 @pytest.fixture(scope='session')
1330 1330 def testrun():
1331 1331 return {
1332 1332 'uuid': uuid.uuid4(),
1333 1333 'start': datetime.datetime.utcnow().isoformat(),
1334 1334 'timestamp': int(time.time()),
1335 1335 }
1336 1336
1337 1337
1338 1338 class AppenlightClient(object):
1339 1339
1340 1340 url_template = '{url}?protocol_version=0.5'
1341 1341
1342 1342 def __init__(
1343 1343 self, url, api_key, add_server=True, add_timestamp=True,
1344 1344 namespace=None, request=None, testrun=None):
1345 1345 self.url = self.url_template.format(url=url)
1346 1346 self.api_key = api_key
1347 1347 self.add_server = add_server
1348 1348 self.add_timestamp = add_timestamp
1349 1349 self.namespace = namespace
1350 1350 self.request = request
1351 1351 self.server = socket.getfqdn(socket.gethostname())
1352 1352 self.tags_before = {}
1353 1353 self.tags_after = {}
1354 1354 self.stats = []
1355 1355 self.testrun = testrun or {}
1356 1356
1357 1357 def tag_before(self, tag, value):
1358 1358 self.tags_before[tag] = value
1359 1359
1360 1360 def tag_after(self, tag, value):
1361 1361 self.tags_after[tag] = value
1362 1362
1363 1363 def collect(self, data):
1364 1364 if self.add_server:
1365 1365 data.setdefault('server', self.server)
1366 1366 if self.add_timestamp:
1367 1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1368 1368 if self.namespace:
1369 1369 data.setdefault('namespace', self.namespace)
1370 1370 if self.request:
1371 1371 data.setdefault('request', self.request)
1372 1372 self.stats.append(data)
1373 1373
1374 1374 def send_stats(self):
1375 1375 tags = [
1376 1376 ('testrun', self.request),
1377 1377 ('testrun.start', self.testrun['start']),
1378 1378 ('testrun.timestamp', self.testrun['timestamp']),
1379 1379 ('test', self.namespace),
1380 1380 ]
1381 1381 for key, value in self.tags_before.items():
1382 1382 tags.append((key + '.before', value))
1383 1383 try:
1384 1384 delta = self.tags_after[key] - value
1385 1385 tags.append((key + '.delta', delta))
1386 1386 except Exception:
1387 1387 pass
1388 1388 for key, value in self.tags_after.items():
1389 1389 tags.append((key + '.after', value))
1390 1390 self.collect({
1391 1391 'message': "Collected tags",
1392 1392 'tags': tags,
1393 1393 })
1394 1394
1395 1395 response = requests.post(
1396 1396 self.url,
1397 1397 headers={
1398 1398 'X-appenlight-api-key': self.api_key},
1399 1399 json=self.stats,
1400 1400 )
1401 1401
1402 1402 if not response.status_code == 200:
1403 1403 pprint.pprint(self.stats)
1404 1404 print(response.headers)
1405 1405 print(response.text)
1406 1406 raise Exception('Sending to appenlight failed')
1407 1407
1408 1408
1409 1409 @pytest.fixture()
1410 1410 def gist_util(request, db_connection):
1411 1411 """
1412 1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1413 1413 """
1414 1414 utility = GistUtility()
1415 1415 request.addfinalizer(utility.cleanup)
1416 1416 return utility
1417 1417
1418 1418
1419 1419 class GistUtility(object):
1420 1420 def __init__(self):
1421 1421 self.fixture = Fixture()
1422 1422 self.gist_ids = []
1423 1423
1424 1424 def create_gist(self, **kwargs):
1425 1425 gist = self.fixture.create_gist(**kwargs)
1426 1426 self.gist_ids.append(gist.gist_id)
1427 1427 return gist
1428 1428
1429 1429 def cleanup(self):
1430 1430 for id_ in self.gist_ids:
1431 1431 self.fixture.destroy_gists(str(id_))
1432 1432
1433 1433
1434 1434 @pytest.fixture()
1435 1435 def enabled_backends(request):
1436 1436 backends = request.config.option.backends
1437 1437 return backends[:]
1438 1438
1439 1439
1440 1440 @pytest.fixture()
1441 1441 def settings_util(request, db_connection):
1442 1442 """
1443 1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1444 1444 """
1445 1445 utility = SettingsUtility()
1446 1446 request.addfinalizer(utility.cleanup)
1447 1447 return utility
1448 1448
1449 1449
1450 1450 class SettingsUtility(object):
1451 1451 def __init__(self):
1452 1452 self.rhodecode_ui_ids = []
1453 1453 self.rhodecode_setting_ids = []
1454 1454 self.repo_rhodecode_ui_ids = []
1455 1455 self.repo_rhodecode_setting_ids = []
1456 1456
1457 1457 def create_repo_rhodecode_ui(
1458 1458 self, repo, section, value, key=None, active=True, cleanup=True):
1459 1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1460 1460
1461 1461 setting = RepoRhodeCodeUi()
1462 1462 setting.repository_id = repo.repo_id
1463 1463 setting.ui_section = section
1464 1464 setting.ui_value = value
1465 1465 setting.ui_key = key
1466 1466 setting.ui_active = active
1467 1467 Session().add(setting)
1468 1468 Session().commit()
1469 1469
1470 1470 if cleanup:
1471 1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1472 1472 return setting
1473 1473
1474 1474 def create_rhodecode_ui(
1475 1475 self, section, value, key=None, active=True, cleanup=True):
1476 1476 key = key or sha1_safe(f'{section}{value}')
1477 1477
1478 1478 setting = RhodeCodeUi()
1479 1479 setting.ui_section = section
1480 1480 setting.ui_value = value
1481 1481 setting.ui_key = key
1482 1482 setting.ui_active = active
1483 1483 Session().add(setting)
1484 1484 Session().commit()
1485 1485
1486 1486 if cleanup:
1487 1487 self.rhodecode_ui_ids.append(setting.ui_id)
1488 1488 return setting
1489 1489
1490 1490 def create_repo_rhodecode_setting(
1491 1491 self, repo, name, value, type_, cleanup=True):
1492 1492 setting = RepoRhodeCodeSetting(
1493 1493 repo.repo_id, key=name, val=value, type=type_)
1494 1494 Session().add(setting)
1495 1495 Session().commit()
1496 1496
1497 1497 if cleanup:
1498 1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1499 1499 return setting
1500 1500
1501 1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1502 1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1503 1503 Session().add(setting)
1504 1504 Session().commit()
1505 1505
1506 1506 if cleanup:
1507 1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1508 1508
1509 1509 return setting
1510 1510
1511 1511 def cleanup(self):
1512 1512 for id_ in self.rhodecode_ui_ids:
1513 1513 setting = RhodeCodeUi.get(id_)
1514 1514 Session().delete(setting)
1515 1515
1516 1516 for id_ in self.rhodecode_setting_ids:
1517 1517 setting = RhodeCodeSetting.get(id_)
1518 1518 Session().delete(setting)
1519 1519
1520 1520 for id_ in self.repo_rhodecode_ui_ids:
1521 1521 setting = RepoRhodeCodeUi.get(id_)
1522 1522 Session().delete(setting)
1523 1523
1524 1524 for id_ in self.repo_rhodecode_setting_ids:
1525 1525 setting = RepoRhodeCodeSetting.get(id_)
1526 1526 Session().delete(setting)
1527 1527
1528 1528 Session().commit()
1529 1529
1530 1530
1531 1531 @pytest.fixture()
1532 1532 def no_notifications(request):
1533 1533 notification_patcher = mock.patch(
1534 1534 'rhodecode.model.notification.NotificationModel.create')
1535 1535 notification_patcher.start()
1536 1536 request.addfinalizer(notification_patcher.stop)
1537 1537
1538 1538
1539 1539 @pytest.fixture(scope='session')
1540 1540 def repeat(request):
1541 1541 """
1542 1542 The number of repetitions is based on this fixture.
1543 1543
1544 1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1545 1545 tests are not too slow in our default test suite.
1546 1546 """
1547 1547 return request.config.getoption('--repeat')
1548 1548
1549 1549
1550 1550 @pytest.fixture()
1551 1551 def rhodecode_fixtures():
1552 1552 return Fixture()
1553 1553
1554 1554
1555 1555 @pytest.fixture()
1556 1556 def context_stub():
1557 1557 """
1558 1558 Stub context object.
1559 1559 """
1560 1560 context = pyramid.testing.DummyResource()
1561 1561 return context
1562 1562
1563 1563
1564 1564 @pytest.fixture()
1565 1565 def request_stub():
1566 1566 """
1567 1567 Stub request object.
1568 1568 """
1569 1569 from rhodecode.lib.base import bootstrap_request
1570 1570 request = bootstrap_request(scheme='https')
1571 1571 return request
1572 1572
1573 1573
1574 1574 @pytest.fixture()
1575 1575 def config_stub(request, request_stub):
1576 1576 """
1577 1577 Set up pyramid.testing and return the Configurator.
1578 1578 """
1579 1579 from rhodecode.lib.base import bootstrap_config
1580 1580 config = bootstrap_config(request=request_stub)
1581 1581
1582 1582 @request.addfinalizer
1583 1583 def cleanup():
1584 1584 pyramid.testing.tearDown()
1585 1585
1586 1586 return config
1587 1587
1588 1588
1589 1589 @pytest.fixture()
1590 1590 def StubIntegrationType():
1591 1591 class _StubIntegrationType(IntegrationTypeBase):
1592 1592 """ Test integration type class """
1593 1593
1594 1594 key = 'test'
1595 1595 display_name = 'Test integration type'
1596 1596 description = 'A test integration type for testing'
1597 1597
1598 1598 @classmethod
1599 1599 def icon(cls):
1600 1600 return 'test_icon_html_image'
1601 1601
1602 1602 def __init__(self, settings):
1603 1603 super(_StubIntegrationType, self).__init__(settings)
1604 1604 self.sent_events = [] # for testing
1605 1605
1606 1606 def send_event(self, event):
1607 1607 self.sent_events.append(event)
1608 1608
1609 1609 def settings_schema(self):
1610 1610 class SettingsSchema(colander.Schema):
1611 1611 test_string_field = colander.SchemaNode(
1612 1612 colander.String(),
1613 1613 missing=colander.required,
1614 1614 title='test string field',
1615 1615 )
1616 1616 test_int_field = colander.SchemaNode(
1617 1617 colander.Int(),
1618 1618 title='some integer setting',
1619 1619 )
1620 1620 return SettingsSchema()
1621 1621
1622 1622
1623 1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1624 1624 return _StubIntegrationType
1625 1625
1626 1626
1627 1627 @pytest.fixture()
1628 1628 def stub_integration_settings():
1629 1629 return {
1630 1630 'test_string_field': 'some data',
1631 1631 'test_int_field': 100,
1632 1632 }
1633 1633
1634 1634
1635 1635 @pytest.fixture()
1636 1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1637 1637 stub_integration_settings):
1638 1638 integration = IntegrationModel().create(
1639 1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1640 1640 name='test repo integration',
1641 1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1642 1642
1643 1643 @request.addfinalizer
1644 1644 def cleanup():
1645 1645 IntegrationModel().delete(integration)
1646 1646
1647 1647 return integration
1648 1648
1649 1649
1650 1650 @pytest.fixture()
1651 1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1652 1652 stub_integration_settings):
1653 1653 integration = IntegrationModel().create(
1654 1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1655 1655 name='test repogroup integration',
1656 1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1657 1657
1658 1658 @request.addfinalizer
1659 1659 def cleanup():
1660 1660 IntegrationModel().delete(integration)
1661 1661
1662 1662 return integration
1663 1663
1664 1664
1665 1665 @pytest.fixture()
1666 1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1667 1667 StubIntegrationType, stub_integration_settings):
1668 1668 integration = IntegrationModel().create(
1669 1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1670 1670 name='test recursive repogroup integration',
1671 1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1672 1672
1673 1673 @request.addfinalizer
1674 1674 def cleanup():
1675 1675 IntegrationModel().delete(integration)
1676 1676
1677 1677 return integration
1678 1678
1679 1679
1680 1680 @pytest.fixture()
1681 1681 def global_integration_stub(request, StubIntegrationType,
1682 1682 stub_integration_settings):
1683 1683 integration = IntegrationModel().create(
1684 1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1685 1685 name='test global integration',
1686 1686 repo=None, repo_group=None, child_repos_only=None)
1687 1687
1688 1688 @request.addfinalizer
1689 1689 def cleanup():
1690 1690 IntegrationModel().delete(integration)
1691 1691
1692 1692 return integration
1693 1693
1694 1694
1695 1695 @pytest.fixture()
1696 1696 def root_repos_integration_stub(request, StubIntegrationType,
1697 1697 stub_integration_settings):
1698 1698 integration = IntegrationModel().create(
1699 1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1700 1700 name='test global integration',
1701 1701 repo=None, repo_group=None, child_repos_only=True)
1702 1702
1703 1703 @request.addfinalizer
1704 1704 def cleanup():
1705 1705 IntegrationModel().delete(integration)
1706 1706
1707 1707 return integration
1708 1708
1709 1709
1710 1710 @pytest.fixture()
1711 1711 def local_dt_to_utc():
1712 1712 def _factory(dt):
1713 1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1714 1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1715 1715 return _factory
1716 1716
1717 1717
1718 1718 @pytest.fixture()
1719 1719 def disable_anonymous_user(request, baseapp):
1720 1720 set_anonymous_access(False)
1721 1721
1722 1722 @request.addfinalizer
1723 1723 def cleanup():
1724 1724 set_anonymous_access(True)
1725 1725
1726 1726
1727 1727 @pytest.fixture(scope='module')
1728 1728 def rc_fixture(request):
1729 1729 return Fixture()
1730 1730
1731 1731
1732 1732 @pytest.fixture()
1733 1733 def repo_groups(request):
1734 1734 fixture = Fixture()
1735 1735
1736 1736 session = Session()
1737 1737 zombie_group = fixture.create_repo_group('zombie')
1738 1738 parent_group = fixture.create_repo_group('parent')
1739 1739 child_group = fixture.create_repo_group('parent/child')
1740 1740 groups_in_db = session.query(RepoGroup).all()
1741 1741 assert len(groups_in_db) == 3
1742 1742 assert child_group.group_parent_id == parent_group.group_id
1743 1743
1744 1744 @request.addfinalizer
1745 1745 def cleanup():
1746 1746 fixture.destroy_repo_group(zombie_group)
1747 1747 fixture.destroy_repo_group(child_group)
1748 1748 fixture.destroy_repo_group(parent_group)
1749 1749
1750 1750 return zombie_group, parent_group, child_group
@@ -1,491 +1,489 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import multiprocessing
21 21 import os
22 22
23 23 import mock
24 24 import py
25 25 import pytest
26 26
27 27 from rhodecode.lib import caching_query
28 28 from rhodecode.lib import utils
29 29 from rhodecode.lib.str_utils import safe_bytes
30 30 from rhodecode.model import settings
31 31 from rhodecode.model import db
32 32 from rhodecode.model import meta
33 33 from rhodecode.model.repo import RepoModel
34 34 from rhodecode.model.repo_group import RepoGroupModel
35 35 from rhodecode.model.settings import UiSetting, SettingsModel
36 36 from rhodecode.tests.fixture import Fixture
37 37 from rhodecode_tools.lib.hash_utils import md5_safe
38 38 from rhodecode.lib.ext_json import json
39 39
40 40 fixture = Fixture()
41 41
42 42
43 43 def extract_hooks(config):
44 44 """Return a dictionary with the hook entries of the given config."""
45 45 hooks = {}
46 46 config_items = config.serialize()
47 47 for section, name, value in config_items:
48 48 if section != 'hooks':
49 49 continue
50 50 hooks[name] = value
51 51
52 52 return hooks
53 53
54 54
55 55 def disable_hooks(request, hooks):
56 56 """Disables the given hooks from the UI settings."""
57 57 session = meta.Session()
58 58
59 59 model = SettingsModel()
60 60 for hook_key in hooks:
61 61 sett = model.get_ui_by_key(hook_key)
62 62 sett.ui_active = False
63 63 session.add(sett)
64 64
65 65 # Invalidate cache
66 66 ui_settings = session.query(db.RhodeCodeUi).options(
67 67 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
68 68
69 69 meta.cache.invalidate(
70 70 ui_settings, {},
71 71 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
72 72
73 73 ui_settings = session.query(db.RhodeCodeUi).options(
74 74 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
75 75
76 76 meta.cache.invalidate(
77 77 ui_settings, {},
78 78 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
79 79
80 80 @request.addfinalizer
81 81 def rollback():
82 82 session.rollback()
83 83
84 84
85 85 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
86 86 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
87 87 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
88 88 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
89 89 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
90 90 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
91 91 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
92 92
93 93 HG_HOOKS = frozenset(
94 94 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
95 95 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
96 96
97 97
98 98 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
99 99 ([], HG_HOOKS),
100 100 (HG_HOOKS, []),
101 101
102 102 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
103 103
104 104 # When a pull/push hook is disabled, its pre-pull/push counterpart should
105 105 # be disabled too.
106 106 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
107 107 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
108 108 HOOK_PUSH_KEY]),
109 109 ])
110 110 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
111 111 expected_hooks):
112 112 disable_hooks(request, disabled_hooks)
113 113
114 114 config = utils.make_db_config()
115 115 hooks = extract_hooks(config)
116 116
117 117 assert set(hooks.keys()).intersection(HG_HOOKS) == set(expected_hooks)
118 118
119 119
120 120 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
121 121 ([], ['pull', 'push']),
122 122 ([HOOK_PUSH], ['pull']),
123 123 ([HOOK_PULL], ['push']),
124 124 ([HOOK_PULL, HOOK_PUSH], []),
125 125 ])
126 126 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
127 127 hook_keys = (HOOK_PUSH, HOOK_PULL)
128 128 ui_settings = [
129 129 ('hooks', key, 'some value', key not in disabled_hooks)
130 130 for key in hook_keys]
131 131
132 132 result = utils.get_enabled_hook_classes(ui_settings)
133 133 assert sorted(result) == expected_hooks
134 134
135 135
136 136 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
137 137 _stub_git_repo(tmpdir.ensure('repo', dir=True))
138 138 repos = list(utils.get_filesystem_repos(str(tmpdir)))
139 139 assert repos == [('repo', ('git', tmpdir.join('repo')))]
140 140
141 141
142 142 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
143 143 tmpdir.ensure('not-a-repo', dir=True)
144 144 repos = list(utils.get_filesystem_repos(str(tmpdir)))
145 145 assert repos == []
146 146
147 147
148 148 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
149 149 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
150 150 repos = list(utils.get_filesystem_repos(str(tmpdir)))
151 151 assert repos == []
152 152
153 153
154 154 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
155 155 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
156 156 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
157 157 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
158 158
159 159
160 160 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
161 161 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
162 162 repos = list(utils.get_filesystem_repos(str(tmpdir)))
163 163 assert repos == []
164 164
165 165
166 166 def test_get_filesystem_repos_skips_files(tmpdir):
167 167 tmpdir.ensure('test-file')
168 168 repos = list(utils.get_filesystem_repos(str(tmpdir)))
169 169 assert repos == []
170 170
171 171
172 172 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
173 173 removed_repo_name = 'rm__00000000_000000_000000__.stub'
174 174 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
175 175 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
176 176 repos = list(utils.get_filesystem_repos(str(tmpdir)))
177 177 assert repos == []
178 178
179 179
180 180 def _stub_git_repo(repo_path):
181 181 """
182 182 Make `repo_path` look like a Git repository.
183 183 """
184 184 repo_path.ensure('.git', dir=True)
185 185
186 186
187 187 def test_get_dirpaths_returns_all_paths_on_str(tmpdir):
188 188 tmpdir.ensure('test-file')
189 189 tmpdir.ensure('test-file-1')
190 190 tmp_path = str(tmpdir)
191 191 dirpaths = utils.get_dirpaths(tmp_path)
192 192 assert list(sorted(dirpaths)) == ['test-file', 'test-file-1']
193 193
194 194
195 195 def test_get_dirpaths_returns_all_paths_on_bytes(tmpdir):
196 196 tmpdir.ensure('test-file-bytes')
197 197 tmp_path = str(tmpdir)
198 198 dirpaths = utils.get_dirpaths(safe_bytes(tmp_path))
199 199 assert list(sorted(dirpaths)) == [b'test-file-bytes']
200 200
201 201
202 202 def test_get_dirpaths_returns_all_paths_bytes(
203 203 tmpdir, platform_encodes_filenames):
204 204 if platform_encodes_filenames:
205 205 pytest.skip("This platform seems to encode filenames.")
206 206 tmpdir.ensure('repo-a-umlaut-\xe4')
207 207 dirpaths = utils.get_dirpaths(str(tmpdir))
208 208 assert dirpaths == ['repo-a-umlaut-\xe4']
209 209
210 210
211 211 def test_get_dirpaths_skips_paths_it_cannot_decode(
212 212 tmpdir, platform_encodes_filenames):
213 213 if platform_encodes_filenames:
214 214 pytest.skip("This platform seems to encode filenames.")
215 215 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 216 tmp_path = str(tmpdir.ensure(path_with_latin1))
217 217 dirpaths = utils.get_dirpaths(tmp_path)
218 218 assert dirpaths == []
219 219
220 220
221 221 @pytest.fixture(scope='session')
222 222 def platform_encodes_filenames():
223 223 """
224 224 Boolean indicator if the current platform changes filename encodings.
225 225 """
226 226 path_with_latin1 = 'repo-a-umlaut-\xe4'
227 227 tmpdir = py.path.local.mkdtemp()
228 228 tmpdir.ensure(path_with_latin1)
229 229 read_path = tmpdir.listdir()[0].basename
230 230 tmpdir.remove()
231 231 return path_with_latin1 != read_path
232 232
233 233
234
235
236 234 def test_repo2db_mapper_groups(repo_groups):
237 235 session = meta.Session()
238 236 zombie_group, parent_group, child_group = repo_groups
239 237 zombie_path = os.path.join(
240 238 RepoGroupModel().repos_path, zombie_group.full_path)
241 239 os.rmdir(zombie_path)
242 240
243 241 # Avoid removing test repos when calling repo2db_mapper
244 242 repo_list = {
245 243 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
246 244 }
247 245 utils.repo2db_mapper(repo_list, remove_obsolete=True)
248 246
249 247 groups_in_db = session.query(db.RepoGroup).all()
250 248 assert child_group in groups_in_db
251 249 assert parent_group in groups_in_db
252 250 assert zombie_path not in groups_in_db
253 251
254 252
255 253 def test_repo2db_mapper_enables_largefiles(backend):
256 254 repo = backend.create_repo()
257 255 repo_list = {repo.repo_name: 'test'}
258 256 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
259 257 utils.repo2db_mapper(repo_list, remove_obsolete=False)
260 258 _, kwargs = scm_mock.call_args
261 259 assert kwargs['config'].get('extensions', 'largefiles') == ''
262 260
263 261
264 262 @pytest.mark.backends("git", "svn")
265 263 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
266 264 repo = backend.create_repo()
267 265 repo_list = {repo.repo_name: 'test'}
268 266 utils.repo2db_mapper(repo_list, remove_obsolete=False)
269 267
270 268
271 269 @pytest.mark.backends("git", "svn")
272 270 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
273 271 repo = backend.create_repo()
274 272 RepoModel().delete(repo, fs_remove=False)
275 273 meta.Session().commit()
276 274 repo_list = {repo.repo_name: repo.scm_instance()}
277 275 utils.repo2db_mapper(repo_list, remove_obsolete=False)
278 276
279 277
280 278 class TestPasswordChanged(object):
281 279
282 280 def setup_method(self):
283 281 self.session = {
284 282 'rhodecode_user': {
285 283 'password': '0cc175b9c0f1b6a831c399e269772661'
286 284 }
287 285 }
288 286 self.auth_user = mock.Mock()
289 287 self.auth_user.userame = 'test'
290 288 self.auth_user.password = 'abc123'
291 289
292 290 def test_returns_false_for_default_user(self):
293 291 self.auth_user.username = db.User.DEFAULT_USER
294 292 result = utils.password_changed(self.auth_user, self.session)
295 293 assert result is False
296 294
297 295 def test_returns_false_if_password_was_not_changed(self):
298 296 self.session['rhodecode_user']['password'] = md5_safe(
299 297 self.auth_user.password)
300 298 result = utils.password_changed(self.auth_user, self.session)
301 299 assert result is False
302 300
303 301 def test_returns_true_if_password_was_changed(self):
304 302 result = utils.password_changed(self.auth_user, self.session)
305 303 assert result is True
306 304
307 305 def test_returns_true_if_auth_user_password_is_empty(self):
308 306 self.auth_user.password = None
309 307 result = utils.password_changed(self.auth_user, self.session)
310 308 assert result is True
311 309
312 310 def test_returns_true_if_session_password_is_empty(self):
313 311 self.session['rhodecode_user'].pop('password')
314 312 result = utils.password_changed(self.auth_user, self.session)
315 313 assert result is True
316 314
317 315
318 316 class TestReadOpenSourceLicenses(object):
319 317 def test_success(self):
320 318 utils._license_cache = None
321 319 json_data = '''
322 320 {
323 321 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
324 322 "python2.7-Markdown-2.6.2": {
325 323 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
326 324 }
327 325 }
328 326 '''
329 327 resource_string_patch = mock.patch.object(
330 328 utils.pkg_resources, 'resource_string', return_value=json_data)
331 329 with resource_string_patch:
332 330 result = utils.read_opensource_licenses()
333 331 assert result == json.loads(json_data)
334 332
335 333 def test_caching(self):
336 334 utils._license_cache = {
337 335 "python2.7-pytest-2.7.1": {
338 336 "UNKNOWN": None
339 337 },
340 338 "python2.7-Markdown-2.6.2": {
341 339 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
342 340 }
343 341 }
344 342 resource_patch = mock.patch.object(
345 343 utils.pkg_resources, 'resource_string', side_effect=Exception)
346 344 json_patch = mock.patch.object(
347 345 utils.json, 'loads', side_effect=Exception)
348 346
349 347 with resource_patch as resource_mock, json_patch as json_mock:
350 348 result = utils.read_opensource_licenses()
351 349
352 350 assert resource_mock.call_count == 0
353 351 assert json_mock.call_count == 0
354 352 assert result == utils._license_cache
355 353
356 354 def test_licenses_file_contains_no_unknown_licenses(self):
357 355 utils._license_cache = None
358 356 result = utils.read_opensource_licenses()
359 357
360 358 for license_data in result:
361 359 if isinstance(license_data["license"], list):
362 360 for lic_data in license_data["license"]:
363 361 assert 'UNKNOWN' not in lic_data["fullName"]
364 362 else:
365 363 full_name = license_data.get("fullName") or license_data
366 364 assert 'UNKNOWN' not in full_name
367 365
368 366
369 367 class TestMakeDbConfig(object):
370 368 def test_data_from_config_data_from_db_returned(self):
371 369 test_data = [
372 370 ('section1', 'option1', 'value1'),
373 371 ('section2', 'option2', 'value2'),
374 372 ('section3', 'option3', 'value3'),
375 373 ]
376 374 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
377 375 config_mock.return_value = test_data
378 376 kwargs = {'clear_session': False, 'repo': 'test_repo'}
379 377 result = utils.make_db_config(**kwargs)
380 378 config_mock.assert_called_once_with(**kwargs)
381 379 for section, option, expected_value in test_data:
382 380 value = result.get(section, option)
383 381 assert value == expected_value
384 382
385 383
386 384 class TestConfigDataFromDb(object):
387 385 def test_config_data_from_db_returns_active_settings(self):
388 386 test_data = [
389 387 UiSetting('section1', 'option1', 'value1', True),
390 388 UiSetting('section2', 'option2', 'value2', True),
391 389 UiSetting('section3', 'option3', 'value3', False),
392 390 ]
393 391 repo_name = 'test_repo'
394 392
395 393 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
396 394 hooks_patch = mock.patch.object(
397 395 utils, 'get_enabled_hook_classes',
398 396 return_value=['pull', 'push', 'repo_size'])
399 397 with model_patch as model_mock, hooks_patch:
400 398 instance_mock = mock.Mock()
401 399 model_mock.return_value = instance_mock
402 400 instance_mock.get_ui_settings.return_value = test_data
403 401 result = utils.config_data_from_db(
404 402 clear_session=False, repo=repo_name)
405 403
406 404 self._assert_repo_name_passed(model_mock, repo_name)
407 405
408 406 expected_result = [
409 407 ('section1', 'option1', 'value1'),
410 408 ('section2', 'option2', 'value2'),
411 409 ]
412 410 assert result == expected_result
413 411
414 412 def _assert_repo_name_passed(self, model_mock, repo_name):
415 413 assert model_mock.call_count == 1
416 414 call_args, call_kwargs = model_mock.call_args
417 415 assert call_kwargs['repo'] == repo_name
418 416
419 417
420 418 class TestIsDirWritable(object):
421 419 def test_returns_false_when_not_writable(self):
422 420 with mock.patch('builtins.open', side_effect=OSError):
423 421 assert not utils._is_dir_writable('/stub-path')
424 422
425 423 def test_returns_true_when_writable(self, tmpdir):
426 424 assert utils._is_dir_writable(str(tmpdir))
427 425
428 426 def test_is_safe_against_race_conditions(self, tmpdir):
429 427 workers = multiprocessing.Pool()
430 428 directories = [str(tmpdir)] * 10
431 429 workers.map(utils._is_dir_writable, directories)
432 430
433 431
434 432 class TestGetEnabledHooks(object):
435 433 def test_only_active_hooks_are_enabled(self):
436 434 ui_settings = [
437 435 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
438 436 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
439 437 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
440 438 ]
441 439 result = utils.get_enabled_hook_classes(ui_settings)
442 440 assert result == ['push', 'repo_size']
443 441
444 442 def test_all_hooks_are_enabled(self):
445 443 ui_settings = [
446 444 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
447 445 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
448 446 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
449 447 ]
450 448 result = utils.get_enabled_hook_classes(ui_settings)
451 449 assert result == ['push', 'repo_size', 'pull']
452 450
453 451 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
454 452 ui_settings = []
455 453 result = utils.get_enabled_hook_classes(ui_settings)
456 454 assert result == []
457 455
458 456
459 457 def test_obfuscate_url_pw():
460 458 from rhodecode.lib.utils2 import obfuscate_url_pw
461 459 engine = u'/home/repos/malmö'
462 460 assert obfuscate_url_pw(engine)
463 461
464 462
465 463 @pytest.mark.parametrize("test_ua, expected", [
466 464 ("", ""),
467 465 ('"quoted"', 'quoted'),
468 466 ('internal-merge', 'internal-merge'),
469 467 ('hg/internal-merge', 'hg/internal-merge'),
470 468 ('git/internal-merge', 'git/internal-merge'),
471 469
472 470 # git
473 471 ('git/2.10.1 (Apple Git-78)', 'git/2.10.1'),
474 472 ('GiT/2.37.2.windows.2', 'git/2.37.2'),
475 473 ('git/2.35.1 (Microsoft Windows NT 10.0.19044.0; Win32NT x64) CLR/4.0.30319 VS16/16.0.0', 'git/2.35.1'),
476 474 ('ssh-user-agent', 'ssh-user-agent'),
477 475 ('git/ssh-user-agent', 'git/ssh-user-agent'),
478 476
479 477
480 478 # hg
481 479 ('mercurial/proto-1.0 (Mercurial 4.2)', 'mercurial/4.2'),
482 480 ('mercurial/proto-1.0', ''),
483 481 ('mercurial/proto-1.0 (Mercurial 3.9.2)', 'mercurial/3.9.2'),
484 482 ('mercurial/ssh-user-agent', 'mercurial/ssh-user-agent'),
485 483 ('mercurial/proto-1.0 (Mercurial 5.8rc0)', 'mercurial/5.8rc0'),
486 484
487 485
488 486 ])
489 487 def test_user_agent_normalizer(test_ua, expected):
490 488 from rhodecode.lib.utils2 import user_agent_normalizer
491 489 assert user_agent_normalizer(test_ua, safe=False) == expected
General Comments 0
You need to be logged in to leave comments. Login now