##// END OF EJS Templates
feat(upgrade): added feature to bulk upgrade hooks from 4.X -> 5.X fixes RCCE-34
super-admin -
r5275:c32427b8 default
parent child Browse files
Show More
@@ -1,191 +1,191 b''
1 # required for pushd to work..
1 # required for pushd to work..
2 SHELL = /bin/bash
2 SHELL = /bin/bash
3
3
4
4
5 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
5 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
6 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
6 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
7
7
8 .PHONY: clean
8 .PHONY: clean
9 ## Cleanup compiled and cache py files
9 ## Cleanup compiled and cache py files
10 clean:
10 clean:
11 make test-clean
11 make test-clean
12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
12 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
13 find . -type d -name "build" -prune -exec rm -rf '{}' ';'
13 find . -type d -name "build" -prune -exec rm -rf '{}' ';'
14
14
15
15
16 .PHONY: test
16 .PHONY: test
17 ## run test-clean and tests
17 ## run test-clean and tests
18 test:
18 test:
19 make test-clean
19 make test-clean
20 make test-only
20 make test-only
21
21
22
22
23 .PHONY: test-clean
23 .PHONY: test-clean
24 ## run test-clean and tests
24 ## run test-clean and tests
25 test-clean:
25 test-clean:
26 rm -rf coverage.xml htmlcov junit.xml pylint.log result
26 rm -rf coverage.xml htmlcov junit.xml pylint.log result
27 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
27 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
28 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
28 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
29
29
30
30
31 .PHONY: test-only
31 .PHONY: test-only
32 ## Run tests only without cleanup
32 ## Run tests only without cleanup
33 test-only:
33 test-only:
34 PYTHONHASHSEED=random \
34 PYTHONHASHSEED=random \
35 py.test -x -vv -r xw -p no:sugar \
35 py.test -x -vv -r xw -p no:sugar \
36 --cov-report=term-missing --cov-report=html \
36 --cov-report=term-missing --cov-report=html \
37 --cov=rhodecode rhodecode
37 --cov=rhodecode rhodecode
38
38
39
39
40 .PHONY: test-only-mysql
40 .PHONY: test-only-mysql
41 ## run tests against mysql
41 ## run tests against mysql
42 test-only-mysql:
42 test-only-mysql:
43 PYTHONHASHSEED=random \
43 PYTHONHASHSEED=random \
44 py.test -x -vv -r xw -p no:sugar \
44 py.test -x -vv -r xw -p no:sugar \
45 --cov-report=term-missing --cov-report=html \
45 --cov-report=term-missing --cov-report=html \
46 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
46 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "mysql://root:qweqwe@localhost/rhodecode_test?charset=utf8"}}' \
47 --cov=rhodecode rhodecode
47 --cov=rhodecode rhodecode
48
48
49
49
50 .PHONY: test-only-postgres
50 .PHONY: test-only-postgres
51 ## run tests against postgres
51 ## run tests against postgres
52 test-only-postgres:
52 test-only-postgres:
53 PYTHONHASHSEED=random \
53 PYTHONHASHSEED=random \
54 py.test -x -vv -r xw -p no:sugar \
54 py.test -x -vv -r xw -p no:sugar \
55 --cov-report=term-missing --cov-report=html \
55 --cov-report=term-missing --cov-report=html \
56 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
56 --ini-config-override='{"app:main": {"sqlalchemy.db1.url": "postgresql://postgres:qweqwe@localhost/rhodecode_test"}}' \
57 --cov=rhodecode rhodecode
57 --cov=rhodecode rhodecode
58
58
59 .PHONY: ruff-check
59 .PHONY: ruff-check
60 ## run a ruff analysis
60 ## run a ruff analysis
61 ruff-check:
61 ruff-check:
62 ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
62 ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev .
63
63
64
64
65 .PHONY: docs
65 .PHONY: docs
66 ## build docs
66 ## build docs
67 docs:
67 docs:
68 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html)
68 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html)
69
69
70
70
71 .PHONY: docs-clean
71 .PHONY: docs-clean
72 ## Cleanup docs
72 ## Cleanup docs
73 docs-clean:
73 docs-clean:
74 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean)
74 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean)
75
75
76
76
77 .PHONY: docs-cleanup
77 .PHONY: docs-cleanup
78 ## Cleanup docs
78 ## Cleanup docs
79 docs-cleanup:
79 docs-cleanup:
80 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup)
80 (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup)
81
81
82
82
83 .PHONY: web-build
83 .PHONY: web-build
84 ## Build JS packages static/js
84 ## Build JS packages static/js
85 web-build:
85 web-build:
86 docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt"
86 docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt"
87 # run static file check
87 # run static file check
88 ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/
88 ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/
89 rm -rf node_modules
89 rm -rf node_modules
90
90
91
91
92 .PHONY: pip-packages
92 .PHONY: pip-packages
93 ## Show outdated packages
93 ## Show outdated packages
94 pip-packages:
94 pip-packages:
95 python ${OUTDATED_PACKAGES}
95 python ${OUTDATED_PACKAGES}
96
96
97
97
98 .PHONY: build
98 .PHONY: build
99 ## Build sdist/egg
99 ## Build sdist/egg
100 build:
100 build:
101 python -m build
101 python -m build
102
102
103
103
104 .PHONY: dev-sh
104 .PHONY: dev-sh
105 ## make dev-sh
105 ## make dev-sh
106 dev-sh:
106 dev-sh:
107 sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list"
107 sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list"
108 sudo apt-get update
108 sudo apt-get update
109 sudo apt-get install -y zsh carapace-bin
109 sudo apt-get install -y zsh carapace-bin
110 rm -rf /home/rhodecode/.oh-my-zsh
110 rm -rf /home/rhodecode/.oh-my-zsh
111 curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh | sh
111 curl https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh | sh
112 echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
112 echo "source <(carapace _carapace)" > /home/rhodecode/.zsrc
113 PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
113 PROMPT='%(?.%F{green}√.%F{red}?%?)%f %B%F{240}%1~%f%b %# ' zsh
114
114
115
115
116 .PHONY: dev-cleanup
116 .PHONY: dev-cleanup
117 ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
117 ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
118 dev-cleanup:
118 dev-cleanup:
119 pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
119 pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y
120 rm -rf /tmp/*
120 rm -rf /tmp/*
121
121
122
122
123 .PHONY: dev-env
123 .PHONY: dev-env
124 ## make dev-env based on the requirements files and install develop of packages
124 ## make dev-env based on the requirements files and install develop of packages
125 dev-env:
125 dev-env:
126 pip install build virtualenv
126 pip install build virtualenv
127 pushd ../rhodecode-vcsserver/ && make dev-env && popd
127 pushd ../rhodecode-vcsserver/ && make dev-env && popd
128 pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
128 pip wheel --wheel-dir=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
129 pip install --no-index --find-links=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
129 pip install --no-index --find-links=/home/rhodecode/.cache/pip/wheels -r requirements.txt -r requirements_rc_tools.txt -r requirements_test.txt -r requirements_debug.txt
130 pip install -e .
130 pip install -e .
131
131
132
132
133 .PHONY: sh
133 .PHONY: sh
134 ## shortcut for make dev-sh dev-env
134 ## shortcut for make dev-sh dev-env
135 sh:
135 sh:
136 (make dev-env; make dev-sh)
136 (make dev-env; make dev-sh)
137
137
138
138
139 .PHONY: dev-srv
139 .PHONY: dev-srv
140 ## run develop server instance, docker exec -it $(docker ps -q --filter 'name=dev-enterprise-ce') /bin/bash
140 ## run develop server instance, docker exec -it $(docker ps -q --filter 'name=dev-enterprise-ce') /bin/bash
141 dev-srv:
141 dev-srv:
142 pserve --reload .dev/dev.ini
142 pserve --reload .dev/dev.ini
143
143
144
144
145 .PHONY: dev-srv-g
145 .PHONY: dev-srv-g
146 ## run gunicorn multi process workers
146 ## run gunicorn multi process workers
147 dev-srv-g:
147 dev-srv-g:
148 gunicorn --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120
148 gunicorn --paste .dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120 --reload
149
149
150
150
151 # Default command on calling make
151 # Default command on calling make
152 .DEFAULT_GOAL := show-help
152 .DEFAULT_GOAL := show-help
153
153
154 .PHONY: show-help
154 .PHONY: show-help
155 show-help:
155 show-help:
156 @echo "$$(tput bold)Available rules:$$(tput sgr0)"
156 @echo "$$(tput bold)Available rules:$$(tput sgr0)"
157 @echo
157 @echo
158 @sed -n -e "/^## / { \
158 @sed -n -e "/^## / { \
159 h; \
159 h; \
160 s/.*//; \
160 s/.*//; \
161 :doc" \
161 :doc" \
162 -e "H; \
162 -e "H; \
163 n; \
163 n; \
164 s/^## //; \
164 s/^## //; \
165 t doc" \
165 t doc" \
166 -e "s/:.*//; \
166 -e "s/:.*//; \
167 G; \
167 G; \
168 s/\\n## /---/; \
168 s/\\n## /---/; \
169 s/\\n/ /g; \
169 s/\\n/ /g; \
170 p; \
170 p; \
171 }" ${MAKEFILE_LIST} \
171 }" ${MAKEFILE_LIST} \
172 | LC_ALL='C' sort --ignore-case \
172 | LC_ALL='C' sort --ignore-case \
173 | awk -F '---' \
173 | awk -F '---' \
174 -v ncol=$$(tput cols) \
174 -v ncol=$$(tput cols) \
175 -v indent=19 \
175 -v indent=19 \
176 -v col_on="$$(tput setaf 6)" \
176 -v col_on="$$(tput setaf 6)" \
177 -v col_off="$$(tput sgr0)" \
177 -v col_off="$$(tput sgr0)" \
178 '{ \
178 '{ \
179 printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
179 printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
180 n = split($$2, words, " "); \
180 n = split($$2, words, " "); \
181 line_length = ncol - indent; \
181 line_length = ncol - indent; \
182 for (i = 1; i <= n; i++) { \
182 for (i = 1; i <= n; i++) { \
183 line_length -= length(words[i]) + 1; \
183 line_length -= length(words[i]) + 1; \
184 if (line_length <= 0) { \
184 if (line_length <= 0) { \
185 line_length = ncol - indent - length(words[i]) - 1; \
185 line_length = ncol - indent - length(words[i]) - 1; \
186 printf "\n%*s ", -indent, " "; \
186 printf "\n%*s ", -indent, " "; \
187 } \
187 } \
188 printf "%s ", words[i]; \
188 printf "%s ", words[i]; \
189 } \
189 } \
190 printf "\n"; \
190 printf "\n"; \
191 }'
191 }'
@@ -1,424 +1,424 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import itertools
20 import itertools
21 import base64
21 import base64
22
22
23 from rhodecode.api import (
23 from rhodecode.api import (
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25
25
26 from rhodecode.api.utils import (
26 from rhodecode.api.utils import (
27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 from rhodecode.lib.utils import repo2db_mapper
28 from rhodecode.lib.utils import repo2db_mapper
29 from rhodecode.lib import system_info
29 from rhodecode.lib import system_info
30 from rhodecode.lib import user_sessions
30 from rhodecode.lib import user_sessions
31 from rhodecode.lib import exc_tracking
31 from rhodecode.lib import exc_tracking
32 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.utils2 import safe_int
33 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.model.db import UserIpMap
34 from rhodecode.model.db import UserIpMap
35 from rhodecode.model.scm import ScmModel
35 from rhodecode.model.scm import ScmModel
36 from rhodecode.model.settings import VcsSettingsModel
36 from rhodecode.model.settings import VcsSettingsModel
37 from rhodecode.apps.file_store import utils
37 from rhodecode.apps.file_store import utils
38 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
38 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
39 FileOverSizeException
39 FileOverSizeException
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 @jsonrpc_method()
44 @jsonrpc_method()
45 def get_server_info(request, apiuser):
45 def get_server_info(request, apiuser):
46 """
46 """
47 Returns the |RCE| server information.
47 Returns the |RCE| server information.
48
48
49 This includes the running version of |RCE| and all installed
49 This includes the running version of |RCE| and all installed
50 packages. This command takes the following options:
50 packages. This command takes the following options:
51
51
52 :param apiuser: This is filled automatically from the |authtoken|.
52 :param apiuser: This is filled automatically from the |authtoken|.
53 :type apiuser: AuthUser
53 :type apiuser: AuthUser
54
54
55 Example output:
55 Example output:
56
56
57 .. code-block:: bash
57 .. code-block:: bash
58
58
59 id : <id_given_in_input>
59 id : <id_given_in_input>
60 result : {
60 result : {
61 'modules': [<module name>,...]
61 'modules': [<module name>,...]
62 'py_version': <python version>,
62 'py_version': <python version>,
63 'platform': <platform type>,
63 'platform': <platform type>,
64 'rhodecode_version': <rhodecode version>
64 'rhodecode_version': <rhodecode version>
65 }
65 }
66 error : null
66 error : null
67 """
67 """
68
68
69 if not has_superadmin_permission(apiuser):
69 if not has_superadmin_permission(apiuser):
70 raise JSONRPCForbidden()
70 raise JSONRPCForbidden()
71
71
72 server_info = ScmModel().get_server_info(request.environ)
72 server_info = ScmModel().get_server_info(request.environ)
73 # rhodecode-index requires those
73 # rhodecode-index requires those
74
74
75 server_info['index_storage'] = server_info['search']['value']['location']
75 server_info['index_storage'] = server_info['search']['value']['location']
76 server_info['storage'] = server_info['storage']['value']['path']
76 server_info['storage'] = server_info['storage']['value']['path']
77
77
78 return server_info
78 return server_info
79
79
80
80
81 @jsonrpc_method()
81 @jsonrpc_method()
82 def get_repo_store(request, apiuser):
82 def get_repo_store(request, apiuser):
83 """
83 """
84 Returns the |RCE| repository storage information.
84 Returns the |RCE| repository storage information.
85
85
86 :param apiuser: This is filled automatically from the |authtoken|.
86 :param apiuser: This is filled automatically from the |authtoken|.
87 :type apiuser: AuthUser
87 :type apiuser: AuthUser
88
88
89 Example output:
89 Example output:
90
90
91 .. code-block:: bash
91 .. code-block:: bash
92
92
93 id : <id_given_in_input>
93 id : <id_given_in_input>
94 result : {
94 result : {
95 'modules': [<module name>,...]
95 'modules': [<module name>,...]
96 'py_version': <python version>,
96 'py_version': <python version>,
97 'platform': <platform type>,
97 'platform': <platform type>,
98 'rhodecode_version': <rhodecode version>
98 'rhodecode_version': <rhodecode version>
99 }
99 }
100 error : null
100 error : null
101 """
101 """
102
102
103 if not has_superadmin_permission(apiuser):
103 if not has_superadmin_permission(apiuser):
104 raise JSONRPCForbidden()
104 raise JSONRPCForbidden()
105
105
106 path = VcsSettingsModel().get_repos_location()
106 path = VcsSettingsModel().get_repos_location()
107 return {"path": path}
107 return {"path": path}
108
108
109
109
110 @jsonrpc_method()
110 @jsonrpc_method()
111 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
111 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
112 """
112 """
113 Displays the IP Address as seen from the |RCE| server.
113 Displays the IP Address as seen from the |RCE| server.
114
114
115 * This command displays the IP Address, as well as all the defined IP
115 * This command displays the IP Address, as well as all the defined IP
116 addresses for the specified user. If the ``userid`` is not set, the
116 addresses for the specified user. If the ``userid`` is not set, the
117 data returned is for the user calling the method.
117 data returned is for the user calling the method.
118
118
119 This command can only be run using an |authtoken| with admin rights to
119 This command can only be run using an |authtoken| with admin rights to
120 the specified repository.
120 the specified repository.
121
121
122 This command takes the following options:
122 This command takes the following options:
123
123
124 :param apiuser: This is filled automatically from |authtoken|.
124 :param apiuser: This is filled automatically from |authtoken|.
125 :type apiuser: AuthUser
125 :type apiuser: AuthUser
126 :param userid: Sets the userid for which associated IP Address data
126 :param userid: Sets the userid for which associated IP Address data
127 is returned.
127 is returned.
128 :type userid: Optional(str or int)
128 :type userid: Optional(str or int)
129
129
130 Example output:
130 Example output:
131
131
132 .. code-block:: bash
132 .. code-block:: bash
133
133
134 id : <id_given_in_input>
134 id : <id_given_in_input>
135 result : {
135 result : {
136 "server_ip_addr": "<ip_from_clien>",
136 "server_ip_addr": "<ip_from_clien>",
137 "user_ips": [
137 "user_ips": [
138 {
138 {
139 "ip_addr": "<ip_with_mask>",
139 "ip_addr": "<ip_with_mask>",
140 "ip_range": ["<start_ip>", "<end_ip>"],
140 "ip_range": ["<start_ip>", "<end_ip>"],
141 },
141 },
142 ...
142 ...
143 ]
143 ]
144 }
144 }
145
145
146 """
146 """
147 if not has_superadmin_permission(apiuser):
147 if not has_superadmin_permission(apiuser):
148 raise JSONRPCForbidden()
148 raise JSONRPCForbidden()
149
149
150 userid = Optional.extract(userid, evaluate_locals=locals())
150 userid = Optional.extract(userid, evaluate_locals=locals())
151 userid = getattr(userid, 'user_id', userid)
151 userid = getattr(userid, 'user_id', userid)
152
152
153 user = get_user_or_error(userid)
153 user = get_user_or_error(userid)
154 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
154 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
155 return {
155 return {
156 'server_ip_addr': request.rpc_ip_addr,
156 'server_ip_addr': request.rpc_ip_addr,
157 'user_ips': ips
157 'user_ips': ips
158 }
158 }
159
159
160
160
161 @jsonrpc_method()
161 @jsonrpc_method()
162 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
162 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
163 """
163 """
164 Triggers a rescan of the specified repositories.
164 Triggers a rescan of the specified repositories.
165
165
166 * If the ``remove_obsolete`` option is set, it also deletes repositories
166 * If the ``remove_obsolete`` option is set, it also deletes repositories
167 that are found in the database but not on the file system, so called
167 that are found in the database but not on the file system, so called
168 "clean zombies".
168 "clean zombies".
169
169
170 This command can only be run using an |authtoken| with admin rights to
170 This command can only be run using an |authtoken| with admin rights to
171 the specified repository.
171 the specified repository.
172
172
173 This command takes the following options:
173 This command takes the following options:
174
174
175 :param apiuser: This is filled automatically from the |authtoken|.
175 :param apiuser: This is filled automatically from the |authtoken|.
176 :type apiuser: AuthUser
176 :type apiuser: AuthUser
177 :param remove_obsolete: Deletes repositories from the database that
177 :param remove_obsolete: Deletes repositories from the database that
178 are not found on the filesystem.
178 are not found on the filesystem.
179 :type remove_obsolete: Optional(``True`` | ``False``)
179 :type remove_obsolete: Optional(``True`` | ``False``)
180
180
181 Example output:
181 Example output:
182
182
183 .. code-block:: bash
183 .. code-block:: bash
184
184
185 id : <id_given_in_input>
185 id : <id_given_in_input>
186 result : {
186 result : {
187 'added': [<added repository name>,...]
187 'added': [<added repository name>,...]
188 'removed': [<removed repository name>,...]
188 'removed': [<removed repository name>,...]
189 }
189 }
190 error : null
190 error : null
191
191
192 Example error output:
192 Example error output:
193
193
194 .. code-block:: bash
194 .. code-block:: bash
195
195
196 id : <id_given_in_input>
196 id : <id_given_in_input>
197 result : null
197 result : null
198 error : {
198 error : {
199 'Error occurred during rescan repositories action'
199 'Error occurred during rescan repositories action'
200 }
200 }
201
201
202 """
202 """
203 if not has_superadmin_permission(apiuser):
203 if not has_superadmin_permission(apiuser):
204 raise JSONRPCForbidden()
204 raise JSONRPCForbidden()
205
205
206 try:
206 try:
207 rm_obsolete = Optional.extract(remove_obsolete)
207 rm_obsolete = Optional.extract(remove_obsolete)
208 added, removed = repo2db_mapper(ScmModel().repo_scan(),
208 added, removed = repo2db_mapper(ScmModel().repo_scan(),
209 remove_obsolete=rm_obsolete)
209 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
210 return {'added': added, 'removed': removed}
210 return {'added': added, 'removed': removed}
211 except Exception:
211 except Exception:
212 log.exception('Failed to run repo rescann')
212 log.exception('Failed to run repo rescann')
213 raise JSONRPCError(
213 raise JSONRPCError(
214 'Error occurred during rescan repositories action'
214 'Error occurred during rescan repositories action'
215 )
215 )
216
216
217
217
218 @jsonrpc_method()
218 @jsonrpc_method()
219 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
219 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
220 """
220 """
221 Triggers a session cleanup action.
221 Triggers a session cleanup action.
222
222
223 If the ``older_then`` option is set, only sessions that hasn't been
223 If the ``older_then`` option is set, only sessions that hasn't been
224 accessed in the given number of days will be removed.
224 accessed in the given number of days will be removed.
225
225
226 This command can only be run using an |authtoken| with admin rights to
226 This command can only be run using an |authtoken| with admin rights to
227 the specified repository.
227 the specified repository.
228
228
229 This command takes the following options:
229 This command takes the following options:
230
230
231 :param apiuser: This is filled automatically from the |authtoken|.
231 :param apiuser: This is filled automatically from the |authtoken|.
232 :type apiuser: AuthUser
232 :type apiuser: AuthUser
233 :param older_then: Deletes session that hasn't been accessed
233 :param older_then: Deletes session that hasn't been accessed
234 in given number of days.
234 in given number of days.
235 :type older_then: Optional(int)
235 :type older_then: Optional(int)
236
236
237 Example output:
237 Example output:
238
238
239 .. code-block:: bash
239 .. code-block:: bash
240
240
241 id : <id_given_in_input>
241 id : <id_given_in_input>
242 result: {
242 result: {
243 "backend": "<type of backend>",
243 "backend": "<type of backend>",
244 "sessions_removed": <number_of_removed_sessions>
244 "sessions_removed": <number_of_removed_sessions>
245 }
245 }
246 error : null
246 error : null
247
247
248 Example error output:
248 Example error output:
249
249
250 .. code-block:: bash
250 .. code-block:: bash
251
251
252 id : <id_given_in_input>
252 id : <id_given_in_input>
253 result : null
253 result : null
254 error : {
254 error : {
255 'Error occurred during session cleanup'
255 'Error occurred during session cleanup'
256 }
256 }
257
257
258 """
258 """
259 if not has_superadmin_permission(apiuser):
259 if not has_superadmin_permission(apiuser):
260 raise JSONRPCForbidden()
260 raise JSONRPCForbidden()
261
261
262 older_then = safe_int(Optional.extract(older_then)) or 60
262 older_then = safe_int(Optional.extract(older_then)) or 60
263 older_than_seconds = 60 * 60 * 24 * older_then
263 older_than_seconds = 60 * 60 * 24 * older_then
264
264
265 config = system_info.rhodecode_config().get_value()['value']['config']
265 config = system_info.rhodecode_config().get_value()['value']['config']
266 session_model = user_sessions.get_session_handler(
266 session_model = user_sessions.get_session_handler(
267 config.get('beaker.session.type', 'memory'))(config)
267 config.get('beaker.session.type', 'memory'))(config)
268
268
269 backend = session_model.SESSION_TYPE
269 backend = session_model.SESSION_TYPE
270 try:
270 try:
271 cleaned = session_model.clean_sessions(
271 cleaned = session_model.clean_sessions(
272 older_than_seconds=older_than_seconds)
272 older_than_seconds=older_than_seconds)
273 return {'sessions_removed': cleaned, 'backend': backend}
273 return {'sessions_removed': cleaned, 'backend': backend}
274 except user_sessions.CleanupCommand as msg:
274 except user_sessions.CleanupCommand as msg:
275 return {'cleanup_command': str(msg), 'backend': backend}
275 return {'cleanup_command': str(msg), 'backend': backend}
276 except Exception as e:
276 except Exception as e:
277 log.exception('Failed session cleanup')
277 log.exception('Failed session cleanup')
278 raise JSONRPCError(
278 raise JSONRPCError(
279 'Error occurred during session cleanup'
279 'Error occurred during session cleanup'
280 )
280 )
281
281
282
282
283 @jsonrpc_method()
283 @jsonrpc_method()
284 def get_method(request, apiuser, pattern=Optional('*')):
284 def get_method(request, apiuser, pattern=Optional('*')):
285 """
285 """
286 Returns list of all available API methods. By default match pattern
286 Returns list of all available API methods. By default match pattern
287 os "*" but any other pattern can be specified. eg *comment* will return
287 os "*" but any other pattern can be specified. eg *comment* will return
288 all methods with comment inside them. If just single method is matched
288 all methods with comment inside them. If just single method is matched
289 returned data will also include method specification
289 returned data will also include method specification
290
290
291 This command can only be run using an |authtoken| with admin rights to
291 This command can only be run using an |authtoken| with admin rights to
292 the specified repository.
292 the specified repository.
293
293
294 This command takes the following options:
294 This command takes the following options:
295
295
296 :param apiuser: This is filled automatically from the |authtoken|.
296 :param apiuser: This is filled automatically from the |authtoken|.
297 :type apiuser: AuthUser
297 :type apiuser: AuthUser
298 :param pattern: pattern to match method names against
298 :param pattern: pattern to match method names against
299 :type pattern: Optional("*")
299 :type pattern: Optional("*")
300
300
301 Example output:
301 Example output:
302
302
303 .. code-block:: bash
303 .. code-block:: bash
304
304
305 id : <id_given_in_input>
305 id : <id_given_in_input>
306 "result": [
306 "result": [
307 "changeset_comment",
307 "changeset_comment",
308 "comment_pull_request",
308 "comment_pull_request",
309 "comment_commit"
309 "comment_commit"
310 ]
310 ]
311 error : null
311 error : null
312
312
313 .. code-block:: bash
313 .. code-block:: bash
314
314
315 id : <id_given_in_input>
315 id : <id_given_in_input>
316 "result": [
316 "result": [
317 "comment_commit",
317 "comment_commit",
318 {
318 {
319 "apiuser": "<RequiredType>",
319 "apiuser": "<RequiredType>",
320 "comment_type": "<Optional:u'note'>",
320 "comment_type": "<Optional:u'note'>",
321 "commit_id": "<RequiredType>",
321 "commit_id": "<RequiredType>",
322 "message": "<RequiredType>",
322 "message": "<RequiredType>",
323 "repoid": "<RequiredType>",
323 "repoid": "<RequiredType>",
324 "request": "<RequiredType>",
324 "request": "<RequiredType>",
325 "resolves_comment_id": "<Optional:None>",
325 "resolves_comment_id": "<Optional:None>",
326 "status": "<Optional:None>",
326 "status": "<Optional:None>",
327 "userid": "<Optional:<OptionalAttr:apiuser>>"
327 "userid": "<Optional:<OptionalAttr:apiuser>>"
328 }
328 }
329 ]
329 ]
330 error : null
330 error : null
331 """
331 """
332 from rhodecode.config.patches import inspect_getargspec
332 from rhodecode.config.patches import inspect_getargspec
333 inspect = inspect_getargspec()
333 inspect = inspect_getargspec()
334
334
335 if not has_superadmin_permission(apiuser):
335 if not has_superadmin_permission(apiuser):
336 raise JSONRPCForbidden()
336 raise JSONRPCForbidden()
337
337
338 pattern = Optional.extract(pattern)
338 pattern = Optional.extract(pattern)
339
339
340 matches = find_methods(request.registry.jsonrpc_methods, pattern)
340 matches = find_methods(request.registry.jsonrpc_methods, pattern)
341
341
342 args_desc = []
342 args_desc = []
343 matches_keys = list(matches.keys())
343 matches_keys = list(matches.keys())
344 if len(matches_keys) == 1:
344 if len(matches_keys) == 1:
345 func = matches[matches_keys[0]]
345 func = matches[matches_keys[0]]
346
346
347 argspec = inspect.getargspec(func)
347 argspec = inspect.getargspec(func)
348 arglist = argspec[0]
348 arglist = argspec[0]
349 defaults = list(map(repr, argspec[3] or []))
349 defaults = list(map(repr, argspec[3] or []))
350
350
351 default_empty = '<RequiredType>'
351 default_empty = '<RequiredType>'
352
352
353 # kw arguments required by this method
353 # kw arguments required by this method
354 func_kwargs = dict(itertools.zip_longest(
354 func_kwargs = dict(itertools.zip_longest(
355 reversed(arglist), reversed(defaults), fillvalue=default_empty))
355 reversed(arglist), reversed(defaults), fillvalue=default_empty))
356 args_desc.append(func_kwargs)
356 args_desc.append(func_kwargs)
357
357
358 return matches_keys + args_desc
358 return matches_keys + args_desc
359
359
360
360
361 @jsonrpc_method()
361 @jsonrpc_method()
362 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
362 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
363 """
363 """
364 Stores sent exception inside the built-in exception tracker in |RCE| server.
364 Stores sent exception inside the built-in exception tracker in |RCE| server.
365
365
366 This command can only be run using an |authtoken| with admin rights to
366 This command can only be run using an |authtoken| with admin rights to
367 the specified repository.
367 the specified repository.
368
368
369 This command takes the following options:
369 This command takes the following options:
370
370
371 :param apiuser: This is filled automatically from the |authtoken|.
371 :param apiuser: This is filled automatically from the |authtoken|.
372 :type apiuser: AuthUser
372 :type apiuser: AuthUser
373
373
374 :param exc_data_json: JSON data with exception e.g
374 :param exc_data_json: JSON data with exception e.g
375 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
375 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
376 :type exc_data_json: JSON data
376 :type exc_data_json: JSON data
377
377
378 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
378 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
379 :type prefix: Optional("rhodecode")
379 :type prefix: Optional("rhodecode")
380
380
381 Example output:
381 Example output:
382
382
383 .. code-block:: bash
383 .. code-block:: bash
384
384
385 id : <id_given_in_input>
385 id : <id_given_in_input>
386 "result": {
386 "result": {
387 "exc_id": 139718459226384,
387 "exc_id": 139718459226384,
388 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
388 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
389 }
389 }
390 error : null
390 error : null
391 """
391 """
392 if not has_superadmin_permission(apiuser):
392 if not has_superadmin_permission(apiuser):
393 raise JSONRPCForbidden()
393 raise JSONRPCForbidden()
394
394
395 prefix = Optional.extract(prefix)
395 prefix = Optional.extract(prefix)
396 exc_id = exc_tracking.generate_id()
396 exc_id = exc_tracking.generate_id()
397
397
398 try:
398 try:
399 exc_data = json.loads(exc_data_json)
399 exc_data = json.loads(exc_data_json)
400 except Exception:
400 except Exception:
401 log.error('Failed to parse JSON: %r', exc_data_json)
401 log.error('Failed to parse JSON: %r', exc_data_json)
402 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
402 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
403 'Please make sure it contains a valid JSON.')
403 'Please make sure it contains a valid JSON.')
404
404
405 try:
405 try:
406 exc_traceback = exc_data['exc_traceback']
406 exc_traceback = exc_data['exc_traceback']
407 exc_type_name = exc_data['exc_type_name']
407 exc_type_name = exc_data['exc_type_name']
408 exc_value = ''
408 exc_value = ''
409 except KeyError as err:
409 except KeyError as err:
410 raise JSONRPCError(
410 raise JSONRPCError(
411 f'Missing exc_traceback, or exc_type_name '
411 f'Missing exc_traceback, or exc_type_name '
412 f'in exc_data_json field. Missing: {err}')
412 f'in exc_data_json field. Missing: {err}')
413
413
414 class ExcType:
414 class ExcType:
415 __name__ = exc_type_name
415 __name__ = exc_type_name
416
416
417 exc_info = (ExcType(), exc_value, exc_traceback)
417 exc_info = (ExcType(), exc_value, exc_traceback)
418
418
419 exc_tracking._store_exception(
419 exc_tracking._store_exception(
420 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
420 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
421
421
422 exc_url = request.route_url(
422 exc_url = request.route_url(
423 'admin_settings_exception_tracker_show', exception_id=exc_id)
423 'admin_settings_exception_tracker_show', exception_id=exc_id)
424 return {'exc_id': exc_id, 'exc_url': exc_url}
424 return {'exc_id': exc_id, 'exc_url': exc_url}
@@ -1,714 +1,714 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import logging
20 import logging
21 import collections
21 import collections
22
22
23 import datetime
23 import datetime
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26
26
27 import rhodecode
27 import rhodecode
28
28
29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31 from pyramid.response import Response
31 from pyramid.response import Response
32
32
33 from rhodecode.apps._base import BaseAppView
33 from rhodecode.apps._base import BaseAppView
34 from rhodecode.apps._base.navigation import navigation_list
34 from rhodecode.apps._base.navigation import navigation_list
35 from rhodecode.apps.svn_support.config_keys import generate_config
35 from rhodecode.apps.svn_support.config_keys import generate_config
36 from rhodecode.lib import helpers as h
36 from rhodecode.lib import helpers as h
37 from rhodecode.lib.auth import (
37 from rhodecode.lib.auth import (
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 from rhodecode.lib.celerylib import tasks, run_task
39 from rhodecode.lib.celerylib import tasks, run_task
40 from rhodecode.lib.str_utils import safe_str
40 from rhodecode.lib.str_utils import safe_str
41 from rhodecode.lib.utils import repo2db_mapper
41 from rhodecode.lib.utils import repo2db_mapper
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 from rhodecode.lib.index import searcher_from_config
43 from rhodecode.lib.index import searcher_from_config
44
44
45 from rhodecode.model.db import RhodeCodeUi, Repository
45 from rhodecode.model.db import RhodeCodeUi, Repository
46 from rhodecode.model.forms import (ApplicationSettingsForm,
46 from rhodecode.model.forms import (ApplicationSettingsForm,
47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
48 LabsSettingsForm, IssueTrackerPatternsForm)
48 LabsSettingsForm, IssueTrackerPatternsForm)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51
51
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.notification import EmailNotificationModel
53 from rhodecode.model.notification import EmailNotificationModel
54 from rhodecode.model.meta import Session
54 from rhodecode.model.meta import Session
55 from rhodecode.model.settings import (
55 from rhodecode.model.settings import (
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 SettingsModel)
57 SettingsModel)
58
58
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 class AdminSettingsView(BaseAppView):
63 class AdminSettingsView(BaseAppView):
64
64
65 def load_default_context(self):
65 def load_default_context(self):
66 c = self._get_local_tmpl_context()
66 c = self._get_local_tmpl_context()
67 c.labs_active = str2bool(
67 c.labs_active = str2bool(
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 c.navlist = navigation_list(self.request)
69 c.navlist = navigation_list(self.request)
70 return c
70 return c
71
71
72 @classmethod
72 @classmethod
73 def _get_ui_settings(cls):
73 def _get_ui_settings(cls):
74 ret = RhodeCodeUi.query().all()
74 ret = RhodeCodeUi.query().all()
75
75
76 if not ret:
76 if not ret:
77 raise Exception('Could not get application ui settings !')
77 raise Exception('Could not get application ui settings !')
78 settings = {}
78 settings = {}
79 for each in ret:
79 for each in ret:
80 k = each.ui_key
80 k = each.ui_key
81 v = each.ui_value
81 v = each.ui_value
82 if k == '/':
82 if k == '/':
83 k = 'root_path'
83 k = 'root_path'
84
84
85 if k in ['push_ssl', 'publish', 'enabled']:
85 if k in ['push_ssl', 'publish', 'enabled']:
86 v = str2bool(v)
86 v = str2bool(v)
87
87
88 if k.find('.') != -1:
88 if k.find('.') != -1:
89 k = k.replace('.', '_')
89 k = k.replace('.', '_')
90
90
91 if each.ui_section in ['hooks', 'extensions']:
91 if each.ui_section in ['hooks', 'extensions']:
92 v = each.ui_active
92 v = each.ui_active
93
93
94 settings[each.ui_section + '_' + k] = v
94 settings[each.ui_section + '_' + k] = v
95 return settings
95 return settings
96
96
97 @classmethod
97 @classmethod
98 def _form_defaults(cls):
98 def _form_defaults(cls):
99 defaults = SettingsModel().get_all_settings()
99 defaults = SettingsModel().get_all_settings()
100 defaults.update(cls._get_ui_settings())
100 defaults.update(cls._get_ui_settings())
101
101
102 defaults.update({
102 defaults.update({
103 'new_svn_branch': '',
103 'new_svn_branch': '',
104 'new_svn_tag': '',
104 'new_svn_tag': '',
105 })
105 })
106 return defaults
106 return defaults
107
107
108 @LoginRequired()
108 @LoginRequired()
109 @HasPermissionAllDecorator('hg.admin')
109 @HasPermissionAllDecorator('hg.admin')
110 def settings_vcs(self):
110 def settings_vcs(self):
111 c = self.load_default_context()
111 c = self.load_default_context()
112 c.active = 'vcs'
112 c.active = 'vcs'
113 model = VcsSettingsModel()
113 model = VcsSettingsModel()
114 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
114 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
115 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
115 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
116
116
117 settings = self.request.registry.settings
117 settings = self.request.registry.settings
118 c.svn_proxy_generate_config = settings[generate_config]
118 c.svn_proxy_generate_config = settings[generate_config]
119
119
120 defaults = self._form_defaults()
120 defaults = self._form_defaults()
121
121
122 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
122 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
123
123
124 data = render('rhodecode:templates/admin/settings/settings.mako',
124 data = render('rhodecode:templates/admin/settings/settings.mako',
125 self._get_template_context(c), self.request)
125 self._get_template_context(c), self.request)
126 html = formencode.htmlfill.render(
126 html = formencode.htmlfill.render(
127 data,
127 data,
128 defaults=defaults,
128 defaults=defaults,
129 encoding="UTF-8",
129 encoding="UTF-8",
130 force_defaults=False
130 force_defaults=False
131 )
131 )
132 return Response(html)
132 return Response(html)
133
133
134 @LoginRequired()
134 @LoginRequired()
135 @HasPermissionAllDecorator('hg.admin')
135 @HasPermissionAllDecorator('hg.admin')
136 @CSRFRequired()
136 @CSRFRequired()
137 def settings_vcs_update(self):
137 def settings_vcs_update(self):
138 _ = self.request.translate
138 _ = self.request.translate
139 c = self.load_default_context()
139 c = self.load_default_context()
140 c.active = 'vcs'
140 c.active = 'vcs'
141
141
142 model = VcsSettingsModel()
142 model = VcsSettingsModel()
143 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
143 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
144 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
144 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
145
145
146 settings = self.request.registry.settings
146 settings = self.request.registry.settings
147 c.svn_proxy_generate_config = settings[generate_config]
147 c.svn_proxy_generate_config = settings[generate_config]
148
148
149 application_form = ApplicationUiSettingsForm(self.request.translate)()
149 application_form = ApplicationUiSettingsForm(self.request.translate)()
150
150
151 try:
151 try:
152 form_result = application_form.to_python(dict(self.request.POST))
152 form_result = application_form.to_python(dict(self.request.POST))
153 except formencode.Invalid as errors:
153 except formencode.Invalid as errors:
154 h.flash(
154 h.flash(
155 _("Some form inputs contain invalid data."),
155 _("Some form inputs contain invalid data."),
156 category='error')
156 category='error')
157 data = render('rhodecode:templates/admin/settings/settings.mako',
157 data = render('rhodecode:templates/admin/settings/settings.mako',
158 self._get_template_context(c), self.request)
158 self._get_template_context(c), self.request)
159 html = formencode.htmlfill.render(
159 html = formencode.htmlfill.render(
160 data,
160 data,
161 defaults=errors.value,
161 defaults=errors.value,
162 errors=errors.unpack_errors() or {},
162 errors=errors.unpack_errors() or {},
163 prefix_error=False,
163 prefix_error=False,
164 encoding="UTF-8",
164 encoding="UTF-8",
165 force_defaults=False
165 force_defaults=False
166 )
166 )
167 return Response(html)
167 return Response(html)
168
168
169 try:
169 try:
170 if c.visual.allow_repo_location_change:
170 if c.visual.allow_repo_location_change:
171 model.update_global_path_setting(form_result['paths_root_path'])
171 model.update_global_path_setting(form_result['paths_root_path'])
172
172
173 model.update_global_ssl_setting(form_result['web_push_ssl'])
173 model.update_global_ssl_setting(form_result['web_push_ssl'])
174 model.update_global_hook_settings(form_result)
174 model.update_global_hook_settings(form_result)
175
175
176 model.create_or_update_global_svn_settings(form_result)
176 model.create_or_update_global_svn_settings(form_result)
177 model.create_or_update_global_hg_settings(form_result)
177 model.create_or_update_global_hg_settings(form_result)
178 model.create_or_update_global_git_settings(form_result)
178 model.create_or_update_global_git_settings(form_result)
179 model.create_or_update_global_pr_settings(form_result)
179 model.create_or_update_global_pr_settings(form_result)
180 except Exception:
180 except Exception:
181 log.exception("Exception while updating settings")
181 log.exception("Exception while updating settings")
182 h.flash(_('Error occurred during updating '
182 h.flash(_('Error occurred during updating '
183 'application settings'), category='error')
183 'application settings'), category='error')
184 else:
184 else:
185 Session().commit()
185 Session().commit()
186 h.flash(_('Updated VCS settings'), category='success')
186 h.flash(_('Updated VCS settings'), category='success')
187 raise HTTPFound(h.route_path('admin_settings_vcs'))
187 raise HTTPFound(h.route_path('admin_settings_vcs'))
188
188
189 data = render('rhodecode:templates/admin/settings/settings.mako',
189 data = render('rhodecode:templates/admin/settings/settings.mako',
190 self._get_template_context(c), self.request)
190 self._get_template_context(c), self.request)
191 html = formencode.htmlfill.render(
191 html = formencode.htmlfill.render(
192 data,
192 data,
193 defaults=self._form_defaults(),
193 defaults=self._form_defaults(),
194 encoding="UTF-8",
194 encoding="UTF-8",
195 force_defaults=False
195 force_defaults=False
196 )
196 )
197 return Response(html)
197 return Response(html)
198
198
199 @LoginRequired()
199 @LoginRequired()
200 @HasPermissionAllDecorator('hg.admin')
200 @HasPermissionAllDecorator('hg.admin')
201 @CSRFRequired()
201 @CSRFRequired()
202 def settings_vcs_delete_svn_pattern(self):
202 def settings_vcs_delete_svn_pattern(self):
203 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
203 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
204 model = VcsSettingsModel()
204 model = VcsSettingsModel()
205 try:
205 try:
206 model.delete_global_svn_pattern(delete_pattern_id)
206 model.delete_global_svn_pattern(delete_pattern_id)
207 except SettingNotFound:
207 except SettingNotFound:
208 log.exception(
208 log.exception(
209 'Failed to delete svn_pattern with id %s', delete_pattern_id)
209 'Failed to delete svn_pattern with id %s', delete_pattern_id)
210 raise HTTPNotFound()
210 raise HTTPNotFound()
211
211
212 Session().commit()
212 Session().commit()
213 return True
213 return True
214
214
215 @LoginRequired()
215 @LoginRequired()
216 @HasPermissionAllDecorator('hg.admin')
216 @HasPermissionAllDecorator('hg.admin')
217 def settings_mapping(self):
217 def settings_mapping(self):
218 c = self.load_default_context()
218 c = self.load_default_context()
219 c.active = 'mapping'
219 c.active = 'mapping'
220
220 c.storage_path = VcsSettingsModel().get_repos_location()
221 data = render('rhodecode:templates/admin/settings/settings.mako',
221 data = render('rhodecode:templates/admin/settings/settings.mako',
222 self._get_template_context(c), self.request)
222 self._get_template_context(c), self.request)
223 html = formencode.htmlfill.render(
223 html = formencode.htmlfill.render(
224 data,
224 data,
225 defaults=self._form_defaults(),
225 defaults=self._form_defaults(),
226 encoding="UTF-8",
226 encoding="UTF-8",
227 force_defaults=False
227 force_defaults=False
228 )
228 )
229 return Response(html)
229 return Response(html)
230
230
231 @LoginRequired()
231 @LoginRequired()
232 @HasPermissionAllDecorator('hg.admin')
232 @HasPermissionAllDecorator('hg.admin')
233 @CSRFRequired()
233 @CSRFRequired()
234 def settings_mapping_update(self):
234 def settings_mapping_update(self):
235 _ = self.request.translate
235 _ = self.request.translate
236 c = self.load_default_context()
236 c = self.load_default_context()
237 c.active = 'mapping'
237 c.active = 'mapping'
238 rm_obsolete = self.request.POST.get('destroy', False)
238 rm_obsolete = self.request.POST.get('destroy', False)
239 invalidate_cache = self.request.POST.get('invalidate', False)
239 invalidate_cache = self.request.POST.get('invalidate', False)
240 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
240 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241
241
242 if invalidate_cache:
242 if invalidate_cache:
243 log.debug('invalidating all repositories cache')
243 log.debug('invalidating all repositories cache')
244 for repo in Repository.get_all():
244 for repo in Repository.get_all():
245 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
245 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
246
246
247 filesystem_repos = ScmModel().repo_scan()
247 filesystem_repos = ScmModel().repo_scan()
248 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete)
248 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 PermissionModel().trigger_permission_flush()
249 PermissionModel().trigger_permission_flush()
250
250
251 def _repr(rm_repo):
251 def _repr(rm_repo):
252 return ', '.join(map(safe_str, rm_repo)) or '-'
252 return ', '.join(map(safe_str, rm_repo)) or '-'
253
253
254 h.flash(_('Repositories successfully '
254 h.flash(_('Repositories successfully '
255 'rescanned added: %s ; removed: %s') %
255 'rescanned added: %s ; removed: %s') %
256 (_repr(added), _repr(removed)),
256 (_repr(added), _repr(removed)),
257 category='success')
257 category='success')
258 raise HTTPFound(h.route_path('admin_settings_mapping'))
258 raise HTTPFound(h.route_path('admin_settings_mapping'))
259
259
260 @LoginRequired()
260 @LoginRequired()
261 @HasPermissionAllDecorator('hg.admin')
261 @HasPermissionAllDecorator('hg.admin')
262 def settings_global(self):
262 def settings_global(self):
263 c = self.load_default_context()
263 c = self.load_default_context()
264 c.active = 'global'
264 c.active = 'global'
265 c.personal_repo_group_default_pattern = RepoGroupModel()\
265 c.personal_repo_group_default_pattern = RepoGroupModel()\
266 .get_personal_group_name_pattern()
266 .get_personal_group_name_pattern()
267
267
268 data = render('rhodecode:templates/admin/settings/settings.mako',
268 data = render('rhodecode:templates/admin/settings/settings.mako',
269 self._get_template_context(c), self.request)
269 self._get_template_context(c), self.request)
270 html = formencode.htmlfill.render(
270 html = formencode.htmlfill.render(
271 data,
271 data,
272 defaults=self._form_defaults(),
272 defaults=self._form_defaults(),
273 encoding="UTF-8",
273 encoding="UTF-8",
274 force_defaults=False
274 force_defaults=False
275 )
275 )
276 return Response(html)
276 return Response(html)
277
277
278 @LoginRequired()
278 @LoginRequired()
279 @HasPermissionAllDecorator('hg.admin')
279 @HasPermissionAllDecorator('hg.admin')
280 @CSRFRequired()
280 @CSRFRequired()
281 def settings_global_update(self):
281 def settings_global_update(self):
282 _ = self.request.translate
282 _ = self.request.translate
283 c = self.load_default_context()
283 c = self.load_default_context()
284 c.active = 'global'
284 c.active = 'global'
285 c.personal_repo_group_default_pattern = RepoGroupModel()\
285 c.personal_repo_group_default_pattern = RepoGroupModel()\
286 .get_personal_group_name_pattern()
286 .get_personal_group_name_pattern()
287 application_form = ApplicationSettingsForm(self.request.translate)()
287 application_form = ApplicationSettingsForm(self.request.translate)()
288 try:
288 try:
289 form_result = application_form.to_python(dict(self.request.POST))
289 form_result = application_form.to_python(dict(self.request.POST))
290 except formencode.Invalid as errors:
290 except formencode.Invalid as errors:
291 h.flash(
291 h.flash(
292 _("Some form inputs contain invalid data."),
292 _("Some form inputs contain invalid data."),
293 category='error')
293 category='error')
294 data = render('rhodecode:templates/admin/settings/settings.mako',
294 data = render('rhodecode:templates/admin/settings/settings.mako',
295 self._get_template_context(c), self.request)
295 self._get_template_context(c), self.request)
296 html = formencode.htmlfill.render(
296 html = formencode.htmlfill.render(
297 data,
297 data,
298 defaults=errors.value,
298 defaults=errors.value,
299 errors=errors.unpack_errors() or {},
299 errors=errors.unpack_errors() or {},
300 prefix_error=False,
300 prefix_error=False,
301 encoding="UTF-8",
301 encoding="UTF-8",
302 force_defaults=False
302 force_defaults=False
303 )
303 )
304 return Response(html)
304 return Response(html)
305
305
306 settings = [
306 settings = [
307 ('title', 'rhodecode_title', 'unicode'),
307 ('title', 'rhodecode_title', 'unicode'),
308 ('realm', 'rhodecode_realm', 'unicode'),
308 ('realm', 'rhodecode_realm', 'unicode'),
309 ('pre_code', 'rhodecode_pre_code', 'unicode'),
309 ('pre_code', 'rhodecode_pre_code', 'unicode'),
310 ('post_code', 'rhodecode_post_code', 'unicode'),
310 ('post_code', 'rhodecode_post_code', 'unicode'),
311 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
311 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
312 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
312 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
313 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
313 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
314 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
314 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
315 ]
315 ]
316
316
317 try:
317 try:
318 for setting, form_key, type_ in settings:
318 for setting, form_key, type_ in settings:
319 sett = SettingsModel().create_or_update_setting(
319 sett = SettingsModel().create_or_update_setting(
320 setting, form_result[form_key], type_)
320 setting, form_result[form_key], type_)
321 Session().add(sett)
321 Session().add(sett)
322
322
323 Session().commit()
323 Session().commit()
324 SettingsModel().invalidate_settings_cache()
324 SettingsModel().invalidate_settings_cache()
325 h.flash(_('Updated application settings'), category='success')
325 h.flash(_('Updated application settings'), category='success')
326 except Exception:
326 except Exception:
327 log.exception("Exception while updating application settings")
327 log.exception("Exception while updating application settings")
328 h.flash(
328 h.flash(
329 _('Error occurred during updating application settings'),
329 _('Error occurred during updating application settings'),
330 category='error')
330 category='error')
331
331
332 raise HTTPFound(h.route_path('admin_settings_global'))
332 raise HTTPFound(h.route_path('admin_settings_global'))
333
333
334 @LoginRequired()
334 @LoginRequired()
335 @HasPermissionAllDecorator('hg.admin')
335 @HasPermissionAllDecorator('hg.admin')
336 def settings_visual(self):
336 def settings_visual(self):
337 c = self.load_default_context()
337 c = self.load_default_context()
338 c.active = 'visual'
338 c.active = 'visual'
339
339
340 data = render('rhodecode:templates/admin/settings/settings.mako',
340 data = render('rhodecode:templates/admin/settings/settings.mako',
341 self._get_template_context(c), self.request)
341 self._get_template_context(c), self.request)
342 html = formencode.htmlfill.render(
342 html = formencode.htmlfill.render(
343 data,
343 data,
344 defaults=self._form_defaults(),
344 defaults=self._form_defaults(),
345 encoding="UTF-8",
345 encoding="UTF-8",
346 force_defaults=False
346 force_defaults=False
347 )
347 )
348 return Response(html)
348 return Response(html)
349
349
350 @LoginRequired()
350 @LoginRequired()
351 @HasPermissionAllDecorator('hg.admin')
351 @HasPermissionAllDecorator('hg.admin')
352 @CSRFRequired()
352 @CSRFRequired()
353 def settings_visual_update(self):
353 def settings_visual_update(self):
354 _ = self.request.translate
354 _ = self.request.translate
355 c = self.load_default_context()
355 c = self.load_default_context()
356 c.active = 'visual'
356 c.active = 'visual'
357 application_form = ApplicationVisualisationForm(self.request.translate)()
357 application_form = ApplicationVisualisationForm(self.request.translate)()
358 try:
358 try:
359 form_result = application_form.to_python(dict(self.request.POST))
359 form_result = application_form.to_python(dict(self.request.POST))
360 except formencode.Invalid as errors:
360 except formencode.Invalid as errors:
361 h.flash(
361 h.flash(
362 _("Some form inputs contain invalid data."),
362 _("Some form inputs contain invalid data."),
363 category='error')
363 category='error')
364 data = render('rhodecode:templates/admin/settings/settings.mako',
364 data = render('rhodecode:templates/admin/settings/settings.mako',
365 self._get_template_context(c), self.request)
365 self._get_template_context(c), self.request)
366 html = formencode.htmlfill.render(
366 html = formencode.htmlfill.render(
367 data,
367 data,
368 defaults=errors.value,
368 defaults=errors.value,
369 errors=errors.unpack_errors() or {},
369 errors=errors.unpack_errors() or {},
370 prefix_error=False,
370 prefix_error=False,
371 encoding="UTF-8",
371 encoding="UTF-8",
372 force_defaults=False
372 force_defaults=False
373 )
373 )
374 return Response(html)
374 return Response(html)
375
375
376 try:
376 try:
377 settings = [
377 settings = [
378 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
378 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
379 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
379 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
380 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
380 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
381 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
381 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
382 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
382 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
383 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
383 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
384 ('show_version', 'rhodecode_show_version', 'bool'),
384 ('show_version', 'rhodecode_show_version', 'bool'),
385 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
385 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
386 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
386 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
387 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
387 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
388 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
388 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
389 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
389 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
390 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
390 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
391 ('support_url', 'rhodecode_support_url', 'unicode'),
391 ('support_url', 'rhodecode_support_url', 'unicode'),
392 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
392 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
393 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
393 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
394 ]
394 ]
395 for setting, form_key, type_ in settings:
395 for setting, form_key, type_ in settings:
396 sett = SettingsModel().create_or_update_setting(
396 sett = SettingsModel().create_or_update_setting(
397 setting, form_result[form_key], type_)
397 setting, form_result[form_key], type_)
398 Session().add(sett)
398 Session().add(sett)
399
399
400 Session().commit()
400 Session().commit()
401 SettingsModel().invalidate_settings_cache()
401 SettingsModel().invalidate_settings_cache()
402 h.flash(_('Updated visualisation settings'), category='success')
402 h.flash(_('Updated visualisation settings'), category='success')
403 except Exception:
403 except Exception:
404 log.exception("Exception updating visualization settings")
404 log.exception("Exception updating visualization settings")
405 h.flash(_('Error occurred during updating '
405 h.flash(_('Error occurred during updating '
406 'visualisation settings'),
406 'visualisation settings'),
407 category='error')
407 category='error')
408
408
409 raise HTTPFound(h.route_path('admin_settings_visual'))
409 raise HTTPFound(h.route_path('admin_settings_visual'))
410
410
411 @LoginRequired()
411 @LoginRequired()
412 @HasPermissionAllDecorator('hg.admin')
412 @HasPermissionAllDecorator('hg.admin')
413 def settings_issuetracker(self):
413 def settings_issuetracker(self):
414 c = self.load_default_context()
414 c = self.load_default_context()
415 c.active = 'issuetracker'
415 c.active = 'issuetracker'
416 defaults = c.rc_config
416 defaults = c.rc_config
417
417
418 entry_key = 'rhodecode_issuetracker_pat_'
418 entry_key = 'rhodecode_issuetracker_pat_'
419
419
420 c.issuetracker_entries = {}
420 c.issuetracker_entries = {}
421 for k, v in defaults.items():
421 for k, v in defaults.items():
422 if k.startswith(entry_key):
422 if k.startswith(entry_key):
423 uid = k[len(entry_key):]
423 uid = k[len(entry_key):]
424 c.issuetracker_entries[uid] = None
424 c.issuetracker_entries[uid] = None
425
425
426 for uid in c.issuetracker_entries:
426 for uid in c.issuetracker_entries:
427 c.issuetracker_entries[uid] = AttributeDict({
427 c.issuetracker_entries[uid] = AttributeDict({
428 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
428 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
429 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
429 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
430 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
430 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
431 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
431 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
432 })
432 })
433
433
434 return self._get_template_context(c)
434 return self._get_template_context(c)
435
435
436 @LoginRequired()
436 @LoginRequired()
437 @HasPermissionAllDecorator('hg.admin')
437 @HasPermissionAllDecorator('hg.admin')
438 @CSRFRequired()
438 @CSRFRequired()
439 def settings_issuetracker_test(self):
439 def settings_issuetracker_test(self):
440 error_container = []
440 error_container = []
441
441
442 urlified_commit = h.urlify_commit_message(
442 urlified_commit = h.urlify_commit_message(
443 self.request.POST.get('test_text', ''),
443 self.request.POST.get('test_text', ''),
444 'repo_group/test_repo1', error_container=error_container)
444 'repo_group/test_repo1', error_container=error_container)
445 if error_container:
445 if error_container:
446 def converter(inp):
446 def converter(inp):
447 return h.html_escape(inp)
447 return h.html_escape(inp)
448
448
449 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
449 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
450
450
451 return urlified_commit
451 return urlified_commit
452
452
453 @LoginRequired()
453 @LoginRequired()
454 @HasPermissionAllDecorator('hg.admin')
454 @HasPermissionAllDecorator('hg.admin')
455 @CSRFRequired()
455 @CSRFRequired()
456 def settings_issuetracker_update(self):
456 def settings_issuetracker_update(self):
457 _ = self.request.translate
457 _ = self.request.translate
458 self.load_default_context()
458 self.load_default_context()
459 settings_model = IssueTrackerSettingsModel()
459 settings_model = IssueTrackerSettingsModel()
460
460
461 try:
461 try:
462 form = IssueTrackerPatternsForm(self.request.translate)()
462 form = IssueTrackerPatternsForm(self.request.translate)()
463 data = form.to_python(self.request.POST)
463 data = form.to_python(self.request.POST)
464 except formencode.Invalid as errors:
464 except formencode.Invalid as errors:
465 log.exception('Failed to add new pattern')
465 log.exception('Failed to add new pattern')
466 error = errors
466 error = errors
467 h.flash(_(f'Invalid issue tracker pattern: {error}'),
467 h.flash(_(f'Invalid issue tracker pattern: {error}'),
468 category='error')
468 category='error')
469 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
469 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
470
470
471 if data:
471 if data:
472 for uid in data.get('delete_patterns', []):
472 for uid in data.get('delete_patterns', []):
473 settings_model.delete_entries(uid)
473 settings_model.delete_entries(uid)
474
474
475 for pattern in data.get('patterns', []):
475 for pattern in data.get('patterns', []):
476 for setting, value, type_ in pattern:
476 for setting, value, type_ in pattern:
477 sett = settings_model.create_or_update_setting(
477 sett = settings_model.create_or_update_setting(
478 setting, value, type_)
478 setting, value, type_)
479 Session().add(sett)
479 Session().add(sett)
480
480
481 Session().commit()
481 Session().commit()
482
482
483 SettingsModel().invalidate_settings_cache()
483 SettingsModel().invalidate_settings_cache()
484 h.flash(_('Updated issue tracker entries'), category='success')
484 h.flash(_('Updated issue tracker entries'), category='success')
485 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
485 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
486
486
487 @LoginRequired()
487 @LoginRequired()
488 @HasPermissionAllDecorator('hg.admin')
488 @HasPermissionAllDecorator('hg.admin')
489 @CSRFRequired()
489 @CSRFRequired()
490 def settings_issuetracker_delete(self):
490 def settings_issuetracker_delete(self):
491 _ = self.request.translate
491 _ = self.request.translate
492 self.load_default_context()
492 self.load_default_context()
493 uid = self.request.POST.get('uid')
493 uid = self.request.POST.get('uid')
494 try:
494 try:
495 IssueTrackerSettingsModel().delete_entries(uid)
495 IssueTrackerSettingsModel().delete_entries(uid)
496 except Exception:
496 except Exception:
497 log.exception('Failed to delete issue tracker setting %s', uid)
497 log.exception('Failed to delete issue tracker setting %s', uid)
498 raise HTTPNotFound()
498 raise HTTPNotFound()
499
499
500 SettingsModel().invalidate_settings_cache()
500 SettingsModel().invalidate_settings_cache()
501 h.flash(_('Removed issue tracker entry.'), category='success')
501 h.flash(_('Removed issue tracker entry.'), category='success')
502
502
503 return {'deleted': uid}
503 return {'deleted': uid}
504
504
505 @LoginRequired()
505 @LoginRequired()
506 @HasPermissionAllDecorator('hg.admin')
506 @HasPermissionAllDecorator('hg.admin')
507 def settings_email(self):
507 def settings_email(self):
508 c = self.load_default_context()
508 c = self.load_default_context()
509 c.active = 'email'
509 c.active = 'email'
510 c.rhodecode_ini = rhodecode.CONFIG
510 c.rhodecode_ini = rhodecode.CONFIG
511
511
512 data = render('rhodecode:templates/admin/settings/settings.mako',
512 data = render('rhodecode:templates/admin/settings/settings.mako',
513 self._get_template_context(c), self.request)
513 self._get_template_context(c), self.request)
514 html = formencode.htmlfill.render(
514 html = formencode.htmlfill.render(
515 data,
515 data,
516 defaults=self._form_defaults(),
516 defaults=self._form_defaults(),
517 encoding="UTF-8",
517 encoding="UTF-8",
518 force_defaults=False
518 force_defaults=False
519 )
519 )
520 return Response(html)
520 return Response(html)
521
521
522 @LoginRequired()
522 @LoginRequired()
523 @HasPermissionAllDecorator('hg.admin')
523 @HasPermissionAllDecorator('hg.admin')
524 @CSRFRequired()
524 @CSRFRequired()
525 def settings_email_update(self):
525 def settings_email_update(self):
526 _ = self.request.translate
526 _ = self.request.translate
527 c = self.load_default_context()
527 c = self.load_default_context()
528 c.active = 'email'
528 c.active = 'email'
529
529
530 test_email = self.request.POST.get('test_email')
530 test_email = self.request.POST.get('test_email')
531
531
532 if not test_email:
532 if not test_email:
533 h.flash(_('Please enter email address'), category='error')
533 h.flash(_('Please enter email address'), category='error')
534 raise HTTPFound(h.route_path('admin_settings_email'))
534 raise HTTPFound(h.route_path('admin_settings_email'))
535
535
536 email_kwargs = {
536 email_kwargs = {
537 'date': datetime.datetime.now(),
537 'date': datetime.datetime.now(),
538 'user': self._rhodecode_db_user
538 'user': self._rhodecode_db_user
539 }
539 }
540
540
541 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
541 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
542 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
542 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
543
543
544 recipients = [test_email] if test_email else None
544 recipients = [test_email] if test_email else None
545
545
546 run_task(tasks.send_email, recipients, subject,
546 run_task(tasks.send_email, recipients, subject,
547 email_body_plaintext, email_body)
547 email_body_plaintext, email_body)
548
548
549 h.flash(_('Send email task created'), category='success')
549 h.flash(_('Send email task created'), category='success')
550 raise HTTPFound(h.route_path('admin_settings_email'))
550 raise HTTPFound(h.route_path('admin_settings_email'))
551
551
552 @LoginRequired()
552 @LoginRequired()
553 @HasPermissionAllDecorator('hg.admin')
553 @HasPermissionAllDecorator('hg.admin')
554 def settings_hooks(self):
554 def settings_hooks(self):
555 c = self.load_default_context()
555 c = self.load_default_context()
556 c.active = 'hooks'
556 c.active = 'hooks'
557
557
558 model = SettingsModel()
558 model = SettingsModel()
559 c.hooks = model.get_builtin_hooks()
559 c.hooks = model.get_builtin_hooks()
560 c.custom_hooks = model.get_custom_hooks()
560 c.custom_hooks = model.get_custom_hooks()
561
561
562 data = render('rhodecode:templates/admin/settings/settings.mako',
562 data = render('rhodecode:templates/admin/settings/settings.mako',
563 self._get_template_context(c), self.request)
563 self._get_template_context(c), self.request)
564 html = formencode.htmlfill.render(
564 html = formencode.htmlfill.render(
565 data,
565 data,
566 defaults=self._form_defaults(),
566 defaults=self._form_defaults(),
567 encoding="UTF-8",
567 encoding="UTF-8",
568 force_defaults=False
568 force_defaults=False
569 )
569 )
570 return Response(html)
570 return Response(html)
571
571
572 @LoginRequired()
572 @LoginRequired()
573 @HasPermissionAllDecorator('hg.admin')
573 @HasPermissionAllDecorator('hg.admin')
574 @CSRFRequired()
574 @CSRFRequired()
575 def settings_hooks_update(self):
575 def settings_hooks_update(self):
576 _ = self.request.translate
576 _ = self.request.translate
577 c = self.load_default_context()
577 c = self.load_default_context()
578 c.active = 'hooks'
578 c.active = 'hooks'
579 if c.visual.allow_custom_hooks_settings:
579 if c.visual.allow_custom_hooks_settings:
580 ui_key = self.request.POST.get('new_hook_ui_key')
580 ui_key = self.request.POST.get('new_hook_ui_key')
581 ui_value = self.request.POST.get('new_hook_ui_value')
581 ui_value = self.request.POST.get('new_hook_ui_value')
582
582
583 hook_id = self.request.POST.get('hook_id')
583 hook_id = self.request.POST.get('hook_id')
584 new_hook = False
584 new_hook = False
585
585
586 model = SettingsModel()
586 model = SettingsModel()
587 try:
587 try:
588 if ui_value and ui_key:
588 if ui_value and ui_key:
589 model.create_or_update_hook(ui_key, ui_value)
589 model.create_or_update_hook(ui_key, ui_value)
590 h.flash(_('Added new hook'), category='success')
590 h.flash(_('Added new hook'), category='success')
591 new_hook = True
591 new_hook = True
592 elif hook_id:
592 elif hook_id:
593 RhodeCodeUi.delete(hook_id)
593 RhodeCodeUi.delete(hook_id)
594 Session().commit()
594 Session().commit()
595
595
596 # check for edits
596 # check for edits
597 update = False
597 update = False
598 _d = self.request.POST.dict_of_lists()
598 _d = self.request.POST.dict_of_lists()
599 for k, v in zip(_d.get('hook_ui_key', []),
599 for k, v in zip(_d.get('hook_ui_key', []),
600 _d.get('hook_ui_value_new', [])):
600 _d.get('hook_ui_value_new', [])):
601 model.create_or_update_hook(k, v)
601 model.create_or_update_hook(k, v)
602 update = True
602 update = True
603
603
604 if update and not new_hook:
604 if update and not new_hook:
605 h.flash(_('Updated hooks'), category='success')
605 h.flash(_('Updated hooks'), category='success')
606 Session().commit()
606 Session().commit()
607 except Exception:
607 except Exception:
608 log.exception("Exception during hook creation")
608 log.exception("Exception during hook creation")
609 h.flash(_('Error occurred during hook creation'),
609 h.flash(_('Error occurred during hook creation'),
610 category='error')
610 category='error')
611
611
612 raise HTTPFound(h.route_path('admin_settings_hooks'))
612 raise HTTPFound(h.route_path('admin_settings_hooks'))
613
613
614 @LoginRequired()
614 @LoginRequired()
615 @HasPermissionAllDecorator('hg.admin')
615 @HasPermissionAllDecorator('hg.admin')
616 def settings_search(self):
616 def settings_search(self):
617 c = self.load_default_context()
617 c = self.load_default_context()
618 c.active = 'search'
618 c.active = 'search'
619
619
620 c.searcher = searcher_from_config(self.request.registry.settings)
620 c.searcher = searcher_from_config(self.request.registry.settings)
621 c.statistics = c.searcher.statistics(self.request.translate)
621 c.statistics = c.searcher.statistics(self.request.translate)
622
622
623 return self._get_template_context(c)
623 return self._get_template_context(c)
624
624
625 @LoginRequired()
625 @LoginRequired()
626 @HasPermissionAllDecorator('hg.admin')
626 @HasPermissionAllDecorator('hg.admin')
627 def settings_labs(self):
627 def settings_labs(self):
628 c = self.load_default_context()
628 c = self.load_default_context()
629 if not c.labs_active:
629 if not c.labs_active:
630 raise HTTPFound(h.route_path('admin_settings'))
630 raise HTTPFound(h.route_path('admin_settings'))
631
631
632 c.active = 'labs'
632 c.active = 'labs'
633 c.lab_settings = _LAB_SETTINGS
633 c.lab_settings = _LAB_SETTINGS
634
634
635 data = render('rhodecode:templates/admin/settings/settings.mako',
635 data = render('rhodecode:templates/admin/settings/settings.mako',
636 self._get_template_context(c), self.request)
636 self._get_template_context(c), self.request)
637 html = formencode.htmlfill.render(
637 html = formencode.htmlfill.render(
638 data,
638 data,
639 defaults=self._form_defaults(),
639 defaults=self._form_defaults(),
640 encoding="UTF-8",
640 encoding="UTF-8",
641 force_defaults=False
641 force_defaults=False
642 )
642 )
643 return Response(html)
643 return Response(html)
644
644
645 @LoginRequired()
645 @LoginRequired()
646 @HasPermissionAllDecorator('hg.admin')
646 @HasPermissionAllDecorator('hg.admin')
647 @CSRFRequired()
647 @CSRFRequired()
648 def settings_labs_update(self):
648 def settings_labs_update(self):
649 _ = self.request.translate
649 _ = self.request.translate
650 c = self.load_default_context()
650 c = self.load_default_context()
651 c.active = 'labs'
651 c.active = 'labs'
652
652
653 application_form = LabsSettingsForm(self.request.translate)()
653 application_form = LabsSettingsForm(self.request.translate)()
654 try:
654 try:
655 form_result = application_form.to_python(dict(self.request.POST))
655 form_result = application_form.to_python(dict(self.request.POST))
656 except formencode.Invalid as errors:
656 except formencode.Invalid as errors:
657 h.flash(
657 h.flash(
658 _("Some form inputs contain invalid data."),
658 _("Some form inputs contain invalid data."),
659 category='error')
659 category='error')
660 data = render('rhodecode:templates/admin/settings/settings.mako',
660 data = render('rhodecode:templates/admin/settings/settings.mako',
661 self._get_template_context(c), self.request)
661 self._get_template_context(c), self.request)
662 html = formencode.htmlfill.render(
662 html = formencode.htmlfill.render(
663 data,
663 data,
664 defaults=errors.value,
664 defaults=errors.value,
665 errors=errors.unpack_errors() or {},
665 errors=errors.unpack_errors() or {},
666 prefix_error=False,
666 prefix_error=False,
667 encoding="UTF-8",
667 encoding="UTF-8",
668 force_defaults=False
668 force_defaults=False
669 )
669 )
670 return Response(html)
670 return Response(html)
671
671
672 try:
672 try:
673 session = Session()
673 session = Session()
674 for setting in _LAB_SETTINGS:
674 for setting in _LAB_SETTINGS:
675 setting_name = setting.key[len('rhodecode_'):]
675 setting_name = setting.key[len('rhodecode_'):]
676 sett = SettingsModel().create_or_update_setting(
676 sett = SettingsModel().create_or_update_setting(
677 setting_name, form_result[setting.key], setting.type)
677 setting_name, form_result[setting.key], setting.type)
678 session.add(sett)
678 session.add(sett)
679
679
680 except Exception:
680 except Exception:
681 log.exception('Exception while updating lab settings')
681 log.exception('Exception while updating lab settings')
682 h.flash(_('Error occurred during updating labs settings'),
682 h.flash(_('Error occurred during updating labs settings'),
683 category='error')
683 category='error')
684 else:
684 else:
685 Session().commit()
685 Session().commit()
686 SettingsModel().invalidate_settings_cache()
686 SettingsModel().invalidate_settings_cache()
687 h.flash(_('Updated Labs settings'), category='success')
687 h.flash(_('Updated Labs settings'), category='success')
688 raise HTTPFound(h.route_path('admin_settings_labs'))
688 raise HTTPFound(h.route_path('admin_settings_labs'))
689
689
690 data = render('rhodecode:templates/admin/settings/settings.mako',
690 data = render('rhodecode:templates/admin/settings/settings.mako',
691 self._get_template_context(c), self.request)
691 self._get_template_context(c), self.request)
692 html = formencode.htmlfill.render(
692 html = formencode.htmlfill.render(
693 data,
693 data,
694 defaults=self._form_defaults(),
694 defaults=self._form_defaults(),
695 encoding="UTF-8",
695 encoding="UTF-8",
696 force_defaults=False
696 force_defaults=False
697 )
697 )
698 return Response(html)
698 return Response(html)
699
699
700
700
701 # :param key: name of the setting including the 'rhodecode_' prefix
701 # :param key: name of the setting including the 'rhodecode_' prefix
702 # :param type: the RhodeCodeSetting type to use.
702 # :param type: the RhodeCodeSetting type to use.
703 # :param group: the i18ned group in which we should dispaly this setting
703 # :param group: the i18ned group in which we should dispaly this setting
704 # :param label: the i18ned label we should display for this setting
704 # :param label: the i18ned label we should display for this setting
705 # :param help: the i18ned help we should dispaly for this setting
705 # :param help: the i18ned help we should dispaly for this setting
706 LabSetting = collections.namedtuple(
706 LabSetting = collections.namedtuple(
707 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
707 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
708
708
709
709
710 # This list has to be kept in sync with the form
710 # This list has to be kept in sync with the form
711 # rhodecode.model.forms.LabsSettingsForm.
711 # rhodecode.model.forms.LabsSettingsForm.
712 _LAB_SETTINGS = [
712 _LAB_SETTINGS = [
713
713
714 ]
714 ]
@@ -1,807 +1,808 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities library for RhodeCode
20 Utilities library for RhodeCode
21 """
21 """
22
22
23 import datetime
23 import datetime
24 import decorator
24 import decorator
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29 import shutil
29 import shutil
30 import socket
30 import socket
31 import tempfile
31 import tempfile
32 import traceback
32 import traceback
33 import tarfile
33 import tarfile
34 import warnings
34 import warnings
35 from os.path import join as jn
35 from os.path import join as jn
36
36
37 import paste
37 import paste
38 import pkg_resources
38 import pkg_resources
39 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
39 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
40
40
41 from mako import exceptions
41 from mako import exceptions
42
42
43 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
43 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
44 from rhodecode.lib.str_utils import safe_bytes, safe_str
44 from rhodecode.lib.str_utils import safe_bytes, safe_str
45 from rhodecode.lib.vcs.backends.base import Config
45 from rhodecode.lib.vcs.backends.base import Config
46 from rhodecode.lib.vcs.exceptions import VCSError
46 from rhodecode.lib.vcs.exceptions import VCSError
47 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
47 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
48 from rhodecode.lib.ext_json import sjson as json
48 from rhodecode.lib.ext_json import sjson as json
49 from rhodecode.model import meta
49 from rhodecode.model import meta
50 from rhodecode.model.db import (
50 from rhodecode.model.db import (
51 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
51 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
57 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
58
58
59 # String which contains characters that are not allowed in slug names for
59 # String which contains characters that are not allowed in slug names for
60 # repositories or repository groups. It is properly escaped to use it in
60 # repositories or repository groups. It is properly escaped to use it in
61 # regular expressions.
61 # regular expressions.
62 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
62 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
63
63
64 # Regex that matches forbidden characters in repo/group slugs.
64 # Regex that matches forbidden characters in repo/group slugs.
65 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
65 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
66
66
67 # Regex that matches allowed characters in repo/group slugs.
67 # Regex that matches allowed characters in repo/group slugs.
68 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
68 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
69
69
70 # Regex that matches whole repo/group slugs.
70 # Regex that matches whole repo/group slugs.
71 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
71 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
72
72
73 _license_cache = None
73 _license_cache = None
74
74
75
75
76 def repo_name_slug(value):
76 def repo_name_slug(value):
77 """
77 """
78 Return slug of name of repository
78 Return slug of name of repository
79 This function is called on each creation/modification
79 This function is called on each creation/modification
80 of repository to prevent bad names in repo
80 of repository to prevent bad names in repo
81 """
81 """
82
82
83 replacement_char = '-'
83 replacement_char = '-'
84
84
85 slug = strip_tags(value)
85 slug = strip_tags(value)
86 slug = convert_accented_entities(slug)
86 slug = convert_accented_entities(slug)
87 slug = convert_misc_entities(slug)
87 slug = convert_misc_entities(slug)
88
88
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 slug = re.sub(r'[\s]+', '-', slug)
90 slug = re.sub(r'[\s]+', '-', slug)
91 slug = collapse(slug, replacement_char)
91 slug = collapse(slug, replacement_char)
92
92
93 return slug
93 return slug
94
94
95
95
96 #==============================================================================
96 #==============================================================================
97 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
98 #==============================================================================
98 #==============================================================================
99 def get_repo_slug(request):
99 def get_repo_slug(request):
100 _repo = ''
100 _repo = ''
101
101
102 if hasattr(request, 'db_repo_name'):
102 if hasattr(request, 'db_repo_name'):
103 # if our requests has set db reference use it for name, this
103 # if our requests has set db reference use it for name, this
104 # translates the example.com/_<id> into proper repo names
104 # translates the example.com/_<id> into proper repo names
105 _repo = request.db_repo_name
105 _repo = request.db_repo_name
106 elif getattr(request, 'matchdict', None):
106 elif getattr(request, 'matchdict', None):
107 # pyramid
107 # pyramid
108 _repo = request.matchdict.get('repo_name')
108 _repo = request.matchdict.get('repo_name')
109
109
110 if _repo:
110 if _repo:
111 _repo = _repo.rstrip('/')
111 _repo = _repo.rstrip('/')
112 return _repo
112 return _repo
113
113
114
114
115 def get_repo_group_slug(request):
115 def get_repo_group_slug(request):
116 _group = ''
116 _group = ''
117 if hasattr(request, 'db_repo_group'):
117 if hasattr(request, 'db_repo_group'):
118 # if our requests has set db reference use it for name, this
118 # if our requests has set db reference use it for name, this
119 # translates the example.com/_<id> into proper repo group names
119 # translates the example.com/_<id> into proper repo group names
120 _group = request.db_repo_group.group_name
120 _group = request.db_repo_group.group_name
121 elif getattr(request, 'matchdict', None):
121 elif getattr(request, 'matchdict', None):
122 # pyramid
122 # pyramid
123 _group = request.matchdict.get('repo_group_name')
123 _group = request.matchdict.get('repo_group_name')
124
124
125 if _group:
125 if _group:
126 _group = _group.rstrip('/')
126 _group = _group.rstrip('/')
127 return _group
127 return _group
128
128
129
129
130 def get_user_group_slug(request):
130 def get_user_group_slug(request):
131 _user_group = ''
131 _user_group = ''
132
132
133 if hasattr(request, 'db_user_group'):
133 if hasattr(request, 'db_user_group'):
134 _user_group = request.db_user_group.users_group_name
134 _user_group = request.db_user_group.users_group_name
135 elif getattr(request, 'matchdict', None):
135 elif getattr(request, 'matchdict', None):
136 # pyramid
136 # pyramid
137 _user_group = request.matchdict.get('user_group_id')
137 _user_group = request.matchdict.get('user_group_id')
138 _user_group_name = request.matchdict.get('user_group_name')
138 _user_group_name = request.matchdict.get('user_group_name')
139 try:
139 try:
140 if _user_group:
140 if _user_group:
141 _user_group = UserGroup.get(_user_group)
141 _user_group = UserGroup.get(_user_group)
142 elif _user_group_name:
142 elif _user_group_name:
143 _user_group = UserGroup.get_by_group_name(_user_group_name)
143 _user_group = UserGroup.get_by_group_name(_user_group_name)
144
144
145 if _user_group:
145 if _user_group:
146 _user_group = _user_group.users_group_name
146 _user_group = _user_group.users_group_name
147 except Exception:
147 except Exception:
148 log.exception('Failed to get user group by id and name')
148 log.exception('Failed to get user group by id and name')
149 # catch all failures here
149 # catch all failures here
150 return None
150 return None
151
151
152 return _user_group
152 return _user_group
153
153
154
154
155 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
156 """
156 """
157 Scans given path for repos and return (name,(type,path)) tuple
157 Scans given path for repos and return (name,(type,path)) tuple
158
158
159 :param path: path to scan for repositories
159 :param path: path to scan for repositories
160 :param recursive: recursive search and return names with subdirs in front
160 :param recursive: recursive search and return names with subdirs in front
161 """
161 """
162
162
163 # remove ending slash for better results
163 # remove ending slash for better results
164 path = path.rstrip(os.sep)
164 path = path.rstrip(os.sep)
165 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165 log.debug('now scanning in %s location recursive:%s...', path, recursive)
166
166
167 def _get_repos(p):
167 def _get_repos(p):
168 dirpaths = get_dirpaths(p)
168 dirpaths = get_dirpaths(p)
169 if not _is_dir_writable(p):
169 if not _is_dir_writable(p):
170 log.warning('repo path without write access: %s', p)
170 log.warning('repo path without write access: %s', p)
171
171
172 for dirpath in dirpaths:
172 for dirpath in dirpaths:
173 if os.path.isfile(os.path.join(p, dirpath)):
173 if os.path.isfile(os.path.join(p, dirpath)):
174 continue
174 continue
175 cur_path = os.path.join(p, dirpath)
175 cur_path = os.path.join(p, dirpath)
176
176
177 # skip removed repos
177 # skip removed repos
178 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
179 continue
179 continue
180
180
181 #skip .<somethin> dirs
181 #skip .<somethin> dirs
182 if dirpath.startswith('.'):
182 if dirpath.startswith('.'):
183 continue
183 continue
184
184
185 try:
185 try:
186 scm_info = get_scm(cur_path)
186 scm_info = get_scm(cur_path)
187 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
188 except VCSError:
188 except VCSError:
189 if not recursive:
189 if not recursive:
190 continue
190 continue
191 #check if this dir containts other repos for recursive scan
191 #check if this dir containts other repos for recursive scan
192 rec_path = os.path.join(p, dirpath)
192 rec_path = os.path.join(p, dirpath)
193 if os.path.isdir(rec_path):
193 if os.path.isdir(rec_path):
194 yield from _get_repos(rec_path)
194 yield from _get_repos(rec_path)
195
195
196 return _get_repos(path)
196 return _get_repos(path)
197
197
198
198
199 def get_dirpaths(p: str) -> list:
199 def get_dirpaths(p: str) -> list:
200 try:
200 try:
201 # OS-independable way of checking if we have at least read-only
201 # OS-independable way of checking if we have at least read-only
202 # access or not.
202 # access or not.
203 dirpaths = os.listdir(p)
203 dirpaths = os.listdir(p)
204 except OSError:
204 except OSError:
205 log.warning('ignoring repo path without read access: %s', p)
205 log.warning('ignoring repo path without read access: %s', p)
206 return []
206 return []
207
207
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 # decode paths and suddenly returns unicode objects itself. The items it
209 # decode paths and suddenly returns unicode objects itself. The items it
210 # cannot decode are returned as strings and cause issues.
210 # cannot decode are returned as strings and cause issues.
211 #
211 #
212 # Those paths are ignored here until a solid solution for path handling has
212 # Those paths are ignored here until a solid solution for path handling has
213 # been built.
213 # been built.
214 expected_type = type(p)
214 expected_type = type(p)
215
215
216 def _has_correct_type(item):
216 def _has_correct_type(item):
217 if type(item) is not expected_type:
217 if type(item) is not expected_type:
218 log.error(
218 log.error(
219 "Ignoring path %s since it cannot be decoded into str.",
219 "Ignoring path %s since it cannot be decoded into str.",
220 # Using "repr" to make sure that we see the byte value in case
220 # Using "repr" to make sure that we see the byte value in case
221 # of support.
221 # of support.
222 repr(item))
222 repr(item))
223 return False
223 return False
224 return True
224 return True
225
225
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227
227
228 return dirpaths
228 return dirpaths
229
229
230
230
231 def _is_dir_writable(path):
231 def _is_dir_writable(path):
232 """
232 """
233 Probe if `path` is writable.
233 Probe if `path` is writable.
234
234
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 possible to create a file inside of `path`, stat does not produce reliable
236 possible to create a file inside of `path`, stat does not produce reliable
237 results in this case.
237 results in this case.
238 """
238 """
239 try:
239 try:
240 with tempfile.TemporaryFile(dir=path):
240 with tempfile.TemporaryFile(dir=path):
241 pass
241 pass
242 except OSError:
242 except OSError:
243 return False
243 return False
244 return True
244 return True
245
245
246
246
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 """
248 """
249 Returns True if given path is a valid repository False otherwise.
249 Returns True if given path is a valid repository False otherwise.
250 If expect_scm param is given also, compare if given scm is the same
250 If expect_scm param is given also, compare if given scm is the same
251 as expected from scm parameter. If explicit_scm is given don't try to
251 as expected from scm parameter. If explicit_scm is given don't try to
252 detect the scm, just use the given one to check if repo is valid
252 detect the scm, just use the given one to check if repo is valid
253
253
254 :param repo_name:
254 :param repo_name:
255 :param base_path:
255 :param base_path:
256 :param expect_scm:
256 :param expect_scm:
257 :param explicit_scm:
257 :param explicit_scm:
258 :param config:
258 :param config:
259
259
260 :return True: if given path is a valid repository
260 :return True: if given path is a valid repository
261 """
261 """
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 log.debug('Checking if `%s` is a valid path for repository. '
263 log.debug('Checking if `%s` is a valid path for repository. '
264 'Explicit type: %s', repo_name, explicit_scm)
264 'Explicit type: %s', repo_name, explicit_scm)
265
265
266 try:
266 try:
267 if explicit_scm:
267 if explicit_scm:
268 detected_scms = [get_scm_backend(explicit_scm)(
268 detected_scms = [get_scm_backend(explicit_scm)(
269 full_path, config=config).alias]
269 full_path, config=config).alias]
270 else:
270 else:
271 detected_scms = get_scm(full_path)
271 detected_scms = get_scm(full_path)
272
272
273 if expect_scm:
273 if expect_scm:
274 return detected_scms[0] == expect_scm
274 return detected_scms[0] == expect_scm
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 return True
276 return True
277 except VCSError:
277 except VCSError:
278 log.debug('path: %s is not a valid repo !', full_path)
278 log.debug('path: %s is not a valid repo !', full_path)
279 return False
279 return False
280
280
281
281
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 """
283 """
284 Returns True if a given path is a repository group, False otherwise
284 Returns True if a given path is a repository group, False otherwise
285
285
286 :param repo_group_name:
286 :param repo_group_name:
287 :param base_path:
287 :param base_path:
288 """
288 """
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 log.debug('Checking if `%s` is a valid path for repository group',
290 log.debug('Checking if `%s` is a valid path for repository group',
291 repo_group_name)
291 repo_group_name)
292
292
293 # check if it's not a repo
293 # check if it's not a repo
294 if is_valid_repo(repo_group_name, base_path):
294 if is_valid_repo(repo_group_name, base_path):
295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
296 return False
296 return False
297
297
298 try:
298 try:
299 # we need to check bare git repos at higher level
299 # we need to check bare git repos at higher level
300 # since we might match branches/hooks/info/objects or possible
300 # since we might match branches/hooks/info/objects or possible
301 # other things inside bare git repo
301 # other things inside bare git repo
302 maybe_repo = os.path.dirname(full_path)
302 maybe_repo = os.path.dirname(full_path)
303 if maybe_repo == base_path:
303 if maybe_repo == base_path:
304 # skip root level repo check; we know root location CANNOT BE a repo group
304 # skip root level repo check; we know root location CANNOT BE a repo group
305 return False
305 return False
306
306
307 scm_ = get_scm(maybe_repo)
307 scm_ = get_scm(maybe_repo)
308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
309 return False
309 return False
310 except VCSError:
310 except VCSError:
311 pass
311 pass
312
312
313 # check if it's a valid path
313 # check if it's a valid path
314 if skip_path_check or os.path.isdir(full_path):
314 if skip_path_check or os.path.isdir(full_path):
315 log.debug('path: %s is a valid repo group !', full_path)
315 log.debug('path: %s is a valid repo group !', full_path)
316 return True
316 return True
317
317
318 log.debug('path: %s is not a valid repo group !', full_path)
318 log.debug('path: %s is not a valid repo group !', full_path)
319 return False
319 return False
320
320
321
321
322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
323 while True:
323 while True:
324 ok = input(prompt)
324 ok = input(prompt)
325 if ok.lower() in ('y', 'ye', 'yes'):
325 if ok.lower() in ('y', 'ye', 'yes'):
326 return True
326 return True
327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
328 return False
328 return False
329 retries = retries - 1
329 retries = retries - 1
330 if retries < 0:
330 if retries < 0:
331 raise OSError
331 raise OSError
332 print(complaint)
332 print(complaint)
333
333
334 # propagated from mercurial documentation
334 # propagated from mercurial documentation
335 ui_sections = [
335 ui_sections = [
336 'alias', 'auth',
336 'alias', 'auth',
337 'decode/encode', 'defaults',
337 'decode/encode', 'defaults',
338 'diff', 'email',
338 'diff', 'email',
339 'extensions', 'format',
339 'extensions', 'format',
340 'merge-patterns', 'merge-tools',
340 'merge-patterns', 'merge-tools',
341 'hooks', 'http_proxy',
341 'hooks', 'http_proxy',
342 'smtp', 'patch',
342 'smtp', 'patch',
343 'paths', 'profiling',
343 'paths', 'profiling',
344 'server', 'trusted',
344 'server', 'trusted',
345 'ui', 'web', ]
345 'ui', 'web', ]
346
346
347
347
348 def config_data_from_db(clear_session=True, repo=None):
348 def config_data_from_db(clear_session=True, repo=None):
349 """
349 """
350 Read the configuration data from the database and return configuration
350 Read the configuration data from the database and return configuration
351 tuples.
351 tuples.
352 """
352 """
353 from rhodecode.model.settings import VcsSettingsModel
353 from rhodecode.model.settings import VcsSettingsModel
354
354
355 config = []
355 config = []
356
356
357 sa = meta.Session()
357 sa = meta.Session()
358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
359
359
360 ui_settings = settings_model.get_ui_settings()
360 ui_settings = settings_model.get_ui_settings()
361
361
362 ui_data = []
362 ui_data = []
363 for setting in ui_settings:
363 for setting in ui_settings:
364 if setting.active:
364 if setting.active:
365 ui_data.append((setting.section, setting.key, setting.value))
365 ui_data.append((setting.section, setting.key, setting.value))
366 config.append((
366 config.append((
367 safe_str(setting.section), safe_str(setting.key),
367 safe_str(setting.section), safe_str(setting.key),
368 safe_str(setting.value)))
368 safe_str(setting.value)))
369 if setting.key == 'push_ssl':
369 if setting.key == 'push_ssl':
370 # force set push_ssl requirement to False, rhodecode
370 # force set push_ssl requirement to False, rhodecode
371 # handles that
371 # handles that
372 config.append((
372 config.append((
373 safe_str(setting.section), safe_str(setting.key), False))
373 safe_str(setting.section), safe_str(setting.key), False))
374 log.debug(
374 log.debug(
375 'settings ui from db@repo[%s]: %s',
375 'settings ui from db@repo[%s]: %s',
376 repo,
376 repo,
377 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
377 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
378 if clear_session:
378 if clear_session:
379 meta.Session.remove()
379 meta.Session.remove()
380
380
381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
382 # It's already there and activated/deactivated
382 # It's already there and activated/deactivated
383 skip_entries = []
383 skip_entries = []
384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
385 if 'pull' not in enabled_hook_classes:
385 if 'pull' not in enabled_hook_classes:
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
387 if 'push' not in enabled_hook_classes:
387 if 'push' not in enabled_hook_classes:
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
391
391
392 config = [entry for entry in config if entry[:2] not in skip_entries]
392 config = [entry for entry in config if entry[:2] not in skip_entries]
393
393
394 return config
394 return config
395
395
396
396
397 def make_db_config(clear_session=True, repo=None):
397 def make_db_config(clear_session=True, repo=None):
398 """
398 """
399 Create a :class:`Config` instance based on the values in the database.
399 Create a :class:`Config` instance based on the values in the database.
400 """
400 """
401 config = Config()
401 config = Config()
402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
403 for section, option, value in config_data:
403 for section, option, value in config_data:
404 config.set(section, option, value)
404 config.set(section, option, value)
405 return config
405 return config
406
406
407
407
408 def get_enabled_hook_classes(ui_settings):
408 def get_enabled_hook_classes(ui_settings):
409 """
409 """
410 Return the enabled hook classes.
410 Return the enabled hook classes.
411
411
412 :param ui_settings: List of ui_settings as returned
412 :param ui_settings: List of ui_settings as returned
413 by :meth:`VcsSettingsModel.get_ui_settings`
413 by :meth:`VcsSettingsModel.get_ui_settings`
414
414
415 :return: a list with the enabled hook classes. The order is not guaranteed.
415 :return: a list with the enabled hook classes. The order is not guaranteed.
416 :rtype: list
416 :rtype: list
417 """
417 """
418 enabled_hooks = []
418 enabled_hooks = []
419 active_hook_keys = [
419 active_hook_keys = [
420 key for section, key, value, active in ui_settings
420 key for section, key, value, active in ui_settings
421 if section == 'hooks' and active]
421 if section == 'hooks' and active]
422
422
423 hook_names = {
423 hook_names = {
424 RhodeCodeUi.HOOK_PUSH: 'push',
424 RhodeCodeUi.HOOK_PUSH: 'push',
425 RhodeCodeUi.HOOK_PULL: 'pull',
425 RhodeCodeUi.HOOK_PULL: 'pull',
426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
427 }
427 }
428
428
429 for key in active_hook_keys:
429 for key in active_hook_keys:
430 hook = hook_names.get(key)
430 hook = hook_names.get(key)
431 if hook:
431 if hook:
432 enabled_hooks.append(hook)
432 enabled_hooks.append(hook)
433
433
434 return enabled_hooks
434 return enabled_hooks
435
435
436
436
437 def set_rhodecode_config(config):
437 def set_rhodecode_config(config):
438 """
438 """
439 Updates pyramid config with new settings from database
439 Updates pyramid config with new settings from database
440
440
441 :param config:
441 :param config:
442 """
442 """
443 from rhodecode.model.settings import SettingsModel
443 from rhodecode.model.settings import SettingsModel
444 app_settings = SettingsModel().get_all_settings()
444 app_settings = SettingsModel().get_all_settings()
445
445
446 for k, v in list(app_settings.items()):
446 for k, v in list(app_settings.items()):
447 config[k] = v
447 config[k] = v
448
448
449
449
450 def get_rhodecode_realm():
450 def get_rhodecode_realm():
451 """
451 """
452 Return the rhodecode realm from database.
452 Return the rhodecode realm from database.
453 """
453 """
454 from rhodecode.model.settings import SettingsModel
454 from rhodecode.model.settings import SettingsModel
455 realm = SettingsModel().get_setting_by_name('realm')
455 realm = SettingsModel().get_setting_by_name('realm')
456 return safe_str(realm.app_settings_value)
456 return safe_str(realm.app_settings_value)
457
457
458
458
459 def get_rhodecode_base_path():
459 def get_rhodecode_base_path():
460 """
460 """
461 Returns the base path. The base path is the filesystem path which points
461 Returns the base path. The base path is the filesystem path which points
462 to the repository store.
462 to the repository store.
463 """
463 """
464
464
465 import rhodecode
465 import rhodecode
466 return rhodecode.CONFIG['default_base_path']
466 return rhodecode.CONFIG['default_base_path']
467
467
468
468
469 def map_groups(path):
469 def map_groups(path):
470 """
470 """
471 Given a full path to a repository, create all nested groups that this
471 Given a full path to a repository, create all nested groups that this
472 repo is inside. This function creates parent-child relationships between
472 repo is inside. This function creates parent-child relationships between
473 groups and creates default perms for all new groups.
473 groups and creates default perms for all new groups.
474
474
475 :param paths: full path to repository
475 :param paths: full path to repository
476 """
476 """
477 from rhodecode.model.repo_group import RepoGroupModel
477 from rhodecode.model.repo_group import RepoGroupModel
478 sa = meta.Session()
478 sa = meta.Session()
479 groups = path.split(Repository.NAME_SEP)
479 groups = path.split(Repository.NAME_SEP)
480 parent = None
480 parent = None
481 group = None
481 group = None
482
482
483 # last element is repo in nested groups structure
483 # last element is repo in nested groups structure
484 groups = groups[:-1]
484 groups = groups[:-1]
485 rgm = RepoGroupModel(sa)
485 rgm = RepoGroupModel(sa)
486 owner = User.get_first_super_admin()
486 owner = User.get_first_super_admin()
487 for lvl, group_name in enumerate(groups):
487 for lvl, group_name in enumerate(groups):
488 group_name = '/'.join(groups[:lvl] + [group_name])
488 group_name = '/'.join(groups[:lvl] + [group_name])
489 group = RepoGroup.get_by_group_name(group_name)
489 group = RepoGroup.get_by_group_name(group_name)
490 desc = '%s group' % group_name
490 desc = '%s group' % group_name
491
491
492 # skip folders that are now removed repos
492 # skip folders that are now removed repos
493 if REMOVED_REPO_PAT.match(group_name):
493 if REMOVED_REPO_PAT.match(group_name):
494 break
494 break
495
495
496 if group is None:
496 if group is None:
497 log.debug('creating group level: %s group_name: %s',
497 log.debug('creating group level: %s group_name: %s',
498 lvl, group_name)
498 lvl, group_name)
499 group = RepoGroup(group_name, parent)
499 group = RepoGroup(group_name, parent)
500 group.group_description = desc
500 group.group_description = desc
501 group.user = owner
501 group.user = owner
502 sa.add(group)
502 sa.add(group)
503 perm_obj = rgm._create_default_perms(group)
503 perm_obj = rgm._create_default_perms(group)
504 sa.add(perm_obj)
504 sa.add(perm_obj)
505 sa.flush()
505 sa.flush()
506
506
507 parent = group
507 parent = group
508 return group
508 return group
509
509
510
510
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
512 """
512 """
513 maps all repos given in initial_repo_list, non existing repositories
513 maps all repos given in initial_repo_list, non existing repositories
514 are created, if remove_obsolete is True it also checks for db entries
514 are created, if remove_obsolete is True it also checks for db entries
515 that are not in initial_repo_list and removes them.
515 that are not in initial_repo_list and removes them.
516
516
517 :param initial_repo_list: list of repositories found by scanning methods
517 :param initial_repo_list: list of repositories found by scanning methods
518 :param remove_obsolete: check for obsolete entries in database
518 :param remove_obsolete: check for obsolete entries in database
519 """
519 """
520 from rhodecode.model.repo import RepoModel
520 from rhodecode.model.repo import RepoModel
521 from rhodecode.model.repo_group import RepoGroupModel
521 from rhodecode.model.repo_group import RepoGroupModel
522 from rhodecode.model.settings import SettingsModel
522 from rhodecode.model.settings import SettingsModel
523
523
524 sa = meta.Session()
524 sa = meta.Session()
525 repo_model = RepoModel()
525 repo_model = RepoModel()
526 user = User.get_first_super_admin()
526 user = User.get_first_super_admin()
527 added = []
527 added = []
528
528
529 # creation defaults
529 # creation defaults
530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
531 enable_statistics = defs.get('repo_enable_statistics')
531 enable_statistics = defs.get('repo_enable_statistics')
532 enable_locking = defs.get('repo_enable_locking')
532 enable_locking = defs.get('repo_enable_locking')
533 enable_downloads = defs.get('repo_enable_downloads')
533 enable_downloads = defs.get('repo_enable_downloads')
534 private = defs.get('repo_private')
534 private = defs.get('repo_private')
535
535
536 for name, repo in list(initial_repo_list.items()):
536 for name, repo in list(initial_repo_list.items()):
537 group = map_groups(name)
537 group = map_groups(name)
538 str_name = safe_str(name)
538 str_name = safe_str(name)
539 db_repo = repo_model.get_by_repo_name(str_name)
539 db_repo = repo_model.get_by_repo_name(str_name)
540
540 # found repo that is on filesystem not in RhodeCode database
541 # found repo that is on filesystem not in RhodeCode database
541 if not db_repo:
542 if not db_repo:
542 log.info('repository %s not found, creating now', name)
543 log.info('repository `%s` not found in the database, creating now', name)
543 added.append(name)
544 added.append(name)
544 desc = (repo.description
545 desc = (repo.description
545 if repo.description != 'unknown'
546 if repo.description != 'unknown'
546 else '%s repository' % name)
547 else '%s repository' % name)
547
548
548 db_repo = repo_model._create_repo(
549 db_repo = repo_model._create_repo(
549 repo_name=name,
550 repo_name=name,
550 repo_type=repo.alias,
551 repo_type=repo.alias,
551 description=desc,
552 description=desc,
552 repo_group=getattr(group, 'group_id', None),
553 repo_group=getattr(group, 'group_id', None),
553 owner=user,
554 owner=user,
554 enable_locking=enable_locking,
555 enable_locking=enable_locking,
555 enable_downloads=enable_downloads,
556 enable_downloads=enable_downloads,
556 enable_statistics=enable_statistics,
557 enable_statistics=enable_statistics,
557 private=private,
558 private=private,
558 state=Repository.STATE_CREATED
559 state=Repository.STATE_CREATED
559 )
560 )
560 sa.commit()
561 sa.commit()
561 # we added that repo just now, and make sure we updated server info
562 # we added that repo just now, and make sure we updated server info
562 if db_repo.repo_type == 'git':
563 if db_repo.repo_type == 'git':
563 git_repo = db_repo.scm_instance()
564 git_repo = db_repo.scm_instance()
564 # update repository server-info
565 # update repository server-info
565 log.debug('Running update server info')
566 log.debug('Running update server info')
566 git_repo._update_server_info()
567 git_repo._update_server_info()
567
568
568 db_repo.update_commit_cache()
569 db_repo.update_commit_cache()
569
570
570 config = db_repo._config
571 config = db_repo._config
571 config.set('extensions', 'largefiles', '')
572 config.set('extensions', 'largefiles', '')
572 repo = db_repo.scm_instance(config=config)
573 repo = db_repo.scm_instance(config=config)
573 repo.install_hooks()
574 repo.install_hooks(force=force_hooks_rebuild)
574
575
575 removed = []
576 removed = []
576 if remove_obsolete:
577 if remove_obsolete:
577 # remove from database those repositories that are not in the filesystem
578 # remove from database those repositories that are not in the filesystem
578 for repo in sa.query(Repository).all():
579 for repo in sa.query(Repository).all():
579 if repo.repo_name not in list(initial_repo_list.keys()):
580 if repo.repo_name not in list(initial_repo_list.keys()):
580 log.debug("Removing non-existing repository found in db `%s`",
581 log.debug("Removing non-existing repository found in db `%s`",
581 repo.repo_name)
582 repo.repo_name)
582 try:
583 try:
583 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
584 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
584 sa.commit()
585 sa.commit()
585 removed.append(repo.repo_name)
586 removed.append(repo.repo_name)
586 except Exception:
587 except Exception:
587 # don't hold further removals on error
588 # don't hold further removals on error
588 log.error(traceback.format_exc())
589 log.error(traceback.format_exc())
589 sa.rollback()
590 sa.rollback()
590
591
591 def splitter(full_repo_name):
592 def splitter(full_repo_name):
592 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
593 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
593 gr_name = None
594 gr_name = None
594 if len(_parts) == 2:
595 if len(_parts) == 2:
595 gr_name = _parts[0]
596 gr_name = _parts[0]
596 return gr_name
597 return gr_name
597
598
598 initial_repo_group_list = [splitter(x) for x in
599 initial_repo_group_list = [splitter(x) for x in
599 list(initial_repo_list.keys()) if splitter(x)]
600 list(initial_repo_list.keys()) if splitter(x)]
600
601
601 # remove from database those repository groups that are not in the
602 # remove from database those repository groups that are not in the
602 # filesystem due to parent child relationships we need to delete them
603 # filesystem due to parent child relationships we need to delete them
603 # in a specific order of most nested first
604 # in a specific order of most nested first
604 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
605 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
605 def nested_sort(gr):
606 def nested_sort(gr):
606 return len(gr.split('/'))
607 return len(gr.split('/'))
607 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
608 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
608 if group_name not in initial_repo_group_list:
609 if group_name not in initial_repo_group_list:
609 repo_group = RepoGroup.get_by_group_name(group_name)
610 repo_group = RepoGroup.get_by_group_name(group_name)
610 if (repo_group.children.all() or
611 if (repo_group.children.all() or
611 not RepoGroupModel().check_exist_filesystem(
612 not RepoGroupModel().check_exist_filesystem(
612 group_name=group_name, exc_on_failure=False)):
613 group_name=group_name, exc_on_failure=False)):
613 continue
614 continue
614
615
615 log.info(
616 log.info(
616 'Removing non-existing repository group found in db `%s`',
617 'Removing non-existing repository group found in db `%s`',
617 group_name)
618 group_name)
618 try:
619 try:
619 RepoGroupModel(sa).delete(group_name, fs_remove=False)
620 RepoGroupModel(sa).delete(group_name, fs_remove=False)
620 sa.commit()
621 sa.commit()
621 removed.append(group_name)
622 removed.append(group_name)
622 except Exception:
623 except Exception:
623 # don't hold further removals on error
624 # don't hold further removals on error
624 log.exception(
625 log.exception(
625 'Unable to remove repository group `%s`',
626 'Unable to remove repository group `%s`',
626 group_name)
627 group_name)
627 sa.rollback()
628 sa.rollback()
628 raise
629 raise
629
630
630 return added, removed
631 return added, removed
631
632
632
633
633 def load_rcextensions(root_path):
634 def load_rcextensions(root_path):
634 import rhodecode
635 import rhodecode
635 from rhodecode.config import conf
636 from rhodecode.config import conf
636
637
637 path = os.path.join(root_path)
638 path = os.path.join(root_path)
638 sys.path.append(path)
639 sys.path.append(path)
639
640
640 try:
641 try:
641 rcextensions = __import__('rcextensions')
642 rcextensions = __import__('rcextensions')
642 except ImportError:
643 except ImportError:
643 if os.path.isdir(os.path.join(path, 'rcextensions')):
644 if os.path.isdir(os.path.join(path, 'rcextensions')):
644 log.warning('Unable to load rcextensions from %s', path)
645 log.warning('Unable to load rcextensions from %s', path)
645 rcextensions = None
646 rcextensions = None
646
647
647 if rcextensions:
648 if rcextensions:
648 log.info('Loaded rcextensions from %s...', rcextensions)
649 log.info('Loaded rcextensions from %s...', rcextensions)
649 rhodecode.EXTENSIONS = rcextensions
650 rhodecode.EXTENSIONS = rcextensions
650
651
651 # Additional mappings that are not present in the pygments lexers
652 # Additional mappings that are not present in the pygments lexers
652 conf.LANGUAGES_EXTENSIONS_MAP.update(
653 conf.LANGUAGES_EXTENSIONS_MAP.update(
653 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
654 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
654
655
655
656
656 def get_custom_lexer(extension):
657 def get_custom_lexer(extension):
657 """
658 """
658 returns a custom lexer if it is defined in rcextensions module, or None
659 returns a custom lexer if it is defined in rcextensions module, or None
659 if there's no custom lexer defined
660 if there's no custom lexer defined
660 """
661 """
661 import rhodecode
662 import rhodecode
662 from pygments import lexers
663 from pygments import lexers
663
664
664 # custom override made by RhodeCode
665 # custom override made by RhodeCode
665 if extension in ['mako']:
666 if extension in ['mako']:
666 return lexers.get_lexer_by_name('html+mako')
667 return lexers.get_lexer_by_name('html+mako')
667
668
668 # check if we didn't define this extension as other lexer
669 # check if we didn't define this extension as other lexer
669 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
670 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
670 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
671 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
671 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
672 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
672 return lexers.get_lexer_by_name(_lexer_name)
673 return lexers.get_lexer_by_name(_lexer_name)
673
674
674
675
675 #==============================================================================
676 #==============================================================================
676 # TEST FUNCTIONS AND CREATORS
677 # TEST FUNCTIONS AND CREATORS
677 #==============================================================================
678 #==============================================================================
678 def create_test_index(repo_location, config):
679 def create_test_index(repo_location, config):
679 """
680 """
680 Makes default test index.
681 Makes default test index.
681 """
682 """
682 try:
683 try:
683 import rc_testdata
684 import rc_testdata
684 except ImportError:
685 except ImportError:
685 raise ImportError('Failed to import rc_testdata, '
686 raise ImportError('Failed to import rc_testdata, '
686 'please make sure this package is installed from requirements_test.txt')
687 'please make sure this package is installed from requirements_test.txt')
687 rc_testdata.extract_search_index(
688 rc_testdata.extract_search_index(
688 'vcs_search_index', os.path.dirname(config['search.location']))
689 'vcs_search_index', os.path.dirname(config['search.location']))
689
690
690
691
691 def create_test_directory(test_path):
692 def create_test_directory(test_path):
692 """
693 """
693 Create test directory if it doesn't exist.
694 Create test directory if it doesn't exist.
694 """
695 """
695 if not os.path.isdir(test_path):
696 if not os.path.isdir(test_path):
696 log.debug('Creating testdir %s', test_path)
697 log.debug('Creating testdir %s', test_path)
697 os.makedirs(test_path)
698 os.makedirs(test_path)
698
699
699
700
700 def create_test_database(test_path, config):
701 def create_test_database(test_path, config):
701 """
702 """
702 Makes a fresh database.
703 Makes a fresh database.
703 """
704 """
704 from rhodecode.lib.db_manage import DbManage
705 from rhodecode.lib.db_manage import DbManage
705 from rhodecode.lib.utils2 import get_encryption_key
706 from rhodecode.lib.utils2 import get_encryption_key
706
707
707 # PART ONE create db
708 # PART ONE create db
708 dbconf = config['sqlalchemy.db1.url']
709 dbconf = config['sqlalchemy.db1.url']
709 enc_key = get_encryption_key(config)
710 enc_key = get_encryption_key(config)
710
711
711 log.debug('making test db %s', dbconf)
712 log.debug('making test db %s', dbconf)
712
713
713 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
714 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
714 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
715 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
715 dbmanage.create_tables(override=True)
716 dbmanage.create_tables(override=True)
716 dbmanage.set_db_version()
717 dbmanage.set_db_version()
717 # for tests dynamically set new root paths based on generated content
718 # for tests dynamically set new root paths based on generated content
718 dbmanage.create_settings(dbmanage.config_prompt(test_path))
719 dbmanage.create_settings(dbmanage.config_prompt(test_path))
719 dbmanage.create_default_user()
720 dbmanage.create_default_user()
720 dbmanage.create_test_admin_and_users()
721 dbmanage.create_test_admin_and_users()
721 dbmanage.create_permissions()
722 dbmanage.create_permissions()
722 dbmanage.populate_default_permissions()
723 dbmanage.populate_default_permissions()
723 Session().commit()
724 Session().commit()
724
725
725
726
726 def create_test_repositories(test_path, config):
727 def create_test_repositories(test_path, config):
727 """
728 """
728 Creates test repositories in the temporary directory. Repositories are
729 Creates test repositories in the temporary directory. Repositories are
729 extracted from archives within the rc_testdata package.
730 extracted from archives within the rc_testdata package.
730 """
731 """
731 import rc_testdata
732 import rc_testdata
732 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
733 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
733
734
734 log.debug('making test vcs repositories')
735 log.debug('making test vcs repositories')
735
736
736 idx_path = config['search.location']
737 idx_path = config['search.location']
737 data_path = config['cache_dir']
738 data_path = config['cache_dir']
738
739
739 # clean index and data
740 # clean index and data
740 if idx_path and os.path.exists(idx_path):
741 if idx_path and os.path.exists(idx_path):
741 log.debug('remove %s', idx_path)
742 log.debug('remove %s', idx_path)
742 shutil.rmtree(idx_path)
743 shutil.rmtree(idx_path)
743
744
744 if data_path and os.path.exists(data_path):
745 if data_path and os.path.exists(data_path):
745 log.debug('remove %s', data_path)
746 log.debug('remove %s', data_path)
746 shutil.rmtree(data_path)
747 shutil.rmtree(data_path)
747
748
748 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
749 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
749 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
750 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
750
751
751 # Note: Subversion is in the process of being integrated with the system,
752 # Note: Subversion is in the process of being integrated with the system,
752 # until we have a properly packed version of the test svn repository, this
753 # until we have a properly packed version of the test svn repository, this
753 # tries to copy over the repo from a package "rc_testdata"
754 # tries to copy over the repo from a package "rc_testdata"
754 svn_repo_path = rc_testdata.get_svn_repo_archive()
755 svn_repo_path = rc_testdata.get_svn_repo_archive()
755 with tarfile.open(svn_repo_path) as tar:
756 with tarfile.open(svn_repo_path) as tar:
756 tar.extractall(jn(test_path, SVN_REPO))
757 tar.extractall(jn(test_path, SVN_REPO))
757
758
758
759
759 def password_changed(auth_user, session):
760 def password_changed(auth_user, session):
760 # Never report password change in case of default user or anonymous user.
761 # Never report password change in case of default user or anonymous user.
761 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
762 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
762 return False
763 return False
763
764
764 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
765 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
765 rhodecode_user = session.get('rhodecode_user', {})
766 rhodecode_user = session.get('rhodecode_user', {})
766 session_password_hash = rhodecode_user.get('password', '')
767 session_password_hash = rhodecode_user.get('password', '')
767 return password_hash != session_password_hash
768 return password_hash != session_password_hash
768
769
769
770
770 def read_opensource_licenses():
771 def read_opensource_licenses():
771 global _license_cache
772 global _license_cache
772
773
773 if not _license_cache:
774 if not _license_cache:
774 licenses = pkg_resources.resource_string(
775 licenses = pkg_resources.resource_string(
775 'rhodecode', 'config/licenses.json')
776 'rhodecode', 'config/licenses.json')
776 _license_cache = json.loads(licenses)
777 _license_cache = json.loads(licenses)
777
778
778 return _license_cache
779 return _license_cache
779
780
780
781
781 def generate_platform_uuid():
782 def generate_platform_uuid():
782 """
783 """
783 Generates platform UUID based on it's name
784 Generates platform UUID based on it's name
784 """
785 """
785 import platform
786 import platform
786
787
787 try:
788 try:
788 uuid_list = [platform.platform()]
789 uuid_list = [platform.platform()]
789 return sha256_safe(':'.join(uuid_list))
790 return sha256_safe(':'.join(uuid_list))
790 except Exception as e:
791 except Exception as e:
791 log.error('Failed to generate host uuid: %s', e)
792 log.error('Failed to generate host uuid: %s', e)
792 return 'UNDEFINED'
793 return 'UNDEFINED'
793
794
794
795
795 def send_test_email(recipients, email_body='TEST EMAIL'):
796 def send_test_email(recipients, email_body='TEST EMAIL'):
796 """
797 """
797 Simple code for generating test emails.
798 Simple code for generating test emails.
798 Usage::
799 Usage::
799
800
800 from rhodecode.lib import utils
801 from rhodecode.lib import utils
801 utils.send_test_email()
802 utils.send_test_email()
802 """
803 """
803 from rhodecode.lib.celerylib import tasks, run_task
804 from rhodecode.lib.celerylib import tasks, run_task
804
805
805 email_body = email_body_plaintext = email_body
806 email_body = email_body_plaintext = email_body
806 subject = f'SUBJECT FROM: {socket.gethostname()}'
807 subject = f'SUBJECT FROM: {socket.gethostname()}'
807 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
808 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,28 +1,33 b''
1 ${h.secure_form(h.route_path('admin_settings_mapping_update'), request=request)}
1 ${h.secure_form(h.route_path('admin_settings_mapping_update'), request=request)}
2
2
3 <div class="panel panel-default">
3 <div class="panel panel-default">
4 <div class="panel-heading">
4 <div class="panel-heading">
5 <h3 class="panel-title">${_('Import New Groups or Repositories')}</h3>
5 <h3 class="panel-title">${_('Import New Groups or Repositories')}</h3>
6 </div>
6 </div>
7 <div class="panel-body">
7 <div class="panel-body">
8
9 <p>
10 ${_('This function will scann all data under the current storage path location at')} <code>${c.storage_path}</code>
11 </p>
12
8 <div class="checkbox">
13 <div class="checkbox">
9 ${h.checkbox('destroy',True)}
14 ${h.checkbox('destroy',True)}
10 <label for="destroy">${_('Destroy old data')}</label>
15 <label for="destroy">${_('Destroy old data')}</label>
11 </div>
16 </div>
12 <span class="help-block">${_('In case a repository or a group was deleted from the filesystem and it still exists in the database, check this option to remove obsolete data from the database.')}</span>
17 <span class="help-block">${_('In case a repository or a group was deleted from the filesystem and it still exists in the database, check this option to remove obsolete data from the database.')}</span>
13
18
14 <div class="checkbox">
19 <div class="checkbox">
15 ${h.checkbox('invalidate',True)}
20 ${h.checkbox('invalidate',True)}
16 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
21 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
17 </div>
22 </div>
18 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
23 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
19
24
20 <div class="buttons">
25 <div class="buttons">
21 ${h.submit('rescan',_('Rescan Filesystem'),class_="btn")}
26 ${h.submit('rescan',_('Rescan Filesystem'),class_="btn")}
22 </div>
27 </div>
23
28
24 </div>
29 </div>
25 </div>
30 </div>
26
31
27
32
28 ${h.end_form()}
33 ${h.end_form()}
@@ -1,1750 +1,1750 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import collections
20 import collections
21 import datetime
21 import datetime
22 import os
22 import os
23 import re
23 import re
24 import pprint
24 import pprint
25 import shutil
25 import shutil
26 import socket
26 import socket
27 import subprocess
27 import subprocess
28 import time
28 import time
29 import uuid
29 import uuid
30 import dateutil.tz
30 import dateutil.tz
31 import logging
31 import logging
32 import functools
32 import functools
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39 import pyramid.paster
39 import pyramid.paster
40
40
41 import rhodecode
41 import rhodecode
42 import rhodecode.lib
42 import rhodecode.lib
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 from rhodecode.model.meta import Session
48 from rhodecode.model.meta import Session
49 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.user import UserModel
52 from rhodecode.model.user import UserModel
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.str_utils import safe_bytes
59 from rhodecode.lib.str_utils import safe_bytes
60 from rhodecode.lib.hash_utils import sha1_safe
60 from rhodecode.lib.hash_utils import sha1_safe
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
72
72
73
73
74 def cmp(a, b):
74 def cmp(a, b):
75 # backport cmp from python2 so we can still use it in the custom code in this module
75 # backport cmp from python2 so we can still use it in the custom code in this module
76 return (a > b) - (a < b)
76 return (a > b) - (a < b)
77
77
78
78
79 @pytest.fixture(scope='session', autouse=True)
79 @pytest.fixture(scope='session', autouse=True)
80 def activate_example_rcextensions(request):
80 def activate_example_rcextensions(request):
81 """
81 """
82 Patch in an example rcextensions module which verifies passed in kwargs.
82 Patch in an example rcextensions module which verifies passed in kwargs.
83 """
83 """
84 from rhodecode.config import rcextensions
84 from rhodecode.config import rcextensions
85
85
86 old_extensions = rhodecode.EXTENSIONS
86 old_extensions = rhodecode.EXTENSIONS
87 rhodecode.EXTENSIONS = rcextensions
87 rhodecode.EXTENSIONS = rcextensions
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89
89
90 @request.addfinalizer
90 @request.addfinalizer
91 def cleanup():
91 def cleanup():
92 rhodecode.EXTENSIONS = old_extensions
92 rhodecode.EXTENSIONS = old_extensions
93
93
94
94
95 @pytest.fixture()
95 @pytest.fixture()
96 def capture_rcextensions():
96 def capture_rcextensions():
97 """
97 """
98 Returns the recorded calls to entry points in rcextensions.
98 Returns the recorded calls to entry points in rcextensions.
99 """
99 """
100 calls = rhodecode.EXTENSIONS.calls
100 calls = rhodecode.EXTENSIONS.calls
101 calls.clear()
101 calls.clear()
102 # Note: At this moment, it is still the empty dict, but that will
102 # Note: At this moment, it is still the empty dict, but that will
103 # be filled during the test run and since it is a reference this
103 # be filled during the test run and since it is a reference this
104 # is enough to make it work.
104 # is enough to make it work.
105 return calls
105 return calls
106
106
107
107
108 @pytest.fixture(scope='session')
108 @pytest.fixture(scope='session')
109 def http_environ_session():
109 def http_environ_session():
110 """
110 """
111 Allow to use "http_environ" in session scope.
111 Allow to use "http_environ" in session scope.
112 """
112 """
113 return plain_http_environ()
113 return plain_http_environ()
114
114
115
115
116 def plain_http_host_stub():
116 def plain_http_host_stub():
117 """
117 """
118 Value of HTTP_HOST in the test run.
118 Value of HTTP_HOST in the test run.
119 """
119 """
120 return 'example.com:80'
120 return 'example.com:80'
121
121
122
122
123 @pytest.fixture()
123 @pytest.fixture()
124 def http_host_stub():
124 def http_host_stub():
125 """
125 """
126 Value of HTTP_HOST in the test run.
126 Value of HTTP_HOST in the test run.
127 """
127 """
128 return plain_http_host_stub()
128 return plain_http_host_stub()
129
129
130
130
131 def plain_http_host_only_stub():
131 def plain_http_host_only_stub():
132 """
132 """
133 Value of HTTP_HOST in the test run.
133 Value of HTTP_HOST in the test run.
134 """
134 """
135 return plain_http_host_stub().split(':')[0]
135 return plain_http_host_stub().split(':')[0]
136
136
137
137
138 @pytest.fixture()
138 @pytest.fixture()
139 def http_host_only_stub():
139 def http_host_only_stub():
140 """
140 """
141 Value of HTTP_HOST in the test run.
141 Value of HTTP_HOST in the test run.
142 """
142 """
143 return plain_http_host_only_stub()
143 return plain_http_host_only_stub()
144
144
145
145
146 def plain_http_environ():
146 def plain_http_environ():
147 """
147 """
148 HTTP extra environ keys.
148 HTTP extra environ keys.
149
149
150 User by the test application and as well for setting up the pylons
150 User by the test application and as well for setting up the pylons
151 environment. In the case of the fixture "app" it should be possible
151 environment. In the case of the fixture "app" it should be possible
152 to override this for a specific test case.
152 to override this for a specific test case.
153 """
153 """
154 return {
154 return {
155 'SERVER_NAME': plain_http_host_only_stub(),
155 'SERVER_NAME': plain_http_host_only_stub(),
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
157 'HTTP_HOST': plain_http_host_stub(),
157 'HTTP_HOST': plain_http_host_stub(),
158 'HTTP_USER_AGENT': 'rc-test-agent',
158 'HTTP_USER_AGENT': 'rc-test-agent',
159 'REQUEST_METHOD': 'GET'
159 'REQUEST_METHOD': 'GET'
160 }
160 }
161
161
162
162
163 @pytest.fixture()
163 @pytest.fixture()
164 def http_environ():
164 def http_environ():
165 """
165 """
166 HTTP extra environ keys.
166 HTTP extra environ keys.
167
167
168 User by the test application and as well for setting up the pylons
168 User by the test application and as well for setting up the pylons
169 environment. In the case of the fixture "app" it should be possible
169 environment. In the case of the fixture "app" it should be possible
170 to override this for a specific test case.
170 to override this for a specific test case.
171 """
171 """
172 return plain_http_environ()
172 return plain_http_environ()
173
173
174
174
175 @pytest.fixture(scope='session')
175 @pytest.fixture(scope='session')
176 def baseapp(ini_config, vcsserver, http_environ_session):
176 def baseapp(ini_config, vcsserver, http_environ_session):
177 from rhodecode.lib.pyramid_utils import get_app_config
177 from rhodecode.lib.pyramid_utils import get_app_config
178 from rhodecode.config.middleware import make_pyramid_app
178 from rhodecode.config.middleware import make_pyramid_app
179
179
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
181 pyramid.paster.setup_logging(ini_config)
181 pyramid.paster.setup_logging(ini_config)
182
182
183 settings = get_app_config(ini_config)
183 settings = get_app_config(ini_config)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
185
185
186 return app
186 return app
187
187
188
188
189 @pytest.fixture(scope='function')
189 @pytest.fixture(scope='function')
190 def app(request, config_stub, baseapp, http_environ):
190 def app(request, config_stub, baseapp, http_environ):
191 app = CustomTestApp(
191 app = CustomTestApp(
192 baseapp,
192 baseapp,
193 extra_environ=http_environ)
193 extra_environ=http_environ)
194 if request.cls:
194 if request.cls:
195 request.cls.app = app
195 request.cls.app = app
196 return app
196 return app
197
197
198
198
199 @pytest.fixture(scope='session')
199 @pytest.fixture(scope='session')
200 def app_settings(baseapp, ini_config):
200 def app_settings(baseapp, ini_config):
201 """
201 """
202 Settings dictionary used to create the app.
202 Settings dictionary used to create the app.
203
203
204 Parses the ini file and passes the result through the sanitize and apply
204 Parses the ini file and passes the result through the sanitize and apply
205 defaults mechanism in `rhodecode.config.middleware`.
205 defaults mechanism in `rhodecode.config.middleware`.
206 """
206 """
207 return baseapp.config.get_settings()
207 return baseapp.config.get_settings()
208
208
209
209
210 @pytest.fixture(scope='session')
210 @pytest.fixture(scope='session')
211 def db_connection(ini_settings):
211 def db_connection(ini_settings):
212 # Initialize the database connection.
212 # Initialize the database connection.
213 config_utils.initialize_database(ini_settings)
213 config_utils.initialize_database(ini_settings)
214
214
215
215
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
217
217
218
218
219 def _autologin_user(app, *args):
219 def _autologin_user(app, *args):
220 session = login_user_session(app, *args)
220 session = login_user_session(app, *args)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 return LoginData(csrf_token, session['rhodecode_user'])
222 return LoginData(csrf_token, session['rhodecode_user'])
223
223
224
224
225 @pytest.fixture()
225 @pytest.fixture()
226 def autologin_user(app):
226 def autologin_user(app):
227 """
227 """
228 Utility fixture which makes sure that the admin user is logged in
228 Utility fixture which makes sure that the admin user is logged in
229 """
229 """
230 return _autologin_user(app)
230 return _autologin_user(app)
231
231
232
232
233 @pytest.fixture()
233 @pytest.fixture()
234 def autologin_regular_user(app):
234 def autologin_regular_user(app):
235 """
235 """
236 Utility fixture which makes sure that the regular user is logged in
236 Utility fixture which makes sure that the regular user is logged in
237 """
237 """
238 return _autologin_user(
238 return _autologin_user(
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240
240
241
241
242 @pytest.fixture(scope='function')
242 @pytest.fixture(scope='function')
243 def csrf_token(request, autologin_user):
243 def csrf_token(request, autologin_user):
244 return autologin_user.csrf_token
244 return autologin_user.csrf_token
245
245
246
246
247 @pytest.fixture(scope='function')
247 @pytest.fixture(scope='function')
248 def xhr_header(request):
248 def xhr_header(request):
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
250
250
251
251
252 @pytest.fixture()
252 @pytest.fixture()
253 def real_crypto_backend(monkeypatch):
253 def real_crypto_backend(monkeypatch):
254 """
254 """
255 Switch the production crypto backend on for this test.
255 Switch the production crypto backend on for this test.
256
256
257 During the test run the crypto backend is replaced with a faster
257 During the test run the crypto backend is replaced with a faster
258 implementation based on the MD5 algorithm.
258 implementation based on the MD5 algorithm.
259 """
259 """
260 monkeypatch.setattr(rhodecode, 'is_test', False)
260 monkeypatch.setattr(rhodecode, 'is_test', False)
261
261
262
262
263 @pytest.fixture(scope='class')
263 @pytest.fixture(scope='class')
264 def index_location(request, baseapp):
264 def index_location(request, baseapp):
265 index_location = baseapp.config.get_settings()['search.location']
265 index_location = baseapp.config.get_settings()['search.location']
266 if request.cls:
266 if request.cls:
267 request.cls.index_location = index_location
267 request.cls.index_location = index_location
268 return index_location
268 return index_location
269
269
270
270
271 @pytest.fixture(scope='session', autouse=True)
271 @pytest.fixture(scope='session', autouse=True)
272 def tests_tmp_path(request):
272 def tests_tmp_path(request):
273 """
273 """
274 Create temporary directory to be used during the test session.
274 Create temporary directory to be used during the test session.
275 """
275 """
276 if not os.path.exists(TESTS_TMP_PATH):
276 if not os.path.exists(TESTS_TMP_PATH):
277 os.makedirs(TESTS_TMP_PATH)
277 os.makedirs(TESTS_TMP_PATH)
278
278
279 if not request.config.getoption('--keep-tmp-path'):
279 if not request.config.getoption('--keep-tmp-path'):
280 @request.addfinalizer
280 @request.addfinalizer
281 def remove_tmp_path():
281 def remove_tmp_path():
282 shutil.rmtree(TESTS_TMP_PATH)
282 shutil.rmtree(TESTS_TMP_PATH)
283
283
284 return TESTS_TMP_PATH
284 return TESTS_TMP_PATH
285
285
286
286
287 @pytest.fixture()
287 @pytest.fixture()
288 def test_repo_group(request):
288 def test_repo_group(request):
289 """
289 """
290 Create a temporary repository group, and destroy it after
290 Create a temporary repository group, and destroy it after
291 usage automatically
291 usage automatically
292 """
292 """
293 fixture = Fixture()
293 fixture = Fixture()
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
295 repo_group = fixture.create_repo_group(repogroupid)
295 repo_group = fixture.create_repo_group(repogroupid)
296
296
297 def _cleanup():
297 def _cleanup():
298 fixture.destroy_repo_group(repogroupid)
298 fixture.destroy_repo_group(repogroupid)
299
299
300 request.addfinalizer(_cleanup)
300 request.addfinalizer(_cleanup)
301 return repo_group
301 return repo_group
302
302
303
303
304 @pytest.fixture()
304 @pytest.fixture()
305 def test_user_group(request):
305 def test_user_group(request):
306 """
306 """
307 Create a temporary user group, and destroy it after
307 Create a temporary user group, and destroy it after
308 usage automatically
308 usage automatically
309 """
309 """
310 fixture = Fixture()
310 fixture = Fixture()
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
312 user_group = fixture.create_user_group(usergroupid)
312 user_group = fixture.create_user_group(usergroupid)
313
313
314 def _cleanup():
314 def _cleanup():
315 fixture.destroy_user_group(user_group)
315 fixture.destroy_user_group(user_group)
316
316
317 request.addfinalizer(_cleanup)
317 request.addfinalizer(_cleanup)
318 return user_group
318 return user_group
319
319
320
320
321 @pytest.fixture(scope='session')
321 @pytest.fixture(scope='session')
322 def test_repo(request):
322 def test_repo(request):
323 container = TestRepoContainer()
323 container = TestRepoContainer()
324 request.addfinalizer(container._cleanup)
324 request.addfinalizer(container._cleanup)
325 return container
325 return container
326
326
327
327
328 class TestRepoContainer(object):
328 class TestRepoContainer(object):
329 """
329 """
330 Container for test repositories which are used read only.
330 Container for test repositories which are used read only.
331
331
332 Repositories will be created on demand and re-used during the lifetime
332 Repositories will be created on demand and re-used during the lifetime
333 of this object.
333 of this object.
334
334
335 Usage to get the svn test repository "minimal"::
335 Usage to get the svn test repository "minimal"::
336
336
337 test_repo = TestContainer()
337 test_repo = TestContainer()
338 repo = test_repo('minimal', 'svn')
338 repo = test_repo('minimal', 'svn')
339
339
340 """
340 """
341
341
342 dump_extractors = {
342 dump_extractors = {
343 'git': utils.extract_git_repo_from_dump,
343 'git': utils.extract_git_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
346 }
346 }
347
347
348 def __init__(self):
348 def __init__(self):
349 self._cleanup_repos = []
349 self._cleanup_repos = []
350 self._fixture = Fixture()
350 self._fixture = Fixture()
351 self._repos = {}
351 self._repos = {}
352
352
353 def __call__(self, dump_name, backend_alias, config=None):
353 def __call__(self, dump_name, backend_alias, config=None):
354 key = (dump_name, backend_alias)
354 key = (dump_name, backend_alias)
355 if key not in self._repos:
355 if key not in self._repos:
356 repo = self._create_repo(dump_name, backend_alias, config)
356 repo = self._create_repo(dump_name, backend_alias, config)
357 self._repos[key] = repo.repo_id
357 self._repos[key] = repo.repo_id
358 return Repository.get(self._repos[key])
358 return Repository.get(self._repos[key])
359
359
360 def _create_repo(self, dump_name, backend_alias, config):
360 def _create_repo(self, dump_name, backend_alias, config):
361 repo_name = '%s-%s' % (backend_alias, dump_name)
361 repo_name = f'{backend_alias}-{dump_name}'
362 backend = get_backend(backend_alias)
362 backend = get_backend(backend_alias)
363 dump_extractor = self.dump_extractors[backend_alias]
363 dump_extractor = self.dump_extractors[backend_alias]
364 repo_path = dump_extractor(dump_name, repo_name)
364 repo_path = dump_extractor(dump_name, repo_name)
365
365
366 vcs_repo = backend(repo_path, config=config)
366 vcs_repo = backend(repo_path, config=config)
367 repo2db_mapper({repo_name: vcs_repo})
367 repo2db_mapper({repo_name: vcs_repo})
368
368
369 repo = RepoModel().get_by_repo_name(repo_name)
369 repo = RepoModel().get_by_repo_name(repo_name)
370 self._cleanup_repos.append(repo_name)
370 self._cleanup_repos.append(repo_name)
371 return repo
371 return repo
372
372
373 def _cleanup(self):
373 def _cleanup(self):
374 for repo_name in reversed(self._cleanup_repos):
374 for repo_name in reversed(self._cleanup_repos):
375 self._fixture.destroy_repo(repo_name)
375 self._fixture.destroy_repo(repo_name)
376
376
377
377
378 def backend_base(request, backend_alias, baseapp, test_repo):
378 def backend_base(request, backend_alias, baseapp, test_repo):
379 if backend_alias not in request.config.getoption('--backends'):
379 if backend_alias not in request.config.getoption('--backends'):
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
381
381
382 utils.check_xfail_backends(request.node, backend_alias)
382 utils.check_xfail_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
384
384
385 repo_name = 'vcs_test_%s' % (backend_alias, )
385 repo_name = 'vcs_test_%s' % (backend_alias, )
386 backend = Backend(
386 backend = Backend(
387 alias=backend_alias,
387 alias=backend_alias,
388 repo_name=repo_name,
388 repo_name=repo_name,
389 test_name=request.node.name,
389 test_name=request.node.name,
390 test_repo_container=test_repo)
390 test_repo_container=test_repo)
391 request.addfinalizer(backend.cleanup)
391 request.addfinalizer(backend.cleanup)
392 return backend
392 return backend
393
393
394
394
395 @pytest.fixture()
395 @pytest.fixture()
396 def backend(request, backend_alias, baseapp, test_repo):
396 def backend(request, backend_alias, baseapp, test_repo):
397 """
397 """
398 Parametrized fixture which represents a single backend implementation.
398 Parametrized fixture which represents a single backend implementation.
399
399
400 It respects the option `--backends` to focus the test run on specific
400 It respects the option `--backends` to focus the test run on specific
401 backend implementations.
401 backend implementations.
402
402
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
404 for specific backends. This is intended as a utility for incremental
404 for specific backends. This is intended as a utility for incremental
405 development of a new backend implementation.
405 development of a new backend implementation.
406 """
406 """
407 return backend_base(request, backend_alias, baseapp, test_repo)
407 return backend_base(request, backend_alias, baseapp, test_repo)
408
408
409
409
410 @pytest.fixture()
410 @pytest.fixture()
411 def backend_git(request, baseapp, test_repo):
411 def backend_git(request, baseapp, test_repo):
412 return backend_base(request, 'git', baseapp, test_repo)
412 return backend_base(request, 'git', baseapp, test_repo)
413
413
414
414
415 @pytest.fixture()
415 @pytest.fixture()
416 def backend_hg(request, baseapp, test_repo):
416 def backend_hg(request, baseapp, test_repo):
417 return backend_base(request, 'hg', baseapp, test_repo)
417 return backend_base(request, 'hg', baseapp, test_repo)
418
418
419
419
420 @pytest.fixture()
420 @pytest.fixture()
421 def backend_svn(request, baseapp, test_repo):
421 def backend_svn(request, baseapp, test_repo):
422 return backend_base(request, 'svn', baseapp, test_repo)
422 return backend_base(request, 'svn', baseapp, test_repo)
423
423
424
424
425 @pytest.fixture()
425 @pytest.fixture()
426 def backend_random(backend_git):
426 def backend_random(backend_git):
427 """
427 """
428 Use this to express that your tests need "a backend.
428 Use this to express that your tests need "a backend.
429
429
430 A few of our tests need a backend, so that we can run the code. This
430 A few of our tests need a backend, so that we can run the code. This
431 fixture is intended to be used for such cases. It will pick one of the
431 fixture is intended to be used for such cases. It will pick one of the
432 backends and run the tests.
432 backends and run the tests.
433
433
434 The fixture `backend` would run the test multiple times for each
434 The fixture `backend` would run the test multiple times for each
435 available backend which is a pure waste of time if the test is
435 available backend which is a pure waste of time if the test is
436 independent of the backend type.
436 independent of the backend type.
437 """
437 """
438 # TODO: johbo: Change this to pick a random backend
438 # TODO: johbo: Change this to pick a random backend
439 return backend_git
439 return backend_git
440
440
441
441
442 @pytest.fixture()
442 @pytest.fixture()
443 def backend_stub(backend_git):
443 def backend_stub(backend_git):
444 """
444 """
445 Use this to express that your tests need a backend stub
445 Use this to express that your tests need a backend stub
446
446
447 TODO: mikhail: Implement a real stub logic instead of returning
447 TODO: mikhail: Implement a real stub logic instead of returning
448 a git backend
448 a git backend
449 """
449 """
450 return backend_git
450 return backend_git
451
451
452
452
453 @pytest.fixture()
453 @pytest.fixture()
454 def repo_stub(backend_stub):
454 def repo_stub(backend_stub):
455 """
455 """
456 Use this to express that your tests need a repository stub
456 Use this to express that your tests need a repository stub
457 """
457 """
458 return backend_stub.create_repo()
458 return backend_stub.create_repo()
459
459
460
460
461 class Backend(object):
461 class Backend(object):
462 """
462 """
463 Represents the test configuration for one supported backend
463 Represents the test configuration for one supported backend
464
464
465 Provides easy access to different test repositories based on
465 Provides easy access to different test repositories based on
466 `__getitem__`. Such repositories will only be created once per test
466 `__getitem__`. Such repositories will only be created once per test
467 session.
467 session.
468 """
468 """
469
469
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
471 _master_repo = None
471 _master_repo = None
472 _master_repo_path = ''
472 _master_repo_path = ''
473 _commit_ids = {}
473 _commit_ids = {}
474
474
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
476 self.alias = alias
476 self.alias = alias
477 self.repo_name = repo_name
477 self.repo_name = repo_name
478 self._cleanup_repos = []
478 self._cleanup_repos = []
479 self._test_name = test_name
479 self._test_name = test_name
480 self._test_repo_container = test_repo_container
480 self._test_repo_container = test_repo_container
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
482 # Fixture will survive in the end.
482 # Fixture will survive in the end.
483 self._fixture = Fixture()
483 self._fixture = Fixture()
484
484
485 def __getitem__(self, key):
485 def __getitem__(self, key):
486 return self._test_repo_container(key, self.alias)
486 return self._test_repo_container(key, self.alias)
487
487
488 def create_test_repo(self, key, config=None):
488 def create_test_repo(self, key, config=None):
489 return self._test_repo_container(key, self.alias, config)
489 return self._test_repo_container(key, self.alias, config)
490
490
491 @property
491 @property
492 def repo_id(self):
492 def repo_id(self):
493 # just fake some repo_id
493 # just fake some repo_id
494 return self.repo.repo_id
494 return self.repo.repo_id
495
495
496 @property
496 @property
497 def repo(self):
497 def repo(self):
498 """
498 """
499 Returns the "current" repository. This is the vcs_test repo or the
499 Returns the "current" repository. This is the vcs_test repo or the
500 last repo which has been created with `create_repo`.
500 last repo which has been created with `create_repo`.
501 """
501 """
502 from rhodecode.model.db import Repository
502 from rhodecode.model.db import Repository
503 return Repository.get_by_repo_name(self.repo_name)
503 return Repository.get_by_repo_name(self.repo_name)
504
504
505 @property
505 @property
506 def default_branch_name(self):
506 def default_branch_name(self):
507 VcsRepository = get_backend(self.alias)
507 VcsRepository = get_backend(self.alias)
508 return VcsRepository.DEFAULT_BRANCH_NAME
508 return VcsRepository.DEFAULT_BRANCH_NAME
509
509
510 @property
510 @property
511 def default_head_id(self):
511 def default_head_id(self):
512 """
512 """
513 Returns the default head id of the underlying backend.
513 Returns the default head id of the underlying backend.
514
514
515 This will be the default branch name in case the backend does have a
515 This will be the default branch name in case the backend does have a
516 default branch. In the other cases it will point to a valid head
516 default branch. In the other cases it will point to a valid head
517 which can serve as the base to create a new commit on top of it.
517 which can serve as the base to create a new commit on top of it.
518 """
518 """
519 vcsrepo = self.repo.scm_instance()
519 vcsrepo = self.repo.scm_instance()
520 head_id = (
520 head_id = (
521 vcsrepo.DEFAULT_BRANCH_NAME or
521 vcsrepo.DEFAULT_BRANCH_NAME or
522 vcsrepo.commit_ids[-1])
522 vcsrepo.commit_ids[-1])
523 return head_id
523 return head_id
524
524
525 @property
525 @property
526 def commit_ids(self):
526 def commit_ids(self):
527 """
527 """
528 Returns the list of commits for the last created repository
528 Returns the list of commits for the last created repository
529 """
529 """
530 return self._commit_ids
530 return self._commit_ids
531
531
532 def create_master_repo(self, commits):
532 def create_master_repo(self, commits):
533 """
533 """
534 Create a repository and remember it as a template.
534 Create a repository and remember it as a template.
535
535
536 This allows to easily create derived repositories to construct
536 This allows to easily create derived repositories to construct
537 more complex scenarios for diff, compare and pull requests.
537 more complex scenarios for diff, compare and pull requests.
538
538
539 Returns a commit map which maps from commit message to raw_id.
539 Returns a commit map which maps from commit message to raw_id.
540 """
540 """
541 self._master_repo = self.create_repo(commits=commits)
541 self._master_repo = self.create_repo(commits=commits)
542 self._master_repo_path = self._master_repo.repo_full_path
542 self._master_repo_path = self._master_repo.repo_full_path
543
543
544 return self._commit_ids
544 return self._commit_ids
545
545
546 def create_repo(
546 def create_repo(
547 self, commits=None, number_of_commits=0, heads=None,
547 self, commits=None, number_of_commits=0, heads=None,
548 name_suffix='', bare=False, **kwargs):
548 name_suffix='', bare=False, **kwargs):
549 """
549 """
550 Create a repository and record it for later cleanup.
550 Create a repository and record it for later cleanup.
551
551
552 :param commits: Optional. A sequence of dict instances.
552 :param commits: Optional. A sequence of dict instances.
553 Will add a commit per entry to the new repository.
553 Will add a commit per entry to the new repository.
554 :param number_of_commits: Optional. If set to a number, this number of
554 :param number_of_commits: Optional. If set to a number, this number of
555 commits will be added to the new repository.
555 commits will be added to the new repository.
556 :param heads: Optional. Can be set to a sequence of of commit
556 :param heads: Optional. Can be set to a sequence of of commit
557 names which shall be pulled in from the master repository.
557 names which shall be pulled in from the master repository.
558 :param name_suffix: adds special suffix to generated repo name
558 :param name_suffix: adds special suffix to generated repo name
559 :param bare: set a repo as bare (no checkout)
559 :param bare: set a repo as bare (no checkout)
560 """
560 """
561 self.repo_name = self._next_repo_name() + name_suffix
561 self.repo_name = self._next_repo_name() + name_suffix
562 repo = self._fixture.create_repo(
562 repo = self._fixture.create_repo(
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
564 self._cleanup_repos.append(repo.repo_name)
564 self._cleanup_repos.append(repo.repo_name)
565
565
566 commits = commits or [
566 commits = commits or [
567 {'message': f'Commit {x} of {self.repo_name}'}
567 {'message': f'Commit {x} of {self.repo_name}'}
568 for x in range(number_of_commits)]
568 for x in range(number_of_commits)]
569 vcs_repo = repo.scm_instance()
569 vcs_repo = repo.scm_instance()
570 vcs_repo.count()
570 vcs_repo.count()
571 self._add_commits_to_repo(vcs_repo, commits)
571 self._add_commits_to_repo(vcs_repo, commits)
572 if heads:
572 if heads:
573 self.pull_heads(repo, heads)
573 self.pull_heads(repo, heads)
574
574
575 return repo
575 return repo
576
576
577 def pull_heads(self, repo, heads, do_fetch=False):
577 def pull_heads(self, repo, heads, do_fetch=False):
578 """
578 """
579 Make sure that repo contains all commits mentioned in `heads`
579 Make sure that repo contains all commits mentioned in `heads`
580 """
580 """
581 vcsrepo = repo.scm_instance()
581 vcsrepo = repo.scm_instance()
582 vcsrepo.config.clear_section('hooks')
582 vcsrepo.config.clear_section('hooks')
583 commit_ids = [self._commit_ids[h] for h in heads]
583 commit_ids = [self._commit_ids[h] for h in heads]
584 if do_fetch:
584 if do_fetch:
585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
587
587
588 def create_fork(self):
588 def create_fork(self):
589 repo_to_fork = self.repo_name
589 repo_to_fork = self.repo_name
590 self.repo_name = self._next_repo_name()
590 self.repo_name = self._next_repo_name()
591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
592 self._cleanup_repos.append(self.repo_name)
592 self._cleanup_repos.append(self.repo_name)
593 return repo
593 return repo
594
594
595 def new_repo_name(self, suffix=''):
595 def new_repo_name(self, suffix=''):
596 self.repo_name = self._next_repo_name() + suffix
596 self.repo_name = self._next_repo_name() + suffix
597 self._cleanup_repos.append(self.repo_name)
597 self._cleanup_repos.append(self.repo_name)
598 return self.repo_name
598 return self.repo_name
599
599
600 def _next_repo_name(self):
600 def _next_repo_name(self):
601 return "%s_%s" % (
601 return "%s_%s" % (
602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
603
603
604 def ensure_file(self, filename, content=b'Test content\n'):
604 def ensure_file(self, filename, content=b'Test content\n'):
605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
606 commits = [
606 commits = [
607 {'added': [
607 {'added': [
608 FileNode(filename, content=content),
608 FileNode(filename, content=content),
609 ]},
609 ]},
610 ]
610 ]
611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
612
612
613 def enable_downloads(self):
613 def enable_downloads(self):
614 repo = self.repo
614 repo = self.repo
615 repo.enable_downloads = True
615 repo.enable_downloads = True
616 Session().add(repo)
616 Session().add(repo)
617 Session().commit()
617 Session().commit()
618
618
619 def cleanup(self):
619 def cleanup(self):
620 for repo_name in reversed(self._cleanup_repos):
620 for repo_name in reversed(self._cleanup_repos):
621 self._fixture.destroy_repo(repo_name)
621 self._fixture.destroy_repo(repo_name)
622
622
623 def _add_commits_to_repo(self, repo, commits):
623 def _add_commits_to_repo(self, repo, commits):
624 commit_ids = _add_commits_to_repo(repo, commits)
624 commit_ids = _add_commits_to_repo(repo, commits)
625 if not commit_ids:
625 if not commit_ids:
626 return
626 return
627 self._commit_ids = commit_ids
627 self._commit_ids = commit_ids
628
628
629 # Creating refs for Git to allow fetching them from remote repository
629 # Creating refs for Git to allow fetching them from remote repository
630 if self.alias == 'git':
630 if self.alias == 'git':
631 refs = {}
631 refs = {}
632 for message in self._commit_ids:
632 for message in self._commit_ids:
633 cleanup_message = message.replace(' ', '')
633 cleanup_message = message.replace(' ', '')
634 ref_name = f'refs/test-refs/{cleanup_message}'
634 ref_name = f'refs/test-refs/{cleanup_message}'
635 refs[ref_name] = self._commit_ids[message]
635 refs[ref_name] = self._commit_ids[message]
636 self._create_refs(repo, refs)
636 self._create_refs(repo, refs)
637
637
638 def _create_refs(self, repo, refs):
638 def _create_refs(self, repo, refs):
639 for ref_name, ref_val in refs.items():
639 for ref_name, ref_val in refs.items():
640 repo.set_refs(ref_name, ref_val)
640 repo.set_refs(ref_name, ref_val)
641
641
642
642
643 class VcsBackend(object):
643 class VcsBackend(object):
644 """
644 """
645 Represents the test configuration for one supported vcs backend.
645 Represents the test configuration for one supported vcs backend.
646 """
646 """
647
647
648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
649
649
650 def __init__(self, alias, repo_path, test_name, test_repo_container):
650 def __init__(self, alias, repo_path, test_name, test_repo_container):
651 self.alias = alias
651 self.alias = alias
652 self._repo_path = repo_path
652 self._repo_path = repo_path
653 self._cleanup_repos = []
653 self._cleanup_repos = []
654 self._test_name = test_name
654 self._test_name = test_name
655 self._test_repo_container = test_repo_container
655 self._test_repo_container = test_repo_container
656
656
657 def __getitem__(self, key):
657 def __getitem__(self, key):
658 return self._test_repo_container(key, self.alias).scm_instance()
658 return self._test_repo_container(key, self.alias).scm_instance()
659
659
660 def __repr__(self):
660 def __repr__(self):
661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
662
662
663 @property
663 @property
664 def repo(self):
664 def repo(self):
665 """
665 """
666 Returns the "current" repository. This is the vcs_test repo of the last
666 Returns the "current" repository. This is the vcs_test repo of the last
667 repo which has been created.
667 repo which has been created.
668 """
668 """
669 Repository = get_backend(self.alias)
669 Repository = get_backend(self.alias)
670 return Repository(self._repo_path)
670 return Repository(self._repo_path)
671
671
672 @property
672 @property
673 def backend(self):
673 def backend(self):
674 """
674 """
675 Returns the backend implementation class.
675 Returns the backend implementation class.
676 """
676 """
677 return get_backend(self.alias)
677 return get_backend(self.alias)
678
678
679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
680 bare=False):
680 bare=False):
681 repo_name = self._next_repo_name()
681 repo_name = self._next_repo_name()
682 self._repo_path = get_new_dir(repo_name)
682 self._repo_path = get_new_dir(repo_name)
683 repo_class = get_backend(self.alias)
683 repo_class = get_backend(self.alias)
684 src_url = None
684 src_url = None
685 if _clone_repo:
685 if _clone_repo:
686 src_url = _clone_repo.path
686 src_url = _clone_repo.path
687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
688 self._cleanup_repos.append(repo)
688 self._cleanup_repos.append(repo)
689
689
690 commits = commits or [
690 commits = commits or [
691 {'message': 'Commit %s of %s' % (x, repo_name)}
691 {'message': 'Commit %s of %s' % (x, repo_name)}
692 for x in range(number_of_commits)]
692 for x in range(number_of_commits)]
693 _add_commits_to_repo(repo, commits)
693 _add_commits_to_repo(repo, commits)
694 return repo
694 return repo
695
695
696 def clone_repo(self, repo):
696 def clone_repo(self, repo):
697 return self.create_repo(_clone_repo=repo)
697 return self.create_repo(_clone_repo=repo)
698
698
699 def cleanup(self):
699 def cleanup(self):
700 for repo in self._cleanup_repos:
700 for repo in self._cleanup_repos:
701 shutil.rmtree(repo.path)
701 shutil.rmtree(repo.path)
702
702
703 def new_repo_path(self):
703 def new_repo_path(self):
704 repo_name = self._next_repo_name()
704 repo_name = self._next_repo_name()
705 self._repo_path = get_new_dir(repo_name)
705 self._repo_path = get_new_dir(repo_name)
706 return self._repo_path
706 return self._repo_path
707
707
708 def _next_repo_name(self):
708 def _next_repo_name(self):
709
709
710 return "{}_{}".format(
710 return "{}_{}".format(
711 self.invalid_repo_name.sub('_', self._test_name),
711 self.invalid_repo_name.sub('_', self._test_name),
712 len(self._cleanup_repos)
712 len(self._cleanup_repos)
713 )
713 )
714
714
715 def add_file(self, repo, filename, content='Test content\n'):
715 def add_file(self, repo, filename, content='Test content\n'):
716 imc = repo.in_memory_commit
716 imc = repo.in_memory_commit
717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
718 imc.commit(
718 imc.commit(
719 message='Automatic commit from vcsbackend fixture',
719 message='Automatic commit from vcsbackend fixture',
720 author='Automatic <automatic@rhodecode.com>')
720 author='Automatic <automatic@rhodecode.com>')
721
721
722 def ensure_file(self, filename, content='Test content\n'):
722 def ensure_file(self, filename, content='Test content\n'):
723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
724 self.add_file(self.repo, filename, content)
724 self.add_file(self.repo, filename, content)
725
725
726
726
727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
728 if backend_alias not in request.config.getoption('--backends'):
728 if backend_alias not in request.config.getoption('--backends'):
729 pytest.skip("Backend %s not selected." % (backend_alias, ))
729 pytest.skip("Backend %s not selected." % (backend_alias, ))
730
730
731 utils.check_xfail_backends(request.node, backend_alias)
731 utils.check_xfail_backends(request.node, backend_alias)
732 utils.check_skip_backends(request.node, backend_alias)
732 utils.check_skip_backends(request.node, backend_alias)
733
733
734 repo_name = f'vcs_test_{backend_alias}'
734 repo_name = f'vcs_test_{backend_alias}'
735 repo_path = os.path.join(tests_tmp_path, repo_name)
735 repo_path = os.path.join(tests_tmp_path, repo_name)
736 backend = VcsBackend(
736 backend = VcsBackend(
737 alias=backend_alias,
737 alias=backend_alias,
738 repo_path=repo_path,
738 repo_path=repo_path,
739 test_name=request.node.name,
739 test_name=request.node.name,
740 test_repo_container=test_repo)
740 test_repo_container=test_repo)
741 request.addfinalizer(backend.cleanup)
741 request.addfinalizer(backend.cleanup)
742 return backend
742 return backend
743
743
744
744
745 @pytest.fixture()
745 @pytest.fixture()
746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
747 """
747 """
748 Parametrized fixture which represents a single vcs backend implementation.
748 Parametrized fixture which represents a single vcs backend implementation.
749
749
750 See the fixture `backend` for more details. This one implements the same
750 See the fixture `backend` for more details. This one implements the same
751 concept, but on vcs level. So it does not provide model instances etc.
751 concept, but on vcs level. So it does not provide model instances etc.
752
752
753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
754 for how this works.
754 for how this works.
755 """
755 """
756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
757
757
758
758
759 @pytest.fixture()
759 @pytest.fixture()
760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
762
762
763
763
764 @pytest.fixture()
764 @pytest.fixture()
765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
767
767
768
768
769 @pytest.fixture()
769 @pytest.fixture()
770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
772
772
773
773
774 @pytest.fixture()
774 @pytest.fixture()
775 def vcsbackend_stub(vcsbackend_git):
775 def vcsbackend_stub(vcsbackend_git):
776 """
776 """
777 Use this to express that your test just needs a stub of a vcsbackend.
777 Use this to express that your test just needs a stub of a vcsbackend.
778
778
779 Plan is to eventually implement an in-memory stub to speed tests up.
779 Plan is to eventually implement an in-memory stub to speed tests up.
780 """
780 """
781 return vcsbackend_git
781 return vcsbackend_git
782
782
783
783
784 def _add_commits_to_repo(vcs_repo, commits):
784 def _add_commits_to_repo(vcs_repo, commits):
785 commit_ids = {}
785 commit_ids = {}
786 if not commits:
786 if not commits:
787 return commit_ids
787 return commit_ids
788
788
789 imc = vcs_repo.in_memory_commit
789 imc = vcs_repo.in_memory_commit
790
790
791 for idx, commit in enumerate(commits):
791 for idx, commit in enumerate(commits):
792 message = str(commit.get('message', f'Commit {idx}'))
792 message = str(commit.get('message', f'Commit {idx}'))
793
793
794 for node in commit.get('added', []):
794 for node in commit.get('added', []):
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 for node in commit.get('changed', []):
796 for node in commit.get('changed', []):
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 for node in commit.get('removed', []):
798 for node in commit.get('removed', []):
799 imc.remove(FileNode(safe_bytes(node.path)))
799 imc.remove(FileNode(safe_bytes(node.path)))
800
800
801 parents = [
801 parents = [
802 vcs_repo.get_commit(commit_id=commit_ids[p])
802 vcs_repo.get_commit(commit_id=commit_ids[p])
803 for p in commit.get('parents', [])]
803 for p in commit.get('parents', [])]
804
804
805 operations = ('added', 'changed', 'removed')
805 operations = ('added', 'changed', 'removed')
806 if not any((commit.get(o) for o in operations)):
806 if not any((commit.get(o) for o in operations)):
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
808
808
809 commit = imc.commit(
809 commit = imc.commit(
810 message=message,
810 message=message,
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
812 date=commit.get('date'),
812 date=commit.get('date'),
813 branch=commit.get('branch'),
813 branch=commit.get('branch'),
814 parents=parents)
814 parents=parents)
815
815
816 commit_ids[commit.message] = commit.raw_id
816 commit_ids[commit.message] = commit.raw_id
817
817
818 return commit_ids
818 return commit_ids
819
819
820
820
821 @pytest.fixture()
821 @pytest.fixture()
822 def reposerver(request):
822 def reposerver(request):
823 """
823 """
824 Allows to serve a backend repository
824 Allows to serve a backend repository
825 """
825 """
826
826
827 repo_server = RepoServer()
827 repo_server = RepoServer()
828 request.addfinalizer(repo_server.cleanup)
828 request.addfinalizer(repo_server.cleanup)
829 return repo_server
829 return repo_server
830
830
831
831
832 class RepoServer(object):
832 class RepoServer(object):
833 """
833 """
834 Utility to serve a local repository for the duration of a test case.
834 Utility to serve a local repository for the duration of a test case.
835
835
836 Supports only Subversion so far.
836 Supports only Subversion so far.
837 """
837 """
838
838
839 url = None
839 url = None
840
840
841 def __init__(self):
841 def __init__(self):
842 self._cleanup_servers = []
842 self._cleanup_servers = []
843
843
844 def serve(self, vcsrepo):
844 def serve(self, vcsrepo):
845 if vcsrepo.alias != 'svn':
845 if vcsrepo.alias != 'svn':
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847
847
848 proc = subprocess.Popen(
848 proc = subprocess.Popen(
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
850 '--root', vcsrepo.path])
850 '--root', vcsrepo.path])
851 self._cleanup_servers.append(proc)
851 self._cleanup_servers.append(proc)
852 self.url = 'svn://localhost'
852 self.url = 'svn://localhost'
853
853
854 def cleanup(self):
854 def cleanup(self):
855 for proc in self._cleanup_servers:
855 for proc in self._cleanup_servers:
856 proc.terminate()
856 proc.terminate()
857
857
858
858
859 @pytest.fixture()
859 @pytest.fixture()
860 def pr_util(backend, request, config_stub):
860 def pr_util(backend, request, config_stub):
861 """
861 """
862 Utility for tests of models and for functional tests around pull requests.
862 Utility for tests of models and for functional tests around pull requests.
863
863
864 It gives an instance of :class:`PRTestUtility` which provides various
864 It gives an instance of :class:`PRTestUtility` which provides various
865 utility methods around one pull request.
865 utility methods around one pull request.
866
866
867 This fixture uses `backend` and inherits its parameterization.
867 This fixture uses `backend` and inherits its parameterization.
868 """
868 """
869
869
870 util = PRTestUtility(backend)
870 util = PRTestUtility(backend)
871 request.addfinalizer(util.cleanup)
871 request.addfinalizer(util.cleanup)
872
872
873 return util
873 return util
874
874
875
875
876 class PRTestUtility(object):
876 class PRTestUtility(object):
877
877
878 pull_request = None
878 pull_request = None
879 pull_request_id = None
879 pull_request_id = None
880 mergeable_patcher = None
880 mergeable_patcher = None
881 mergeable_mock = None
881 mergeable_mock = None
882 notification_patcher = None
882 notification_patcher = None
883 commit_ids: dict
883 commit_ids: dict
884
884
885 def __init__(self, backend):
885 def __init__(self, backend):
886 self.backend = backend
886 self.backend = backend
887
887
888 def create_pull_request(
888 def create_pull_request(
889 self, commits=None, target_head=None, source_head=None,
889 self, commits=None, target_head=None, source_head=None,
890 revisions=None, approved=False, author=None, mergeable=False,
890 revisions=None, approved=False, author=None, mergeable=False,
891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
892 title="Test", description="Description"):
892 title="Test", description="Description"):
893 self.set_mergeable(mergeable)
893 self.set_mergeable(mergeable)
894 if not enable_notifications:
894 if not enable_notifications:
895 # mock notification side effect
895 # mock notification side effect
896 self.notification_patcher = mock.patch(
896 self.notification_patcher = mock.patch(
897 'rhodecode.model.notification.NotificationModel.create')
897 'rhodecode.model.notification.NotificationModel.create')
898 self.notification_patcher.start()
898 self.notification_patcher.start()
899
899
900 if not self.pull_request:
900 if not self.pull_request:
901 if not commits:
901 if not commits:
902 commits = [
902 commits = [
903 {'message': 'c1'},
903 {'message': 'c1'},
904 {'message': 'c2'},
904 {'message': 'c2'},
905 {'message': 'c3'},
905 {'message': 'c3'},
906 ]
906 ]
907 target_head = 'c1'
907 target_head = 'c1'
908 source_head = 'c2'
908 source_head = 'c2'
909 revisions = ['c2']
909 revisions = ['c2']
910
910
911 self.commit_ids = self.backend.create_master_repo(commits)
911 self.commit_ids = self.backend.create_master_repo(commits)
912 self.target_repository = self.backend.create_repo(
912 self.target_repository = self.backend.create_repo(
913 heads=[target_head], name_suffix=name_suffix)
913 heads=[target_head], name_suffix=name_suffix)
914 self.source_repository = self.backend.create_repo(
914 self.source_repository = self.backend.create_repo(
915 heads=[source_head], name_suffix=name_suffix)
915 heads=[source_head], name_suffix=name_suffix)
916 self.author = author or UserModel().get_by_username(
916 self.author = author or UserModel().get_by_username(
917 TEST_USER_ADMIN_LOGIN)
917 TEST_USER_ADMIN_LOGIN)
918
918
919 model = PullRequestModel()
919 model = PullRequestModel()
920 self.create_parameters = {
920 self.create_parameters = {
921 'created_by': self.author,
921 'created_by': self.author,
922 'source_repo': self.source_repository.repo_name,
922 'source_repo': self.source_repository.repo_name,
923 'source_ref': self._default_branch_reference(source_head),
923 'source_ref': self._default_branch_reference(source_head),
924 'target_repo': self.target_repository.repo_name,
924 'target_repo': self.target_repository.repo_name,
925 'target_ref': self._default_branch_reference(target_head),
925 'target_ref': self._default_branch_reference(target_head),
926 'revisions': [self.commit_ids[r] for r in revisions],
926 'revisions': [self.commit_ids[r] for r in revisions],
927 'reviewers': reviewers or self._get_reviewers(),
927 'reviewers': reviewers or self._get_reviewers(),
928 'observers': observers or self._get_observers(),
928 'observers': observers or self._get_observers(),
929 'title': title,
929 'title': title,
930 'description': description,
930 'description': description,
931 }
931 }
932 self.pull_request = model.create(**self.create_parameters)
932 self.pull_request = model.create(**self.create_parameters)
933 assert model.get_versions(self.pull_request) == []
933 assert model.get_versions(self.pull_request) == []
934
934
935 self.pull_request_id = self.pull_request.pull_request_id
935 self.pull_request_id = self.pull_request.pull_request_id
936
936
937 if approved:
937 if approved:
938 self.approve()
938 self.approve()
939
939
940 Session().add(self.pull_request)
940 Session().add(self.pull_request)
941 Session().commit()
941 Session().commit()
942
942
943 return self.pull_request
943 return self.pull_request
944
944
945 def approve(self):
945 def approve(self):
946 self.create_status_votes(
946 self.create_status_votes(
947 ChangesetStatus.STATUS_APPROVED,
947 ChangesetStatus.STATUS_APPROVED,
948 *self.pull_request.reviewers)
948 *self.pull_request.reviewers)
949
949
950 def close(self):
950 def close(self):
951 PullRequestModel().close_pull_request(self.pull_request, self.author)
951 PullRequestModel().close_pull_request(self.pull_request, self.author)
952
952
953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
954 default_branch = branch or self.backend.default_branch_name
954 default_branch = branch or self.backend.default_branch_name
955 message = self.commit_ids[commit_message]
955 message = self.commit_ids[commit_message]
956 reference = f'branch:{default_branch}:{message}'
956 reference = f'branch:{default_branch}:{message}'
957
957
958 return reference
958 return reference
959
959
960 def _get_reviewers(self):
960 def _get_reviewers(self):
961 role = PullRequestReviewers.ROLE_REVIEWER
961 role = PullRequestReviewers.ROLE_REVIEWER
962 return [
962 return [
963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
965 ]
965 ]
966
966
967 def _get_observers(self):
967 def _get_observers(self):
968 return [
968 return [
969
969
970 ]
970 ]
971
971
972 def update_source_repository(self, head=None, do_fetch=False):
972 def update_source_repository(self, head=None, do_fetch=False):
973 heads = [head or 'c3']
973 heads = [head or 'c3']
974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
975
975
976 def update_target_repository(self, head=None, do_fetch=False):
976 def update_target_repository(self, head=None, do_fetch=False):
977 heads = [head or 'c3']
977 heads = [head or 'c3']
978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
979
979
980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
982 self.pull_request.target_ref = full_ref
982 self.pull_request.target_ref = full_ref
983 return full_ref
983 return full_ref
984
984
985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
987 self.pull_request.source_ref = full_ref
987 self.pull_request.source_ref = full_ref
988 return full_ref
988 return full_ref
989
989
990 def add_one_commit(self, head=None):
990 def add_one_commit(self, head=None):
991 self.update_source_repository(head=head)
991 self.update_source_repository(head=head)
992 old_commit_ids = set(self.pull_request.revisions)
992 old_commit_ids = set(self.pull_request.revisions)
993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
994 commit_ids = set(self.pull_request.revisions)
994 commit_ids = set(self.pull_request.revisions)
995 new_commit_ids = commit_ids - old_commit_ids
995 new_commit_ids = commit_ids - old_commit_ids
996 assert len(new_commit_ids) == 1
996 assert len(new_commit_ids) == 1
997 return new_commit_ids.pop()
997 return new_commit_ids.pop()
998
998
999 def remove_one_commit(self):
999 def remove_one_commit(self):
1000 assert len(self.pull_request.revisions) == 2
1000 assert len(self.pull_request.revisions) == 2
1001 source_vcs = self.source_repository.scm_instance()
1001 source_vcs = self.source_repository.scm_instance()
1002 removed_commit_id = source_vcs.commit_ids[-1]
1002 removed_commit_id = source_vcs.commit_ids[-1]
1003
1003
1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1005 # remove the if once that's sorted out.
1005 # remove the if once that's sorted out.
1006 if self.backend.alias == "git":
1006 if self.backend.alias == "git":
1007 kwargs = {'branch_name': self.backend.default_branch_name}
1007 kwargs = {'branch_name': self.backend.default_branch_name}
1008 else:
1008 else:
1009 kwargs = {}
1009 kwargs = {}
1010 source_vcs.strip(removed_commit_id, **kwargs)
1010 source_vcs.strip(removed_commit_id, **kwargs)
1011
1011
1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1013 assert len(self.pull_request.revisions) == 1
1013 assert len(self.pull_request.revisions) == 1
1014 return removed_commit_id
1014 return removed_commit_id
1015
1015
1016 def create_comment(self, linked_to=None):
1016 def create_comment(self, linked_to=None):
1017 comment = CommentsModel().create(
1017 comment = CommentsModel().create(
1018 text="Test comment",
1018 text="Test comment",
1019 repo=self.target_repository.repo_name,
1019 repo=self.target_repository.repo_name,
1020 user=self.author,
1020 user=self.author,
1021 pull_request=self.pull_request)
1021 pull_request=self.pull_request)
1022 assert comment.pull_request_version_id is None
1022 assert comment.pull_request_version_id is None
1023
1023
1024 if linked_to:
1024 if linked_to:
1025 PullRequestModel()._link_comments_to_version(linked_to)
1025 PullRequestModel()._link_comments_to_version(linked_to)
1026
1026
1027 return comment
1027 return comment
1028
1028
1029 def create_inline_comment(
1029 def create_inline_comment(
1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1031 comment = CommentsModel().create(
1031 comment = CommentsModel().create(
1032 text="Test comment",
1032 text="Test comment",
1033 repo=self.target_repository.repo_name,
1033 repo=self.target_repository.repo_name,
1034 user=self.author,
1034 user=self.author,
1035 line_no=line_no,
1035 line_no=line_no,
1036 f_path=file_path,
1036 f_path=file_path,
1037 pull_request=self.pull_request)
1037 pull_request=self.pull_request)
1038 assert comment.pull_request_version_id is None
1038 assert comment.pull_request_version_id is None
1039
1039
1040 if linked_to:
1040 if linked_to:
1041 PullRequestModel()._link_comments_to_version(linked_to)
1041 PullRequestModel()._link_comments_to_version(linked_to)
1042
1042
1043 return comment
1043 return comment
1044
1044
1045 def create_version_of_pull_request(self):
1045 def create_version_of_pull_request(self):
1046 pull_request = self.create_pull_request()
1046 pull_request = self.create_pull_request()
1047 version = PullRequestModel()._create_version_from_snapshot(
1047 version = PullRequestModel()._create_version_from_snapshot(
1048 pull_request)
1048 pull_request)
1049 return version
1049 return version
1050
1050
1051 def create_status_votes(self, status, *reviewers):
1051 def create_status_votes(self, status, *reviewers):
1052 for reviewer in reviewers:
1052 for reviewer in reviewers:
1053 ChangesetStatusModel().set_status(
1053 ChangesetStatusModel().set_status(
1054 repo=self.pull_request.target_repo,
1054 repo=self.pull_request.target_repo,
1055 status=status,
1055 status=status,
1056 user=reviewer.user_id,
1056 user=reviewer.user_id,
1057 pull_request=self.pull_request)
1057 pull_request=self.pull_request)
1058
1058
1059 def set_mergeable(self, value):
1059 def set_mergeable(self, value):
1060 if not self.mergeable_patcher:
1060 if not self.mergeable_patcher:
1061 self.mergeable_patcher = mock.patch.object(
1061 self.mergeable_patcher = mock.patch.object(
1062 VcsSettingsModel, 'get_general_settings')
1062 VcsSettingsModel, 'get_general_settings')
1063 self.mergeable_mock = self.mergeable_patcher.start()
1063 self.mergeable_mock = self.mergeable_patcher.start()
1064 self.mergeable_mock.return_value = {
1064 self.mergeable_mock.return_value = {
1065 'rhodecode_pr_merge_enabled': value}
1065 'rhodecode_pr_merge_enabled': value}
1066
1066
1067 def cleanup(self):
1067 def cleanup(self):
1068 # In case the source repository is already cleaned up, the pull
1068 # In case the source repository is already cleaned up, the pull
1069 # request will already be deleted.
1069 # request will already be deleted.
1070 pull_request = PullRequest().get(self.pull_request_id)
1070 pull_request = PullRequest().get(self.pull_request_id)
1071 if pull_request:
1071 if pull_request:
1072 PullRequestModel().delete(pull_request, pull_request.author)
1072 PullRequestModel().delete(pull_request, pull_request.author)
1073 Session().commit()
1073 Session().commit()
1074
1074
1075 if self.notification_patcher:
1075 if self.notification_patcher:
1076 self.notification_patcher.stop()
1076 self.notification_patcher.stop()
1077
1077
1078 if self.mergeable_patcher:
1078 if self.mergeable_patcher:
1079 self.mergeable_patcher.stop()
1079 self.mergeable_patcher.stop()
1080
1080
1081
1081
1082 @pytest.fixture()
1082 @pytest.fixture()
1083 def user_admin(baseapp):
1083 def user_admin(baseapp):
1084 """
1084 """
1085 Provides the default admin test user as an instance of `db.User`.
1085 Provides the default admin test user as an instance of `db.User`.
1086 """
1086 """
1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1088 return user
1088 return user
1089
1089
1090
1090
1091 @pytest.fixture()
1091 @pytest.fixture()
1092 def user_regular(baseapp):
1092 def user_regular(baseapp):
1093 """
1093 """
1094 Provides the default regular test user as an instance of `db.User`.
1094 Provides the default regular test user as an instance of `db.User`.
1095 """
1095 """
1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1097 return user
1097 return user
1098
1098
1099
1099
1100 @pytest.fixture()
1100 @pytest.fixture()
1101 def user_util(request, db_connection):
1101 def user_util(request, db_connection):
1102 """
1102 """
1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1104 """
1104 """
1105 utility = UserUtility(test_name=request.node.name)
1105 utility = UserUtility(test_name=request.node.name)
1106 request.addfinalizer(utility.cleanup)
1106 request.addfinalizer(utility.cleanup)
1107 return utility
1107 return utility
1108
1108
1109
1109
1110 # TODO: johbo: Split this up into utilities per domain or something similar
1110 # TODO: johbo: Split this up into utilities per domain or something similar
1111 class UserUtility(object):
1111 class UserUtility(object):
1112
1112
1113 def __init__(self, test_name="test"):
1113 def __init__(self, test_name="test"):
1114 self._test_name = self._sanitize_name(test_name)
1114 self._test_name = self._sanitize_name(test_name)
1115 self.fixture = Fixture()
1115 self.fixture = Fixture()
1116 self.repo_group_ids = []
1116 self.repo_group_ids = []
1117 self.repos_ids = []
1117 self.repos_ids = []
1118 self.user_ids = []
1118 self.user_ids = []
1119 self.user_group_ids = []
1119 self.user_group_ids = []
1120 self.user_repo_permission_ids = []
1120 self.user_repo_permission_ids = []
1121 self.user_group_repo_permission_ids = []
1121 self.user_group_repo_permission_ids = []
1122 self.user_repo_group_permission_ids = []
1122 self.user_repo_group_permission_ids = []
1123 self.user_group_repo_group_permission_ids = []
1123 self.user_group_repo_group_permission_ids = []
1124 self.user_user_group_permission_ids = []
1124 self.user_user_group_permission_ids = []
1125 self.user_group_user_group_permission_ids = []
1125 self.user_group_user_group_permission_ids = []
1126 self.user_permissions = []
1126 self.user_permissions = []
1127
1127
1128 def _sanitize_name(self, name):
1128 def _sanitize_name(self, name):
1129 for char in ['[', ']']:
1129 for char in ['[', ']']:
1130 name = name.replace(char, '_')
1130 name = name.replace(char, '_')
1131 return name
1131 return name
1132
1132
1133 def create_repo_group(
1133 def create_repo_group(
1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1135 group_name = "{prefix}_repogroup_{count}".format(
1135 group_name = "{prefix}_repogroup_{count}".format(
1136 prefix=self._test_name,
1136 prefix=self._test_name,
1137 count=len(self.repo_group_ids))
1137 count=len(self.repo_group_ids))
1138 repo_group = self.fixture.create_repo_group(
1138 repo_group = self.fixture.create_repo_group(
1139 group_name, cur_user=owner)
1139 group_name, cur_user=owner)
1140 if auto_cleanup:
1140 if auto_cleanup:
1141 self.repo_group_ids.append(repo_group.group_id)
1141 self.repo_group_ids.append(repo_group.group_id)
1142 return repo_group
1142 return repo_group
1143
1143
1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1145 auto_cleanup=True, repo_type='hg', bare=False):
1145 auto_cleanup=True, repo_type='hg', bare=False):
1146 repo_name = "{prefix}_repository_{count}".format(
1146 repo_name = "{prefix}_repository_{count}".format(
1147 prefix=self._test_name,
1147 prefix=self._test_name,
1148 count=len(self.repos_ids))
1148 count=len(self.repos_ids))
1149
1149
1150 repository = self.fixture.create_repo(
1150 repository = self.fixture.create_repo(
1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1152 if auto_cleanup:
1152 if auto_cleanup:
1153 self.repos_ids.append(repository.repo_id)
1153 self.repos_ids.append(repository.repo_id)
1154 return repository
1154 return repository
1155
1155
1156 def create_user(self, auto_cleanup=True, **kwargs):
1156 def create_user(self, auto_cleanup=True, **kwargs):
1157 user_name = "{prefix}_user_{count}".format(
1157 user_name = "{prefix}_user_{count}".format(
1158 prefix=self._test_name,
1158 prefix=self._test_name,
1159 count=len(self.user_ids))
1159 count=len(self.user_ids))
1160 user = self.fixture.create_user(user_name, **kwargs)
1160 user = self.fixture.create_user(user_name, **kwargs)
1161 if auto_cleanup:
1161 if auto_cleanup:
1162 self.user_ids.append(user.user_id)
1162 self.user_ids.append(user.user_id)
1163 return user
1163 return user
1164
1164
1165 def create_additional_user_email(self, user, email):
1165 def create_additional_user_email(self, user, email):
1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1167 return uem
1167 return uem
1168
1168
1169 def create_user_with_group(self):
1169 def create_user_with_group(self):
1170 user = self.create_user()
1170 user = self.create_user()
1171 user_group = self.create_user_group(members=[user])
1171 user_group = self.create_user_group(members=[user])
1172 return user, user_group
1172 return user, user_group
1173
1173
1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1175 auto_cleanup=True, **kwargs):
1175 auto_cleanup=True, **kwargs):
1176 group_name = "{prefix}_usergroup_{count}".format(
1176 group_name = "{prefix}_usergroup_{count}".format(
1177 prefix=self._test_name,
1177 prefix=self._test_name,
1178 count=len(self.user_group_ids))
1178 count=len(self.user_group_ids))
1179 user_group = self.fixture.create_user_group(
1179 user_group = self.fixture.create_user_group(
1180 group_name, cur_user=owner, **kwargs)
1180 group_name, cur_user=owner, **kwargs)
1181
1181
1182 if auto_cleanup:
1182 if auto_cleanup:
1183 self.user_group_ids.append(user_group.users_group_id)
1183 self.user_group_ids.append(user_group.users_group_id)
1184 if members:
1184 if members:
1185 for user in members:
1185 for user in members:
1186 UserGroupModel().add_user_to_group(user_group, user)
1186 UserGroupModel().add_user_to_group(user_group, user)
1187 return user_group
1187 return user_group
1188
1188
1189 def grant_user_permission(self, user_name, permission_name):
1189 def grant_user_permission(self, user_name, permission_name):
1190 self.inherit_default_user_permissions(user_name, False)
1190 self.inherit_default_user_permissions(user_name, False)
1191 self.user_permissions.append((user_name, permission_name))
1191 self.user_permissions.append((user_name, permission_name))
1192
1192
1193 def grant_user_permission_to_repo_group(
1193 def grant_user_permission_to_repo_group(
1194 self, repo_group, user, permission_name):
1194 self, repo_group, user, permission_name):
1195 permission = RepoGroupModel().grant_user_permission(
1195 permission = RepoGroupModel().grant_user_permission(
1196 repo_group, user, permission_name)
1196 repo_group, user, permission_name)
1197 self.user_repo_group_permission_ids.append(
1197 self.user_repo_group_permission_ids.append(
1198 (repo_group.group_id, user.user_id))
1198 (repo_group.group_id, user.user_id))
1199 return permission
1199 return permission
1200
1200
1201 def grant_user_group_permission_to_repo_group(
1201 def grant_user_group_permission_to_repo_group(
1202 self, repo_group, user_group, permission_name):
1202 self, repo_group, user_group, permission_name):
1203 permission = RepoGroupModel().grant_user_group_permission(
1203 permission = RepoGroupModel().grant_user_group_permission(
1204 repo_group, user_group, permission_name)
1204 repo_group, user_group, permission_name)
1205 self.user_group_repo_group_permission_ids.append(
1205 self.user_group_repo_group_permission_ids.append(
1206 (repo_group.group_id, user_group.users_group_id))
1206 (repo_group.group_id, user_group.users_group_id))
1207 return permission
1207 return permission
1208
1208
1209 def grant_user_permission_to_repo(
1209 def grant_user_permission_to_repo(
1210 self, repo, user, permission_name):
1210 self, repo, user, permission_name):
1211 permission = RepoModel().grant_user_permission(
1211 permission = RepoModel().grant_user_permission(
1212 repo, user, permission_name)
1212 repo, user, permission_name)
1213 self.user_repo_permission_ids.append(
1213 self.user_repo_permission_ids.append(
1214 (repo.repo_id, user.user_id))
1214 (repo.repo_id, user.user_id))
1215 return permission
1215 return permission
1216
1216
1217 def grant_user_group_permission_to_repo(
1217 def grant_user_group_permission_to_repo(
1218 self, repo, user_group, permission_name):
1218 self, repo, user_group, permission_name):
1219 permission = RepoModel().grant_user_group_permission(
1219 permission = RepoModel().grant_user_group_permission(
1220 repo, user_group, permission_name)
1220 repo, user_group, permission_name)
1221 self.user_group_repo_permission_ids.append(
1221 self.user_group_repo_permission_ids.append(
1222 (repo.repo_id, user_group.users_group_id))
1222 (repo.repo_id, user_group.users_group_id))
1223 return permission
1223 return permission
1224
1224
1225 def grant_user_permission_to_user_group(
1225 def grant_user_permission_to_user_group(
1226 self, target_user_group, user, permission_name):
1226 self, target_user_group, user, permission_name):
1227 permission = UserGroupModel().grant_user_permission(
1227 permission = UserGroupModel().grant_user_permission(
1228 target_user_group, user, permission_name)
1228 target_user_group, user, permission_name)
1229 self.user_user_group_permission_ids.append(
1229 self.user_user_group_permission_ids.append(
1230 (target_user_group.users_group_id, user.user_id))
1230 (target_user_group.users_group_id, user.user_id))
1231 return permission
1231 return permission
1232
1232
1233 def grant_user_group_permission_to_user_group(
1233 def grant_user_group_permission_to_user_group(
1234 self, target_user_group, user_group, permission_name):
1234 self, target_user_group, user_group, permission_name):
1235 permission = UserGroupModel().grant_user_group_permission(
1235 permission = UserGroupModel().grant_user_group_permission(
1236 target_user_group, user_group, permission_name)
1236 target_user_group, user_group, permission_name)
1237 self.user_group_user_group_permission_ids.append(
1237 self.user_group_user_group_permission_ids.append(
1238 (target_user_group.users_group_id, user_group.users_group_id))
1238 (target_user_group.users_group_id, user_group.users_group_id))
1239 return permission
1239 return permission
1240
1240
1241 def revoke_user_permission(self, user_name, permission_name):
1241 def revoke_user_permission(self, user_name, permission_name):
1242 self.inherit_default_user_permissions(user_name, True)
1242 self.inherit_default_user_permissions(user_name, True)
1243 UserModel().revoke_perm(user_name, permission_name)
1243 UserModel().revoke_perm(user_name, permission_name)
1244
1244
1245 def inherit_default_user_permissions(self, user_name, value):
1245 def inherit_default_user_permissions(self, user_name, value):
1246 user = UserModel().get_by_username(user_name)
1246 user = UserModel().get_by_username(user_name)
1247 user.inherit_default_permissions = value
1247 user.inherit_default_permissions = value
1248 Session().add(user)
1248 Session().add(user)
1249 Session().commit()
1249 Session().commit()
1250
1250
1251 def cleanup(self):
1251 def cleanup(self):
1252 self._cleanup_permissions()
1252 self._cleanup_permissions()
1253 self._cleanup_repos()
1253 self._cleanup_repos()
1254 self._cleanup_repo_groups()
1254 self._cleanup_repo_groups()
1255 self._cleanup_user_groups()
1255 self._cleanup_user_groups()
1256 self._cleanup_users()
1256 self._cleanup_users()
1257
1257
1258 def _cleanup_permissions(self):
1258 def _cleanup_permissions(self):
1259 if self.user_permissions:
1259 if self.user_permissions:
1260 for user_name, permission_name in self.user_permissions:
1260 for user_name, permission_name in self.user_permissions:
1261 self.revoke_user_permission(user_name, permission_name)
1261 self.revoke_user_permission(user_name, permission_name)
1262
1262
1263 for permission in self.user_repo_permission_ids:
1263 for permission in self.user_repo_permission_ids:
1264 RepoModel().revoke_user_permission(*permission)
1264 RepoModel().revoke_user_permission(*permission)
1265
1265
1266 for permission in self.user_group_repo_permission_ids:
1266 for permission in self.user_group_repo_permission_ids:
1267 RepoModel().revoke_user_group_permission(*permission)
1267 RepoModel().revoke_user_group_permission(*permission)
1268
1268
1269 for permission in self.user_repo_group_permission_ids:
1269 for permission in self.user_repo_group_permission_ids:
1270 RepoGroupModel().revoke_user_permission(*permission)
1270 RepoGroupModel().revoke_user_permission(*permission)
1271
1271
1272 for permission in self.user_group_repo_group_permission_ids:
1272 for permission in self.user_group_repo_group_permission_ids:
1273 RepoGroupModel().revoke_user_group_permission(*permission)
1273 RepoGroupModel().revoke_user_group_permission(*permission)
1274
1274
1275 for permission in self.user_user_group_permission_ids:
1275 for permission in self.user_user_group_permission_ids:
1276 UserGroupModel().revoke_user_permission(*permission)
1276 UserGroupModel().revoke_user_permission(*permission)
1277
1277
1278 for permission in self.user_group_user_group_permission_ids:
1278 for permission in self.user_group_user_group_permission_ids:
1279 UserGroupModel().revoke_user_group_permission(*permission)
1279 UserGroupModel().revoke_user_group_permission(*permission)
1280
1280
1281 def _cleanup_repo_groups(self):
1281 def _cleanup_repo_groups(self):
1282 def _repo_group_compare(first_group_id, second_group_id):
1282 def _repo_group_compare(first_group_id, second_group_id):
1283 """
1283 """
1284 Gives higher priority to the groups with the most complex paths
1284 Gives higher priority to the groups with the most complex paths
1285 """
1285 """
1286 first_group = RepoGroup.get(first_group_id)
1286 first_group = RepoGroup.get(first_group_id)
1287 second_group = RepoGroup.get(second_group_id)
1287 second_group = RepoGroup.get(second_group_id)
1288 first_group_parts = (
1288 first_group_parts = (
1289 len(first_group.group_name.split('/')) if first_group else 0)
1289 len(first_group.group_name.split('/')) if first_group else 0)
1290 second_group_parts = (
1290 second_group_parts = (
1291 len(second_group.group_name.split('/')) if second_group else 0)
1291 len(second_group.group_name.split('/')) if second_group else 0)
1292 return cmp(second_group_parts, first_group_parts)
1292 return cmp(second_group_parts, first_group_parts)
1293
1293
1294 sorted_repo_group_ids = sorted(
1294 sorted_repo_group_ids = sorted(
1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1296 for repo_group_id in sorted_repo_group_ids:
1296 for repo_group_id in sorted_repo_group_ids:
1297 self.fixture.destroy_repo_group(repo_group_id)
1297 self.fixture.destroy_repo_group(repo_group_id)
1298
1298
1299 def _cleanup_repos(self):
1299 def _cleanup_repos(self):
1300 sorted_repos_ids = sorted(self.repos_ids)
1300 sorted_repos_ids = sorted(self.repos_ids)
1301 for repo_id in sorted_repos_ids:
1301 for repo_id in sorted_repos_ids:
1302 self.fixture.destroy_repo(repo_id)
1302 self.fixture.destroy_repo(repo_id)
1303
1303
1304 def _cleanup_user_groups(self):
1304 def _cleanup_user_groups(self):
1305 def _user_group_compare(first_group_id, second_group_id):
1305 def _user_group_compare(first_group_id, second_group_id):
1306 """
1306 """
1307 Gives higher priority to the groups with the most complex paths
1307 Gives higher priority to the groups with the most complex paths
1308 """
1308 """
1309 first_group = UserGroup.get(first_group_id)
1309 first_group = UserGroup.get(first_group_id)
1310 second_group = UserGroup.get(second_group_id)
1310 second_group = UserGroup.get(second_group_id)
1311 first_group_parts = (
1311 first_group_parts = (
1312 len(first_group.users_group_name.split('/'))
1312 len(first_group.users_group_name.split('/'))
1313 if first_group else 0)
1313 if first_group else 0)
1314 second_group_parts = (
1314 second_group_parts = (
1315 len(second_group.users_group_name.split('/'))
1315 len(second_group.users_group_name.split('/'))
1316 if second_group else 0)
1316 if second_group else 0)
1317 return cmp(second_group_parts, first_group_parts)
1317 return cmp(second_group_parts, first_group_parts)
1318
1318
1319 sorted_user_group_ids = sorted(
1319 sorted_user_group_ids = sorted(
1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1321 for user_group_id in sorted_user_group_ids:
1321 for user_group_id in sorted_user_group_ids:
1322 self.fixture.destroy_user_group(user_group_id)
1322 self.fixture.destroy_user_group(user_group_id)
1323
1323
1324 def _cleanup_users(self):
1324 def _cleanup_users(self):
1325 for user_id in self.user_ids:
1325 for user_id in self.user_ids:
1326 self.fixture.destroy_user(user_id)
1326 self.fixture.destroy_user(user_id)
1327
1327
1328
1328
1329 @pytest.fixture(scope='session')
1329 @pytest.fixture(scope='session')
1330 def testrun():
1330 def testrun():
1331 return {
1331 return {
1332 'uuid': uuid.uuid4(),
1332 'uuid': uuid.uuid4(),
1333 'start': datetime.datetime.utcnow().isoformat(),
1333 'start': datetime.datetime.utcnow().isoformat(),
1334 'timestamp': int(time.time()),
1334 'timestamp': int(time.time()),
1335 }
1335 }
1336
1336
1337
1337
1338 class AppenlightClient(object):
1338 class AppenlightClient(object):
1339
1339
1340 url_template = '{url}?protocol_version=0.5'
1340 url_template = '{url}?protocol_version=0.5'
1341
1341
1342 def __init__(
1342 def __init__(
1343 self, url, api_key, add_server=True, add_timestamp=True,
1343 self, url, api_key, add_server=True, add_timestamp=True,
1344 namespace=None, request=None, testrun=None):
1344 namespace=None, request=None, testrun=None):
1345 self.url = self.url_template.format(url=url)
1345 self.url = self.url_template.format(url=url)
1346 self.api_key = api_key
1346 self.api_key = api_key
1347 self.add_server = add_server
1347 self.add_server = add_server
1348 self.add_timestamp = add_timestamp
1348 self.add_timestamp = add_timestamp
1349 self.namespace = namespace
1349 self.namespace = namespace
1350 self.request = request
1350 self.request = request
1351 self.server = socket.getfqdn(socket.gethostname())
1351 self.server = socket.getfqdn(socket.gethostname())
1352 self.tags_before = {}
1352 self.tags_before = {}
1353 self.tags_after = {}
1353 self.tags_after = {}
1354 self.stats = []
1354 self.stats = []
1355 self.testrun = testrun or {}
1355 self.testrun = testrun or {}
1356
1356
1357 def tag_before(self, tag, value):
1357 def tag_before(self, tag, value):
1358 self.tags_before[tag] = value
1358 self.tags_before[tag] = value
1359
1359
1360 def tag_after(self, tag, value):
1360 def tag_after(self, tag, value):
1361 self.tags_after[tag] = value
1361 self.tags_after[tag] = value
1362
1362
1363 def collect(self, data):
1363 def collect(self, data):
1364 if self.add_server:
1364 if self.add_server:
1365 data.setdefault('server', self.server)
1365 data.setdefault('server', self.server)
1366 if self.add_timestamp:
1366 if self.add_timestamp:
1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1368 if self.namespace:
1368 if self.namespace:
1369 data.setdefault('namespace', self.namespace)
1369 data.setdefault('namespace', self.namespace)
1370 if self.request:
1370 if self.request:
1371 data.setdefault('request', self.request)
1371 data.setdefault('request', self.request)
1372 self.stats.append(data)
1372 self.stats.append(data)
1373
1373
1374 def send_stats(self):
1374 def send_stats(self):
1375 tags = [
1375 tags = [
1376 ('testrun', self.request),
1376 ('testrun', self.request),
1377 ('testrun.start', self.testrun['start']),
1377 ('testrun.start', self.testrun['start']),
1378 ('testrun.timestamp', self.testrun['timestamp']),
1378 ('testrun.timestamp', self.testrun['timestamp']),
1379 ('test', self.namespace),
1379 ('test', self.namespace),
1380 ]
1380 ]
1381 for key, value in self.tags_before.items():
1381 for key, value in self.tags_before.items():
1382 tags.append((key + '.before', value))
1382 tags.append((key + '.before', value))
1383 try:
1383 try:
1384 delta = self.tags_after[key] - value
1384 delta = self.tags_after[key] - value
1385 tags.append((key + '.delta', delta))
1385 tags.append((key + '.delta', delta))
1386 except Exception:
1386 except Exception:
1387 pass
1387 pass
1388 for key, value in self.tags_after.items():
1388 for key, value in self.tags_after.items():
1389 tags.append((key + '.after', value))
1389 tags.append((key + '.after', value))
1390 self.collect({
1390 self.collect({
1391 'message': "Collected tags",
1391 'message': "Collected tags",
1392 'tags': tags,
1392 'tags': tags,
1393 })
1393 })
1394
1394
1395 response = requests.post(
1395 response = requests.post(
1396 self.url,
1396 self.url,
1397 headers={
1397 headers={
1398 'X-appenlight-api-key': self.api_key},
1398 'X-appenlight-api-key': self.api_key},
1399 json=self.stats,
1399 json=self.stats,
1400 )
1400 )
1401
1401
1402 if not response.status_code == 200:
1402 if not response.status_code == 200:
1403 pprint.pprint(self.stats)
1403 pprint.pprint(self.stats)
1404 print(response.headers)
1404 print(response.headers)
1405 print(response.text)
1405 print(response.text)
1406 raise Exception('Sending to appenlight failed')
1406 raise Exception('Sending to appenlight failed')
1407
1407
1408
1408
1409 @pytest.fixture()
1409 @pytest.fixture()
1410 def gist_util(request, db_connection):
1410 def gist_util(request, db_connection):
1411 """
1411 """
1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1413 """
1413 """
1414 utility = GistUtility()
1414 utility = GistUtility()
1415 request.addfinalizer(utility.cleanup)
1415 request.addfinalizer(utility.cleanup)
1416 return utility
1416 return utility
1417
1417
1418
1418
1419 class GistUtility(object):
1419 class GistUtility(object):
1420 def __init__(self):
1420 def __init__(self):
1421 self.fixture = Fixture()
1421 self.fixture = Fixture()
1422 self.gist_ids = []
1422 self.gist_ids = []
1423
1423
1424 def create_gist(self, **kwargs):
1424 def create_gist(self, **kwargs):
1425 gist = self.fixture.create_gist(**kwargs)
1425 gist = self.fixture.create_gist(**kwargs)
1426 self.gist_ids.append(gist.gist_id)
1426 self.gist_ids.append(gist.gist_id)
1427 return gist
1427 return gist
1428
1428
1429 def cleanup(self):
1429 def cleanup(self):
1430 for id_ in self.gist_ids:
1430 for id_ in self.gist_ids:
1431 self.fixture.destroy_gists(str(id_))
1431 self.fixture.destroy_gists(str(id_))
1432
1432
1433
1433
1434 @pytest.fixture()
1434 @pytest.fixture()
1435 def enabled_backends(request):
1435 def enabled_backends(request):
1436 backends = request.config.option.backends
1436 backends = request.config.option.backends
1437 return backends[:]
1437 return backends[:]
1438
1438
1439
1439
1440 @pytest.fixture()
1440 @pytest.fixture()
1441 def settings_util(request, db_connection):
1441 def settings_util(request, db_connection):
1442 """
1442 """
1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1444 """
1444 """
1445 utility = SettingsUtility()
1445 utility = SettingsUtility()
1446 request.addfinalizer(utility.cleanup)
1446 request.addfinalizer(utility.cleanup)
1447 return utility
1447 return utility
1448
1448
1449
1449
1450 class SettingsUtility(object):
1450 class SettingsUtility(object):
1451 def __init__(self):
1451 def __init__(self):
1452 self.rhodecode_ui_ids = []
1452 self.rhodecode_ui_ids = []
1453 self.rhodecode_setting_ids = []
1453 self.rhodecode_setting_ids = []
1454 self.repo_rhodecode_ui_ids = []
1454 self.repo_rhodecode_ui_ids = []
1455 self.repo_rhodecode_setting_ids = []
1455 self.repo_rhodecode_setting_ids = []
1456
1456
1457 def create_repo_rhodecode_ui(
1457 def create_repo_rhodecode_ui(
1458 self, repo, section, value, key=None, active=True, cleanup=True):
1458 self, repo, section, value, key=None, active=True, cleanup=True):
1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1460
1460
1461 setting = RepoRhodeCodeUi()
1461 setting = RepoRhodeCodeUi()
1462 setting.repository_id = repo.repo_id
1462 setting.repository_id = repo.repo_id
1463 setting.ui_section = section
1463 setting.ui_section = section
1464 setting.ui_value = value
1464 setting.ui_value = value
1465 setting.ui_key = key
1465 setting.ui_key = key
1466 setting.ui_active = active
1466 setting.ui_active = active
1467 Session().add(setting)
1467 Session().add(setting)
1468 Session().commit()
1468 Session().commit()
1469
1469
1470 if cleanup:
1470 if cleanup:
1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1472 return setting
1472 return setting
1473
1473
1474 def create_rhodecode_ui(
1474 def create_rhodecode_ui(
1475 self, section, value, key=None, active=True, cleanup=True):
1475 self, section, value, key=None, active=True, cleanup=True):
1476 key = key or sha1_safe(f'{section}{value}')
1476 key = key or sha1_safe(f'{section}{value}')
1477
1477
1478 setting = RhodeCodeUi()
1478 setting = RhodeCodeUi()
1479 setting.ui_section = section
1479 setting.ui_section = section
1480 setting.ui_value = value
1480 setting.ui_value = value
1481 setting.ui_key = key
1481 setting.ui_key = key
1482 setting.ui_active = active
1482 setting.ui_active = active
1483 Session().add(setting)
1483 Session().add(setting)
1484 Session().commit()
1484 Session().commit()
1485
1485
1486 if cleanup:
1486 if cleanup:
1487 self.rhodecode_ui_ids.append(setting.ui_id)
1487 self.rhodecode_ui_ids.append(setting.ui_id)
1488 return setting
1488 return setting
1489
1489
1490 def create_repo_rhodecode_setting(
1490 def create_repo_rhodecode_setting(
1491 self, repo, name, value, type_, cleanup=True):
1491 self, repo, name, value, type_, cleanup=True):
1492 setting = RepoRhodeCodeSetting(
1492 setting = RepoRhodeCodeSetting(
1493 repo.repo_id, key=name, val=value, type=type_)
1493 repo.repo_id, key=name, val=value, type=type_)
1494 Session().add(setting)
1494 Session().add(setting)
1495 Session().commit()
1495 Session().commit()
1496
1496
1497 if cleanup:
1497 if cleanup:
1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1499 return setting
1499 return setting
1500
1500
1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1503 Session().add(setting)
1503 Session().add(setting)
1504 Session().commit()
1504 Session().commit()
1505
1505
1506 if cleanup:
1506 if cleanup:
1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1508
1508
1509 return setting
1509 return setting
1510
1510
1511 def cleanup(self):
1511 def cleanup(self):
1512 for id_ in self.rhodecode_ui_ids:
1512 for id_ in self.rhodecode_ui_ids:
1513 setting = RhodeCodeUi.get(id_)
1513 setting = RhodeCodeUi.get(id_)
1514 Session().delete(setting)
1514 Session().delete(setting)
1515
1515
1516 for id_ in self.rhodecode_setting_ids:
1516 for id_ in self.rhodecode_setting_ids:
1517 setting = RhodeCodeSetting.get(id_)
1517 setting = RhodeCodeSetting.get(id_)
1518 Session().delete(setting)
1518 Session().delete(setting)
1519
1519
1520 for id_ in self.repo_rhodecode_ui_ids:
1520 for id_ in self.repo_rhodecode_ui_ids:
1521 setting = RepoRhodeCodeUi.get(id_)
1521 setting = RepoRhodeCodeUi.get(id_)
1522 Session().delete(setting)
1522 Session().delete(setting)
1523
1523
1524 for id_ in self.repo_rhodecode_setting_ids:
1524 for id_ in self.repo_rhodecode_setting_ids:
1525 setting = RepoRhodeCodeSetting.get(id_)
1525 setting = RepoRhodeCodeSetting.get(id_)
1526 Session().delete(setting)
1526 Session().delete(setting)
1527
1527
1528 Session().commit()
1528 Session().commit()
1529
1529
1530
1530
1531 @pytest.fixture()
1531 @pytest.fixture()
1532 def no_notifications(request):
1532 def no_notifications(request):
1533 notification_patcher = mock.patch(
1533 notification_patcher = mock.patch(
1534 'rhodecode.model.notification.NotificationModel.create')
1534 'rhodecode.model.notification.NotificationModel.create')
1535 notification_patcher.start()
1535 notification_patcher.start()
1536 request.addfinalizer(notification_patcher.stop)
1536 request.addfinalizer(notification_patcher.stop)
1537
1537
1538
1538
1539 @pytest.fixture(scope='session')
1539 @pytest.fixture(scope='session')
1540 def repeat(request):
1540 def repeat(request):
1541 """
1541 """
1542 The number of repetitions is based on this fixture.
1542 The number of repetitions is based on this fixture.
1543
1543
1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1545 tests are not too slow in our default test suite.
1545 tests are not too slow in our default test suite.
1546 """
1546 """
1547 return request.config.getoption('--repeat')
1547 return request.config.getoption('--repeat')
1548
1548
1549
1549
1550 @pytest.fixture()
1550 @pytest.fixture()
1551 def rhodecode_fixtures():
1551 def rhodecode_fixtures():
1552 return Fixture()
1552 return Fixture()
1553
1553
1554
1554
1555 @pytest.fixture()
1555 @pytest.fixture()
1556 def context_stub():
1556 def context_stub():
1557 """
1557 """
1558 Stub context object.
1558 Stub context object.
1559 """
1559 """
1560 context = pyramid.testing.DummyResource()
1560 context = pyramid.testing.DummyResource()
1561 return context
1561 return context
1562
1562
1563
1563
1564 @pytest.fixture()
1564 @pytest.fixture()
1565 def request_stub():
1565 def request_stub():
1566 """
1566 """
1567 Stub request object.
1567 Stub request object.
1568 """
1568 """
1569 from rhodecode.lib.base import bootstrap_request
1569 from rhodecode.lib.base import bootstrap_request
1570 request = bootstrap_request(scheme='https')
1570 request = bootstrap_request(scheme='https')
1571 return request
1571 return request
1572
1572
1573
1573
1574 @pytest.fixture()
1574 @pytest.fixture()
1575 def config_stub(request, request_stub):
1575 def config_stub(request, request_stub):
1576 """
1576 """
1577 Set up pyramid.testing and return the Configurator.
1577 Set up pyramid.testing and return the Configurator.
1578 """
1578 """
1579 from rhodecode.lib.base import bootstrap_config
1579 from rhodecode.lib.base import bootstrap_config
1580 config = bootstrap_config(request=request_stub)
1580 config = bootstrap_config(request=request_stub)
1581
1581
1582 @request.addfinalizer
1582 @request.addfinalizer
1583 def cleanup():
1583 def cleanup():
1584 pyramid.testing.tearDown()
1584 pyramid.testing.tearDown()
1585
1585
1586 return config
1586 return config
1587
1587
1588
1588
1589 @pytest.fixture()
1589 @pytest.fixture()
1590 def StubIntegrationType():
1590 def StubIntegrationType():
1591 class _StubIntegrationType(IntegrationTypeBase):
1591 class _StubIntegrationType(IntegrationTypeBase):
1592 """ Test integration type class """
1592 """ Test integration type class """
1593
1593
1594 key = 'test'
1594 key = 'test'
1595 display_name = 'Test integration type'
1595 display_name = 'Test integration type'
1596 description = 'A test integration type for testing'
1596 description = 'A test integration type for testing'
1597
1597
1598 @classmethod
1598 @classmethod
1599 def icon(cls):
1599 def icon(cls):
1600 return 'test_icon_html_image'
1600 return 'test_icon_html_image'
1601
1601
1602 def __init__(self, settings):
1602 def __init__(self, settings):
1603 super(_StubIntegrationType, self).__init__(settings)
1603 super(_StubIntegrationType, self).__init__(settings)
1604 self.sent_events = [] # for testing
1604 self.sent_events = [] # for testing
1605
1605
1606 def send_event(self, event):
1606 def send_event(self, event):
1607 self.sent_events.append(event)
1607 self.sent_events.append(event)
1608
1608
1609 def settings_schema(self):
1609 def settings_schema(self):
1610 class SettingsSchema(colander.Schema):
1610 class SettingsSchema(colander.Schema):
1611 test_string_field = colander.SchemaNode(
1611 test_string_field = colander.SchemaNode(
1612 colander.String(),
1612 colander.String(),
1613 missing=colander.required,
1613 missing=colander.required,
1614 title='test string field',
1614 title='test string field',
1615 )
1615 )
1616 test_int_field = colander.SchemaNode(
1616 test_int_field = colander.SchemaNode(
1617 colander.Int(),
1617 colander.Int(),
1618 title='some integer setting',
1618 title='some integer setting',
1619 )
1619 )
1620 return SettingsSchema()
1620 return SettingsSchema()
1621
1621
1622
1622
1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1624 return _StubIntegrationType
1624 return _StubIntegrationType
1625
1625
1626
1626
1627 @pytest.fixture()
1627 @pytest.fixture()
1628 def stub_integration_settings():
1628 def stub_integration_settings():
1629 return {
1629 return {
1630 'test_string_field': 'some data',
1630 'test_string_field': 'some data',
1631 'test_int_field': 100,
1631 'test_int_field': 100,
1632 }
1632 }
1633
1633
1634
1634
1635 @pytest.fixture()
1635 @pytest.fixture()
1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1637 stub_integration_settings):
1637 stub_integration_settings):
1638 integration = IntegrationModel().create(
1638 integration = IntegrationModel().create(
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1640 name='test repo integration',
1640 name='test repo integration',
1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1642
1642
1643 @request.addfinalizer
1643 @request.addfinalizer
1644 def cleanup():
1644 def cleanup():
1645 IntegrationModel().delete(integration)
1645 IntegrationModel().delete(integration)
1646
1646
1647 return integration
1647 return integration
1648
1648
1649
1649
1650 @pytest.fixture()
1650 @pytest.fixture()
1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1652 stub_integration_settings):
1652 stub_integration_settings):
1653 integration = IntegrationModel().create(
1653 integration = IntegrationModel().create(
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1655 name='test repogroup integration',
1655 name='test repogroup integration',
1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1657
1657
1658 @request.addfinalizer
1658 @request.addfinalizer
1659 def cleanup():
1659 def cleanup():
1660 IntegrationModel().delete(integration)
1660 IntegrationModel().delete(integration)
1661
1661
1662 return integration
1662 return integration
1663
1663
1664
1664
1665 @pytest.fixture()
1665 @pytest.fixture()
1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1667 StubIntegrationType, stub_integration_settings):
1667 StubIntegrationType, stub_integration_settings):
1668 integration = IntegrationModel().create(
1668 integration = IntegrationModel().create(
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1670 name='test recursive repogroup integration',
1670 name='test recursive repogroup integration',
1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1672
1672
1673 @request.addfinalizer
1673 @request.addfinalizer
1674 def cleanup():
1674 def cleanup():
1675 IntegrationModel().delete(integration)
1675 IntegrationModel().delete(integration)
1676
1676
1677 return integration
1677 return integration
1678
1678
1679
1679
1680 @pytest.fixture()
1680 @pytest.fixture()
1681 def global_integration_stub(request, StubIntegrationType,
1681 def global_integration_stub(request, StubIntegrationType,
1682 stub_integration_settings):
1682 stub_integration_settings):
1683 integration = IntegrationModel().create(
1683 integration = IntegrationModel().create(
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1685 name='test global integration',
1685 name='test global integration',
1686 repo=None, repo_group=None, child_repos_only=None)
1686 repo=None, repo_group=None, child_repos_only=None)
1687
1687
1688 @request.addfinalizer
1688 @request.addfinalizer
1689 def cleanup():
1689 def cleanup():
1690 IntegrationModel().delete(integration)
1690 IntegrationModel().delete(integration)
1691
1691
1692 return integration
1692 return integration
1693
1693
1694
1694
1695 @pytest.fixture()
1695 @pytest.fixture()
1696 def root_repos_integration_stub(request, StubIntegrationType,
1696 def root_repos_integration_stub(request, StubIntegrationType,
1697 stub_integration_settings):
1697 stub_integration_settings):
1698 integration = IntegrationModel().create(
1698 integration = IntegrationModel().create(
1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1700 name='test global integration',
1700 name='test global integration',
1701 repo=None, repo_group=None, child_repos_only=True)
1701 repo=None, repo_group=None, child_repos_only=True)
1702
1702
1703 @request.addfinalizer
1703 @request.addfinalizer
1704 def cleanup():
1704 def cleanup():
1705 IntegrationModel().delete(integration)
1705 IntegrationModel().delete(integration)
1706
1706
1707 return integration
1707 return integration
1708
1708
1709
1709
1710 @pytest.fixture()
1710 @pytest.fixture()
1711 def local_dt_to_utc():
1711 def local_dt_to_utc():
1712 def _factory(dt):
1712 def _factory(dt):
1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1715 return _factory
1715 return _factory
1716
1716
1717
1717
1718 @pytest.fixture()
1718 @pytest.fixture()
1719 def disable_anonymous_user(request, baseapp):
1719 def disable_anonymous_user(request, baseapp):
1720 set_anonymous_access(False)
1720 set_anonymous_access(False)
1721
1721
1722 @request.addfinalizer
1722 @request.addfinalizer
1723 def cleanup():
1723 def cleanup():
1724 set_anonymous_access(True)
1724 set_anonymous_access(True)
1725
1725
1726
1726
1727 @pytest.fixture(scope='module')
1727 @pytest.fixture(scope='module')
1728 def rc_fixture(request):
1728 def rc_fixture(request):
1729 return Fixture()
1729 return Fixture()
1730
1730
1731
1731
1732 @pytest.fixture()
1732 @pytest.fixture()
1733 def repo_groups(request):
1733 def repo_groups(request):
1734 fixture = Fixture()
1734 fixture = Fixture()
1735
1735
1736 session = Session()
1736 session = Session()
1737 zombie_group = fixture.create_repo_group('zombie')
1737 zombie_group = fixture.create_repo_group('zombie')
1738 parent_group = fixture.create_repo_group('parent')
1738 parent_group = fixture.create_repo_group('parent')
1739 child_group = fixture.create_repo_group('parent/child')
1739 child_group = fixture.create_repo_group('parent/child')
1740 groups_in_db = session.query(RepoGroup).all()
1740 groups_in_db = session.query(RepoGroup).all()
1741 assert len(groups_in_db) == 3
1741 assert len(groups_in_db) == 3
1742 assert child_group.group_parent_id == parent_group.group_id
1742 assert child_group.group_parent_id == parent_group.group_id
1743
1743
1744 @request.addfinalizer
1744 @request.addfinalizer
1745 def cleanup():
1745 def cleanup():
1746 fixture.destroy_repo_group(zombie_group)
1746 fixture.destroy_repo_group(zombie_group)
1747 fixture.destroy_repo_group(child_group)
1747 fixture.destroy_repo_group(child_group)
1748 fixture.destroy_repo_group(parent_group)
1748 fixture.destroy_repo_group(parent_group)
1749
1749
1750 return zombie_group, parent_group, child_group
1750 return zombie_group, parent_group, child_group
@@ -1,491 +1,489 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import multiprocessing
20 import multiprocessing
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import py
24 import py
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib import caching_query
27 from rhodecode.lib import caching_query
28 from rhodecode.lib import utils
28 from rhodecode.lib import utils
29 from rhodecode.lib.str_utils import safe_bytes
29 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.model import settings
30 from rhodecode.model import settings
31 from rhodecode.model import db
31 from rhodecode.model import db
32 from rhodecode.model import meta
32 from rhodecode.model import meta
33 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.repo_group import RepoGroupModel
34 from rhodecode.model.repo_group import RepoGroupModel
35 from rhodecode.model.settings import UiSetting, SettingsModel
35 from rhodecode.model.settings import UiSetting, SettingsModel
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixture import Fixture
37 from rhodecode_tools.lib.hash_utils import md5_safe
37 from rhodecode_tools.lib.hash_utils import md5_safe
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39
39
40 fixture = Fixture()
40 fixture = Fixture()
41
41
42
42
43 def extract_hooks(config):
43 def extract_hooks(config):
44 """Return a dictionary with the hook entries of the given config."""
44 """Return a dictionary with the hook entries of the given config."""
45 hooks = {}
45 hooks = {}
46 config_items = config.serialize()
46 config_items = config.serialize()
47 for section, name, value in config_items:
47 for section, name, value in config_items:
48 if section != 'hooks':
48 if section != 'hooks':
49 continue
49 continue
50 hooks[name] = value
50 hooks[name] = value
51
51
52 return hooks
52 return hooks
53
53
54
54
55 def disable_hooks(request, hooks):
55 def disable_hooks(request, hooks):
56 """Disables the given hooks from the UI settings."""
56 """Disables the given hooks from the UI settings."""
57 session = meta.Session()
57 session = meta.Session()
58
58
59 model = SettingsModel()
59 model = SettingsModel()
60 for hook_key in hooks:
60 for hook_key in hooks:
61 sett = model.get_ui_by_key(hook_key)
61 sett = model.get_ui_by_key(hook_key)
62 sett.ui_active = False
62 sett.ui_active = False
63 session.add(sett)
63 session.add(sett)
64
64
65 # Invalidate cache
65 # Invalidate cache
66 ui_settings = session.query(db.RhodeCodeUi).options(
66 ui_settings = session.query(db.RhodeCodeUi).options(
67 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
67 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
68
68
69 meta.cache.invalidate(
69 meta.cache.invalidate(
70 ui_settings, {},
70 ui_settings, {},
71 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
71 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
72
72
73 ui_settings = session.query(db.RhodeCodeUi).options(
73 ui_settings = session.query(db.RhodeCodeUi).options(
74 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
74 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
75
75
76 meta.cache.invalidate(
76 meta.cache.invalidate(
77 ui_settings, {},
77 ui_settings, {},
78 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
78 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
79
79
80 @request.addfinalizer
80 @request.addfinalizer
81 def rollback():
81 def rollback():
82 session.rollback()
82 session.rollback()
83
83
84
84
85 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
85 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
86 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
86 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
87 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
87 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
88 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
88 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
89 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
89 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
90 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
90 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
91 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
91 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
92
92
93 HG_HOOKS = frozenset(
93 HG_HOOKS = frozenset(
94 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
94 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
95 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
95 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
96
96
97
97
98 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
98 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
99 ([], HG_HOOKS),
99 ([], HG_HOOKS),
100 (HG_HOOKS, []),
100 (HG_HOOKS, []),
101
101
102 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
102 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
103
103
104 # When a pull/push hook is disabled, its pre-pull/push counterpart should
104 # When a pull/push hook is disabled, its pre-pull/push counterpart should
105 # be disabled too.
105 # be disabled too.
106 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
106 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
107 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
107 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
108 HOOK_PUSH_KEY]),
108 HOOK_PUSH_KEY]),
109 ])
109 ])
110 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
110 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
111 expected_hooks):
111 expected_hooks):
112 disable_hooks(request, disabled_hooks)
112 disable_hooks(request, disabled_hooks)
113
113
114 config = utils.make_db_config()
114 config = utils.make_db_config()
115 hooks = extract_hooks(config)
115 hooks = extract_hooks(config)
116
116
117 assert set(hooks.keys()).intersection(HG_HOOKS) == set(expected_hooks)
117 assert set(hooks.keys()).intersection(HG_HOOKS) == set(expected_hooks)
118
118
119
119
120 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
120 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
121 ([], ['pull', 'push']),
121 ([], ['pull', 'push']),
122 ([HOOK_PUSH], ['pull']),
122 ([HOOK_PUSH], ['pull']),
123 ([HOOK_PULL], ['push']),
123 ([HOOK_PULL], ['push']),
124 ([HOOK_PULL, HOOK_PUSH], []),
124 ([HOOK_PULL, HOOK_PUSH], []),
125 ])
125 ])
126 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
126 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
127 hook_keys = (HOOK_PUSH, HOOK_PULL)
127 hook_keys = (HOOK_PUSH, HOOK_PULL)
128 ui_settings = [
128 ui_settings = [
129 ('hooks', key, 'some value', key not in disabled_hooks)
129 ('hooks', key, 'some value', key not in disabled_hooks)
130 for key in hook_keys]
130 for key in hook_keys]
131
131
132 result = utils.get_enabled_hook_classes(ui_settings)
132 result = utils.get_enabled_hook_classes(ui_settings)
133 assert sorted(result) == expected_hooks
133 assert sorted(result) == expected_hooks
134
134
135
135
136 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
136 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
137 _stub_git_repo(tmpdir.ensure('repo', dir=True))
137 _stub_git_repo(tmpdir.ensure('repo', dir=True))
138 repos = list(utils.get_filesystem_repos(str(tmpdir)))
138 repos = list(utils.get_filesystem_repos(str(tmpdir)))
139 assert repos == [('repo', ('git', tmpdir.join('repo')))]
139 assert repos == [('repo', ('git', tmpdir.join('repo')))]
140
140
141
141
142 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
142 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
143 tmpdir.ensure('not-a-repo', dir=True)
143 tmpdir.ensure('not-a-repo', dir=True)
144 repos = list(utils.get_filesystem_repos(str(tmpdir)))
144 repos = list(utils.get_filesystem_repos(str(tmpdir)))
145 assert repos == []
145 assert repos == []
146
146
147
147
148 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
148 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
149 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
149 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
150 repos = list(utils.get_filesystem_repos(str(tmpdir)))
150 repos = list(utils.get_filesystem_repos(str(tmpdir)))
151 assert repos == []
151 assert repos == []
152
152
153
153
154 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
154 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
155 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
155 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
156 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
156 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
157 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
157 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
158
158
159
159
160 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
160 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
161 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
161 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
162 repos = list(utils.get_filesystem_repos(str(tmpdir)))
162 repos = list(utils.get_filesystem_repos(str(tmpdir)))
163 assert repos == []
163 assert repos == []
164
164
165
165
166 def test_get_filesystem_repos_skips_files(tmpdir):
166 def test_get_filesystem_repos_skips_files(tmpdir):
167 tmpdir.ensure('test-file')
167 tmpdir.ensure('test-file')
168 repos = list(utils.get_filesystem_repos(str(tmpdir)))
168 repos = list(utils.get_filesystem_repos(str(tmpdir)))
169 assert repos == []
169 assert repos == []
170
170
171
171
172 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
172 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
173 removed_repo_name = 'rm__00000000_000000_000000__.stub'
173 removed_repo_name = 'rm__00000000_000000_000000__.stub'
174 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
174 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
175 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
175 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
176 repos = list(utils.get_filesystem_repos(str(tmpdir)))
176 repos = list(utils.get_filesystem_repos(str(tmpdir)))
177 assert repos == []
177 assert repos == []
178
178
179
179
180 def _stub_git_repo(repo_path):
180 def _stub_git_repo(repo_path):
181 """
181 """
182 Make `repo_path` look like a Git repository.
182 Make `repo_path` look like a Git repository.
183 """
183 """
184 repo_path.ensure('.git', dir=True)
184 repo_path.ensure('.git', dir=True)
185
185
186
186
187 def test_get_dirpaths_returns_all_paths_on_str(tmpdir):
187 def test_get_dirpaths_returns_all_paths_on_str(tmpdir):
188 tmpdir.ensure('test-file')
188 tmpdir.ensure('test-file')
189 tmpdir.ensure('test-file-1')
189 tmpdir.ensure('test-file-1')
190 tmp_path = str(tmpdir)
190 tmp_path = str(tmpdir)
191 dirpaths = utils.get_dirpaths(tmp_path)
191 dirpaths = utils.get_dirpaths(tmp_path)
192 assert list(sorted(dirpaths)) == ['test-file', 'test-file-1']
192 assert list(sorted(dirpaths)) == ['test-file', 'test-file-1']
193
193
194
194
195 def test_get_dirpaths_returns_all_paths_on_bytes(tmpdir):
195 def test_get_dirpaths_returns_all_paths_on_bytes(tmpdir):
196 tmpdir.ensure('test-file-bytes')
196 tmpdir.ensure('test-file-bytes')
197 tmp_path = str(tmpdir)
197 tmp_path = str(tmpdir)
198 dirpaths = utils.get_dirpaths(safe_bytes(tmp_path))
198 dirpaths = utils.get_dirpaths(safe_bytes(tmp_path))
199 assert list(sorted(dirpaths)) == [b'test-file-bytes']
199 assert list(sorted(dirpaths)) == [b'test-file-bytes']
200
200
201
201
202 def test_get_dirpaths_returns_all_paths_bytes(
202 def test_get_dirpaths_returns_all_paths_bytes(
203 tmpdir, platform_encodes_filenames):
203 tmpdir, platform_encodes_filenames):
204 if platform_encodes_filenames:
204 if platform_encodes_filenames:
205 pytest.skip("This platform seems to encode filenames.")
205 pytest.skip("This platform seems to encode filenames.")
206 tmpdir.ensure('repo-a-umlaut-\xe4')
206 tmpdir.ensure('repo-a-umlaut-\xe4')
207 dirpaths = utils.get_dirpaths(str(tmpdir))
207 dirpaths = utils.get_dirpaths(str(tmpdir))
208 assert dirpaths == ['repo-a-umlaut-\xe4']
208 assert dirpaths == ['repo-a-umlaut-\xe4']
209
209
210
210
211 def test_get_dirpaths_skips_paths_it_cannot_decode(
211 def test_get_dirpaths_skips_paths_it_cannot_decode(
212 tmpdir, platform_encodes_filenames):
212 tmpdir, platform_encodes_filenames):
213 if platform_encodes_filenames:
213 if platform_encodes_filenames:
214 pytest.skip("This platform seems to encode filenames.")
214 pytest.skip("This platform seems to encode filenames.")
215 path_with_latin1 = 'repo-a-umlaut-\xe4'
215 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 tmp_path = str(tmpdir.ensure(path_with_latin1))
216 tmp_path = str(tmpdir.ensure(path_with_latin1))
217 dirpaths = utils.get_dirpaths(tmp_path)
217 dirpaths = utils.get_dirpaths(tmp_path)
218 assert dirpaths == []
218 assert dirpaths == []
219
219
220
220
221 @pytest.fixture(scope='session')
221 @pytest.fixture(scope='session')
222 def platform_encodes_filenames():
222 def platform_encodes_filenames():
223 """
223 """
224 Boolean indicator if the current platform changes filename encodings.
224 Boolean indicator if the current platform changes filename encodings.
225 """
225 """
226 path_with_latin1 = 'repo-a-umlaut-\xe4'
226 path_with_latin1 = 'repo-a-umlaut-\xe4'
227 tmpdir = py.path.local.mkdtemp()
227 tmpdir = py.path.local.mkdtemp()
228 tmpdir.ensure(path_with_latin1)
228 tmpdir.ensure(path_with_latin1)
229 read_path = tmpdir.listdir()[0].basename
229 read_path = tmpdir.listdir()[0].basename
230 tmpdir.remove()
230 tmpdir.remove()
231 return path_with_latin1 != read_path
231 return path_with_latin1 != read_path
232
232
233
233
234
235
236 def test_repo2db_mapper_groups(repo_groups):
234 def test_repo2db_mapper_groups(repo_groups):
237 session = meta.Session()
235 session = meta.Session()
238 zombie_group, parent_group, child_group = repo_groups
236 zombie_group, parent_group, child_group = repo_groups
239 zombie_path = os.path.join(
237 zombie_path = os.path.join(
240 RepoGroupModel().repos_path, zombie_group.full_path)
238 RepoGroupModel().repos_path, zombie_group.full_path)
241 os.rmdir(zombie_path)
239 os.rmdir(zombie_path)
242
240
243 # Avoid removing test repos when calling repo2db_mapper
241 # Avoid removing test repos when calling repo2db_mapper
244 repo_list = {
242 repo_list = {
245 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
243 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
246 }
244 }
247 utils.repo2db_mapper(repo_list, remove_obsolete=True)
245 utils.repo2db_mapper(repo_list, remove_obsolete=True)
248
246
249 groups_in_db = session.query(db.RepoGroup).all()
247 groups_in_db = session.query(db.RepoGroup).all()
250 assert child_group in groups_in_db
248 assert child_group in groups_in_db
251 assert parent_group in groups_in_db
249 assert parent_group in groups_in_db
252 assert zombie_path not in groups_in_db
250 assert zombie_path not in groups_in_db
253
251
254
252
255 def test_repo2db_mapper_enables_largefiles(backend):
253 def test_repo2db_mapper_enables_largefiles(backend):
256 repo = backend.create_repo()
254 repo = backend.create_repo()
257 repo_list = {repo.repo_name: 'test'}
255 repo_list = {repo.repo_name: 'test'}
258 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
256 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
259 utils.repo2db_mapper(repo_list, remove_obsolete=False)
257 utils.repo2db_mapper(repo_list, remove_obsolete=False)
260 _, kwargs = scm_mock.call_args
258 _, kwargs = scm_mock.call_args
261 assert kwargs['config'].get('extensions', 'largefiles') == ''
259 assert kwargs['config'].get('extensions', 'largefiles') == ''
262
260
263
261
264 @pytest.mark.backends("git", "svn")
262 @pytest.mark.backends("git", "svn")
265 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
263 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
266 repo = backend.create_repo()
264 repo = backend.create_repo()
267 repo_list = {repo.repo_name: 'test'}
265 repo_list = {repo.repo_name: 'test'}
268 utils.repo2db_mapper(repo_list, remove_obsolete=False)
266 utils.repo2db_mapper(repo_list, remove_obsolete=False)
269
267
270
268
271 @pytest.mark.backends("git", "svn")
269 @pytest.mark.backends("git", "svn")
272 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
270 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
273 repo = backend.create_repo()
271 repo = backend.create_repo()
274 RepoModel().delete(repo, fs_remove=False)
272 RepoModel().delete(repo, fs_remove=False)
275 meta.Session().commit()
273 meta.Session().commit()
276 repo_list = {repo.repo_name: repo.scm_instance()}
274 repo_list = {repo.repo_name: repo.scm_instance()}
277 utils.repo2db_mapper(repo_list, remove_obsolete=False)
275 utils.repo2db_mapper(repo_list, remove_obsolete=False)
278
276
279
277
280 class TestPasswordChanged(object):
278 class TestPasswordChanged(object):
281
279
282 def setup_method(self):
280 def setup_method(self):
283 self.session = {
281 self.session = {
284 'rhodecode_user': {
282 'rhodecode_user': {
285 'password': '0cc175b9c0f1b6a831c399e269772661'
283 'password': '0cc175b9c0f1b6a831c399e269772661'
286 }
284 }
287 }
285 }
288 self.auth_user = mock.Mock()
286 self.auth_user = mock.Mock()
289 self.auth_user.userame = 'test'
287 self.auth_user.userame = 'test'
290 self.auth_user.password = 'abc123'
288 self.auth_user.password = 'abc123'
291
289
292 def test_returns_false_for_default_user(self):
290 def test_returns_false_for_default_user(self):
293 self.auth_user.username = db.User.DEFAULT_USER
291 self.auth_user.username = db.User.DEFAULT_USER
294 result = utils.password_changed(self.auth_user, self.session)
292 result = utils.password_changed(self.auth_user, self.session)
295 assert result is False
293 assert result is False
296
294
297 def test_returns_false_if_password_was_not_changed(self):
295 def test_returns_false_if_password_was_not_changed(self):
298 self.session['rhodecode_user']['password'] = md5_safe(
296 self.session['rhodecode_user']['password'] = md5_safe(
299 self.auth_user.password)
297 self.auth_user.password)
300 result = utils.password_changed(self.auth_user, self.session)
298 result = utils.password_changed(self.auth_user, self.session)
301 assert result is False
299 assert result is False
302
300
303 def test_returns_true_if_password_was_changed(self):
301 def test_returns_true_if_password_was_changed(self):
304 result = utils.password_changed(self.auth_user, self.session)
302 result = utils.password_changed(self.auth_user, self.session)
305 assert result is True
303 assert result is True
306
304
307 def test_returns_true_if_auth_user_password_is_empty(self):
305 def test_returns_true_if_auth_user_password_is_empty(self):
308 self.auth_user.password = None
306 self.auth_user.password = None
309 result = utils.password_changed(self.auth_user, self.session)
307 result = utils.password_changed(self.auth_user, self.session)
310 assert result is True
308 assert result is True
311
309
312 def test_returns_true_if_session_password_is_empty(self):
310 def test_returns_true_if_session_password_is_empty(self):
313 self.session['rhodecode_user'].pop('password')
311 self.session['rhodecode_user'].pop('password')
314 result = utils.password_changed(self.auth_user, self.session)
312 result = utils.password_changed(self.auth_user, self.session)
315 assert result is True
313 assert result is True
316
314
317
315
318 class TestReadOpenSourceLicenses(object):
316 class TestReadOpenSourceLicenses(object):
319 def test_success(self):
317 def test_success(self):
320 utils._license_cache = None
318 utils._license_cache = None
321 json_data = '''
319 json_data = '''
322 {
320 {
323 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
321 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
324 "python2.7-Markdown-2.6.2": {
322 "python2.7-Markdown-2.6.2": {
325 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
323 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
326 }
324 }
327 }
325 }
328 '''
326 '''
329 resource_string_patch = mock.patch.object(
327 resource_string_patch = mock.patch.object(
330 utils.pkg_resources, 'resource_string', return_value=json_data)
328 utils.pkg_resources, 'resource_string', return_value=json_data)
331 with resource_string_patch:
329 with resource_string_patch:
332 result = utils.read_opensource_licenses()
330 result = utils.read_opensource_licenses()
333 assert result == json.loads(json_data)
331 assert result == json.loads(json_data)
334
332
335 def test_caching(self):
333 def test_caching(self):
336 utils._license_cache = {
334 utils._license_cache = {
337 "python2.7-pytest-2.7.1": {
335 "python2.7-pytest-2.7.1": {
338 "UNKNOWN": None
336 "UNKNOWN": None
339 },
337 },
340 "python2.7-Markdown-2.6.2": {
338 "python2.7-Markdown-2.6.2": {
341 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
339 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
342 }
340 }
343 }
341 }
344 resource_patch = mock.patch.object(
342 resource_patch = mock.patch.object(
345 utils.pkg_resources, 'resource_string', side_effect=Exception)
343 utils.pkg_resources, 'resource_string', side_effect=Exception)
346 json_patch = mock.patch.object(
344 json_patch = mock.patch.object(
347 utils.json, 'loads', side_effect=Exception)
345 utils.json, 'loads', side_effect=Exception)
348
346
349 with resource_patch as resource_mock, json_patch as json_mock:
347 with resource_patch as resource_mock, json_patch as json_mock:
350 result = utils.read_opensource_licenses()
348 result = utils.read_opensource_licenses()
351
349
352 assert resource_mock.call_count == 0
350 assert resource_mock.call_count == 0
353 assert json_mock.call_count == 0
351 assert json_mock.call_count == 0
354 assert result == utils._license_cache
352 assert result == utils._license_cache
355
353
356 def test_licenses_file_contains_no_unknown_licenses(self):
354 def test_licenses_file_contains_no_unknown_licenses(self):
357 utils._license_cache = None
355 utils._license_cache = None
358 result = utils.read_opensource_licenses()
356 result = utils.read_opensource_licenses()
359
357
360 for license_data in result:
358 for license_data in result:
361 if isinstance(license_data["license"], list):
359 if isinstance(license_data["license"], list):
362 for lic_data in license_data["license"]:
360 for lic_data in license_data["license"]:
363 assert 'UNKNOWN' not in lic_data["fullName"]
361 assert 'UNKNOWN' not in lic_data["fullName"]
364 else:
362 else:
365 full_name = license_data.get("fullName") or license_data
363 full_name = license_data.get("fullName") or license_data
366 assert 'UNKNOWN' not in full_name
364 assert 'UNKNOWN' not in full_name
367
365
368
366
369 class TestMakeDbConfig(object):
367 class TestMakeDbConfig(object):
370 def test_data_from_config_data_from_db_returned(self):
368 def test_data_from_config_data_from_db_returned(self):
371 test_data = [
369 test_data = [
372 ('section1', 'option1', 'value1'),
370 ('section1', 'option1', 'value1'),
373 ('section2', 'option2', 'value2'),
371 ('section2', 'option2', 'value2'),
374 ('section3', 'option3', 'value3'),
372 ('section3', 'option3', 'value3'),
375 ]
373 ]
376 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
374 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
377 config_mock.return_value = test_data
375 config_mock.return_value = test_data
378 kwargs = {'clear_session': False, 'repo': 'test_repo'}
376 kwargs = {'clear_session': False, 'repo': 'test_repo'}
379 result = utils.make_db_config(**kwargs)
377 result = utils.make_db_config(**kwargs)
380 config_mock.assert_called_once_with(**kwargs)
378 config_mock.assert_called_once_with(**kwargs)
381 for section, option, expected_value in test_data:
379 for section, option, expected_value in test_data:
382 value = result.get(section, option)
380 value = result.get(section, option)
383 assert value == expected_value
381 assert value == expected_value
384
382
385
383
386 class TestConfigDataFromDb(object):
384 class TestConfigDataFromDb(object):
387 def test_config_data_from_db_returns_active_settings(self):
385 def test_config_data_from_db_returns_active_settings(self):
388 test_data = [
386 test_data = [
389 UiSetting('section1', 'option1', 'value1', True),
387 UiSetting('section1', 'option1', 'value1', True),
390 UiSetting('section2', 'option2', 'value2', True),
388 UiSetting('section2', 'option2', 'value2', True),
391 UiSetting('section3', 'option3', 'value3', False),
389 UiSetting('section3', 'option3', 'value3', False),
392 ]
390 ]
393 repo_name = 'test_repo'
391 repo_name = 'test_repo'
394
392
395 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
393 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
396 hooks_patch = mock.patch.object(
394 hooks_patch = mock.patch.object(
397 utils, 'get_enabled_hook_classes',
395 utils, 'get_enabled_hook_classes',
398 return_value=['pull', 'push', 'repo_size'])
396 return_value=['pull', 'push', 'repo_size'])
399 with model_patch as model_mock, hooks_patch:
397 with model_patch as model_mock, hooks_patch:
400 instance_mock = mock.Mock()
398 instance_mock = mock.Mock()
401 model_mock.return_value = instance_mock
399 model_mock.return_value = instance_mock
402 instance_mock.get_ui_settings.return_value = test_data
400 instance_mock.get_ui_settings.return_value = test_data
403 result = utils.config_data_from_db(
401 result = utils.config_data_from_db(
404 clear_session=False, repo=repo_name)
402 clear_session=False, repo=repo_name)
405
403
406 self._assert_repo_name_passed(model_mock, repo_name)
404 self._assert_repo_name_passed(model_mock, repo_name)
407
405
408 expected_result = [
406 expected_result = [
409 ('section1', 'option1', 'value1'),
407 ('section1', 'option1', 'value1'),
410 ('section2', 'option2', 'value2'),
408 ('section2', 'option2', 'value2'),
411 ]
409 ]
412 assert result == expected_result
410 assert result == expected_result
413
411
414 def _assert_repo_name_passed(self, model_mock, repo_name):
412 def _assert_repo_name_passed(self, model_mock, repo_name):
415 assert model_mock.call_count == 1
413 assert model_mock.call_count == 1
416 call_args, call_kwargs = model_mock.call_args
414 call_args, call_kwargs = model_mock.call_args
417 assert call_kwargs['repo'] == repo_name
415 assert call_kwargs['repo'] == repo_name
418
416
419
417
420 class TestIsDirWritable(object):
418 class TestIsDirWritable(object):
421 def test_returns_false_when_not_writable(self):
419 def test_returns_false_when_not_writable(self):
422 with mock.patch('builtins.open', side_effect=OSError):
420 with mock.patch('builtins.open', side_effect=OSError):
423 assert not utils._is_dir_writable('/stub-path')
421 assert not utils._is_dir_writable('/stub-path')
424
422
425 def test_returns_true_when_writable(self, tmpdir):
423 def test_returns_true_when_writable(self, tmpdir):
426 assert utils._is_dir_writable(str(tmpdir))
424 assert utils._is_dir_writable(str(tmpdir))
427
425
428 def test_is_safe_against_race_conditions(self, tmpdir):
426 def test_is_safe_against_race_conditions(self, tmpdir):
429 workers = multiprocessing.Pool()
427 workers = multiprocessing.Pool()
430 directories = [str(tmpdir)] * 10
428 directories = [str(tmpdir)] * 10
431 workers.map(utils._is_dir_writable, directories)
429 workers.map(utils._is_dir_writable, directories)
432
430
433
431
434 class TestGetEnabledHooks(object):
432 class TestGetEnabledHooks(object):
435 def test_only_active_hooks_are_enabled(self):
433 def test_only_active_hooks_are_enabled(self):
436 ui_settings = [
434 ui_settings = [
437 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
435 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
438 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
436 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
439 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
437 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
440 ]
438 ]
441 result = utils.get_enabled_hook_classes(ui_settings)
439 result = utils.get_enabled_hook_classes(ui_settings)
442 assert result == ['push', 'repo_size']
440 assert result == ['push', 'repo_size']
443
441
444 def test_all_hooks_are_enabled(self):
442 def test_all_hooks_are_enabled(self):
445 ui_settings = [
443 ui_settings = [
446 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
444 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
447 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
445 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
448 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
446 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
449 ]
447 ]
450 result = utils.get_enabled_hook_classes(ui_settings)
448 result = utils.get_enabled_hook_classes(ui_settings)
451 assert result == ['push', 'repo_size', 'pull']
449 assert result == ['push', 'repo_size', 'pull']
452
450
453 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
451 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
454 ui_settings = []
452 ui_settings = []
455 result = utils.get_enabled_hook_classes(ui_settings)
453 result = utils.get_enabled_hook_classes(ui_settings)
456 assert result == []
454 assert result == []
457
455
458
456
459 def test_obfuscate_url_pw():
457 def test_obfuscate_url_pw():
460 from rhodecode.lib.utils2 import obfuscate_url_pw
458 from rhodecode.lib.utils2 import obfuscate_url_pw
461 engine = u'/home/repos/malmΓΆ'
459 engine = u'/home/repos/malmΓΆ'
462 assert obfuscate_url_pw(engine)
460 assert obfuscate_url_pw(engine)
463
461
464
462
465 @pytest.mark.parametrize("test_ua, expected", [
463 @pytest.mark.parametrize("test_ua, expected", [
466 ("", ""),
464 ("", ""),
467 ('"quoted"', 'quoted'),
465 ('"quoted"', 'quoted'),
468 ('internal-merge', 'internal-merge'),
466 ('internal-merge', 'internal-merge'),
469 ('hg/internal-merge', 'hg/internal-merge'),
467 ('hg/internal-merge', 'hg/internal-merge'),
470 ('git/internal-merge', 'git/internal-merge'),
468 ('git/internal-merge', 'git/internal-merge'),
471
469
472 # git
470 # git
473 ('git/2.10.1 (Apple Git-78)', 'git/2.10.1'),
471 ('git/2.10.1 (Apple Git-78)', 'git/2.10.1'),
474 ('GiT/2.37.2.windows.2', 'git/2.37.2'),
472 ('GiT/2.37.2.windows.2', 'git/2.37.2'),
475 ('git/2.35.1 (Microsoft Windows NT 10.0.19044.0; Win32NT x64) CLR/4.0.30319 VS16/16.0.0', 'git/2.35.1'),
473 ('git/2.35.1 (Microsoft Windows NT 10.0.19044.0; Win32NT x64) CLR/4.0.30319 VS16/16.0.0', 'git/2.35.1'),
476 ('ssh-user-agent', 'ssh-user-agent'),
474 ('ssh-user-agent', 'ssh-user-agent'),
477 ('git/ssh-user-agent', 'git/ssh-user-agent'),
475 ('git/ssh-user-agent', 'git/ssh-user-agent'),
478
476
479
477
480 # hg
478 # hg
481 ('mercurial/proto-1.0 (Mercurial 4.2)', 'mercurial/4.2'),
479 ('mercurial/proto-1.0 (Mercurial 4.2)', 'mercurial/4.2'),
482 ('mercurial/proto-1.0', ''),
480 ('mercurial/proto-1.0', ''),
483 ('mercurial/proto-1.0 (Mercurial 3.9.2)', 'mercurial/3.9.2'),
481 ('mercurial/proto-1.0 (Mercurial 3.9.2)', 'mercurial/3.9.2'),
484 ('mercurial/ssh-user-agent', 'mercurial/ssh-user-agent'),
482 ('mercurial/ssh-user-agent', 'mercurial/ssh-user-agent'),
485 ('mercurial/proto-1.0 (Mercurial 5.8rc0)', 'mercurial/5.8rc0'),
483 ('mercurial/proto-1.0 (Mercurial 5.8rc0)', 'mercurial/5.8rc0'),
486
484
487
485
488 ])
486 ])
489 def test_user_agent_normalizer(test_ua, expected):
487 def test_user_agent_normalizer(test_ua, expected):
490 from rhodecode.lib.utils2 import user_agent_normalizer
488 from rhodecode.lib.utils2 import user_agent_normalizer
491 assert user_agent_normalizer(test_ua, safe=False) == expected
489 assert user_agent_normalizer(test_ua, safe=False) == expected
General Comments 0
You need to be logged in to leave comments. Login now