##// END OF EJS Templates
feat(remap and rescan): added more relient remap and removal option, and also split the logic to either add or cleanup
super-admin -
r5619:c9e499e7 default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import mock
20 import pytest
21
22 from rhodecode.model.scm import ScmModel
23 from rhodecode.api.tests.utils import (
24 build_data, api_call, assert_ok, assert_error, crash)
25
26
27 @pytest.mark.usefixtures("testuser_api", "app")
28 class TestCleanupRepos(object):
29 def test_api_cleanup_repos(self):
30 id_, params = build_data(self.apikey, 'cleanup_repos')
31 response = api_call(self.app, params)
32
33 expected = {'removed': [], 'errors': []}
34 assert_ok(id_, expected, given=response.body)
35
36 def test_api_cleanup_repos_error(self):
37
38 id_, params = build_data(self.apikey, 'cleanup_repos', )
39
40 with mock.patch('rhodecode.lib.utils.repo2db_cleanup', side_effect=crash):
41 response = api_call(self.app, params)
42
43 expected = 'Error occurred during repo storage cleanup action'
44 assert_error(id_, expected, given=response.body)
@@ -1,42 +1,42 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import mock
19 import mock
20 import pytest
20 import pytest
21
21
22 from rhodecode.model.scm import ScmModel
23 from rhodecode.api.tests.utils import (
22 from rhodecode.api.tests.utils import (
24 build_data, api_call, assert_ok, assert_error, crash)
23 build_data, api_call, assert_ok, assert_error, crash)
25
24
26
25
27 @pytest.mark.usefixtures("testuser_api", "app")
26 @pytest.mark.usefixtures("testuser_api", "app")
28 class TestRescanRepos(object):
27 class TestRescanRepos(object):
29 def test_api_rescan_repos(self):
28 def test_api_rescan_repos(self):
30 id_, params = build_data(self.apikey, 'rescan_repos')
29 id_, params = build_data(self.apikey, 'rescan_repos')
31 response = api_call(self.app, params)
30 response = api_call(self.app, params)
32
31
33 expected = {'added': [], 'removed': []}
32 expected = {'added': [], 'errors': []}
34 assert_ok(id_, expected, given=response.body)
33 assert_ok(id_, expected, given=response.body)
35
34
36 @mock.patch.object(ScmModel, 'repo_scan', crash)
35 def test_api_rescan_repos_error(self):
37 def test_api_rescann_error(self):
38 id_, params = build_data(self.apikey, 'rescan_repos', )
36 id_, params = build_data(self.apikey, 'rescan_repos', )
39 response = api_call(self.app, params)
37
38 with mock.patch('rhodecode.lib.utils.repo2db_mapper', side_effect=crash):
39 response = api_call(self.app, params)
40
40
41 expected = 'Error occurred during rescan repositories action'
41 expected = 'Error occurred during rescan repositories action'
42 assert_error(id_, expected, given=response.body)
42 assert_error(id_, expected, given=response.body)
@@ -1,479 +1,463 b''
1 # Copyright (C) 2011-2024 RhodeCode GmbH
1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import itertools
20 import itertools
21 import base64
22
21
23 from rhodecode.api import (
22 from rhodecode.api import (
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
23 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25
24
26 from rhodecode.api.utils import (
25 from rhodecode.api.utils import (
27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
26 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
27 from rhodecode.lib.utils import get_rhodecode_repo_store_path
29 from rhodecode.lib import system_info
28 from rhodecode.lib import system_info
30 from rhodecode.lib import user_sessions
29 from rhodecode.lib import user_sessions
31 from rhodecode.lib import exc_tracking
30 from rhodecode.lib import exc_tracking
32 from rhodecode.lib.ext_json import json
31 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.utils2 import safe_int
32 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.model.db import UserIpMap
33 from rhodecode.model.db import UserIpMap
35 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.scm import ScmModel
36 from rhodecode.apps.file_store import utils as store_utils
35
37 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
38 FileOverSizeException
39
36
40 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
41
38
42
39
43 @jsonrpc_method()
40 @jsonrpc_method()
44 def get_server_info(request, apiuser):
41 def get_server_info(request, apiuser):
45 """
42 """
46 Returns the |RCE| server information.
43 Returns the |RCE| server information.
47
44
48 This includes the running version of |RCE| and all installed
45 This includes the running version of |RCE| and all installed
49 packages. This command takes the following options:
46 packages. This command takes the following options:
50
47
51 :param apiuser: This is filled automatically from the |authtoken|.
48 :param apiuser: This is filled automatically from the |authtoken|.
52 :type apiuser: AuthUser
49 :type apiuser: AuthUser
53
50
54 Example output:
51 Example output:
55
52
56 .. code-block:: bash
53 .. code-block:: bash
57
54
58 id : <id_given_in_input>
55 id : <id_given_in_input>
59 result : {
56 result : {
60 'modules': [<module name>,...]
57 'modules': [<module name>,...]
61 'py_version': <python version>,
58 'py_version': <python version>,
62 'platform': <platform type>,
59 'platform': <platform type>,
63 'rhodecode_version': <rhodecode version>
60 'rhodecode_version': <rhodecode version>
64 }
61 }
65 error : null
62 error : null
66 """
63 """
67
64
68 if not has_superadmin_permission(apiuser):
65 if not has_superadmin_permission(apiuser):
69 raise JSONRPCForbidden()
66 raise JSONRPCForbidden()
70
67
71 server_info = ScmModel().get_server_info(request.environ)
68 server_info = ScmModel().get_server_info(request.environ)
72 # rhodecode-index requires those
69 # rhodecode-index requires those
73
70
74 server_info['index_storage'] = server_info['search']['value']['location']
71 server_info['index_storage'] = server_info['search']['value']['location']
75 server_info['storage'] = server_info['storage']['value']['path']
72 server_info['storage'] = server_info['storage']['value']['path']
76
73
77 return server_info
74 return server_info
78
75
79
76
80 @jsonrpc_method()
77 @jsonrpc_method()
81 def get_repo_store(request, apiuser):
78 def get_repo_store(request, apiuser):
82 """
79 """
83 Returns the |RCE| repository storage information.
80 Returns the |RCE| repository storage information.
84
81
85 :param apiuser: This is filled automatically from the |authtoken|.
82 :param apiuser: This is filled automatically from the |authtoken|.
86 :type apiuser: AuthUser
83 :type apiuser: AuthUser
87
84
88 Example output:
85 Example output:
89
86
90 .. code-block:: bash
87 .. code-block:: bash
91
88
92 id : <id_given_in_input>
89 id : <id_given_in_input>
93 result : {
90 result : {
94 'modules': [<module name>,...]
91 'modules': [<module name>,...]
95 'py_version': <python version>,
92 'py_version': <python version>,
96 'platform': <platform type>,
93 'platform': <platform type>,
97 'rhodecode_version': <rhodecode version>
94 'rhodecode_version': <rhodecode version>
98 }
95 }
99 error : null
96 error : null
100 """
97 """
101
98
102 if not has_superadmin_permission(apiuser):
99 if not has_superadmin_permission(apiuser):
103 raise JSONRPCForbidden()
100 raise JSONRPCForbidden()
104
101
105 path = get_rhodecode_repo_store_path()
102 path = get_rhodecode_repo_store_path()
106 return {"path": path}
103 return {"path": path}
107
104
108
105
109 @jsonrpc_method()
106 @jsonrpc_method()
110 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
107 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
111 """
108 """
112 Displays the IP Address as seen from the |RCE| server.
109 Displays the IP Address as seen from the |RCE| server.
113
110
114 * This command displays the IP Address, as well as all the defined IP
111 * This command displays the IP Address, as well as all the defined IP
115 addresses for the specified user. If the ``userid`` is not set, the
112 addresses for the specified user. If the ``userid`` is not set, the
116 data returned is for the user calling the method.
113 data returned is for the user calling the method.
117
114
118 This command can only be run using an |authtoken| with admin rights to
115 This command can only be run using an |authtoken| with admin rights to
119 the specified repository.
116 the specified repository.
120
117
121 This command takes the following options:
118 This command takes the following options:
122
119
123 :param apiuser: This is filled automatically from |authtoken|.
120 :param apiuser: This is filled automatically from |authtoken|.
124 :type apiuser: AuthUser
121 :type apiuser: AuthUser
125 :param userid: Sets the userid for which associated IP Address data
122 :param userid: Sets the userid for which associated IP Address data
126 is returned.
123 is returned.
127 :type userid: Optional(str or int)
124 :type userid: Optional(str or int)
128
125
129 Example output:
126 Example output:
130
127
131 .. code-block:: bash
128 .. code-block:: bash
132
129
133 id : <id_given_in_input>
130 id : <id_given_in_input>
134 result : {
131 result : {
135 "server_ip_addr": "<ip_from_clien>",
132 "server_ip_addr": "<ip_from_clien>",
136 "user_ips": [
133 "user_ips": [
137 {
134 {
138 "ip_addr": "<ip_with_mask>",
135 "ip_addr": "<ip_with_mask>",
139 "ip_range": ["<start_ip>", "<end_ip>"],
136 "ip_range": ["<start_ip>", "<end_ip>"],
140 },
137 },
141 ...
138 ...
142 ]
139 ]
143 }
140 }
144
141
145 """
142 """
146 if not has_superadmin_permission(apiuser):
143 if not has_superadmin_permission(apiuser):
147 raise JSONRPCForbidden()
144 raise JSONRPCForbidden()
148
145
149 userid = Optional.extract(userid, evaluate_locals=locals())
146 userid = Optional.extract(userid, evaluate_locals=locals())
150 userid = getattr(userid, 'user_id', userid)
147 userid = getattr(userid, 'user_id', userid)
151
148
152 user = get_user_or_error(userid)
149 user = get_user_or_error(userid)
153 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
150 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
154 return {
151 return {
155 'server_ip_addr': request.rpc_ip_addr,
152 'server_ip_addr': request.rpc_ip_addr,
156 'user_ips': ips
153 'user_ips': ips
157 }
154 }
158
155
159
156
160 @jsonrpc_method()
157 @jsonrpc_method()
161 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
158 def rescan_repos(request, apiuser):
162 """
159 """
163 Triggers a rescan of the specified repositories.
160 Triggers a rescan of the specified repositories.
164
161 It returns list of added repositories, and errors during scan.
165 * If the ``remove_obsolete`` option is set, it also deletes repositories
166 that are found in the database but not on the file system, so called
167 "clean zombies".
168
162
169 This command can only be run using an |authtoken| with admin rights to
163 This command can only be run using an |authtoken| with admin rights to
170 the specified repository.
164 the specified repository.
171
165
172 This command takes the following options:
166 This command takes the following options:
173
167
174 :param apiuser: This is filled automatically from the |authtoken|.
168 :param apiuser: This is filled automatically from the |authtoken|.
175 :type apiuser: AuthUser
169 :type apiuser: AuthUser
176 :param remove_obsolete: Deletes repositories from the database that
177 are not found on the filesystem.
178 :type remove_obsolete: Optional(``True`` | ``False``)
179
170
180 Example output:
171 Example output:
181
172
182 .. code-block:: bash
173 .. code-block:: bash
183
174
184 id : <id_given_in_input>
175 id : <id_given_in_input>
185 result : {
176 result : {
186 'added': [<added repository name>,...]
177 'added': [<added repository name>,...]
187 'removed': [<removed repository name>,...]
178 'errors': [<error_list>,...]
188 }
179 }
189 error : null
180 error : null
190
181
191 Example error output:
182 Example error output:
192
183
193 .. code-block:: bash
184 .. code-block:: bash
194
185
195 id : <id_given_in_input>
186 id : <id_given_in_input>
196 result : null
187 result : null
197 error : {
188 error : {
198 'Error occurred during rescan repositories action'
189 'Error occurred during rescan repositories action'
199 }
190 }
200
191
201 """
192 """
193 from rhodecode.lib.utils import repo2db_mapper # re-import for testing patches
194
202 if not has_superadmin_permission(apiuser):
195 if not has_superadmin_permission(apiuser):
203 raise JSONRPCForbidden()
196 raise JSONRPCForbidden()
204
197
205 try:
198 try:
206 rm_obsolete = Optional.extract(remove_obsolete)
199 added, errors = repo2db_mapper(ScmModel().repo_scan(), force_hooks_rebuild=True)
207 added, removed = repo2db_mapper(ScmModel().repo_scan(),
200 return {'added': added, 'errors': errors}
208 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
209 return {'added': added, 'removed': removed}
210 except Exception:
201 except Exception:
211 log.exception('Failed to run repo rescann')
202 log.exception('Failed to run repo rescan')
212 raise JSONRPCError(
203 raise JSONRPCError(
213 'Error occurred during rescan repositories action'
204 'Error occurred during rescan repositories action'
214 )
205 )
215
206
216 @jsonrpc_method()
207 @jsonrpc_method()
217 def cleanup_repos(request, apiuser, remove_obsolete=Optional(False)):
208 def cleanup_repos(request, apiuser):
218 """
209 """
219 Triggers a rescan of the specified repositories.
210 Triggers a cleanup of non-existing repositories or repository groups in filesystem.
220
221 * If the ``remove_obsolete`` option is set, it also deletes repositories
222 that are found in the database but not on the file system, so called
223 "clean zombies".
224
211
225 This command can only be run using an |authtoken| with admin rights to
212 This command can only be run using an |authtoken| with admin rights to
226 the specified repository.
213 the specified repository.
227
214
228 This command takes the following options:
215 This command takes the following options:
229
216
230 :param apiuser: This is filled automatically from the |authtoken|.
217 :param apiuser: This is filled automatically from the |authtoken|.
231 :type apiuser: AuthUser
218 :type apiuser: AuthUser
232 :param remove_obsolete: Deletes repositories from the database that
233 are not found on the filesystem.
234 :type remove_obsolete: Optional(``True`` | ``False``)
235
219
236 Example output:
220 Example output:
237
221
238 .. code-block:: bash
222 .. code-block:: bash
239
223
240 id : <id_given_in_input>
224 id : <id_given_in_input>
241 result : {
225 result : {
242 'added': [<added repository name>,...]
226 'removed': [<removed repository name or repository group name>,...]
243 'removed': [<removed repository name>,...]
227 'errors': [<error list of failures to remove>,...]
244 }
228 }
245 error : null
229 error : null
246
230
247 Example error output:
231 Example error output:
248
232
249 .. code-block:: bash
233 .. code-block:: bash
250
234
251 id : <id_given_in_input>
235 id : <id_given_in_input>
252 result : null
236 result : null
253 error : {
237 error : {
254 'Error occurred during rescan repositories action'
238 'Error occurred during repo storage cleanup action'
255 }
239 }
256
240
257 """
241 """
242 from rhodecode.lib.utils import repo2db_cleanup # re-import for testing patches
243
258 if not has_superadmin_permission(apiuser):
244 if not has_superadmin_permission(apiuser):
259 raise JSONRPCForbidden()
245 raise JSONRPCForbidden()
260
246
261 try:
247 try:
262 rm_obsolete = Optional.extract(remove_obsolete)
248 removed, errors = repo2db_cleanup()
263 added, removed = repo2db_mapper(ScmModel().repo_scan(),
249 return {'removed': removed, 'errors': errors}
264 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
265 return {'added': added, 'removed': removed}
266 except Exception:
250 except Exception:
267 log.exception('Failed to run repo rescann')
251 log.exception('Failed to run repo storage cleanup')
268 raise JSONRPCError(
252 raise JSONRPCError(
269 'Error occurred during rescan repositories action'
253 'Error occurred during repo storage cleanup action'
270 )
254 )
271
255
272
256
273 @jsonrpc_method()
257 @jsonrpc_method()
274 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
258 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
275 """
259 """
276 Triggers a session cleanup action.
260 Triggers a session cleanup action.
277
261
278 If the ``older_then`` option is set, only sessions that hasn't been
262 If the ``older_then`` option is set, only sessions that hasn't been
279 accessed in the given number of days will be removed.
263 accessed in the given number of days will be removed.
280
264
281 This command can only be run using an |authtoken| with admin rights to
265 This command can only be run using an |authtoken| with admin rights to
282 the specified repository.
266 the specified repository.
283
267
284 This command takes the following options:
268 This command takes the following options:
285
269
286 :param apiuser: This is filled automatically from the |authtoken|.
270 :param apiuser: This is filled automatically from the |authtoken|.
287 :type apiuser: AuthUser
271 :type apiuser: AuthUser
288 :param older_then: Deletes session that hasn't been accessed
272 :param older_then: Deletes session that hasn't been accessed
289 in given number of days.
273 in given number of days.
290 :type older_then: Optional(int)
274 :type older_then: Optional(int)
291
275
292 Example output:
276 Example output:
293
277
294 .. code-block:: bash
278 .. code-block:: bash
295
279
296 id : <id_given_in_input>
280 id : <id_given_in_input>
297 result: {
281 result: {
298 "backend": "<type of backend>",
282 "backend": "<type of backend>",
299 "sessions_removed": <number_of_removed_sessions>
283 "sessions_removed": <number_of_removed_sessions>
300 }
284 }
301 error : null
285 error : null
302
286
303 Example error output:
287 Example error output:
304
288
305 .. code-block:: bash
289 .. code-block:: bash
306
290
307 id : <id_given_in_input>
291 id : <id_given_in_input>
308 result : null
292 result : null
309 error : {
293 error : {
310 'Error occurred during session cleanup'
294 'Error occurred during session cleanup'
311 }
295 }
312
296
313 """
297 """
314 if not has_superadmin_permission(apiuser):
298 if not has_superadmin_permission(apiuser):
315 raise JSONRPCForbidden()
299 raise JSONRPCForbidden()
316
300
317 older_then = safe_int(Optional.extract(older_then)) or 60
301 older_then = safe_int(Optional.extract(older_then)) or 60
318 older_than_seconds = 60 * 60 * 24 * older_then
302 older_than_seconds = 60 * 60 * 24 * older_then
319
303
320 config = system_info.rhodecode_config().get_value()['value']['config']
304 config = system_info.rhodecode_config().get_value()['value']['config']
321 session_model = user_sessions.get_session_handler(
305 session_model = user_sessions.get_session_handler(
322 config.get('beaker.session.type', 'memory'))(config)
306 config.get('beaker.session.type', 'memory'))(config)
323
307
324 backend = session_model.SESSION_TYPE
308 backend = session_model.SESSION_TYPE
325 try:
309 try:
326 cleaned = session_model.clean_sessions(
310 cleaned = session_model.clean_sessions(
327 older_than_seconds=older_than_seconds)
311 older_than_seconds=older_than_seconds)
328 return {'sessions_removed': cleaned, 'backend': backend}
312 return {'sessions_removed': cleaned, 'backend': backend}
329 except user_sessions.CleanupCommand as msg:
313 except user_sessions.CleanupCommand as msg:
330 return {'cleanup_command': str(msg), 'backend': backend}
314 return {'cleanup_command': str(msg), 'backend': backend}
331 except Exception as e:
315 except Exception as e:
332 log.exception('Failed session cleanup')
316 log.exception('Failed session cleanup')
333 raise JSONRPCError(
317 raise JSONRPCError(
334 'Error occurred during session cleanup'
318 'Error occurred during session cleanup'
335 )
319 )
336
320
337
321
338 @jsonrpc_method()
322 @jsonrpc_method()
339 def get_method(request, apiuser, pattern=Optional('*')):
323 def get_method(request, apiuser, pattern=Optional('*')):
340 """
324 """
341 Returns list of all available API methods. By default match pattern
325 Returns list of all available API methods. By default match pattern
342 os "*" but any other pattern can be specified. eg *comment* will return
326 os "*" but any other pattern can be specified. eg *comment* will return
343 all methods with comment inside them. If just single method is matched
327 all methods with comment inside them. If just single method is matched
344 returned data will also include method specification
328 returned data will also include method specification
345
329
346 This command can only be run using an |authtoken| with admin rights to
330 This command can only be run using an |authtoken| with admin rights to
347 the specified repository.
331 the specified repository.
348
332
349 This command takes the following options:
333 This command takes the following options:
350
334
351 :param apiuser: This is filled automatically from the |authtoken|.
335 :param apiuser: This is filled automatically from the |authtoken|.
352 :type apiuser: AuthUser
336 :type apiuser: AuthUser
353 :param pattern: pattern to match method names against
337 :param pattern: pattern to match method names against
354 :type pattern: Optional("*")
338 :type pattern: Optional("*")
355
339
356 Example output:
340 Example output:
357
341
358 .. code-block:: bash
342 .. code-block:: bash
359
343
360 id : <id_given_in_input>
344 id : <id_given_in_input>
361 "result": [
345 "result": [
362 "changeset_comment",
346 "changeset_comment",
363 "comment_pull_request",
347 "comment_pull_request",
364 "comment_commit"
348 "comment_commit"
365 ]
349 ]
366 error : null
350 error : null
367
351
368 .. code-block:: bash
352 .. code-block:: bash
369
353
370 id : <id_given_in_input>
354 id : <id_given_in_input>
371 "result": [
355 "result": [
372 "comment_commit",
356 "comment_commit",
373 {
357 {
374 "apiuser": "<RequiredType>",
358 "apiuser": "<RequiredType>",
375 "comment_type": "<Optional:u'note'>",
359 "comment_type": "<Optional:u'note'>",
376 "commit_id": "<RequiredType>",
360 "commit_id": "<RequiredType>",
377 "message": "<RequiredType>",
361 "message": "<RequiredType>",
378 "repoid": "<RequiredType>",
362 "repoid": "<RequiredType>",
379 "request": "<RequiredType>",
363 "request": "<RequiredType>",
380 "resolves_comment_id": "<Optional:None>",
364 "resolves_comment_id": "<Optional:None>",
381 "status": "<Optional:None>",
365 "status": "<Optional:None>",
382 "userid": "<Optional:<OptionalAttr:apiuser>>"
366 "userid": "<Optional:<OptionalAttr:apiuser>>"
383 }
367 }
384 ]
368 ]
385 error : null
369 error : null
386 """
370 """
387 from rhodecode.config import patches
371 from rhodecode.config import patches
388 inspect = patches.inspect_getargspec()
372 inspect = patches.inspect_getargspec()
389
373
390 if not has_superadmin_permission(apiuser):
374 if not has_superadmin_permission(apiuser):
391 raise JSONRPCForbidden()
375 raise JSONRPCForbidden()
392
376
393 pattern = Optional.extract(pattern)
377 pattern = Optional.extract(pattern)
394
378
395 matches = find_methods(request.registry.jsonrpc_methods, pattern)
379 matches = find_methods(request.registry.jsonrpc_methods, pattern)
396
380
397 args_desc = []
381 args_desc = []
398 matches_keys = list(matches.keys())
382 matches_keys = list(matches.keys())
399 if len(matches_keys) == 1:
383 if len(matches_keys) == 1:
400 func = matches[matches_keys[0]]
384 func = matches[matches_keys[0]]
401
385
402 argspec = inspect.getargspec(func)
386 argspec = inspect.getargspec(func)
403 arglist = argspec[0]
387 arglist = argspec[0]
404 defaults = list(map(repr, argspec[3] or []))
388 defaults = list(map(repr, argspec[3] or []))
405
389
406 default_empty = '<RequiredType>'
390 default_empty = '<RequiredType>'
407
391
408 # kw arguments required by this method
392 # kw arguments required by this method
409 func_kwargs = dict(itertools.zip_longest(
393 func_kwargs = dict(itertools.zip_longest(
410 reversed(arglist), reversed(defaults), fillvalue=default_empty))
394 reversed(arglist), reversed(defaults), fillvalue=default_empty))
411 args_desc.append(func_kwargs)
395 args_desc.append(func_kwargs)
412
396
413 return matches_keys + args_desc
397 return matches_keys + args_desc
414
398
415
399
416 @jsonrpc_method()
400 @jsonrpc_method()
417 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
401 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
418 """
402 """
419 Stores sent exception inside the built-in exception tracker in |RCE| server.
403 Stores sent exception inside the built-in exception tracker in |RCE| server.
420
404
421 This command can only be run using an |authtoken| with admin rights to
405 This command can only be run using an |authtoken| with admin rights to
422 the specified repository.
406 the specified repository.
423
407
424 This command takes the following options:
408 This command takes the following options:
425
409
426 :param apiuser: This is filled automatically from the |authtoken|.
410 :param apiuser: This is filled automatically from the |authtoken|.
427 :type apiuser: AuthUser
411 :type apiuser: AuthUser
428
412
429 :param exc_data_json: JSON data with exception e.g
413 :param exc_data_json: JSON data with exception e.g
430 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
414 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
431 :type exc_data_json: JSON data
415 :type exc_data_json: JSON data
432
416
433 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
417 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
434 :type prefix: Optional("rhodecode")
418 :type prefix: Optional("rhodecode")
435
419
436 Example output:
420 Example output:
437
421
438 .. code-block:: bash
422 .. code-block:: bash
439
423
440 id : <id_given_in_input>
424 id : <id_given_in_input>
441 "result": {
425 "result": {
442 "exc_id": 139718459226384,
426 "exc_id": 139718459226384,
443 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
427 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
444 }
428 }
445 error : null
429 error : null
446 """
430 """
447 if not has_superadmin_permission(apiuser):
431 if not has_superadmin_permission(apiuser):
448 raise JSONRPCForbidden()
432 raise JSONRPCForbidden()
449
433
450 prefix = Optional.extract(prefix)
434 prefix = Optional.extract(prefix)
451 exc_id = exc_tracking.generate_id()
435 exc_id = exc_tracking.generate_id()
452
436
453 try:
437 try:
454 exc_data = json.loads(exc_data_json)
438 exc_data = json.loads(exc_data_json)
455 except Exception:
439 except Exception:
456 log.error('Failed to parse JSON: %r', exc_data_json)
440 log.error('Failed to parse JSON: %r', exc_data_json)
457 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
441 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
458 'Please make sure it contains a valid JSON.')
442 'Please make sure it contains a valid JSON.')
459
443
460 try:
444 try:
461 exc_traceback = exc_data['exc_traceback']
445 exc_traceback = exc_data['exc_traceback']
462 exc_type_name = exc_data['exc_type_name']
446 exc_type_name = exc_data['exc_type_name']
463 exc_value = ''
447 exc_value = ''
464 except KeyError as err:
448 except KeyError as err:
465 raise JSONRPCError(
449 raise JSONRPCError(
466 f'Missing exc_traceback, or exc_type_name '
450 f'Missing exc_traceback, or exc_type_name '
467 f'in exc_data_json field. Missing: {err}')
451 f'in exc_data_json field. Missing: {err}')
468
452
469 class ExcType:
453 class ExcType:
470 __name__ = exc_type_name
454 __name__ = exc_type_name
471
455
472 exc_info = (ExcType(), exc_value, exc_traceback)
456 exc_info = (ExcType(), exc_value, exc_traceback)
473
457
474 exc_tracking._store_exception(
458 exc_tracking._store_exception(
475 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
459 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
476
460
477 exc_url = request.route_url(
461 exc_url = request.route_url(
478 'admin_settings_exception_tracker_show', exception_id=exc_id)
462 'admin_settings_exception_tracker_show', exception_id=exc_id)
479 return {'exc_id': exc_id, 'exc_url': exc_url}
463 return {'exc_id': exc_id, 'exc_url': exc_url}
@@ -1,1124 +1,1133 b''
1 # Copyright (C) 2016-2024 RhodeCode GmbH
1 # Copyright (C) 2016-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 from rhodecode.apps._base import ADMIN_PREFIX
20 from rhodecode.apps._base import ADMIN_PREFIX
21 from rhodecode.apps._base.navigation import includeme as nav_includeme
21 from rhodecode.apps._base.navigation import includeme as nav_includeme
22 from rhodecode.apps.admin.views.main_views import AdminMainView
22 from rhodecode.apps.admin.views.main_views import AdminMainView
23
23
24
24
25 def admin_routes(config):
25 def admin_routes(config):
26 """
26 """
27 Admin prefixed routes
27 Admin prefixed routes
28 """
28 """
29 from rhodecode.apps.admin.views.audit_logs import AdminAuditLogsView
29 from rhodecode.apps.admin.views.audit_logs import AdminAuditLogsView
30 from rhodecode.apps.admin.views.artifacts import AdminArtifactsView
30 from rhodecode.apps.admin.views.artifacts import AdminArtifactsView
31 from rhodecode.apps.admin.views.automation import AdminAutomationView
31 from rhodecode.apps.admin.views.automation import AdminAutomationView
32 from rhodecode.apps.admin.views.scheduler import AdminSchedulerView
32 from rhodecode.apps.admin.views.scheduler import AdminSchedulerView
33 from rhodecode.apps.admin.views.defaults import AdminDefaultSettingsView
33 from rhodecode.apps.admin.views.defaults import AdminDefaultSettingsView
34 from rhodecode.apps.admin.views.exception_tracker import ExceptionsTrackerView
34 from rhodecode.apps.admin.views.exception_tracker import ExceptionsTrackerView
35 from rhodecode.apps.admin.views.open_source_licenses import OpenSourceLicensesAdminSettingsView
35 from rhodecode.apps.admin.views.open_source_licenses import OpenSourceLicensesAdminSettingsView
36 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
36 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
37 from rhodecode.apps.admin.views.process_management import AdminProcessManagementView
37 from rhodecode.apps.admin.views.process_management import AdminProcessManagementView
38 from rhodecode.apps.admin.views.repo_groups import AdminRepoGroupsView
38 from rhodecode.apps.admin.views.repo_groups import AdminRepoGroupsView
39 from rhodecode.apps.admin.views.repositories import AdminReposView
39 from rhodecode.apps.admin.views.repositories import AdminReposView
40 from rhodecode.apps.admin.views.sessions import AdminSessionSettingsView
40 from rhodecode.apps.admin.views.sessions import AdminSessionSettingsView
41 from rhodecode.apps.admin.views.settings import AdminSettingsView
41 from rhodecode.apps.admin.views.settings import AdminSettingsView
42 from rhodecode.apps.admin.views.svn_config import AdminSvnConfigView
42 from rhodecode.apps.admin.views.svn_config import AdminSvnConfigView
43 from rhodecode.apps.admin.views.system_info import AdminSystemInfoSettingsView
43 from rhodecode.apps.admin.views.system_info import AdminSystemInfoSettingsView
44 from rhodecode.apps.admin.views.user_groups import AdminUserGroupsView
44 from rhodecode.apps.admin.views.user_groups import AdminUserGroupsView
45 from rhodecode.apps.admin.views.users import AdminUsersView, UsersView
45 from rhodecode.apps.admin.views.users import AdminUsersView, UsersView
46 from rhodecode.apps.admin.views.security import AdminSecurityView
46 from rhodecode.apps.admin.views.security import AdminSecurityView
47
47
48 # Security EE feature
48 # Security EE feature
49
49
50 config.add_route(
50 config.add_route(
51 'admin_security',
51 'admin_security',
52 pattern='/security')
52 pattern='/security')
53 config.add_view(
53 config.add_view(
54 AdminSecurityView,
54 AdminSecurityView,
55 attr='security',
55 attr='security',
56 route_name='admin_security', request_method='GET',
56 route_name='admin_security', request_method='GET',
57 renderer='rhodecode:templates/admin/security/security.mako')
57 renderer='rhodecode:templates/admin/security/security.mako')
58
58
59 config.add_route(
59 config.add_route(
60 name='admin_security_update',
60 name='admin_security_update',
61 pattern='/security/update')
61 pattern='/security/update')
62 config.add_view(
62 config.add_view(
63 AdminSecurityView,
63 AdminSecurityView,
64 attr='security_update',
64 attr='security_update',
65 route_name='admin_security_update', request_method='POST',
65 route_name='admin_security_update', request_method='POST',
66 renderer='rhodecode:templates/admin/security/security.mako')
66 renderer='rhodecode:templates/admin/security/security.mako')
67
67
68 config.add_route(
68 config.add_route(
69 name='admin_security_modify_allowed_vcs_client_versions',
69 name='admin_security_modify_allowed_vcs_client_versions',
70 pattern=ADMIN_PREFIX + '/security/modify/allowed_vcs_client_versions')
70 pattern=ADMIN_PREFIX + '/security/modify/allowed_vcs_client_versions')
71 config.add_view(
71 config.add_view(
72 AdminSecurityView,
72 AdminSecurityView,
73 attr='vcs_whitelisted_client_versions_edit',
73 attr='vcs_whitelisted_client_versions_edit',
74 route_name='admin_security_modify_allowed_vcs_client_versions', request_method=('GET', 'POST'),
74 route_name='admin_security_modify_allowed_vcs_client_versions', request_method=('GET', 'POST'),
75 renderer='rhodecode:templates/admin/security/edit_allowed_vcs_client_versions.mako')
75 renderer='rhodecode:templates/admin/security/edit_allowed_vcs_client_versions.mako')
76
76
77
77
78 config.add_route(
78 config.add_route(
79 name='admin_audit_logs',
79 name='admin_audit_logs',
80 pattern='/audit_logs')
80 pattern='/audit_logs')
81 config.add_view(
81 config.add_view(
82 AdminAuditLogsView,
82 AdminAuditLogsView,
83 attr='admin_audit_logs',
83 attr='admin_audit_logs',
84 route_name='admin_audit_logs', request_method='GET',
84 route_name='admin_audit_logs', request_method='GET',
85 renderer='rhodecode:templates/admin/admin_audit_logs.mako')
85 renderer='rhodecode:templates/admin/admin_audit_logs.mako')
86
86
87 config.add_route(
87 config.add_route(
88 name='admin_audit_log_entry',
88 name='admin_audit_log_entry',
89 pattern='/audit_logs/{audit_log_id}')
89 pattern='/audit_logs/{audit_log_id}')
90 config.add_view(
90 config.add_view(
91 AdminAuditLogsView,
91 AdminAuditLogsView,
92 attr='admin_audit_log_entry',
92 attr='admin_audit_log_entry',
93 route_name='admin_audit_log_entry', request_method='GET',
93 route_name='admin_audit_log_entry', request_method='GET',
94 renderer='rhodecode:templates/admin/admin_audit_log_entry.mako')
94 renderer='rhodecode:templates/admin/admin_audit_log_entry.mako')
95
95
96 # Artifacts EE feature
96 # Artifacts EE feature
97 config.add_route(
97 config.add_route(
98 'admin_artifacts',
98 'admin_artifacts',
99 pattern=ADMIN_PREFIX + '/artifacts')
99 pattern=ADMIN_PREFIX + '/artifacts')
100 config.add_route(
100 config.add_route(
101 'admin_artifacts_show_all',
101 'admin_artifacts_show_all',
102 pattern=ADMIN_PREFIX + '/artifacts')
102 pattern=ADMIN_PREFIX + '/artifacts')
103 config.add_view(
103 config.add_view(
104 AdminArtifactsView,
104 AdminArtifactsView,
105 attr='artifacts',
105 attr='artifacts',
106 route_name='admin_artifacts', request_method='GET',
106 route_name='admin_artifacts', request_method='GET',
107 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
107 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
108 config.add_view(
108 config.add_view(
109 AdminArtifactsView,
109 AdminArtifactsView,
110 attr='artifacts',
110 attr='artifacts',
111 route_name='admin_artifacts_show_all', request_method='GET',
111 route_name='admin_artifacts_show_all', request_method='GET',
112 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
112 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
113
113
114 # EE views
114 # EE views
115 config.add_route(
115 config.add_route(
116 name='admin_artifacts_show_info',
116 name='admin_artifacts_show_info',
117 pattern=ADMIN_PREFIX + '/artifacts/{uid}')
117 pattern=ADMIN_PREFIX + '/artifacts/{uid}')
118 config.add_route(
118 config.add_route(
119 name='admin_artifacts_delete',
119 name='admin_artifacts_delete',
120 pattern=ADMIN_PREFIX + '/artifacts/{uid}/delete')
120 pattern=ADMIN_PREFIX + '/artifacts/{uid}/delete')
121 config.add_route(
121 config.add_route(
122 name='admin_artifacts_update',
122 name='admin_artifacts_update',
123 pattern=ADMIN_PREFIX + '/artifacts/{uid}/update')
123 pattern=ADMIN_PREFIX + '/artifacts/{uid}/update')
124
124
125 # Automation EE feature
125 # Automation EE feature
126 config.add_route(
126 config.add_route(
127 'admin_automation',
127 'admin_automation',
128 pattern=ADMIN_PREFIX + '/automation')
128 pattern=ADMIN_PREFIX + '/automation')
129 config.add_view(
129 config.add_view(
130 AdminAutomationView,
130 AdminAutomationView,
131 attr='automation',
131 attr='automation',
132 route_name='admin_automation', request_method='GET',
132 route_name='admin_automation', request_method='GET',
133 renderer='rhodecode:templates/admin/automation/automation.mako')
133 renderer='rhodecode:templates/admin/automation/automation.mako')
134
134
135 # Scheduler EE feature
135 # Scheduler EE feature
136 config.add_route(
136 config.add_route(
137 'admin_scheduler',
137 'admin_scheduler',
138 pattern=ADMIN_PREFIX + '/scheduler')
138 pattern=ADMIN_PREFIX + '/scheduler')
139 config.add_view(
139 config.add_view(
140 AdminSchedulerView,
140 AdminSchedulerView,
141 attr='scheduler',
141 attr='scheduler',
142 route_name='admin_scheduler', request_method='GET',
142 route_name='admin_scheduler', request_method='GET',
143 renderer='rhodecode:templates/admin/scheduler/scheduler.mako')
143 renderer='rhodecode:templates/admin/scheduler/scheduler.mako')
144
144
145 config.add_route(
145 config.add_route(
146 name='admin_settings_open_source',
146 name='admin_settings_open_source',
147 pattern='/settings/open_source')
147 pattern='/settings/open_source')
148 config.add_view(
148 config.add_view(
149 OpenSourceLicensesAdminSettingsView,
149 OpenSourceLicensesAdminSettingsView,
150 attr='open_source_licenses',
150 attr='open_source_licenses',
151 route_name='admin_settings_open_source', request_method='GET',
151 route_name='admin_settings_open_source', request_method='GET',
152 renderer='rhodecode:templates/admin/settings/settings.mako')
152 renderer='rhodecode:templates/admin/settings/settings.mako')
153
153
154 config.add_route(
154 config.add_route(
155 name='admin_settings_vcs_svn_generate_cfg',
155 name='admin_settings_vcs_svn_generate_cfg',
156 pattern='/settings/vcs/svn_generate_cfg')
156 pattern='/settings/vcs/svn_generate_cfg')
157 config.add_view(
157 config.add_view(
158 AdminSvnConfigView,
158 AdminSvnConfigView,
159 attr='vcs_svn_generate_config',
159 attr='vcs_svn_generate_config',
160 route_name='admin_settings_vcs_svn_generate_cfg',
160 route_name='admin_settings_vcs_svn_generate_cfg',
161 request_method='POST', renderer='json')
161 request_method='POST', renderer='json')
162
162
163 config.add_route(
163 config.add_route(
164 name='admin_settings_system',
164 name='admin_settings_system',
165 pattern='/settings/system')
165 pattern='/settings/system')
166 config.add_view(
166 config.add_view(
167 AdminSystemInfoSettingsView,
167 AdminSystemInfoSettingsView,
168 attr='settings_system_info',
168 attr='settings_system_info',
169 route_name='admin_settings_system', request_method='GET',
169 route_name='admin_settings_system', request_method='GET',
170 renderer='rhodecode:templates/admin/settings/settings.mako')
170 renderer='rhodecode:templates/admin/settings/settings.mako')
171
171
172 config.add_route(
172 config.add_route(
173 name='admin_settings_system_update',
173 name='admin_settings_system_update',
174 pattern='/settings/system/updates')
174 pattern='/settings/system/updates')
175 config.add_view(
175 config.add_view(
176 AdminSystemInfoSettingsView,
176 AdminSystemInfoSettingsView,
177 attr='settings_system_info_check_update',
177 attr='settings_system_info_check_update',
178 route_name='admin_settings_system_update', request_method='GET',
178 route_name='admin_settings_system_update', request_method='GET',
179 renderer='rhodecode:templates/admin/settings/settings_system_update.mako')
179 renderer='rhodecode:templates/admin/settings/settings_system_update.mako')
180
180
181 config.add_route(
181 config.add_route(
182 name='admin_settings_exception_tracker',
182 name='admin_settings_exception_tracker',
183 pattern='/settings/exceptions')
183 pattern='/settings/exceptions')
184 config.add_view(
184 config.add_view(
185 ExceptionsTrackerView,
185 ExceptionsTrackerView,
186 attr='browse_exceptions',
186 attr='browse_exceptions',
187 route_name='admin_settings_exception_tracker', request_method='GET',
187 route_name='admin_settings_exception_tracker', request_method='GET',
188 renderer='rhodecode:templates/admin/settings/settings.mako')
188 renderer='rhodecode:templates/admin/settings/settings.mako')
189
189
190 config.add_route(
190 config.add_route(
191 name='admin_settings_exception_tracker_delete_all',
191 name='admin_settings_exception_tracker_delete_all',
192 pattern='/settings/exceptions_delete_all')
192 pattern='/settings/exceptions_delete_all')
193 config.add_view(
193 config.add_view(
194 ExceptionsTrackerView,
194 ExceptionsTrackerView,
195 attr='exception_delete_all',
195 attr='exception_delete_all',
196 route_name='admin_settings_exception_tracker_delete_all', request_method='POST',
196 route_name='admin_settings_exception_tracker_delete_all', request_method='POST',
197 renderer='rhodecode:templates/admin/settings/settings.mako')
197 renderer='rhodecode:templates/admin/settings/settings.mako')
198
198
199 config.add_route(
199 config.add_route(
200 name='admin_settings_exception_tracker_show',
200 name='admin_settings_exception_tracker_show',
201 pattern='/settings/exceptions/{exception_id}')
201 pattern='/settings/exceptions/{exception_id}')
202 config.add_view(
202 config.add_view(
203 ExceptionsTrackerView,
203 ExceptionsTrackerView,
204 attr='exception_show',
204 attr='exception_show',
205 route_name='admin_settings_exception_tracker_show', request_method='GET',
205 route_name='admin_settings_exception_tracker_show', request_method='GET',
206 renderer='rhodecode:templates/admin/settings/settings.mako')
206 renderer='rhodecode:templates/admin/settings/settings.mako')
207
207
208 config.add_route(
208 config.add_route(
209 name='admin_settings_exception_tracker_delete',
209 name='admin_settings_exception_tracker_delete',
210 pattern='/settings/exceptions/{exception_id}/delete')
210 pattern='/settings/exceptions/{exception_id}/delete')
211 config.add_view(
211 config.add_view(
212 ExceptionsTrackerView,
212 ExceptionsTrackerView,
213 attr='exception_delete',
213 attr='exception_delete',
214 route_name='admin_settings_exception_tracker_delete', request_method='POST',
214 route_name='admin_settings_exception_tracker_delete', request_method='POST',
215 renderer='rhodecode:templates/admin/settings/settings.mako')
215 renderer='rhodecode:templates/admin/settings/settings.mako')
216
216
217 config.add_route(
217 config.add_route(
218 name='admin_settings_sessions',
218 name='admin_settings_sessions',
219 pattern='/settings/sessions')
219 pattern='/settings/sessions')
220 config.add_view(
220 config.add_view(
221 AdminSessionSettingsView,
221 AdminSessionSettingsView,
222 attr='settings_sessions',
222 attr='settings_sessions',
223 route_name='admin_settings_sessions', request_method='GET',
223 route_name='admin_settings_sessions', request_method='GET',
224 renderer='rhodecode:templates/admin/settings/settings.mako')
224 renderer='rhodecode:templates/admin/settings/settings.mako')
225
225
226 config.add_route(
226 config.add_route(
227 name='admin_settings_sessions_cleanup',
227 name='admin_settings_sessions_cleanup',
228 pattern='/settings/sessions/cleanup')
228 pattern='/settings/sessions/cleanup')
229 config.add_view(
229 config.add_view(
230 AdminSessionSettingsView,
230 AdminSessionSettingsView,
231 attr='settings_sessions_cleanup',
231 attr='settings_sessions_cleanup',
232 route_name='admin_settings_sessions_cleanup', request_method='POST')
232 route_name='admin_settings_sessions_cleanup', request_method='POST')
233
233
234 config.add_route(
234 config.add_route(
235 name='admin_settings_process_management',
235 name='admin_settings_process_management',
236 pattern='/settings/process_management')
236 pattern='/settings/process_management')
237 config.add_view(
237 config.add_view(
238 AdminProcessManagementView,
238 AdminProcessManagementView,
239 attr='process_management',
239 attr='process_management',
240 route_name='admin_settings_process_management', request_method='GET',
240 route_name='admin_settings_process_management', request_method='GET',
241 renderer='rhodecode:templates/admin/settings/settings.mako')
241 renderer='rhodecode:templates/admin/settings/settings.mako')
242
242
243 config.add_route(
243 config.add_route(
244 name='admin_settings_process_management_data',
244 name='admin_settings_process_management_data',
245 pattern='/settings/process_management/data')
245 pattern='/settings/process_management/data')
246 config.add_view(
246 config.add_view(
247 AdminProcessManagementView,
247 AdminProcessManagementView,
248 attr='process_management_data',
248 attr='process_management_data',
249 route_name='admin_settings_process_management_data', request_method='GET',
249 route_name='admin_settings_process_management_data', request_method='GET',
250 renderer='rhodecode:templates/admin/settings/settings_process_management_data.mako')
250 renderer='rhodecode:templates/admin/settings/settings_process_management_data.mako')
251
251
252 config.add_route(
252 config.add_route(
253 name='admin_settings_process_management_signal',
253 name='admin_settings_process_management_signal',
254 pattern='/settings/process_management/signal')
254 pattern='/settings/process_management/signal')
255 config.add_view(
255 config.add_view(
256 AdminProcessManagementView,
256 AdminProcessManagementView,
257 attr='process_management_signal',
257 attr='process_management_signal',
258 route_name='admin_settings_process_management_signal',
258 route_name='admin_settings_process_management_signal',
259 request_method='POST', renderer='json_ext')
259 request_method='POST', renderer='json_ext')
260
260
261 config.add_route(
261 config.add_route(
262 name='admin_settings_process_management_master_signal',
262 name='admin_settings_process_management_master_signal',
263 pattern='/settings/process_management/master_signal')
263 pattern='/settings/process_management/master_signal')
264 config.add_view(
264 config.add_view(
265 AdminProcessManagementView,
265 AdminProcessManagementView,
266 attr='process_management_master_signal',
266 attr='process_management_master_signal',
267 route_name='admin_settings_process_management_master_signal',
267 route_name='admin_settings_process_management_master_signal',
268 request_method='POST', renderer='json_ext')
268 request_method='POST', renderer='json_ext')
269
269
270 # default settings
270 # default settings
271 config.add_route(
271 config.add_route(
272 name='admin_defaults_repositories',
272 name='admin_defaults_repositories',
273 pattern='/defaults/repositories')
273 pattern='/defaults/repositories')
274 config.add_view(
274 config.add_view(
275 AdminDefaultSettingsView,
275 AdminDefaultSettingsView,
276 attr='defaults_repository_show',
276 attr='defaults_repository_show',
277 route_name='admin_defaults_repositories', request_method='GET',
277 route_name='admin_defaults_repositories', request_method='GET',
278 renderer='rhodecode:templates/admin/defaults/defaults.mako')
278 renderer='rhodecode:templates/admin/defaults/defaults.mako')
279
279
280 config.add_route(
280 config.add_route(
281 name='admin_defaults_repositories_update',
281 name='admin_defaults_repositories_update',
282 pattern='/defaults/repositories/update')
282 pattern='/defaults/repositories/update')
283 config.add_view(
283 config.add_view(
284 AdminDefaultSettingsView,
284 AdminDefaultSettingsView,
285 attr='defaults_repository_update',
285 attr='defaults_repository_update',
286 route_name='admin_defaults_repositories_update', request_method='POST',
286 route_name='admin_defaults_repositories_update', request_method='POST',
287 renderer='rhodecode:templates/admin/defaults/defaults.mako')
287 renderer='rhodecode:templates/admin/defaults/defaults.mako')
288
288
289 # admin settings
289 # admin settings
290
290
291 config.add_route(
291 config.add_route(
292 name='admin_settings',
292 name='admin_settings',
293 pattern='/settings')
293 pattern='/settings')
294 config.add_view(
294 config.add_view(
295 AdminSettingsView,
295 AdminSettingsView,
296 attr='settings_global',
296 attr='settings_global',
297 route_name='admin_settings', request_method='GET',
297 route_name='admin_settings', request_method='GET',
298 renderer='rhodecode:templates/admin/settings/settings.mako')
298 renderer='rhodecode:templates/admin/settings/settings.mako')
299
299
300 config.add_route(
300 config.add_route(
301 name='admin_settings_update',
301 name='admin_settings_update',
302 pattern='/settings/update')
302 pattern='/settings/update')
303 config.add_view(
303 config.add_view(
304 AdminSettingsView,
304 AdminSettingsView,
305 attr='settings_global_update',
305 attr='settings_global_update',
306 route_name='admin_settings_update', request_method='POST',
306 route_name='admin_settings_update', request_method='POST',
307 renderer='rhodecode:templates/admin/settings/settings.mako')
307 renderer='rhodecode:templates/admin/settings/settings.mako')
308
308
309 config.add_route(
309 config.add_route(
310 name='admin_settings_global',
310 name='admin_settings_global',
311 pattern='/settings/global')
311 pattern='/settings/global')
312 config.add_view(
312 config.add_view(
313 AdminSettingsView,
313 AdminSettingsView,
314 attr='settings_global',
314 attr='settings_global',
315 route_name='admin_settings_global', request_method='GET',
315 route_name='admin_settings_global', request_method='GET',
316 renderer='rhodecode:templates/admin/settings/settings.mako')
316 renderer='rhodecode:templates/admin/settings/settings.mako')
317
317
318 config.add_route(
318 config.add_route(
319 name='admin_settings_global_update',
319 name='admin_settings_global_update',
320 pattern='/settings/global/update')
320 pattern='/settings/global/update')
321 config.add_view(
321 config.add_view(
322 AdminSettingsView,
322 AdminSettingsView,
323 attr='settings_global_update',
323 attr='settings_global_update',
324 route_name='admin_settings_global_update', request_method='POST',
324 route_name='admin_settings_global_update', request_method='POST',
325 renderer='rhodecode:templates/admin/settings/settings.mako')
325 renderer='rhodecode:templates/admin/settings/settings.mako')
326
326
327 config.add_route(
327 config.add_route(
328 name='admin_settings_vcs',
328 name='admin_settings_vcs',
329 pattern='/settings/vcs')
329 pattern='/settings/vcs')
330 config.add_view(
330 config.add_view(
331 AdminSettingsView,
331 AdminSettingsView,
332 attr='settings_vcs',
332 attr='settings_vcs',
333 route_name='admin_settings_vcs', request_method='GET',
333 route_name='admin_settings_vcs', request_method='GET',
334 renderer='rhodecode:templates/admin/settings/settings.mako')
334 renderer='rhodecode:templates/admin/settings/settings.mako')
335
335
336 config.add_route(
336 config.add_route(
337 name='admin_settings_vcs_update',
337 name='admin_settings_vcs_update',
338 pattern='/settings/vcs/update')
338 pattern='/settings/vcs/update')
339 config.add_view(
339 config.add_view(
340 AdminSettingsView,
340 AdminSettingsView,
341 attr='settings_vcs_update',
341 attr='settings_vcs_update',
342 route_name='admin_settings_vcs_update', request_method='POST',
342 route_name='admin_settings_vcs_update', request_method='POST',
343 renderer='rhodecode:templates/admin/settings/settings.mako')
343 renderer='rhodecode:templates/admin/settings/settings.mako')
344
344
345 config.add_route(
345 config.add_route(
346 name='admin_settings_vcs_svn_pattern_delete',
346 name='admin_settings_vcs_svn_pattern_delete',
347 pattern='/settings/vcs/svn_pattern_delete')
347 pattern='/settings/vcs/svn_pattern_delete')
348 config.add_view(
348 config.add_view(
349 AdminSettingsView,
349 AdminSettingsView,
350 attr='settings_vcs_delete_svn_pattern',
350 attr='settings_vcs_delete_svn_pattern',
351 route_name='admin_settings_vcs_svn_pattern_delete', request_method='POST',
351 route_name='admin_settings_vcs_svn_pattern_delete', request_method='POST',
352 renderer='json_ext', xhr=True)
352 renderer='json_ext', xhr=True)
353
353
354 config.add_route(
354 config.add_route(
355 name='admin_settings_mapping',
355 name='admin_settings_mapping',
356 pattern='/settings/mapping')
356 pattern='/settings/mapping')
357 config.add_view(
357 config.add_view(
358 AdminSettingsView,
358 AdminSettingsView,
359 attr='settings_mapping',
359 attr='settings_mapping',
360 route_name='admin_settings_mapping', request_method='GET',
360 route_name='admin_settings_mapping', request_method='GET',
361 renderer='rhodecode:templates/admin/settings/settings.mako')
361 renderer='rhodecode:templates/admin/settings/settings.mako')
362
362
363 config.add_route(
363 config.add_route(
364 name='admin_settings_mapping_update',
364 name='admin_settings_mapping_create',
365 pattern='/settings/mapping/update')
365 pattern='/settings/mapping/create')
366 config.add_view(
366 config.add_view(
367 AdminSettingsView,
367 AdminSettingsView,
368 attr='settings_mapping_update',
368 attr='settings_mapping_create',
369 route_name='admin_settings_mapping_update', request_method='POST',
369 route_name='admin_settings_mapping_create', request_method='POST',
370 renderer='rhodecode:templates/admin/settings/settings.mako')
371
372 config.add_route(
373 name='admin_settings_mapping_cleanup',
374 pattern='/settings/mapping/cleanup')
375 config.add_view(
376 AdminSettingsView,
377 attr='settings_mapping_cleanup',
378 route_name='admin_settings_mapping_cleanup', request_method='POST',
370 renderer='rhodecode:templates/admin/settings/settings.mako')
379 renderer='rhodecode:templates/admin/settings/settings.mako')
371
380
372 config.add_route(
381 config.add_route(
373 name='admin_settings_visual',
382 name='admin_settings_visual',
374 pattern='/settings/visual')
383 pattern='/settings/visual')
375 config.add_view(
384 config.add_view(
376 AdminSettingsView,
385 AdminSettingsView,
377 attr='settings_visual',
386 attr='settings_visual',
378 route_name='admin_settings_visual', request_method='GET',
387 route_name='admin_settings_visual', request_method='GET',
379 renderer='rhodecode:templates/admin/settings/settings.mako')
388 renderer='rhodecode:templates/admin/settings/settings.mako')
380
389
381 config.add_route(
390 config.add_route(
382 name='admin_settings_visual_update',
391 name='admin_settings_visual_update',
383 pattern='/settings/visual/update')
392 pattern='/settings/visual/update')
384 config.add_view(
393 config.add_view(
385 AdminSettingsView,
394 AdminSettingsView,
386 attr='settings_visual_update',
395 attr='settings_visual_update',
387 route_name='admin_settings_visual_update', request_method='POST',
396 route_name='admin_settings_visual_update', request_method='POST',
388 renderer='rhodecode:templates/admin/settings/settings.mako')
397 renderer='rhodecode:templates/admin/settings/settings.mako')
389
398
390 config.add_route(
399 config.add_route(
391 name='admin_settings_issuetracker',
400 name='admin_settings_issuetracker',
392 pattern='/settings/issue-tracker')
401 pattern='/settings/issue-tracker')
393 config.add_view(
402 config.add_view(
394 AdminSettingsView,
403 AdminSettingsView,
395 attr='settings_issuetracker',
404 attr='settings_issuetracker',
396 route_name='admin_settings_issuetracker', request_method='GET',
405 route_name='admin_settings_issuetracker', request_method='GET',
397 renderer='rhodecode:templates/admin/settings/settings.mako')
406 renderer='rhodecode:templates/admin/settings/settings.mako')
398
407
399 config.add_route(
408 config.add_route(
400 name='admin_settings_issuetracker_update',
409 name='admin_settings_issuetracker_update',
401 pattern='/settings/issue-tracker/update')
410 pattern='/settings/issue-tracker/update')
402 config.add_view(
411 config.add_view(
403 AdminSettingsView,
412 AdminSettingsView,
404 attr='settings_issuetracker_update',
413 attr='settings_issuetracker_update',
405 route_name='admin_settings_issuetracker_update', request_method='POST',
414 route_name='admin_settings_issuetracker_update', request_method='POST',
406 renderer='rhodecode:templates/admin/settings/settings.mako')
415 renderer='rhodecode:templates/admin/settings/settings.mako')
407
416
408 config.add_route(
417 config.add_route(
409 name='admin_settings_issuetracker_test',
418 name='admin_settings_issuetracker_test',
410 pattern='/settings/issue-tracker/test')
419 pattern='/settings/issue-tracker/test')
411 config.add_view(
420 config.add_view(
412 AdminSettingsView,
421 AdminSettingsView,
413 attr='settings_issuetracker_test',
422 attr='settings_issuetracker_test',
414 route_name='admin_settings_issuetracker_test', request_method='POST',
423 route_name='admin_settings_issuetracker_test', request_method='POST',
415 renderer='string', xhr=True)
424 renderer='string', xhr=True)
416
425
417 config.add_route(
426 config.add_route(
418 name='admin_settings_issuetracker_delete',
427 name='admin_settings_issuetracker_delete',
419 pattern='/settings/issue-tracker/delete')
428 pattern='/settings/issue-tracker/delete')
420 config.add_view(
429 config.add_view(
421 AdminSettingsView,
430 AdminSettingsView,
422 attr='settings_issuetracker_delete',
431 attr='settings_issuetracker_delete',
423 route_name='admin_settings_issuetracker_delete', request_method='POST',
432 route_name='admin_settings_issuetracker_delete', request_method='POST',
424 renderer='json_ext', xhr=True)
433 renderer='json_ext', xhr=True)
425
434
426 config.add_route(
435 config.add_route(
427 name='admin_settings_email',
436 name='admin_settings_email',
428 pattern='/settings/email')
437 pattern='/settings/email')
429 config.add_view(
438 config.add_view(
430 AdminSettingsView,
439 AdminSettingsView,
431 attr='settings_email',
440 attr='settings_email',
432 route_name='admin_settings_email', request_method='GET',
441 route_name='admin_settings_email', request_method='GET',
433 renderer='rhodecode:templates/admin/settings/settings.mako')
442 renderer='rhodecode:templates/admin/settings/settings.mako')
434
443
435 config.add_route(
444 config.add_route(
436 name='admin_settings_email_update',
445 name='admin_settings_email_update',
437 pattern='/settings/email/update')
446 pattern='/settings/email/update')
438 config.add_view(
447 config.add_view(
439 AdminSettingsView,
448 AdminSettingsView,
440 attr='settings_email_update',
449 attr='settings_email_update',
441 route_name='admin_settings_email_update', request_method='POST',
450 route_name='admin_settings_email_update', request_method='POST',
442 renderer='rhodecode:templates/admin/settings/settings.mako')
451 renderer='rhodecode:templates/admin/settings/settings.mako')
443
452
444 config.add_route(
453 config.add_route(
445 name='admin_settings_hooks',
454 name='admin_settings_hooks',
446 pattern='/settings/hooks')
455 pattern='/settings/hooks')
447 config.add_view(
456 config.add_view(
448 AdminSettingsView,
457 AdminSettingsView,
449 attr='settings_hooks',
458 attr='settings_hooks',
450 route_name='admin_settings_hooks', request_method='GET',
459 route_name='admin_settings_hooks', request_method='GET',
451 renderer='rhodecode:templates/admin/settings/settings.mako')
460 renderer='rhodecode:templates/admin/settings/settings.mako')
452
461
453 config.add_route(
462 config.add_route(
454 name='admin_settings_hooks_update',
463 name='admin_settings_hooks_update',
455 pattern='/settings/hooks/update')
464 pattern='/settings/hooks/update')
456 config.add_view(
465 config.add_view(
457 AdminSettingsView,
466 AdminSettingsView,
458 attr='settings_hooks_update',
467 attr='settings_hooks_update',
459 route_name='admin_settings_hooks_update', request_method='POST',
468 route_name='admin_settings_hooks_update', request_method='POST',
460 renderer='rhodecode:templates/admin/settings/settings.mako')
469 renderer='rhodecode:templates/admin/settings/settings.mako')
461
470
462 config.add_route(
471 config.add_route(
463 name='admin_settings_hooks_delete',
472 name='admin_settings_hooks_delete',
464 pattern='/settings/hooks/delete')
473 pattern='/settings/hooks/delete')
465 config.add_view(
474 config.add_view(
466 AdminSettingsView,
475 AdminSettingsView,
467 attr='settings_hooks_update',
476 attr='settings_hooks_update',
468 route_name='admin_settings_hooks_delete', request_method='POST',
477 route_name='admin_settings_hooks_delete', request_method='POST',
469 renderer='rhodecode:templates/admin/settings/settings.mako')
478 renderer='rhodecode:templates/admin/settings/settings.mako')
470
479
471 config.add_route(
480 config.add_route(
472 name='admin_settings_search',
481 name='admin_settings_search',
473 pattern='/settings/search')
482 pattern='/settings/search')
474 config.add_view(
483 config.add_view(
475 AdminSettingsView,
484 AdminSettingsView,
476 attr='settings_search',
485 attr='settings_search',
477 route_name='admin_settings_search', request_method='GET',
486 route_name='admin_settings_search', request_method='GET',
478 renderer='rhodecode:templates/admin/settings/settings.mako')
487 renderer='rhodecode:templates/admin/settings/settings.mako')
479
488
480 config.add_route(
489 config.add_route(
481 name='admin_settings_labs',
490 name='admin_settings_labs',
482 pattern='/settings/labs')
491 pattern='/settings/labs')
483 config.add_view(
492 config.add_view(
484 AdminSettingsView,
493 AdminSettingsView,
485 attr='settings_labs',
494 attr='settings_labs',
486 route_name='admin_settings_labs', request_method='GET',
495 route_name='admin_settings_labs', request_method='GET',
487 renderer='rhodecode:templates/admin/settings/settings.mako')
496 renderer='rhodecode:templates/admin/settings/settings.mako')
488
497
489 config.add_route(
498 config.add_route(
490 name='admin_settings_labs_update',
499 name='admin_settings_labs_update',
491 pattern='/settings/labs/update')
500 pattern='/settings/labs/update')
492 config.add_view(
501 config.add_view(
493 AdminSettingsView,
502 AdminSettingsView,
494 attr='settings_labs_update',
503 attr='settings_labs_update',
495 route_name='admin_settings_labs_update', request_method='POST',
504 route_name='admin_settings_labs_update', request_method='POST',
496 renderer='rhodecode:templates/admin/settings/settings.mako')
505 renderer='rhodecode:templates/admin/settings/settings.mako')
497
506
498 # global permissions
507 # global permissions
499
508
500 config.add_route(
509 config.add_route(
501 name='admin_permissions_application',
510 name='admin_permissions_application',
502 pattern='/permissions/application')
511 pattern='/permissions/application')
503 config.add_view(
512 config.add_view(
504 AdminPermissionsView,
513 AdminPermissionsView,
505 attr='permissions_application',
514 attr='permissions_application',
506 route_name='admin_permissions_application', request_method='GET',
515 route_name='admin_permissions_application', request_method='GET',
507 renderer='rhodecode:templates/admin/permissions/permissions.mako')
516 renderer='rhodecode:templates/admin/permissions/permissions.mako')
508
517
509 config.add_route(
518 config.add_route(
510 name='admin_permissions_application_update',
519 name='admin_permissions_application_update',
511 pattern='/permissions/application/update')
520 pattern='/permissions/application/update')
512 config.add_view(
521 config.add_view(
513 AdminPermissionsView,
522 AdminPermissionsView,
514 attr='permissions_application_update',
523 attr='permissions_application_update',
515 route_name='admin_permissions_application_update', request_method='POST',
524 route_name='admin_permissions_application_update', request_method='POST',
516 renderer='rhodecode:templates/admin/permissions/permissions.mako')
525 renderer='rhodecode:templates/admin/permissions/permissions.mako')
517
526
518 config.add_route(
527 config.add_route(
519 name='admin_permissions_global',
528 name='admin_permissions_global',
520 pattern='/permissions/global')
529 pattern='/permissions/global')
521 config.add_view(
530 config.add_view(
522 AdminPermissionsView,
531 AdminPermissionsView,
523 attr='permissions_global',
532 attr='permissions_global',
524 route_name='admin_permissions_global', request_method='GET',
533 route_name='admin_permissions_global', request_method='GET',
525 renderer='rhodecode:templates/admin/permissions/permissions.mako')
534 renderer='rhodecode:templates/admin/permissions/permissions.mako')
526
535
527 config.add_route(
536 config.add_route(
528 name='admin_permissions_global_update',
537 name='admin_permissions_global_update',
529 pattern='/permissions/global/update')
538 pattern='/permissions/global/update')
530 config.add_view(
539 config.add_view(
531 AdminPermissionsView,
540 AdminPermissionsView,
532 attr='permissions_global_update',
541 attr='permissions_global_update',
533 route_name='admin_permissions_global_update', request_method='POST',
542 route_name='admin_permissions_global_update', request_method='POST',
534 renderer='rhodecode:templates/admin/permissions/permissions.mako')
543 renderer='rhodecode:templates/admin/permissions/permissions.mako')
535
544
536 config.add_route(
545 config.add_route(
537 name='admin_permissions_object',
546 name='admin_permissions_object',
538 pattern='/permissions/object')
547 pattern='/permissions/object')
539 config.add_view(
548 config.add_view(
540 AdminPermissionsView,
549 AdminPermissionsView,
541 attr='permissions_objects',
550 attr='permissions_objects',
542 route_name='admin_permissions_object', request_method='GET',
551 route_name='admin_permissions_object', request_method='GET',
543 renderer='rhodecode:templates/admin/permissions/permissions.mako')
552 renderer='rhodecode:templates/admin/permissions/permissions.mako')
544
553
545 config.add_route(
554 config.add_route(
546 name='admin_permissions_object_update',
555 name='admin_permissions_object_update',
547 pattern='/permissions/object/update')
556 pattern='/permissions/object/update')
548 config.add_view(
557 config.add_view(
549 AdminPermissionsView,
558 AdminPermissionsView,
550 attr='permissions_objects_update',
559 attr='permissions_objects_update',
551 route_name='admin_permissions_object_update', request_method='POST',
560 route_name='admin_permissions_object_update', request_method='POST',
552 renderer='rhodecode:templates/admin/permissions/permissions.mako')
561 renderer='rhodecode:templates/admin/permissions/permissions.mako')
553
562
554 # Branch perms EE feature
563 # Branch perms EE feature
555 config.add_route(
564 config.add_route(
556 name='admin_permissions_branch',
565 name='admin_permissions_branch',
557 pattern='/permissions/branch')
566 pattern='/permissions/branch')
558 config.add_view(
567 config.add_view(
559 AdminPermissionsView,
568 AdminPermissionsView,
560 attr='permissions_branch',
569 attr='permissions_branch',
561 route_name='admin_permissions_branch', request_method='GET',
570 route_name='admin_permissions_branch', request_method='GET',
562 renderer='rhodecode:templates/admin/permissions/permissions.mako')
571 renderer='rhodecode:templates/admin/permissions/permissions.mako')
563
572
564 config.add_route(
573 config.add_route(
565 name='admin_permissions_ips',
574 name='admin_permissions_ips',
566 pattern='/permissions/ips')
575 pattern='/permissions/ips')
567 config.add_view(
576 config.add_view(
568 AdminPermissionsView,
577 AdminPermissionsView,
569 attr='permissions_ips',
578 attr='permissions_ips',
570 route_name='admin_permissions_ips', request_method='GET',
579 route_name='admin_permissions_ips', request_method='GET',
571 renderer='rhodecode:templates/admin/permissions/permissions.mako')
580 renderer='rhodecode:templates/admin/permissions/permissions.mako')
572
581
573 config.add_route(
582 config.add_route(
574 name='admin_permissions_overview',
583 name='admin_permissions_overview',
575 pattern='/permissions/overview')
584 pattern='/permissions/overview')
576 config.add_view(
585 config.add_view(
577 AdminPermissionsView,
586 AdminPermissionsView,
578 attr='permissions_overview',
587 attr='permissions_overview',
579 route_name='admin_permissions_overview', request_method='GET',
588 route_name='admin_permissions_overview', request_method='GET',
580 renderer='rhodecode:templates/admin/permissions/permissions.mako')
589 renderer='rhodecode:templates/admin/permissions/permissions.mako')
581
590
582 config.add_route(
591 config.add_route(
583 name='admin_permissions_auth_token_access',
592 name='admin_permissions_auth_token_access',
584 pattern='/permissions/auth_token_access')
593 pattern='/permissions/auth_token_access')
585 config.add_view(
594 config.add_view(
586 AdminPermissionsView,
595 AdminPermissionsView,
587 attr='auth_token_access',
596 attr='auth_token_access',
588 route_name='admin_permissions_auth_token_access', request_method='GET',
597 route_name='admin_permissions_auth_token_access', request_method='GET',
589 renderer='rhodecode:templates/admin/permissions/permissions.mako')
598 renderer='rhodecode:templates/admin/permissions/permissions.mako')
590
599
591 config.add_route(
600 config.add_route(
592 name='admin_permissions_ssh_keys',
601 name='admin_permissions_ssh_keys',
593 pattern='/permissions/ssh_keys')
602 pattern='/permissions/ssh_keys')
594 config.add_view(
603 config.add_view(
595 AdminPermissionsView,
604 AdminPermissionsView,
596 attr='ssh_keys',
605 attr='ssh_keys',
597 route_name='admin_permissions_ssh_keys', request_method='GET',
606 route_name='admin_permissions_ssh_keys', request_method='GET',
598 renderer='rhodecode:templates/admin/permissions/permissions.mako')
607 renderer='rhodecode:templates/admin/permissions/permissions.mako')
599
608
600 config.add_route(
609 config.add_route(
601 name='admin_permissions_ssh_keys_data',
610 name='admin_permissions_ssh_keys_data',
602 pattern='/permissions/ssh_keys/data')
611 pattern='/permissions/ssh_keys/data')
603 config.add_view(
612 config.add_view(
604 AdminPermissionsView,
613 AdminPermissionsView,
605 attr='ssh_keys_data',
614 attr='ssh_keys_data',
606 route_name='admin_permissions_ssh_keys_data', request_method='GET',
615 route_name='admin_permissions_ssh_keys_data', request_method='GET',
607 renderer='json_ext', xhr=True)
616 renderer='json_ext', xhr=True)
608
617
609 config.add_route(
618 config.add_route(
610 name='admin_permissions_ssh_keys_update',
619 name='admin_permissions_ssh_keys_update',
611 pattern='/permissions/ssh_keys/update')
620 pattern='/permissions/ssh_keys/update')
612 config.add_view(
621 config.add_view(
613 AdminPermissionsView,
622 AdminPermissionsView,
614 attr='ssh_keys_update',
623 attr='ssh_keys_update',
615 route_name='admin_permissions_ssh_keys_update', request_method='POST',
624 route_name='admin_permissions_ssh_keys_update', request_method='POST',
616 renderer='rhodecode:templates/admin/permissions/permissions.mako')
625 renderer='rhodecode:templates/admin/permissions/permissions.mako')
617
626
618 # users admin
627 # users admin
619 config.add_route(
628 config.add_route(
620 name='users',
629 name='users',
621 pattern='/users')
630 pattern='/users')
622 config.add_view(
631 config.add_view(
623 AdminUsersView,
632 AdminUsersView,
624 attr='users_list',
633 attr='users_list',
625 route_name='users', request_method='GET',
634 route_name='users', request_method='GET',
626 renderer='rhodecode:templates/admin/users/users.mako')
635 renderer='rhodecode:templates/admin/users/users.mako')
627
636
628 config.add_route(
637 config.add_route(
629 name='users_data',
638 name='users_data',
630 pattern='/users_data')
639 pattern='/users_data')
631 config.add_view(
640 config.add_view(
632 AdminUsersView,
641 AdminUsersView,
633 attr='users_list_data',
642 attr='users_list_data',
634 # renderer defined below
643 # renderer defined below
635 route_name='users_data', request_method='GET',
644 route_name='users_data', request_method='GET',
636 renderer='json_ext', xhr=True)
645 renderer='json_ext', xhr=True)
637
646
638 config.add_route(
647 config.add_route(
639 name='users_create',
648 name='users_create',
640 pattern='/users/create')
649 pattern='/users/create')
641 config.add_view(
650 config.add_view(
642 AdminUsersView,
651 AdminUsersView,
643 attr='users_create',
652 attr='users_create',
644 route_name='users_create', request_method='POST',
653 route_name='users_create', request_method='POST',
645 renderer='rhodecode:templates/admin/users/user_add.mako')
654 renderer='rhodecode:templates/admin/users/user_add.mako')
646
655
647 config.add_route(
656 config.add_route(
648 name='users_new',
657 name='users_new',
649 pattern='/users/new')
658 pattern='/users/new')
650 config.add_view(
659 config.add_view(
651 AdminUsersView,
660 AdminUsersView,
652 attr='users_new',
661 attr='users_new',
653 route_name='users_new', request_method='GET',
662 route_name='users_new', request_method='GET',
654 renderer='rhodecode:templates/admin/users/user_add.mako')
663 renderer='rhodecode:templates/admin/users/user_add.mako')
655
664
656 # user management
665 # user management
657 config.add_route(
666 config.add_route(
658 name='user_edit',
667 name='user_edit',
659 pattern=r'/users/{user_id:\d+}/edit',
668 pattern=r'/users/{user_id:\d+}/edit',
660 user_route=True)
669 user_route=True)
661 config.add_view(
670 config.add_view(
662 UsersView,
671 UsersView,
663 attr='user_edit',
672 attr='user_edit',
664 route_name='user_edit', request_method='GET',
673 route_name='user_edit', request_method='GET',
665 renderer='rhodecode:templates/admin/users/user_edit.mako')
674 renderer='rhodecode:templates/admin/users/user_edit.mako')
666
675
667 config.add_route(
676 config.add_route(
668 name='user_edit_advanced',
677 name='user_edit_advanced',
669 pattern=r'/users/{user_id:\d+}/edit/advanced',
678 pattern=r'/users/{user_id:\d+}/edit/advanced',
670 user_route=True)
679 user_route=True)
671 config.add_view(
680 config.add_view(
672 UsersView,
681 UsersView,
673 attr='user_edit_advanced',
682 attr='user_edit_advanced',
674 route_name='user_edit_advanced', request_method='GET',
683 route_name='user_edit_advanced', request_method='GET',
675 renderer='rhodecode:templates/admin/users/user_edit.mako')
684 renderer='rhodecode:templates/admin/users/user_edit.mako')
676
685
677 config.add_route(
686 config.add_route(
678 name='user_edit_global_perms',
687 name='user_edit_global_perms',
679 pattern=r'/users/{user_id:\d+}/edit/global_permissions',
688 pattern=r'/users/{user_id:\d+}/edit/global_permissions',
680 user_route=True)
689 user_route=True)
681 config.add_view(
690 config.add_view(
682 UsersView,
691 UsersView,
683 attr='user_edit_global_perms',
692 attr='user_edit_global_perms',
684 route_name='user_edit_global_perms', request_method='GET',
693 route_name='user_edit_global_perms', request_method='GET',
685 renderer='rhodecode:templates/admin/users/user_edit.mako')
694 renderer='rhodecode:templates/admin/users/user_edit.mako')
686
695
687 config.add_route(
696 config.add_route(
688 name='user_edit_global_perms_update',
697 name='user_edit_global_perms_update',
689 pattern=r'/users/{user_id:\d+}/edit/global_permissions/update',
698 pattern=r'/users/{user_id:\d+}/edit/global_permissions/update',
690 user_route=True)
699 user_route=True)
691 config.add_view(
700 config.add_view(
692 UsersView,
701 UsersView,
693 attr='user_edit_global_perms_update',
702 attr='user_edit_global_perms_update',
694 route_name='user_edit_global_perms_update', request_method='POST',
703 route_name='user_edit_global_perms_update', request_method='POST',
695 renderer='rhodecode:templates/admin/users/user_edit.mako')
704 renderer='rhodecode:templates/admin/users/user_edit.mako')
696
705
697 config.add_route(
706 config.add_route(
698 name='user_update',
707 name='user_update',
699 pattern=r'/users/{user_id:\d+}/update',
708 pattern=r'/users/{user_id:\d+}/update',
700 user_route=True)
709 user_route=True)
701 config.add_view(
710 config.add_view(
702 UsersView,
711 UsersView,
703 attr='user_update',
712 attr='user_update',
704 route_name='user_update', request_method='POST',
713 route_name='user_update', request_method='POST',
705 renderer='rhodecode:templates/admin/users/user_edit.mako')
714 renderer='rhodecode:templates/admin/users/user_edit.mako')
706
715
707 config.add_route(
716 config.add_route(
708 name='user_delete',
717 name='user_delete',
709 pattern=r'/users/{user_id:\d+}/delete',
718 pattern=r'/users/{user_id:\d+}/delete',
710 user_route=True)
719 user_route=True)
711 config.add_view(
720 config.add_view(
712 UsersView,
721 UsersView,
713 attr='user_delete',
722 attr='user_delete',
714 route_name='user_delete', request_method='POST',
723 route_name='user_delete', request_method='POST',
715 renderer='rhodecode:templates/admin/users/user_edit.mako')
724 renderer='rhodecode:templates/admin/users/user_edit.mako')
716
725
717 config.add_route(
726 config.add_route(
718 name='user_enable_force_password_reset',
727 name='user_enable_force_password_reset',
719 pattern=r'/users/{user_id:\d+}/password_reset_enable',
728 pattern=r'/users/{user_id:\d+}/password_reset_enable',
720 user_route=True)
729 user_route=True)
721 config.add_view(
730 config.add_view(
722 UsersView,
731 UsersView,
723 attr='user_enable_force_password_reset',
732 attr='user_enable_force_password_reset',
724 route_name='user_enable_force_password_reset', request_method='POST',
733 route_name='user_enable_force_password_reset', request_method='POST',
725 renderer='rhodecode:templates/admin/users/user_edit.mako')
734 renderer='rhodecode:templates/admin/users/user_edit.mako')
726
735
727 config.add_route(
736 config.add_route(
728 name='user_disable_force_password_reset',
737 name='user_disable_force_password_reset',
729 pattern=r'/users/{user_id:\d+}/password_reset_disable',
738 pattern=r'/users/{user_id:\d+}/password_reset_disable',
730 user_route=True)
739 user_route=True)
731 config.add_view(
740 config.add_view(
732 UsersView,
741 UsersView,
733 attr='user_disable_force_password_reset',
742 attr='user_disable_force_password_reset',
734 route_name='user_disable_force_password_reset', request_method='POST',
743 route_name='user_disable_force_password_reset', request_method='POST',
735 renderer='rhodecode:templates/admin/users/user_edit.mako')
744 renderer='rhodecode:templates/admin/users/user_edit.mako')
736
745
737 config.add_route(
746 config.add_route(
738 name='user_create_personal_repo_group',
747 name='user_create_personal_repo_group',
739 pattern=r'/users/{user_id:\d+}/create_repo_group',
748 pattern=r'/users/{user_id:\d+}/create_repo_group',
740 user_route=True)
749 user_route=True)
741 config.add_view(
750 config.add_view(
742 UsersView,
751 UsersView,
743 attr='user_create_personal_repo_group',
752 attr='user_create_personal_repo_group',
744 route_name='user_create_personal_repo_group', request_method='POST',
753 route_name='user_create_personal_repo_group', request_method='POST',
745 renderer='rhodecode:templates/admin/users/user_edit.mako')
754 renderer='rhodecode:templates/admin/users/user_edit.mako')
746
755
747 # user notice
756 # user notice
748 config.add_route(
757 config.add_route(
749 name='user_notice_dismiss',
758 name='user_notice_dismiss',
750 pattern=r'/users/{user_id:\d+}/notice_dismiss',
759 pattern=r'/users/{user_id:\d+}/notice_dismiss',
751 user_route=True)
760 user_route=True)
752 config.add_view(
761 config.add_view(
753 UsersView,
762 UsersView,
754 attr='user_notice_dismiss',
763 attr='user_notice_dismiss',
755 route_name='user_notice_dismiss', request_method='POST',
764 route_name='user_notice_dismiss', request_method='POST',
756 renderer='json_ext', xhr=True)
765 renderer='json_ext', xhr=True)
757
766
758 # user auth tokens
767 # user auth tokens
759 config.add_route(
768 config.add_route(
760 name='edit_user_auth_tokens',
769 name='edit_user_auth_tokens',
761 pattern=r'/users/{user_id:\d+}/edit/auth_tokens',
770 pattern=r'/users/{user_id:\d+}/edit/auth_tokens',
762 user_route=True)
771 user_route=True)
763 config.add_view(
772 config.add_view(
764 UsersView,
773 UsersView,
765 attr='auth_tokens',
774 attr='auth_tokens',
766 route_name='edit_user_auth_tokens', request_method='GET',
775 route_name='edit_user_auth_tokens', request_method='GET',
767 renderer='rhodecode:templates/admin/users/user_edit.mako')
776 renderer='rhodecode:templates/admin/users/user_edit.mako')
768
777
769 config.add_route(
778 config.add_route(
770 name='edit_user_auth_tokens_view',
779 name='edit_user_auth_tokens_view',
771 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/view',
780 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/view',
772 user_route=True)
781 user_route=True)
773 config.add_view(
782 config.add_view(
774 UsersView,
783 UsersView,
775 attr='auth_tokens_view',
784 attr='auth_tokens_view',
776 route_name='edit_user_auth_tokens_view', request_method='POST',
785 route_name='edit_user_auth_tokens_view', request_method='POST',
777 renderer='json_ext', xhr=True)
786 renderer='json_ext', xhr=True)
778
787
779 config.add_route(
788 config.add_route(
780 name='edit_user_auth_tokens_add',
789 name='edit_user_auth_tokens_add',
781 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/new',
790 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/new',
782 user_route=True)
791 user_route=True)
783 config.add_view(
792 config.add_view(
784 UsersView,
793 UsersView,
785 attr='auth_tokens_add',
794 attr='auth_tokens_add',
786 route_name='edit_user_auth_tokens_add', request_method='POST')
795 route_name='edit_user_auth_tokens_add', request_method='POST')
787
796
788 config.add_route(
797 config.add_route(
789 name='edit_user_auth_tokens_delete',
798 name='edit_user_auth_tokens_delete',
790 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/delete',
799 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/delete',
791 user_route=True)
800 user_route=True)
792 config.add_view(
801 config.add_view(
793 UsersView,
802 UsersView,
794 attr='auth_tokens_delete',
803 attr='auth_tokens_delete',
795 route_name='edit_user_auth_tokens_delete', request_method='POST')
804 route_name='edit_user_auth_tokens_delete', request_method='POST')
796
805
797 # user ssh keys
806 # user ssh keys
798 config.add_route(
807 config.add_route(
799 name='edit_user_ssh_keys',
808 name='edit_user_ssh_keys',
800 pattern=r'/users/{user_id:\d+}/edit/ssh_keys',
809 pattern=r'/users/{user_id:\d+}/edit/ssh_keys',
801 user_route=True)
810 user_route=True)
802 config.add_view(
811 config.add_view(
803 UsersView,
812 UsersView,
804 attr='ssh_keys',
813 attr='ssh_keys',
805 route_name='edit_user_ssh_keys', request_method='GET',
814 route_name='edit_user_ssh_keys', request_method='GET',
806 renderer='rhodecode:templates/admin/users/user_edit.mako')
815 renderer='rhodecode:templates/admin/users/user_edit.mako')
807
816
808 config.add_route(
817 config.add_route(
809 name='edit_user_ssh_keys_generate_keypair',
818 name='edit_user_ssh_keys_generate_keypair',
810 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/generate',
819 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/generate',
811 user_route=True)
820 user_route=True)
812 config.add_view(
821 config.add_view(
813 UsersView,
822 UsersView,
814 attr='ssh_keys_generate_keypair',
823 attr='ssh_keys_generate_keypair',
815 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
824 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
816 renderer='rhodecode:templates/admin/users/user_edit.mako')
825 renderer='rhodecode:templates/admin/users/user_edit.mako')
817
826
818 config.add_route(
827 config.add_route(
819 name='edit_user_ssh_keys_add',
828 name='edit_user_ssh_keys_add',
820 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/new',
829 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/new',
821 user_route=True)
830 user_route=True)
822 config.add_view(
831 config.add_view(
823 UsersView,
832 UsersView,
824 attr='ssh_keys_add',
833 attr='ssh_keys_add',
825 route_name='edit_user_ssh_keys_add', request_method='POST')
834 route_name='edit_user_ssh_keys_add', request_method='POST')
826
835
827 config.add_route(
836 config.add_route(
828 name='edit_user_ssh_keys_delete',
837 name='edit_user_ssh_keys_delete',
829 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/delete',
838 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/delete',
830 user_route=True)
839 user_route=True)
831 config.add_view(
840 config.add_view(
832 UsersView,
841 UsersView,
833 attr='ssh_keys_delete',
842 attr='ssh_keys_delete',
834 route_name='edit_user_ssh_keys_delete', request_method='POST')
843 route_name='edit_user_ssh_keys_delete', request_method='POST')
835
844
836 # user emails
845 # user emails
837 config.add_route(
846 config.add_route(
838 name='edit_user_emails',
847 name='edit_user_emails',
839 pattern=r'/users/{user_id:\d+}/edit/emails',
848 pattern=r'/users/{user_id:\d+}/edit/emails',
840 user_route=True)
849 user_route=True)
841 config.add_view(
850 config.add_view(
842 UsersView,
851 UsersView,
843 attr='emails',
852 attr='emails',
844 route_name='edit_user_emails', request_method='GET',
853 route_name='edit_user_emails', request_method='GET',
845 renderer='rhodecode:templates/admin/users/user_edit.mako')
854 renderer='rhodecode:templates/admin/users/user_edit.mako')
846
855
847 config.add_route(
856 config.add_route(
848 name='edit_user_emails_add',
857 name='edit_user_emails_add',
849 pattern=r'/users/{user_id:\d+}/edit/emails/new',
858 pattern=r'/users/{user_id:\d+}/edit/emails/new',
850 user_route=True)
859 user_route=True)
851 config.add_view(
860 config.add_view(
852 UsersView,
861 UsersView,
853 attr='emails_add',
862 attr='emails_add',
854 route_name='edit_user_emails_add', request_method='POST')
863 route_name='edit_user_emails_add', request_method='POST')
855
864
856 config.add_route(
865 config.add_route(
857 name='edit_user_emails_delete',
866 name='edit_user_emails_delete',
858 pattern=r'/users/{user_id:\d+}/edit/emails/delete',
867 pattern=r'/users/{user_id:\d+}/edit/emails/delete',
859 user_route=True)
868 user_route=True)
860 config.add_view(
869 config.add_view(
861 UsersView,
870 UsersView,
862 attr='emails_delete',
871 attr='emails_delete',
863 route_name='edit_user_emails_delete', request_method='POST')
872 route_name='edit_user_emails_delete', request_method='POST')
864
873
865 # user IPs
874 # user IPs
866 config.add_route(
875 config.add_route(
867 name='edit_user_ips',
876 name='edit_user_ips',
868 pattern=r'/users/{user_id:\d+}/edit/ips',
877 pattern=r'/users/{user_id:\d+}/edit/ips',
869 user_route=True)
878 user_route=True)
870 config.add_view(
879 config.add_view(
871 UsersView,
880 UsersView,
872 attr='ips',
881 attr='ips',
873 route_name='edit_user_ips', request_method='GET',
882 route_name='edit_user_ips', request_method='GET',
874 renderer='rhodecode:templates/admin/users/user_edit.mako')
883 renderer='rhodecode:templates/admin/users/user_edit.mako')
875
884
876 config.add_route(
885 config.add_route(
877 name='edit_user_ips_add',
886 name='edit_user_ips_add',
878 pattern=r'/users/{user_id:\d+}/edit/ips/new',
887 pattern=r'/users/{user_id:\d+}/edit/ips/new',
879 user_route_with_default=True) # enabled for default user too
888 user_route_with_default=True) # enabled for default user too
880 config.add_view(
889 config.add_view(
881 UsersView,
890 UsersView,
882 attr='ips_add',
891 attr='ips_add',
883 route_name='edit_user_ips_add', request_method='POST')
892 route_name='edit_user_ips_add', request_method='POST')
884
893
885 config.add_route(
894 config.add_route(
886 name='edit_user_ips_delete',
895 name='edit_user_ips_delete',
887 pattern=r'/users/{user_id:\d+}/edit/ips/delete',
896 pattern=r'/users/{user_id:\d+}/edit/ips/delete',
888 user_route_with_default=True) # enabled for default user too
897 user_route_with_default=True) # enabled for default user too
889 config.add_view(
898 config.add_view(
890 UsersView,
899 UsersView,
891 attr='ips_delete',
900 attr='ips_delete',
892 route_name='edit_user_ips_delete', request_method='POST')
901 route_name='edit_user_ips_delete', request_method='POST')
893
902
894 # user perms
903 # user perms
895 config.add_route(
904 config.add_route(
896 name='edit_user_perms_summary',
905 name='edit_user_perms_summary',
897 pattern=r'/users/{user_id:\d+}/edit/permissions_summary',
906 pattern=r'/users/{user_id:\d+}/edit/permissions_summary',
898 user_route=True)
907 user_route=True)
899 config.add_view(
908 config.add_view(
900 UsersView,
909 UsersView,
901 attr='user_perms_summary',
910 attr='user_perms_summary',
902 route_name='edit_user_perms_summary', request_method='GET',
911 route_name='edit_user_perms_summary', request_method='GET',
903 renderer='rhodecode:templates/admin/users/user_edit.mako')
912 renderer='rhodecode:templates/admin/users/user_edit.mako')
904
913
905 config.add_route(
914 config.add_route(
906 name='edit_user_perms_summary_json',
915 name='edit_user_perms_summary_json',
907 pattern=r'/users/{user_id:\d+}/edit/permissions_summary/json',
916 pattern=r'/users/{user_id:\d+}/edit/permissions_summary/json',
908 user_route=True)
917 user_route=True)
909 config.add_view(
918 config.add_view(
910 UsersView,
919 UsersView,
911 attr='user_perms_summary_json',
920 attr='user_perms_summary_json',
912 route_name='edit_user_perms_summary_json', request_method='GET',
921 route_name='edit_user_perms_summary_json', request_method='GET',
913 renderer='json_ext')
922 renderer='json_ext')
914
923
915 # user user groups management
924 # user user groups management
916 config.add_route(
925 config.add_route(
917 name='edit_user_groups_management',
926 name='edit_user_groups_management',
918 pattern=r'/users/{user_id:\d+}/edit/groups_management',
927 pattern=r'/users/{user_id:\d+}/edit/groups_management',
919 user_route=True)
928 user_route=True)
920 config.add_view(
929 config.add_view(
921 UsersView,
930 UsersView,
922 attr='groups_management',
931 attr='groups_management',
923 route_name='edit_user_groups_management', request_method='GET',
932 route_name='edit_user_groups_management', request_method='GET',
924 renderer='rhodecode:templates/admin/users/user_edit.mako')
933 renderer='rhodecode:templates/admin/users/user_edit.mako')
925
934
926 config.add_route(
935 config.add_route(
927 name='edit_user_groups_management_updates',
936 name='edit_user_groups_management_updates',
928 pattern=r'/users/{user_id:\d+}/edit/edit_user_groups_management/updates',
937 pattern=r'/users/{user_id:\d+}/edit/edit_user_groups_management/updates',
929 user_route=True)
938 user_route=True)
930 config.add_view(
939 config.add_view(
931 UsersView,
940 UsersView,
932 attr='groups_management_updates',
941 attr='groups_management_updates',
933 route_name='edit_user_groups_management_updates', request_method='POST')
942 route_name='edit_user_groups_management_updates', request_method='POST')
934
943
935 # user audit logs
944 # user audit logs
936 config.add_route(
945 config.add_route(
937 name='edit_user_audit_logs',
946 name='edit_user_audit_logs',
938 pattern=r'/users/{user_id:\d+}/edit/audit', user_route=True)
947 pattern=r'/users/{user_id:\d+}/edit/audit', user_route=True)
939 config.add_view(
948 config.add_view(
940 UsersView,
949 UsersView,
941 attr='user_audit_logs',
950 attr='user_audit_logs',
942 route_name='edit_user_audit_logs', request_method='GET',
951 route_name='edit_user_audit_logs', request_method='GET',
943 renderer='rhodecode:templates/admin/users/user_edit.mako')
952 renderer='rhodecode:templates/admin/users/user_edit.mako')
944
953
945 config.add_route(
954 config.add_route(
946 name='edit_user_audit_logs_download',
955 name='edit_user_audit_logs_download',
947 pattern=r'/users/{user_id:\d+}/edit/audit/download', user_route=True)
956 pattern=r'/users/{user_id:\d+}/edit/audit/download', user_route=True)
948 config.add_view(
957 config.add_view(
949 UsersView,
958 UsersView,
950 attr='user_audit_logs_download',
959 attr='user_audit_logs_download',
951 route_name='edit_user_audit_logs_download', request_method='GET',
960 route_name='edit_user_audit_logs_download', request_method='GET',
952 renderer='string')
961 renderer='string')
953
962
954 # user caches
963 # user caches
955 config.add_route(
964 config.add_route(
956 name='edit_user_caches',
965 name='edit_user_caches',
957 pattern=r'/users/{user_id:\d+}/edit/caches',
966 pattern=r'/users/{user_id:\d+}/edit/caches',
958 user_route=True)
967 user_route=True)
959 config.add_view(
968 config.add_view(
960 UsersView,
969 UsersView,
961 attr='user_caches',
970 attr='user_caches',
962 route_name='edit_user_caches', request_method='GET',
971 route_name='edit_user_caches', request_method='GET',
963 renderer='rhodecode:templates/admin/users/user_edit.mako')
972 renderer='rhodecode:templates/admin/users/user_edit.mako')
964
973
965 config.add_route(
974 config.add_route(
966 name='edit_user_caches_update',
975 name='edit_user_caches_update',
967 pattern=r'/users/{user_id:\d+}/edit/caches/update',
976 pattern=r'/users/{user_id:\d+}/edit/caches/update',
968 user_route=True)
977 user_route=True)
969 config.add_view(
978 config.add_view(
970 UsersView,
979 UsersView,
971 attr='user_caches_update',
980 attr='user_caches_update',
972 route_name='edit_user_caches_update', request_method='POST')
981 route_name='edit_user_caches_update', request_method='POST')
973
982
974 # user-groups admin
983 # user-groups admin
975 config.add_route(
984 config.add_route(
976 name='user_groups',
985 name='user_groups',
977 pattern='/user_groups')
986 pattern='/user_groups')
978 config.add_view(
987 config.add_view(
979 AdminUserGroupsView,
988 AdminUserGroupsView,
980 attr='user_groups_list',
989 attr='user_groups_list',
981 route_name='user_groups', request_method='GET',
990 route_name='user_groups', request_method='GET',
982 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
991 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
983
992
984 config.add_route(
993 config.add_route(
985 name='user_groups_data',
994 name='user_groups_data',
986 pattern='/user_groups_data')
995 pattern='/user_groups_data')
987 config.add_view(
996 config.add_view(
988 AdminUserGroupsView,
997 AdminUserGroupsView,
989 attr='user_groups_list_data',
998 attr='user_groups_list_data',
990 route_name='user_groups_data', request_method='GET',
999 route_name='user_groups_data', request_method='GET',
991 renderer='json_ext', xhr=True)
1000 renderer='json_ext', xhr=True)
992
1001
993 config.add_route(
1002 config.add_route(
994 name='user_groups_new',
1003 name='user_groups_new',
995 pattern='/user_groups/new')
1004 pattern='/user_groups/new')
996 config.add_view(
1005 config.add_view(
997 AdminUserGroupsView,
1006 AdminUserGroupsView,
998 attr='user_groups_new',
1007 attr='user_groups_new',
999 route_name='user_groups_new', request_method='GET',
1008 route_name='user_groups_new', request_method='GET',
1000 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
1009 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
1001
1010
1002 config.add_route(
1011 config.add_route(
1003 name='user_groups_create',
1012 name='user_groups_create',
1004 pattern='/user_groups/create')
1013 pattern='/user_groups/create')
1005 config.add_view(
1014 config.add_view(
1006 AdminUserGroupsView,
1015 AdminUserGroupsView,
1007 attr='user_groups_create',
1016 attr='user_groups_create',
1008 route_name='user_groups_create', request_method='POST',
1017 route_name='user_groups_create', request_method='POST',
1009 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
1018 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
1010
1019
1011 # repos admin
1020 # repos admin
1012 config.add_route(
1021 config.add_route(
1013 name='repos',
1022 name='repos',
1014 pattern='/repos')
1023 pattern='/repos')
1015 config.add_view(
1024 config.add_view(
1016 AdminReposView,
1025 AdminReposView,
1017 attr='repository_list',
1026 attr='repository_list',
1018 route_name='repos', request_method='GET',
1027 route_name='repos', request_method='GET',
1019 renderer='rhodecode:templates/admin/repos/repos.mako')
1028 renderer='rhodecode:templates/admin/repos/repos.mako')
1020
1029
1021 config.add_route(
1030 config.add_route(
1022 name='repos_data',
1031 name='repos_data',
1023 pattern='/repos_data')
1032 pattern='/repos_data')
1024 config.add_view(
1033 config.add_view(
1025 AdminReposView,
1034 AdminReposView,
1026 attr='repository_list_data',
1035 attr='repository_list_data',
1027 route_name='repos_data', request_method='GET',
1036 route_name='repos_data', request_method='GET',
1028 renderer='json_ext', xhr=True)
1037 renderer='json_ext', xhr=True)
1029
1038
1030 config.add_route(
1039 config.add_route(
1031 name='repo_new',
1040 name='repo_new',
1032 pattern='/repos/new')
1041 pattern='/repos/new')
1033 config.add_view(
1042 config.add_view(
1034 AdminReposView,
1043 AdminReposView,
1035 attr='repository_new',
1044 attr='repository_new',
1036 route_name='repo_new', request_method='GET',
1045 route_name='repo_new', request_method='GET',
1037 renderer='rhodecode:templates/admin/repos/repo_add.mako')
1046 renderer='rhodecode:templates/admin/repos/repo_add.mako')
1038
1047
1039 config.add_route(
1048 config.add_route(
1040 name='repo_create',
1049 name='repo_create',
1041 pattern='/repos/create')
1050 pattern='/repos/create')
1042 config.add_view(
1051 config.add_view(
1043 AdminReposView,
1052 AdminReposView,
1044 attr='repository_create',
1053 attr='repository_create',
1045 route_name='repo_create', request_method='POST',
1054 route_name='repo_create', request_method='POST',
1046 renderer='rhodecode:templates/admin/repos/repos.mako')
1055 renderer='rhodecode:templates/admin/repos/repos.mako')
1047
1056
1048 # repo groups admin
1057 # repo groups admin
1049 config.add_route(
1058 config.add_route(
1050 name='repo_groups',
1059 name='repo_groups',
1051 pattern='/repo_groups')
1060 pattern='/repo_groups')
1052 config.add_view(
1061 config.add_view(
1053 AdminRepoGroupsView,
1062 AdminRepoGroupsView,
1054 attr='repo_group_list',
1063 attr='repo_group_list',
1055 route_name='repo_groups', request_method='GET',
1064 route_name='repo_groups', request_method='GET',
1056 renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako')
1065 renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako')
1057
1066
1058 config.add_route(
1067 config.add_route(
1059 name='repo_groups_data',
1068 name='repo_groups_data',
1060 pattern='/repo_groups_data')
1069 pattern='/repo_groups_data')
1061 config.add_view(
1070 config.add_view(
1062 AdminRepoGroupsView,
1071 AdminRepoGroupsView,
1063 attr='repo_group_list_data',
1072 attr='repo_group_list_data',
1064 route_name='repo_groups_data', request_method='GET',
1073 route_name='repo_groups_data', request_method='GET',
1065 renderer='json_ext', xhr=True)
1074 renderer='json_ext', xhr=True)
1066
1075
1067 config.add_route(
1076 config.add_route(
1068 name='repo_group_new',
1077 name='repo_group_new',
1069 pattern='/repo_group/new')
1078 pattern='/repo_group/new')
1070 config.add_view(
1079 config.add_view(
1071 AdminRepoGroupsView,
1080 AdminRepoGroupsView,
1072 attr='repo_group_new',
1081 attr='repo_group_new',
1073 route_name='repo_group_new', request_method='GET',
1082 route_name='repo_group_new', request_method='GET',
1074 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1083 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1075
1084
1076 config.add_route(
1085 config.add_route(
1077 name='repo_group_create',
1086 name='repo_group_create',
1078 pattern='/repo_group/create')
1087 pattern='/repo_group/create')
1079 config.add_view(
1088 config.add_view(
1080 AdminRepoGroupsView,
1089 AdminRepoGroupsView,
1081 attr='repo_group_create',
1090 attr='repo_group_create',
1082 route_name='repo_group_create', request_method='POST',
1091 route_name='repo_group_create', request_method='POST',
1083 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1092 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1084
1093
1085
1094
1086 def includeme(config):
1095 def includeme(config):
1087 # Create admin navigation registry and add it to the pyramid registry.
1096 # Create admin navigation registry and add it to the pyramid registry.
1088 nav_includeme(config)
1097 nav_includeme(config)
1089
1098
1090 # main admin routes
1099 # main admin routes
1091 config.add_route(
1100 config.add_route(
1092 name='admin_home', pattern=ADMIN_PREFIX)
1101 name='admin_home', pattern=ADMIN_PREFIX)
1093 config.add_view(
1102 config.add_view(
1094 AdminMainView,
1103 AdminMainView,
1095 attr='admin_main',
1104 attr='admin_main',
1096 route_name='admin_home', request_method='GET',
1105 route_name='admin_home', request_method='GET',
1097 renderer='rhodecode:templates/admin/main.mako')
1106 renderer='rhodecode:templates/admin/main.mako')
1098
1107
1099 # pr global redirect
1108 # pr global redirect
1100 config.add_route(
1109 config.add_route(
1101 name='pull_requests_global_0', # backward compat
1110 name='pull_requests_global_0', # backward compat
1102 pattern=ADMIN_PREFIX + r'/pull_requests/{pull_request_id:\d+}')
1111 pattern=ADMIN_PREFIX + r'/pull_requests/{pull_request_id:\d+}')
1103 config.add_view(
1112 config.add_view(
1104 AdminMainView,
1113 AdminMainView,
1105 attr='pull_requests',
1114 attr='pull_requests',
1106 route_name='pull_requests_global_0', request_method='GET')
1115 route_name='pull_requests_global_0', request_method='GET')
1107
1116
1108 config.add_route(
1117 config.add_route(
1109 name='pull_requests_global_1', # backward compat
1118 name='pull_requests_global_1', # backward compat
1110 pattern=ADMIN_PREFIX + r'/pull-requests/{pull_request_id:\d+}')
1119 pattern=ADMIN_PREFIX + r'/pull-requests/{pull_request_id:\d+}')
1111 config.add_view(
1120 config.add_view(
1112 AdminMainView,
1121 AdminMainView,
1113 attr='pull_requests',
1122 attr='pull_requests',
1114 route_name='pull_requests_global_1', request_method='GET')
1123 route_name='pull_requests_global_1', request_method='GET')
1115
1124
1116 config.add_route(
1125 config.add_route(
1117 name='pull_requests_global',
1126 name='pull_requests_global',
1118 pattern=ADMIN_PREFIX + r'/pull-request/{pull_request_id:\d+}')
1127 pattern=ADMIN_PREFIX + r'/pull-request/{pull_request_id:\d+}')
1119 config.add_view(
1128 config.add_view(
1120 AdminMainView,
1129 AdminMainView,
1121 attr='pull_requests',
1130 attr='pull_requests',
1122 route_name='pull_requests_global', request_method='GET')
1131 route_name='pull_requests_global', request_method='GET')
1123
1132
1124 config.include(admin_routes, route_prefix=ADMIN_PREFIX)
1133 config.include(admin_routes, route_prefix=ADMIN_PREFIX)
@@ -1,496 +1,498 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import urllib.request
19 import urllib.request
20 import urllib.parse
20 import urllib.parse
21 import urllib.error
21 import urllib.error
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.apps._base import ADMIN_PREFIX
26 from rhodecode.apps._base import ADMIN_PREFIX
27 from rhodecode.lib import auth
27 from rhodecode.lib import auth
28 from rhodecode.lib.utils2 import safe_str
28 from rhodecode.lib.utils2 import safe_str
29 from rhodecode.lib import helpers as h
29 from rhodecode.lib import helpers as h
30 from rhodecode.model.db import (
30 from rhodecode.model.db import (
31 Repository, RepoGroup, UserRepoToPerm, User, Permission)
31 Repository, RepoGroup, UserRepoToPerm, User, Permission)
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.repo_group import RepoGroupModel
34 from rhodecode.model.repo_group import RepoGroupModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import (
36 from rhodecode.tests import (
37 login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN,
37 login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN,
38 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
38 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
39 from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function
39 from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function
40 from rhodecode.tests.utils import repo_on_filesystem
40 from rhodecode.tests.utils import repo_on_filesystem
41 from rhodecode.tests.routes import route_path
41 from rhodecode.tests.routes import route_path
42
42
43 fixture = Fixture()
43 fixture = Fixture()
44
44
45
45
46 def _get_permission_for_user(user, repo):
46 def _get_permission_for_user(user, repo):
47 perm = UserRepoToPerm.query()\
47 perm = UserRepoToPerm.query()\
48 .filter(UserRepoToPerm.repository ==
48 .filter(UserRepoToPerm.repository ==
49 Repository.get_by_repo_name(repo))\
49 Repository.get_by_repo_name(repo))\
50 .filter(UserRepoToPerm.user == User.get_by_username(user))\
50 .filter(UserRepoToPerm.user == User.get_by_username(user))\
51 .all()
51 .all()
52 return perm
52 return perm
53
53
54
54
55 @pytest.mark.usefixtures("app")
55 @pytest.mark.usefixtures("app")
56 class TestAdminRepos(object):
56 class TestAdminRepos(object):
57
57
58 def test_repo_list(self, autologin_user, user_util, xhr_header):
58 def test_repo_list(self, autologin_user, user_util, xhr_header):
59 repo = user_util.create_repo()
59 repo = user_util.create_repo()
60 repo_name = repo.repo_name
60 repo_name = repo.repo_name
61 response = self.app.get(
61 response = self.app.get(
62 route_path('repos_data'), status=200,
62 route_path('repos_data'), status=200,
63 extra_environ=xhr_header)
63 extra_environ=xhr_header)
64
64
65 response.mustcontain(repo_name)
65 response.mustcontain(repo_name)
66
66
67 def test_create_page_restricted_to_single_backend(self, autologin_user, backend):
67 def test_create_page_restricted_to_single_backend(self, autologin_user, backend):
68 with mock.patch('rhodecode.BACKENDS', {'git': 'git'}):
68 with mock.patch('rhodecode.BACKENDS', {'git': 'git'}):
69 response = self.app.get(route_path('repo_new'), status=200)
69 response = self.app.get(route_path('repo_new'), status=200)
70 assert_response = response.assert_response()
70 assert_response = response.assert_response()
71 element = assert_response.get_element('[name=repo_type]')
71 element = assert_response.get_element('[name=repo_type]')
72 assert element.get('value') == 'git'
72 assert element.get('value') == 'git'
73
73
74 def test_create_page_non_restricted_backends(self, autologin_user, backend):
74 def test_create_page_non_restricted_backends(self, autologin_user, backend):
75 response = self.app.get(route_path('repo_new'), status=200)
75 response = self.app.get(route_path('repo_new'), status=200)
76 assert_response = response.assert_response()
76 assert_response = response.assert_response()
77 assert ['hg', 'git', 'svn'] == [x.get('value') for x in assert_response.get_elements('[name=repo_type]')]
77 assert ['hg', 'git', 'svn'] == [x.get('value') for x in assert_response.get_elements('[name=repo_type]')]
78
78
79 @pytest.mark.parametrize(
79 @pytest.mark.parametrize(
80 "suffix", ['', 'xxa'], ids=['', 'non-ascii'])
80 "suffix", ['', 'xxa'], ids=['', 'non-ascii'])
81 def test_create(self, autologin_user, backend, suffix, csrf_token):
81 def test_create(self, autologin_user, backend, suffix, csrf_token):
82 repo_name_unicode = backend.new_repo_name(suffix=suffix)
82 repo_name_unicode = backend.new_repo_name(suffix=suffix)
83 repo_name = repo_name_unicode
83 repo_name = repo_name_unicode
84
84
85 description_unicode = 'description for newly created repo' + suffix
85 description_unicode = 'description for newly created repo' + suffix
86 description = description_unicode
86 description = description_unicode
87
87
88 response = self.app.post(
88 response = self.app.post(
89 route_path('repo_create'),
89 route_path('repo_create'),
90 fixture._get_repo_create_params(
90 fixture._get_repo_create_params(
91 repo_private=False,
91 repo_private=False,
92 repo_name=repo_name,
92 repo_name=repo_name,
93 repo_type=backend.alias,
93 repo_type=backend.alias,
94 repo_description=description,
94 repo_description=description,
95 csrf_token=csrf_token),
95 csrf_token=csrf_token),
96 status=302)
96 status=302)
97
97
98 self.assert_repository_is_created_correctly(
98 self.assert_repository_is_created_correctly(
99 repo_name, description, backend)
99 repo_name, description, backend)
100
100
101 def test_create_numeric_name(self, autologin_user, backend, csrf_token):
101 def test_create_numeric_name(self, autologin_user, backend, csrf_token):
102 numeric_repo = '1234'
102 numeric_repo = '1234'
103 repo_name = numeric_repo
103 repo_name = numeric_repo
104 description = 'description for newly created repo' + numeric_repo
104 description = 'description for newly created repo' + numeric_repo
105 self.app.post(
105 self.app.post(
106 route_path('repo_create'),
106 route_path('repo_create'),
107 fixture._get_repo_create_params(
107 fixture._get_repo_create_params(
108 repo_private=False,
108 repo_private=False,
109 repo_name=repo_name,
109 repo_name=repo_name,
110 repo_type=backend.alias,
110 repo_type=backend.alias,
111 repo_description=description,
111 repo_description=description,
112 csrf_token=csrf_token))
112 csrf_token=csrf_token))
113
113 try:
114 self.assert_repository_is_created_correctly(
114 self.assert_repository_is_created_correctly(repo_name, description, backend)
115 repo_name, description, backend)
115 finally:
116 RepoModel().delete(numeric_repo)
117 Session().commit()
116
118
117 @pytest.mark.parametrize("suffix", ['', '_Δ…Δ‡Δ™'], ids=['', 'non-ascii'])
119 @pytest.mark.parametrize("suffix", ['', '_Δ…Δ‡Δ™'], ids=['', 'non-ascii'])
118 def test_create_in_group(
120 def test_create_in_group(
119 self, autologin_user, backend, suffix, csrf_token):
121 self, autologin_user, backend, suffix, csrf_token):
120 # create GROUP
122 # create GROUP
121 group_name = f'sometest_{backend.alias}'
123 group_name = f'sometest_{backend.alias}'
122 gr = RepoGroupModel().create(group_name=group_name,
124 gr = RepoGroupModel().create(group_name=group_name,
123 group_description='test',
125 group_description='test',
124 owner=TEST_USER_ADMIN_LOGIN)
126 owner=TEST_USER_ADMIN_LOGIN)
125 Session().commit()
127 Session().commit()
126
128
127 repo_name = f'ingroup{suffix}'
129 repo_name = f'ingroup{suffix}'
128 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
130 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
129 description = 'description for newly created repo'
131 description = 'description for newly created repo'
130
132
131 self.app.post(
133 self.app.post(
132 route_path('repo_create'),
134 route_path('repo_create'),
133 fixture._get_repo_create_params(
135 fixture._get_repo_create_params(
134 repo_private=False,
136 repo_private=False,
135 repo_name=safe_str(repo_name),
137 repo_name=safe_str(repo_name),
136 repo_type=backend.alias,
138 repo_type=backend.alias,
137 repo_description=description,
139 repo_description=description,
138 repo_group=gr.group_id,
140 repo_group=gr.group_id,
139 csrf_token=csrf_token))
141 csrf_token=csrf_token))
140
142
141 # TODO: johbo: Cleanup work to fixture
143 # TODO: johbo: Cleanup work to fixture
142 try:
144 try:
143 self.assert_repository_is_created_correctly(
145 self.assert_repository_is_created_correctly(
144 repo_name_full, description, backend)
146 repo_name_full, description, backend)
145
147
146 new_repo = RepoModel().get_by_repo_name(repo_name_full)
148 new_repo = RepoModel().get_by_repo_name(repo_name_full)
147 inherited_perms = UserRepoToPerm.query().filter(
149 inherited_perms = UserRepoToPerm.query().filter(
148 UserRepoToPerm.repository_id == new_repo.repo_id).all()
150 UserRepoToPerm.repository_id == new_repo.repo_id).all()
149 assert len(inherited_perms) == 1
151 assert len(inherited_perms) == 1
150 finally:
152 finally:
151 RepoModel().delete(repo_name_full)
153 RepoModel().delete(repo_name_full)
152 RepoGroupModel().delete(group_name)
154 RepoGroupModel().delete(group_name)
153 Session().commit()
155 Session().commit()
154
156
155 def test_create_in_group_numeric_name(
157 def test_create_in_group_numeric_name(
156 self, autologin_user, backend, csrf_token):
158 self, autologin_user, backend, csrf_token):
157 # create GROUP
159 # create GROUP
158 group_name = 'sometest_%s' % backend.alias
160 group_name = 'sometest_%s' % backend.alias
159 gr = RepoGroupModel().create(group_name=group_name,
161 gr = RepoGroupModel().create(group_name=group_name,
160 group_description='test',
162 group_description='test',
161 owner=TEST_USER_ADMIN_LOGIN)
163 owner=TEST_USER_ADMIN_LOGIN)
162 Session().commit()
164 Session().commit()
163
165
164 repo_name = '12345'
166 repo_name = '12345'
165 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
167 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
166 description = 'description for newly created repo'
168 description = 'description for newly created repo'
167 self.app.post(
169 self.app.post(
168 route_path('repo_create'),
170 route_path('repo_create'),
169 fixture._get_repo_create_params(
171 fixture._get_repo_create_params(
170 repo_private=False,
172 repo_private=False,
171 repo_name=repo_name,
173 repo_name=repo_name,
172 repo_type=backend.alias,
174 repo_type=backend.alias,
173 repo_description=description,
175 repo_description=description,
174 repo_group=gr.group_id,
176 repo_group=gr.group_id,
175 csrf_token=csrf_token))
177 csrf_token=csrf_token))
176
178
177 # TODO: johbo: Cleanup work to fixture
179 # TODO: johbo: Cleanup work to fixture
178 try:
180 try:
179 self.assert_repository_is_created_correctly(
181 self.assert_repository_is_created_correctly(
180 repo_name_full, description, backend)
182 repo_name_full, description, backend)
181
183
182 new_repo = RepoModel().get_by_repo_name(repo_name_full)
184 new_repo = RepoModel().get_by_repo_name(repo_name_full)
183 inherited_perms = UserRepoToPerm.query()\
185 inherited_perms = UserRepoToPerm.query()\
184 .filter(UserRepoToPerm.repository_id == new_repo.repo_id).all()
186 .filter(UserRepoToPerm.repository_id == new_repo.repo_id).all()
185 assert len(inherited_perms) == 1
187 assert len(inherited_perms) == 1
186 finally:
188 finally:
187 RepoModel().delete(repo_name_full)
189 RepoModel().delete(repo_name_full)
188 RepoGroupModel().delete(group_name)
190 RepoGroupModel().delete(group_name)
189 Session().commit()
191 Session().commit()
190
192
191 def test_create_in_group_without_needed_permissions(self, backend):
193 def test_create_in_group_without_needed_permissions(self, backend):
192 session = login_user_session(
194 session = login_user_session(
193 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
195 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
194 csrf_token = auth.get_csrf_token(session)
196 csrf_token = auth.get_csrf_token(session)
195 # revoke
197 # revoke
196 user_model = UserModel()
198 user_model = UserModel()
197 # disable fork and create on default user
199 # disable fork and create on default user
198 user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
200 user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
199 user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
201 user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
200 user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
202 user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
201 user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
203 user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
202
204
203 # disable on regular user
205 # disable on regular user
204 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
206 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
205 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
207 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
206 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
208 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
207 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
209 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
208 Session().commit()
210 Session().commit()
209
211
210 # create GROUP
212 # create GROUP
211 group_name = 'reg_sometest_%s' % backend.alias
213 group_name = 'reg_sometest_%s' % backend.alias
212 gr = RepoGroupModel().create(group_name=group_name,
214 gr = RepoGroupModel().create(group_name=group_name,
213 group_description='test',
215 group_description='test',
214 owner=TEST_USER_ADMIN_LOGIN)
216 owner=TEST_USER_ADMIN_LOGIN)
215 Session().commit()
217 Session().commit()
216 repo_group_id = gr.group_id
218 repo_group_id = gr.group_id
217
219
218 group_name_allowed = 'reg_sometest_allowed_%s' % backend.alias
220 group_name_allowed = 'reg_sometest_allowed_%s' % backend.alias
219 gr_allowed = RepoGroupModel().create(
221 gr_allowed = RepoGroupModel().create(
220 group_name=group_name_allowed,
222 group_name=group_name_allowed,
221 group_description='test',
223 group_description='test',
222 owner=TEST_USER_REGULAR_LOGIN)
224 owner=TEST_USER_REGULAR_LOGIN)
223 allowed_repo_group_id = gr_allowed.group_id
225 allowed_repo_group_id = gr_allowed.group_id
224 Session().commit()
226 Session().commit()
225
227
226 repo_name = 'ingroup'
228 repo_name = 'ingroup'
227 description = 'description for newly created repo'
229 description = 'description for newly created repo'
228 response = self.app.post(
230 response = self.app.post(
229 route_path('repo_create'),
231 route_path('repo_create'),
230 fixture._get_repo_create_params(
232 fixture._get_repo_create_params(
231 repo_private=False,
233 repo_private=False,
232 repo_name=repo_name,
234 repo_name=repo_name,
233 repo_type=backend.alias,
235 repo_type=backend.alias,
234 repo_description=description,
236 repo_description=description,
235 repo_group=repo_group_id,
237 repo_group=repo_group_id,
236 csrf_token=csrf_token))
238 csrf_token=csrf_token))
237
239
238 response.mustcontain('Invalid value')
240 response.mustcontain('Invalid value')
239
241
240 # user is allowed to create in this group
242 # user is allowed to create in this group
241 repo_name = 'ingroup'
243 repo_name = 'ingroup'
242 repo_name_full = RepoGroup.url_sep().join(
244 repo_name_full = RepoGroup.url_sep().join(
243 [group_name_allowed, repo_name])
245 [group_name_allowed, repo_name])
244 description = 'description for newly created repo'
246 description = 'description for newly created repo'
245 response = self.app.post(
247 response = self.app.post(
246 route_path('repo_create'),
248 route_path('repo_create'),
247 fixture._get_repo_create_params(
249 fixture._get_repo_create_params(
248 repo_private=False,
250 repo_private=False,
249 repo_name=repo_name,
251 repo_name=repo_name,
250 repo_type=backend.alias,
252 repo_type=backend.alias,
251 repo_description=description,
253 repo_description=description,
252 repo_group=allowed_repo_group_id,
254 repo_group=allowed_repo_group_id,
253 csrf_token=csrf_token))
255 csrf_token=csrf_token))
254
256
255 # TODO: johbo: Cleanup in pytest fixture
257 # TODO: johbo: Cleanup in pytest fixture
256 try:
258 try:
257 self.assert_repository_is_created_correctly(
259 self.assert_repository_is_created_correctly(
258 repo_name_full, description, backend)
260 repo_name_full, description, backend)
259
261
260 new_repo = RepoModel().get_by_repo_name(repo_name_full)
262 new_repo = RepoModel().get_by_repo_name(repo_name_full)
261 inherited_perms = UserRepoToPerm.query().filter(
263 inherited_perms = UserRepoToPerm.query().filter(
262 UserRepoToPerm.repository_id == new_repo.repo_id).all()
264 UserRepoToPerm.repository_id == new_repo.repo_id).all()
263 assert len(inherited_perms) == 1
265 assert len(inherited_perms) == 1
264
266
265 assert repo_on_filesystem(repo_name_full)
267 assert repo_on_filesystem(repo_name_full)
266 finally:
268 finally:
267 RepoModel().delete(repo_name_full)
269 RepoModel().delete(repo_name_full)
268 RepoGroupModel().delete(group_name)
270 RepoGroupModel().delete(group_name)
269 RepoGroupModel().delete(group_name_allowed)
271 RepoGroupModel().delete(group_name_allowed)
270 Session().commit()
272 Session().commit()
271
273
272 def test_create_in_group_inherit_permissions(self, autologin_user, backend,
274 def test_create_in_group_inherit_permissions(self, autologin_user, backend,
273 csrf_token):
275 csrf_token):
274 # create GROUP
276 # create GROUP
275 group_name = 'sometest_%s' % backend.alias
277 group_name = 'sometest_%s' % backend.alias
276 gr = RepoGroupModel().create(group_name=group_name,
278 gr = RepoGroupModel().create(group_name=group_name,
277 group_description='test',
279 group_description='test',
278 owner=TEST_USER_ADMIN_LOGIN)
280 owner=TEST_USER_ADMIN_LOGIN)
279 perm = Permission.get_by_key('repository.write')
281 perm = Permission.get_by_key('repository.write')
280 RepoGroupModel().grant_user_permission(
282 RepoGroupModel().grant_user_permission(
281 gr, TEST_USER_REGULAR_LOGIN, perm)
283 gr, TEST_USER_REGULAR_LOGIN, perm)
282
284
283 # add repo permissions
285 # add repo permissions
284 Session().commit()
286 Session().commit()
285 repo_group_id = gr.group_id
287 repo_group_id = gr.group_id
286 repo_name = 'ingroup_inherited_%s' % backend.alias
288 repo_name = 'ingroup_inherited_%s' % backend.alias
287 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
289 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
288 description = 'description for newly created repo'
290 description = 'description for newly created repo'
289 self.app.post(
291 self.app.post(
290 route_path('repo_create'),
292 route_path('repo_create'),
291 fixture._get_repo_create_params(
293 fixture._get_repo_create_params(
292 repo_private=False,
294 repo_private=False,
293 repo_name=repo_name,
295 repo_name=repo_name,
294 repo_type=backend.alias,
296 repo_type=backend.alias,
295 repo_description=description,
297 repo_description=description,
296 repo_group=repo_group_id,
298 repo_group=repo_group_id,
297 repo_copy_permissions=True,
299 repo_copy_permissions=True,
298 csrf_token=csrf_token))
300 csrf_token=csrf_token))
299
301
300 # TODO: johbo: Cleanup to pytest fixture
302 # TODO: johbo: Cleanup to pytest fixture
301 try:
303 try:
302 self.assert_repository_is_created_correctly(
304 self.assert_repository_is_created_correctly(
303 repo_name_full, description, backend)
305 repo_name_full, description, backend)
304 except Exception:
306 except Exception:
305 RepoGroupModel().delete(group_name)
307 RepoGroupModel().delete(group_name)
306 Session().commit()
308 Session().commit()
307 raise
309 raise
308
310
309 # check if inherited permissions are applied
311 # check if inherited permissions are applied
310 new_repo = RepoModel().get_by_repo_name(repo_name_full)
312 new_repo = RepoModel().get_by_repo_name(repo_name_full)
311 inherited_perms = UserRepoToPerm.query().filter(
313 inherited_perms = UserRepoToPerm.query().filter(
312 UserRepoToPerm.repository_id == new_repo.repo_id).all()
314 UserRepoToPerm.repository_id == new_repo.repo_id).all()
313 assert len(inherited_perms) == 2
315 assert len(inherited_perms) == 2
314
316
315 assert TEST_USER_REGULAR_LOGIN in [
317 assert TEST_USER_REGULAR_LOGIN in [
316 x.user.username for x in inherited_perms]
318 x.user.username for x in inherited_perms]
317 assert 'repository.write' in [
319 assert 'repository.write' in [
318 x.permission.permission_name for x in inherited_perms]
320 x.permission.permission_name for x in inherited_perms]
319
321
320 RepoModel().delete(repo_name_full)
322 RepoModel().delete(repo_name_full)
321 RepoGroupModel().delete(group_name)
323 RepoGroupModel().delete(group_name)
322 Session().commit()
324 Session().commit()
323
325
324 @pytest.mark.xfail_backends(
326 @pytest.mark.xfail_backends(
325 "git", "hg", reason="Missing reposerver support")
327 "git", "hg", reason="Missing reposerver support")
326 def test_create_with_clone_uri(self, autologin_user, backend, reposerver,
328 def test_create_with_clone_uri(self, autologin_user, backend, reposerver,
327 csrf_token):
329 csrf_token):
328 source_repo = backend.create_repo(number_of_commits=2)
330 source_repo = backend.create_repo(number_of_commits=2)
329 source_repo_name = source_repo.repo_name
331 source_repo_name = source_repo.repo_name
330 reposerver.serve(source_repo.scm_instance())
332 reposerver.serve(source_repo.scm_instance())
331
333
332 repo_name = backend.new_repo_name()
334 repo_name = backend.new_repo_name()
333 response = self.app.post(
335 response = self.app.post(
334 route_path('repo_create'),
336 route_path('repo_create'),
335 fixture._get_repo_create_params(
337 fixture._get_repo_create_params(
336 repo_private=False,
338 repo_private=False,
337 repo_name=repo_name,
339 repo_name=repo_name,
338 repo_type=backend.alias,
340 repo_type=backend.alias,
339 repo_description='',
341 repo_description='',
340 clone_uri=reposerver.url,
342 clone_uri=reposerver.url,
341 csrf_token=csrf_token),
343 csrf_token=csrf_token),
342 status=302)
344 status=302)
343
345
344 # Should be redirected to the creating page
346 # Should be redirected to the creating page
345 response.mustcontain('repo_creating')
347 response.mustcontain('repo_creating')
346
348
347 # Expecting that both repositories have same history
349 # Expecting that both repositories have same history
348 source_repo = RepoModel().get_by_repo_name(source_repo_name)
350 source_repo = RepoModel().get_by_repo_name(source_repo_name)
349 source_vcs = source_repo.scm_instance()
351 source_vcs = source_repo.scm_instance()
350 repo = RepoModel().get_by_repo_name(repo_name)
352 repo = RepoModel().get_by_repo_name(repo_name)
351 repo_vcs = repo.scm_instance()
353 repo_vcs = repo.scm_instance()
352 assert source_vcs[0].message == repo_vcs[0].message
354 assert source_vcs[0].message == repo_vcs[0].message
353 assert source_vcs.count() == repo_vcs.count()
355 assert source_vcs.count() == repo_vcs.count()
354 assert source_vcs.commit_ids == repo_vcs.commit_ids
356 assert source_vcs.commit_ids == repo_vcs.commit_ids
355
357
356 @pytest.mark.xfail_backends("svn", reason="Depends on import support")
358 @pytest.mark.xfail_backends("svn", reason="Depends on import support")
357 def test_create_remote_repo_wrong_clone_uri(self, autologin_user, backend,
359 def test_create_remote_repo_wrong_clone_uri(self, autologin_user, backend,
358 csrf_token):
360 csrf_token):
359 repo_name = backend.new_repo_name()
361 repo_name = backend.new_repo_name()
360 description = 'description for newly created repo'
362 description = 'description for newly created repo'
361 response = self.app.post(
363 response = self.app.post(
362 route_path('repo_create'),
364 route_path('repo_create'),
363 fixture._get_repo_create_params(
365 fixture._get_repo_create_params(
364 repo_private=False,
366 repo_private=False,
365 repo_name=repo_name,
367 repo_name=repo_name,
366 repo_type=backend.alias,
368 repo_type=backend.alias,
367 repo_description=description,
369 repo_description=description,
368 clone_uri='http://repo.invalid/repo',
370 clone_uri='http://repo.invalid/repo',
369 csrf_token=csrf_token))
371 csrf_token=csrf_token))
370 response.mustcontain('invalid clone url')
372 response.mustcontain('invalid clone url')
371
373
372 @pytest.mark.xfail_backends("svn", reason="Depends on import support")
374 @pytest.mark.xfail_backends("svn", reason="Depends on import support")
373 def test_create_remote_repo_wrong_clone_uri_hg_svn(
375 def test_create_remote_repo_wrong_clone_uri_hg_svn(
374 self, autologin_user, backend, csrf_token):
376 self, autologin_user, backend, csrf_token):
375 repo_name = backend.new_repo_name()
377 repo_name = backend.new_repo_name()
376 description = 'description for newly created repo'
378 description = 'description for newly created repo'
377 response = self.app.post(
379 response = self.app.post(
378 route_path('repo_create'),
380 route_path('repo_create'),
379 fixture._get_repo_create_params(
381 fixture._get_repo_create_params(
380 repo_private=False,
382 repo_private=False,
381 repo_name=repo_name,
383 repo_name=repo_name,
382 repo_type=backend.alias,
384 repo_type=backend.alias,
383 repo_description=description,
385 repo_description=description,
384 clone_uri='svn+http://svn.invalid/repo',
386 clone_uri='svn+http://svn.invalid/repo',
385 csrf_token=csrf_token))
387 csrf_token=csrf_token))
386 response.mustcontain('invalid clone url')
388 response.mustcontain('invalid clone url')
387
389
388 def test_create_with_git_suffix(
390 def test_create_with_git_suffix(
389 self, autologin_user, backend, csrf_token):
391 self, autologin_user, backend, csrf_token):
390 repo_name = backend.new_repo_name() + ".git"
392 repo_name = backend.new_repo_name() + ".git"
391 description = 'description for newly created repo'
393 description = 'description for newly created repo'
392 response = self.app.post(
394 response = self.app.post(
393 route_path('repo_create'),
395 route_path('repo_create'),
394 fixture._get_repo_create_params(
396 fixture._get_repo_create_params(
395 repo_private=False,
397 repo_private=False,
396 repo_name=repo_name,
398 repo_name=repo_name,
397 repo_type=backend.alias,
399 repo_type=backend.alias,
398 repo_description=description,
400 repo_description=description,
399 csrf_token=csrf_token))
401 csrf_token=csrf_token))
400 response.mustcontain('Repository name cannot end with .git')
402 response.mustcontain('Repository name cannot end with .git')
401
403
402 def test_default_user_cannot_access_private_repo_in_a_group(
404 def test_default_user_cannot_access_private_repo_in_a_group(
403 self, autologin_user, user_util, backend):
405 self, autologin_user, user_util, backend):
404
406
405 group = user_util.create_repo_group()
407 group = user_util.create_repo_group()
406
408
407 repo = backend.create_repo(
409 repo = backend.create_repo(
408 repo_private=True, repo_group=group, repo_copy_permissions=True)
410 repo_private=True, repo_group=group, repo_copy_permissions=True)
409
411
410 permissions = _get_permission_for_user(
412 permissions = _get_permission_for_user(
411 user='default', repo=repo.repo_name)
413 user='default', repo=repo.repo_name)
412 assert len(permissions) == 1
414 assert len(permissions) == 1
413 assert permissions[0].permission.permission_name == 'repository.none'
415 assert permissions[0].permission.permission_name == 'repository.none'
414 assert permissions[0].repository.private is True
416 assert permissions[0].repository.private is True
415
417
416 def test_create_on_top_level_without_permissions(self, backend):
418 def test_create_on_top_level_without_permissions(self, backend):
417 session = login_user_session(
419 session = login_user_session(
418 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
420 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
419 csrf_token = auth.get_csrf_token(session)
421 csrf_token = auth.get_csrf_token(session)
420
422
421 # revoke
423 # revoke
422 user_model = UserModel()
424 user_model = UserModel()
423 # disable fork and create on default user
425 # disable fork and create on default user
424 user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
426 user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
425 user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
427 user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
426 user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
428 user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
427 user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
429 user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
428
430
429 # disable on regular user
431 # disable on regular user
430 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
432 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
431 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
433 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
432 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
434 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
433 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
435 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
434 Session().commit()
436 Session().commit()
435
437
436 repo_name = backend.new_repo_name()
438 repo_name = backend.new_repo_name()
437 description = 'description for newly created repo'
439 description = 'description for newly created repo'
438 response = self.app.post(
440 response = self.app.post(
439 route_path('repo_create'),
441 route_path('repo_create'),
440 fixture._get_repo_create_params(
442 fixture._get_repo_create_params(
441 repo_private=False,
443 repo_private=False,
442 repo_name=repo_name,
444 repo_name=repo_name,
443 repo_type=backend.alias,
445 repo_type=backend.alias,
444 repo_description=description,
446 repo_description=description,
445 csrf_token=csrf_token))
447 csrf_token=csrf_token))
446
448
447 response.mustcontain(
449 response.mustcontain(
448 "You do not have the permission to store repositories in "
450 "You do not have the permission to store repositories in "
449 "the root location.")
451 "the root location.")
450
452
451 @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function)
453 @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function)
452 def test_create_repo_when_filesystem_op_fails(
454 def test_create_repo_when_filesystem_op_fails(
453 self, autologin_user, backend, csrf_token):
455 self, autologin_user, backend, csrf_token):
454 repo_name = backend.new_repo_name()
456 repo_name = backend.new_repo_name()
455 description = 'description for newly created repo'
457 description = 'description for newly created repo'
456
458
457 response = self.app.post(
459 response = self.app.post(
458 route_path('repo_create'),
460 route_path('repo_create'),
459 fixture._get_repo_create_params(
461 fixture._get_repo_create_params(
460 repo_private=False,
462 repo_private=False,
461 repo_name=repo_name,
463 repo_name=repo_name,
462 repo_type=backend.alias,
464 repo_type=backend.alias,
463 repo_description=description,
465 repo_description=description,
464 csrf_token=csrf_token))
466 csrf_token=csrf_token))
465
467
466 assert_session_flash(
468 assert_session_flash(
467 response, 'Error creating repository %s' % repo_name)
469 response, 'Error creating repository %s' % repo_name)
468 # repo must not be in db
470 # repo must not be in db
469 assert backend.repo is None
471 assert backend.repo is None
470 # repo must not be in filesystem !
472 # repo must not be in filesystem !
471 assert not repo_on_filesystem(repo_name)
473 assert not repo_on_filesystem(repo_name)
472
474
473 def assert_repository_is_created_correctly(self, repo_name, description, backend):
475 def assert_repository_is_created_correctly(self, repo_name, description, backend):
474 url_quoted_repo_name = urllib.parse.quote(repo_name)
476 url_quoted_repo_name = urllib.parse.quote(repo_name)
475
477
476 # run the check page that triggers the flash message
478 # run the check page that triggers the flash message
477 response = self.app.get(
479 response = self.app.get(
478 route_path('repo_creating_check', repo_name=repo_name))
480 route_path('repo_creating_check', repo_name=repo_name))
479 assert response.json == {'result': True}
481 assert response.json == {'result': True}
480
482
481 flash_msg = 'Created repository <a href="/{}">{}</a>'.format(url_quoted_repo_name, repo_name)
483 flash_msg = 'Created repository <a href="/{}">{}</a>'.format(url_quoted_repo_name, repo_name)
482 assert_session_flash(response, flash_msg)
484 assert_session_flash(response, flash_msg)
483
485
484 # test if the repo was created in the database
486 # test if the repo was created in the database
485 new_repo = RepoModel().get_by_repo_name(repo_name)
487 new_repo = RepoModel().get_by_repo_name(repo_name)
486
488
487 assert new_repo.repo_name == repo_name
489 assert new_repo.repo_name == repo_name
488 assert new_repo.description == description
490 assert new_repo.description == description
489
491
490 # test if the repository is visible in the list ?
492 # test if the repository is visible in the list ?
491 response = self.app.get(
493 response = self.app.get(
492 h.route_path('repo_summary', repo_name=repo_name))
494 h.route_path('repo_summary', repo_name=repo_name))
493 response.mustcontain(repo_name)
495 response.mustcontain(repo_name)
494 response.mustcontain(backend.alias)
496 response.mustcontain(backend.alias)
495
497
496 assert repo_on_filesystem(repo_name)
498 assert repo_on_filesystem(repo_name)
@@ -1,716 +1,733 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import logging
20 import logging
21 import collections
21 import collections
22
22
23 import datetime
23 import datetime
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26
26
27 import rhodecode
27 import rhodecode
28
28
29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31 from pyramid.response import Response
31 from pyramid.response import Response
32
32
33 from rhodecode.apps._base import BaseAppView
33 from rhodecode.apps._base import BaseAppView
34 from rhodecode.apps._base.navigation import navigation_list
34 from rhodecode.apps._base.navigation import navigation_list
35 from rhodecode.apps.svn_support import config_keys
35 from rhodecode.apps.svn_support import config_keys
36 from rhodecode.lib import helpers as h
36 from rhodecode.lib import helpers as h
37 from rhodecode.lib.auth import (
37 from rhodecode.lib.auth import (
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 from rhodecode.lib.celerylib import tasks, run_task
39 from rhodecode.lib.celerylib import tasks, run_task
40 from rhodecode.lib.str_utils import safe_str
40 from rhodecode.lib.str_utils import safe_str
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path, repo2db_cleanup
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 from rhodecode.lib.index import searcher_from_config
43 from rhodecode.lib.index import searcher_from_config
44
44
45 from rhodecode.model.db import RhodeCodeUi, Repository
45 from rhodecode.model.db import RhodeCodeUi, Repository
46 from rhodecode.model.forms import (ApplicationSettingsForm,
46 from rhodecode.model.forms import (ApplicationSettingsForm,
47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
48 LabsSettingsForm, IssueTrackerPatternsForm)
48 LabsSettingsForm, IssueTrackerPatternsForm)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51
51
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.notification import EmailNotificationModel
53 from rhodecode.model.notification import EmailNotificationModel
54 from rhodecode.model.meta import Session
54 from rhodecode.model.meta import Session
55 from rhodecode.model.settings import (
55 from rhodecode.model.settings import (
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 SettingsModel)
57 SettingsModel)
58
58
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 class AdminSettingsView(BaseAppView):
63 class AdminSettingsView(BaseAppView):
64
64
65 def load_default_context(self):
65 def load_default_context(self):
66 c = self._get_local_tmpl_context()
66 c = self._get_local_tmpl_context()
67 c.labs_active = str2bool(
67 c.labs_active = str2bool(
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 c.navlist = navigation_list(self.request)
69 c.navlist = navigation_list(self.request)
70 return c
70 return c
71
71
72 @classmethod
72 @classmethod
73 def _get_ui_settings(cls):
73 def _get_ui_settings(cls):
74 ret = RhodeCodeUi.query().all()
74 ret = RhodeCodeUi.query().all()
75
75
76 if not ret:
76 if not ret:
77 raise Exception('Could not get application ui settings !')
77 raise Exception('Could not get application ui settings !')
78 settings = {
78 settings = {
79 # legacy param that needs to be kept
79 # legacy param that needs to be kept
80 'web_push_ssl': False,
80 'web_push_ssl': False,
81 'extensions_hgsubversion': False
81 'extensions_hgsubversion': False
82 }
82 }
83 for each in ret:
83 for each in ret:
84 k = each.ui_key
84 k = each.ui_key
85 v = each.ui_value
85 v = each.ui_value
86 section = each.ui_section
86 section = each.ui_section
87
87
88 # skip some options if they are defined
88 # skip some options if they are defined
89 if f"{section}_{k}" in ['web_push_ssl', 'extensions_hgsubversion']:
89 if f"{section}_{k}" in ['web_push_ssl', 'extensions_hgsubversion']:
90 continue
90 continue
91
91
92 if k == '/':
92 if k == '/':
93 k = 'root_path'
93 k = 'root_path'
94
94
95 if k in ['publish', 'enabled']:
95 if k in ['publish', 'enabled']:
96 v = str2bool(v)
96 v = str2bool(v)
97
97
98 if k.find('.') != -1:
98 if k.find('.') != -1:
99 k = k.replace('.', '_')
99 k = k.replace('.', '_')
100
100
101 if each.ui_section in ['hooks', 'extensions']:
101 if each.ui_section in ['hooks', 'extensions']:
102 v = each.ui_active
102 v = each.ui_active
103
103
104 settings[section + '_' + k] = v
104 settings[section + '_' + k] = v
105
105
106 return settings
106 return settings
107
107
108 @classmethod
108 @classmethod
109 def _form_defaults(cls):
109 def _form_defaults(cls):
110 defaults = SettingsModel().get_all_settings()
110 defaults = SettingsModel().get_all_settings()
111 defaults.update(cls._get_ui_settings())
111 defaults.update(cls._get_ui_settings())
112
112
113 defaults.update({
113 defaults.update({
114 'new_svn_branch': '',
114 'new_svn_branch': '',
115 'new_svn_tag': '',
115 'new_svn_tag': '',
116 })
116 })
117 return defaults
117 return defaults
118
118
119 @LoginRequired()
119 @LoginRequired()
120 @HasPermissionAllDecorator('hg.admin')
120 @HasPermissionAllDecorator('hg.admin')
121 def settings_vcs(self):
121 def settings_vcs(self):
122 c = self.load_default_context()
122 c = self.load_default_context()
123 c.active = 'vcs'
123 c.active = 'vcs'
124 model = VcsSettingsModel()
124 model = VcsSettingsModel()
125 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
125 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
126 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
126 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
127 c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
127 c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
128 c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
128 c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
129 defaults = self._form_defaults()
129 defaults = self._form_defaults()
130
130
131 data = render('rhodecode:templates/admin/settings/settings.mako',
131 data = render('rhodecode:templates/admin/settings/settings.mako',
132 self._get_template_context(c), self.request)
132 self._get_template_context(c), self.request)
133 html = formencode.htmlfill.render(
133 html = formencode.htmlfill.render(
134 data,
134 data,
135 defaults=defaults,
135 defaults=defaults,
136 encoding="UTF-8",
136 encoding="UTF-8",
137 force_defaults=False
137 force_defaults=False
138 )
138 )
139 return Response(html)
139 return Response(html)
140
140
141 @LoginRequired()
141 @LoginRequired()
142 @HasPermissionAllDecorator('hg.admin')
142 @HasPermissionAllDecorator('hg.admin')
143 @CSRFRequired()
143 @CSRFRequired()
144 def settings_vcs_update(self):
144 def settings_vcs_update(self):
145 _ = self.request.translate
145 _ = self.request.translate
146 c = self.load_default_context()
146 c = self.load_default_context()
147 c.active = 'vcs'
147 c.active = 'vcs'
148
148
149 model = VcsSettingsModel()
149 model = VcsSettingsModel()
150 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
150 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
151 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
151 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
152
152
153 c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
153 c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
154 c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
154 c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
155 application_form = ApplicationUiSettingsForm(self.request.translate)()
155 application_form = ApplicationUiSettingsForm(self.request.translate)()
156
156
157 try:
157 try:
158 form_result = application_form.to_python(dict(self.request.POST))
158 form_result = application_form.to_python(dict(self.request.POST))
159 except formencode.Invalid as errors:
159 except formencode.Invalid as errors:
160 h.flash(
160 h.flash(
161 _("Some form inputs contain invalid data."),
161 _("Some form inputs contain invalid data."),
162 category='error')
162 category='error')
163 data = render('rhodecode:templates/admin/settings/settings.mako',
163 data = render('rhodecode:templates/admin/settings/settings.mako',
164 self._get_template_context(c), self.request)
164 self._get_template_context(c), self.request)
165 html = formencode.htmlfill.render(
165 html = formencode.htmlfill.render(
166 data,
166 data,
167 defaults=errors.value,
167 defaults=errors.value,
168 errors=errors.unpack_errors() or {},
168 errors=errors.unpack_errors() or {},
169 prefix_error=False,
169 prefix_error=False,
170 encoding="UTF-8",
170 encoding="UTF-8",
171 force_defaults=False
171 force_defaults=False
172 )
172 )
173 return Response(html)
173 return Response(html)
174
174
175 try:
175 try:
176 model.update_global_hook_settings(form_result)
176 model.update_global_hook_settings(form_result)
177
177
178 model.create_or_update_global_svn_settings(form_result)
178 model.create_or_update_global_svn_settings(form_result)
179 model.create_or_update_global_hg_settings(form_result)
179 model.create_or_update_global_hg_settings(form_result)
180 model.create_or_update_global_git_settings(form_result)
180 model.create_or_update_global_git_settings(form_result)
181 model.create_or_update_global_pr_settings(form_result)
181 model.create_or_update_global_pr_settings(form_result)
182 except Exception:
182 except Exception:
183 log.exception("Exception while updating settings")
183 log.exception("Exception while updating settings")
184 h.flash(_('Error occurred during updating '
184 h.flash(_('Error occurred during updating '
185 'application settings'), category='error')
185 'application settings'), category='error')
186 else:
186 else:
187 Session().commit()
187 Session().commit()
188 h.flash(_('Updated VCS settings'), category='success')
188 h.flash(_('Updated VCS settings'), category='success')
189 raise HTTPFound(h.route_path('admin_settings_vcs'))
189 raise HTTPFound(h.route_path('admin_settings_vcs'))
190
190
191 data = render('rhodecode:templates/admin/settings/settings.mako',
191 data = render('rhodecode:templates/admin/settings/settings.mako',
192 self._get_template_context(c), self.request)
192 self._get_template_context(c), self.request)
193 html = formencode.htmlfill.render(
193 html = formencode.htmlfill.render(
194 data,
194 data,
195 defaults=self._form_defaults(),
195 defaults=self._form_defaults(),
196 encoding="UTF-8",
196 encoding="UTF-8",
197 force_defaults=False
197 force_defaults=False
198 )
198 )
199 return Response(html)
199 return Response(html)
200
200
201 @LoginRequired()
201 @LoginRequired()
202 @HasPermissionAllDecorator('hg.admin')
202 @HasPermissionAllDecorator('hg.admin')
203 @CSRFRequired()
203 @CSRFRequired()
204 def settings_vcs_delete_svn_pattern(self):
204 def settings_vcs_delete_svn_pattern(self):
205 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
205 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
206 model = VcsSettingsModel()
206 model = VcsSettingsModel()
207 try:
207 try:
208 model.delete_global_svn_pattern(delete_pattern_id)
208 model.delete_global_svn_pattern(delete_pattern_id)
209 except SettingNotFound:
209 except SettingNotFound:
210 log.exception(
210 log.exception(
211 'Failed to delete svn_pattern with id %s', delete_pattern_id)
211 'Failed to delete svn_pattern with id %s', delete_pattern_id)
212 raise HTTPNotFound()
212 raise HTTPNotFound()
213
213
214 Session().commit()
214 Session().commit()
215 return True
215 return True
216
216
217 @LoginRequired()
217 @LoginRequired()
218 @HasPermissionAllDecorator('hg.admin')
218 @HasPermissionAllDecorator('hg.admin')
219 def settings_mapping(self):
219 def settings_mapping(self):
220 c = self.load_default_context()
220 c = self.load_default_context()
221 c.active = 'mapping'
221 c.active = 'mapping'
222 c.storage_path = get_rhodecode_repo_store_path()
222 c.storage_path = get_rhodecode_repo_store_path()
223 data = render('rhodecode:templates/admin/settings/settings.mako',
223 data = render('rhodecode:templates/admin/settings/settings.mako',
224 self._get_template_context(c), self.request)
224 self._get_template_context(c), self.request)
225 html = formencode.htmlfill.render(
225 html = formencode.htmlfill.render(
226 data,
226 data,
227 defaults=self._form_defaults(),
227 defaults=self._form_defaults(),
228 encoding="UTF-8",
228 encoding="UTF-8",
229 force_defaults=False
229 force_defaults=False
230 )
230 )
231 return Response(html)
231 return Response(html)
232
232
233 @LoginRequired()
233 @LoginRequired()
234 @HasPermissionAllDecorator('hg.admin')
234 @HasPermissionAllDecorator('hg.admin')
235 @CSRFRequired()
235 @CSRFRequired()
236 def settings_mapping_update(self):
236 def settings_mapping_create(self):
237 _ = self.request.translate
237 _ = self.request.translate
238 c = self.load_default_context()
238 c = self.load_default_context()
239 c.active = 'mapping'
239 c.active = 'mapping'
240 rm_obsolete = self.request.POST.get('destroy', False)
241 invalidate_cache = self.request.POST.get('invalidate', False)
240 invalidate_cache = self.request.POST.get('invalidate', False)
242 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241 log.debug('rescanning repo location')
243
242
244 if invalidate_cache:
243 if invalidate_cache:
245 log.debug('invalidating all repositories cache')
244 log.debug('invalidating all repositories cache')
246 for repo in Repository.get_all():
245 for repo in Repository.get_all():
247 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
246 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
248
247
249 filesystem_repos = ScmModel().repo_scan()
248 filesystem_repos = ScmModel().repo_scan()
250 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 added, errors = repo2db_mapper(filesystem_repos, force_hooks_rebuild=True)
251 PermissionModel().trigger_permission_flush()
250 PermissionModel().trigger_permission_flush()
252
251
253 def _repr(rm_repo):
252 def _repr(rm_repo):
254 return ', '.join(map(safe_str, rm_repo)) or '-'
253 return ', '.join(map(safe_str, rm_repo)) or '-'
255
254
256 h.flash(_('Repositories successfully '
255 if errors:
257 'rescanned added: %s ; removed: %s') %
256 h.flash(_('Errors during scan: {}').format(_repr(errors), ), category='error')
258 (_repr(added), _repr(removed)),
257
259 category='success')
258 h.flash(_('Repositories successfully scanned: Added: {}').format(_repr(added)), category='success')
259 raise HTTPFound(h.route_path('admin_settings_mapping'))
260
261 @LoginRequired()
262 @HasPermissionAllDecorator('hg.admin')
263 @CSRFRequired()
264 def settings_mapping_cleanup(self):
265 _ = self.request.translate
266 c = self.load_default_context()
267 c.active = 'mapping'
268 log.debug('rescanning repo location')
269
270 removed, errors = repo2db_cleanup()
271 PermissionModel().trigger_permission_flush()
272
273 def _repr(rm_repo):
274 return ', '.join(map(safe_str, rm_repo)) or '-'
275
276 h.flash(_('Repositories successfully scanned: Errors: {}, Added: {}').format(errors, _repr(removed)), category='success')
260 raise HTTPFound(h.route_path('admin_settings_mapping'))
277 raise HTTPFound(h.route_path('admin_settings_mapping'))
261
278
262 @LoginRequired()
279 @LoginRequired()
263 @HasPermissionAllDecorator('hg.admin')
280 @HasPermissionAllDecorator('hg.admin')
264 def settings_global(self):
281 def settings_global(self):
265 c = self.load_default_context()
282 c = self.load_default_context()
266 c.active = 'global'
283 c.active = 'global'
267 c.personal_repo_group_default_pattern = RepoGroupModel()\
284 c.personal_repo_group_default_pattern = RepoGroupModel()\
268 .get_personal_group_name_pattern()
285 .get_personal_group_name_pattern()
269
286
270 data = render('rhodecode:templates/admin/settings/settings.mako',
287 data = render('rhodecode:templates/admin/settings/settings.mako',
271 self._get_template_context(c), self.request)
288 self._get_template_context(c), self.request)
272 html = formencode.htmlfill.render(
289 html = formencode.htmlfill.render(
273 data,
290 data,
274 defaults=self._form_defaults(),
291 defaults=self._form_defaults(),
275 encoding="UTF-8",
292 encoding="UTF-8",
276 force_defaults=False
293 force_defaults=False
277 )
294 )
278 return Response(html)
295 return Response(html)
279
296
280 @LoginRequired()
297 @LoginRequired()
281 @HasPermissionAllDecorator('hg.admin')
298 @HasPermissionAllDecorator('hg.admin')
282 @CSRFRequired()
299 @CSRFRequired()
283 def settings_global_update(self):
300 def settings_global_update(self):
284 _ = self.request.translate
301 _ = self.request.translate
285 c = self.load_default_context()
302 c = self.load_default_context()
286 c.active = 'global'
303 c.active = 'global'
287 c.personal_repo_group_default_pattern = RepoGroupModel()\
304 c.personal_repo_group_default_pattern = RepoGroupModel()\
288 .get_personal_group_name_pattern()
305 .get_personal_group_name_pattern()
289 application_form = ApplicationSettingsForm(self.request.translate)()
306 application_form = ApplicationSettingsForm(self.request.translate)()
290 try:
307 try:
291 form_result = application_form.to_python(dict(self.request.POST))
308 form_result = application_form.to_python(dict(self.request.POST))
292 except formencode.Invalid as errors:
309 except formencode.Invalid as errors:
293 h.flash(
310 h.flash(
294 _("Some form inputs contain invalid data."),
311 _("Some form inputs contain invalid data."),
295 category='error')
312 category='error')
296 data = render('rhodecode:templates/admin/settings/settings.mako',
313 data = render('rhodecode:templates/admin/settings/settings.mako',
297 self._get_template_context(c), self.request)
314 self._get_template_context(c), self.request)
298 html = formencode.htmlfill.render(
315 html = formencode.htmlfill.render(
299 data,
316 data,
300 defaults=errors.value,
317 defaults=errors.value,
301 errors=errors.unpack_errors() or {},
318 errors=errors.unpack_errors() or {},
302 prefix_error=False,
319 prefix_error=False,
303 encoding="UTF-8",
320 encoding="UTF-8",
304 force_defaults=False
321 force_defaults=False
305 )
322 )
306 return Response(html)
323 return Response(html)
307
324
308 settings = [
325 settings = [
309 ('title', 'rhodecode_title', 'unicode'),
326 ('title', 'rhodecode_title', 'unicode'),
310 ('realm', 'rhodecode_realm', 'unicode'),
327 ('realm', 'rhodecode_realm', 'unicode'),
311 ('pre_code', 'rhodecode_pre_code', 'unicode'),
328 ('pre_code', 'rhodecode_pre_code', 'unicode'),
312 ('post_code', 'rhodecode_post_code', 'unicode'),
329 ('post_code', 'rhodecode_post_code', 'unicode'),
313 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
330 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
314 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
331 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
315 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
332 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
316 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
333 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
317 ]
334 ]
318
335
319 try:
336 try:
320 for setting, form_key, type_ in settings:
337 for setting, form_key, type_ in settings:
321 sett = SettingsModel().create_or_update_setting(
338 sett = SettingsModel().create_or_update_setting(
322 setting, form_result[form_key], type_)
339 setting, form_result[form_key], type_)
323 Session().add(sett)
340 Session().add(sett)
324
341
325 Session().commit()
342 Session().commit()
326 SettingsModel().invalidate_settings_cache()
343 SettingsModel().invalidate_settings_cache()
327 h.flash(_('Updated application settings'), category='success')
344 h.flash(_('Updated application settings'), category='success')
328 except Exception:
345 except Exception:
329 log.exception("Exception while updating application settings")
346 log.exception("Exception while updating application settings")
330 h.flash(
347 h.flash(
331 _('Error occurred during updating application settings'),
348 _('Error occurred during updating application settings'),
332 category='error')
349 category='error')
333
350
334 raise HTTPFound(h.route_path('admin_settings_global'))
351 raise HTTPFound(h.route_path('admin_settings_global'))
335
352
336 @LoginRequired()
353 @LoginRequired()
337 @HasPermissionAllDecorator('hg.admin')
354 @HasPermissionAllDecorator('hg.admin')
338 def settings_visual(self):
355 def settings_visual(self):
339 c = self.load_default_context()
356 c = self.load_default_context()
340 c.active = 'visual'
357 c.active = 'visual'
341
358
342 data = render('rhodecode:templates/admin/settings/settings.mako',
359 data = render('rhodecode:templates/admin/settings/settings.mako',
343 self._get_template_context(c), self.request)
360 self._get_template_context(c), self.request)
344 html = formencode.htmlfill.render(
361 html = formencode.htmlfill.render(
345 data,
362 data,
346 defaults=self._form_defaults(),
363 defaults=self._form_defaults(),
347 encoding="UTF-8",
364 encoding="UTF-8",
348 force_defaults=False
365 force_defaults=False
349 )
366 )
350 return Response(html)
367 return Response(html)
351
368
352 @LoginRequired()
369 @LoginRequired()
353 @HasPermissionAllDecorator('hg.admin')
370 @HasPermissionAllDecorator('hg.admin')
354 @CSRFRequired()
371 @CSRFRequired()
355 def settings_visual_update(self):
372 def settings_visual_update(self):
356 _ = self.request.translate
373 _ = self.request.translate
357 c = self.load_default_context()
374 c = self.load_default_context()
358 c.active = 'visual'
375 c.active = 'visual'
359 application_form = ApplicationVisualisationForm(self.request.translate)()
376 application_form = ApplicationVisualisationForm(self.request.translate)()
360 try:
377 try:
361 form_result = application_form.to_python(dict(self.request.POST))
378 form_result = application_form.to_python(dict(self.request.POST))
362 except formencode.Invalid as errors:
379 except formencode.Invalid as errors:
363 h.flash(
380 h.flash(
364 _("Some form inputs contain invalid data."),
381 _("Some form inputs contain invalid data."),
365 category='error')
382 category='error')
366 data = render('rhodecode:templates/admin/settings/settings.mako',
383 data = render('rhodecode:templates/admin/settings/settings.mako',
367 self._get_template_context(c), self.request)
384 self._get_template_context(c), self.request)
368 html = formencode.htmlfill.render(
385 html = formencode.htmlfill.render(
369 data,
386 data,
370 defaults=errors.value,
387 defaults=errors.value,
371 errors=errors.unpack_errors() or {},
388 errors=errors.unpack_errors() or {},
372 prefix_error=False,
389 prefix_error=False,
373 encoding="UTF-8",
390 encoding="UTF-8",
374 force_defaults=False
391 force_defaults=False
375 )
392 )
376 return Response(html)
393 return Response(html)
377
394
378 try:
395 try:
379 settings = [
396 settings = [
380 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
397 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
381 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
398 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
382 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
399 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
383 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
400 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
384 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
401 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
385 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
402 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
386 ('show_version', 'rhodecode_show_version', 'bool'),
403 ('show_version', 'rhodecode_show_version', 'bool'),
387 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
404 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
388 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
405 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
389 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
406 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
390 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
407 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
391 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
408 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
392 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
409 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
393 ('support_url', 'rhodecode_support_url', 'unicode'),
410 ('support_url', 'rhodecode_support_url', 'unicode'),
394 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
411 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
395 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
412 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
396 ]
413 ]
397 for setting, form_key, type_ in settings:
414 for setting, form_key, type_ in settings:
398 sett = SettingsModel().create_or_update_setting(
415 sett = SettingsModel().create_or_update_setting(
399 setting, form_result[form_key], type_)
416 setting, form_result[form_key], type_)
400 Session().add(sett)
417 Session().add(sett)
401
418
402 Session().commit()
419 Session().commit()
403 SettingsModel().invalidate_settings_cache()
420 SettingsModel().invalidate_settings_cache()
404 h.flash(_('Updated visualisation settings'), category='success')
421 h.flash(_('Updated visualisation settings'), category='success')
405 except Exception:
422 except Exception:
406 log.exception("Exception updating visualization settings")
423 log.exception("Exception updating visualization settings")
407 h.flash(_('Error occurred during updating '
424 h.flash(_('Error occurred during updating '
408 'visualisation settings'),
425 'visualisation settings'),
409 category='error')
426 category='error')
410
427
411 raise HTTPFound(h.route_path('admin_settings_visual'))
428 raise HTTPFound(h.route_path('admin_settings_visual'))
412
429
413 @LoginRequired()
430 @LoginRequired()
414 @HasPermissionAllDecorator('hg.admin')
431 @HasPermissionAllDecorator('hg.admin')
415 def settings_issuetracker(self):
432 def settings_issuetracker(self):
416 c = self.load_default_context()
433 c = self.load_default_context()
417 c.active = 'issuetracker'
434 c.active = 'issuetracker'
418 defaults = c.rc_config
435 defaults = c.rc_config
419
436
420 entry_key = 'rhodecode_issuetracker_pat_'
437 entry_key = 'rhodecode_issuetracker_pat_'
421
438
422 c.issuetracker_entries = {}
439 c.issuetracker_entries = {}
423 for k, v in defaults.items():
440 for k, v in defaults.items():
424 if k.startswith(entry_key):
441 if k.startswith(entry_key):
425 uid = k[len(entry_key):]
442 uid = k[len(entry_key):]
426 c.issuetracker_entries[uid] = None
443 c.issuetracker_entries[uid] = None
427
444
428 for uid in c.issuetracker_entries:
445 for uid in c.issuetracker_entries:
429 c.issuetracker_entries[uid] = AttributeDict({
446 c.issuetracker_entries[uid] = AttributeDict({
430 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
447 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
431 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
448 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
432 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
449 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
433 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
450 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
434 })
451 })
435
452
436 return self._get_template_context(c)
453 return self._get_template_context(c)
437
454
438 @LoginRequired()
455 @LoginRequired()
439 @HasPermissionAllDecorator('hg.admin')
456 @HasPermissionAllDecorator('hg.admin')
440 @CSRFRequired()
457 @CSRFRequired()
441 def settings_issuetracker_test(self):
458 def settings_issuetracker_test(self):
442 error_container = []
459 error_container = []
443
460
444 urlified_commit = h.urlify_commit_message(
461 urlified_commit = h.urlify_commit_message(
445 self.request.POST.get('test_text', ''),
462 self.request.POST.get('test_text', ''),
446 'repo_group/test_repo1', error_container=error_container)
463 'repo_group/test_repo1', error_container=error_container)
447 if error_container:
464 if error_container:
448 def converter(inp):
465 def converter(inp):
449 return h.html_escape(inp)
466 return h.html_escape(inp)
450
467
451 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
468 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
452
469
453 return urlified_commit
470 return urlified_commit
454
471
455 @LoginRequired()
472 @LoginRequired()
456 @HasPermissionAllDecorator('hg.admin')
473 @HasPermissionAllDecorator('hg.admin')
457 @CSRFRequired()
474 @CSRFRequired()
458 def settings_issuetracker_update(self):
475 def settings_issuetracker_update(self):
459 _ = self.request.translate
476 _ = self.request.translate
460 self.load_default_context()
477 self.load_default_context()
461 settings_model = IssueTrackerSettingsModel()
478 settings_model = IssueTrackerSettingsModel()
462
479
463 try:
480 try:
464 form = IssueTrackerPatternsForm(self.request.translate)()
481 form = IssueTrackerPatternsForm(self.request.translate)()
465 data = form.to_python(self.request.POST)
482 data = form.to_python(self.request.POST)
466 except formencode.Invalid as errors:
483 except formencode.Invalid as errors:
467 log.exception('Failed to add new pattern')
484 log.exception('Failed to add new pattern')
468 error = errors
485 error = errors
469 h.flash(_(f'Invalid issue tracker pattern: {error}'),
486 h.flash(_(f'Invalid issue tracker pattern: {error}'),
470 category='error')
487 category='error')
471 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
488 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
472
489
473 if data:
490 if data:
474 for uid in data.get('delete_patterns', []):
491 for uid in data.get('delete_patterns', []):
475 settings_model.delete_entries(uid)
492 settings_model.delete_entries(uid)
476
493
477 for pattern in data.get('patterns', []):
494 for pattern in data.get('patterns', []):
478 for setting, value, type_ in pattern:
495 for setting, value, type_ in pattern:
479 sett = settings_model.create_or_update_setting(
496 sett = settings_model.create_or_update_setting(
480 setting, value, type_)
497 setting, value, type_)
481 Session().add(sett)
498 Session().add(sett)
482
499
483 Session().commit()
500 Session().commit()
484
501
485 SettingsModel().invalidate_settings_cache()
502 SettingsModel().invalidate_settings_cache()
486 h.flash(_('Updated issue tracker entries'), category='success')
503 h.flash(_('Updated issue tracker entries'), category='success')
487 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
504 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
488
505
489 @LoginRequired()
506 @LoginRequired()
490 @HasPermissionAllDecorator('hg.admin')
507 @HasPermissionAllDecorator('hg.admin')
491 @CSRFRequired()
508 @CSRFRequired()
492 def settings_issuetracker_delete(self):
509 def settings_issuetracker_delete(self):
493 _ = self.request.translate
510 _ = self.request.translate
494 self.load_default_context()
511 self.load_default_context()
495 uid = self.request.POST.get('uid')
512 uid = self.request.POST.get('uid')
496 try:
513 try:
497 IssueTrackerSettingsModel().delete_entries(uid)
514 IssueTrackerSettingsModel().delete_entries(uid)
498 except Exception:
515 except Exception:
499 log.exception('Failed to delete issue tracker setting %s', uid)
516 log.exception('Failed to delete issue tracker setting %s', uid)
500 raise HTTPNotFound()
517 raise HTTPNotFound()
501
518
502 SettingsModel().invalidate_settings_cache()
519 SettingsModel().invalidate_settings_cache()
503 h.flash(_('Removed issue tracker entry.'), category='success')
520 h.flash(_('Removed issue tracker entry.'), category='success')
504
521
505 return {'deleted': uid}
522 return {'deleted': uid}
506
523
507 @LoginRequired()
524 @LoginRequired()
508 @HasPermissionAllDecorator('hg.admin')
525 @HasPermissionAllDecorator('hg.admin')
509 def settings_email(self):
526 def settings_email(self):
510 c = self.load_default_context()
527 c = self.load_default_context()
511 c.active = 'email'
528 c.active = 'email'
512 c.rhodecode_ini = rhodecode.CONFIG
529 c.rhodecode_ini = rhodecode.CONFIG
513
530
514 data = render('rhodecode:templates/admin/settings/settings.mako',
531 data = render('rhodecode:templates/admin/settings/settings.mako',
515 self._get_template_context(c), self.request)
532 self._get_template_context(c), self.request)
516 html = formencode.htmlfill.render(
533 html = formencode.htmlfill.render(
517 data,
534 data,
518 defaults=self._form_defaults(),
535 defaults=self._form_defaults(),
519 encoding="UTF-8",
536 encoding="UTF-8",
520 force_defaults=False
537 force_defaults=False
521 )
538 )
522 return Response(html)
539 return Response(html)
523
540
524 @LoginRequired()
541 @LoginRequired()
525 @HasPermissionAllDecorator('hg.admin')
542 @HasPermissionAllDecorator('hg.admin')
526 @CSRFRequired()
543 @CSRFRequired()
527 def settings_email_update(self):
544 def settings_email_update(self):
528 _ = self.request.translate
545 _ = self.request.translate
529 c = self.load_default_context()
546 c = self.load_default_context()
530 c.active = 'email'
547 c.active = 'email'
531
548
532 test_email = self.request.POST.get('test_email')
549 test_email = self.request.POST.get('test_email')
533
550
534 if not test_email:
551 if not test_email:
535 h.flash(_('Please enter email address'), category='error')
552 h.flash(_('Please enter email address'), category='error')
536 raise HTTPFound(h.route_path('admin_settings_email'))
553 raise HTTPFound(h.route_path('admin_settings_email'))
537
554
538 email_kwargs = {
555 email_kwargs = {
539 'date': datetime.datetime.now(),
556 'date': datetime.datetime.now(),
540 'user': self._rhodecode_db_user
557 'user': self._rhodecode_db_user
541 }
558 }
542
559
543 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
560 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
544 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
561 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
545
562
546 recipients = [test_email] if test_email else None
563 recipients = [test_email] if test_email else None
547
564
548 run_task(tasks.send_email, recipients, subject,
565 run_task(tasks.send_email, recipients, subject,
549 email_body_plaintext, email_body)
566 email_body_plaintext, email_body)
550
567
551 h.flash(_('Send email task created'), category='success')
568 h.flash(_('Send email task created'), category='success')
552 raise HTTPFound(h.route_path('admin_settings_email'))
569 raise HTTPFound(h.route_path('admin_settings_email'))
553
570
554 @LoginRequired()
571 @LoginRequired()
555 @HasPermissionAllDecorator('hg.admin')
572 @HasPermissionAllDecorator('hg.admin')
556 def settings_hooks(self):
573 def settings_hooks(self):
557 c = self.load_default_context()
574 c = self.load_default_context()
558 c.active = 'hooks'
575 c.active = 'hooks'
559
576
560 model = SettingsModel()
577 model = SettingsModel()
561 c.hooks = model.get_builtin_hooks()
578 c.hooks = model.get_builtin_hooks()
562 c.custom_hooks = model.get_custom_hooks()
579 c.custom_hooks = model.get_custom_hooks()
563
580
564 data = render('rhodecode:templates/admin/settings/settings.mako',
581 data = render('rhodecode:templates/admin/settings/settings.mako',
565 self._get_template_context(c), self.request)
582 self._get_template_context(c), self.request)
566 html = formencode.htmlfill.render(
583 html = formencode.htmlfill.render(
567 data,
584 data,
568 defaults=self._form_defaults(),
585 defaults=self._form_defaults(),
569 encoding="UTF-8",
586 encoding="UTF-8",
570 force_defaults=False
587 force_defaults=False
571 )
588 )
572 return Response(html)
589 return Response(html)
573
590
574 @LoginRequired()
591 @LoginRequired()
575 @HasPermissionAllDecorator('hg.admin')
592 @HasPermissionAllDecorator('hg.admin')
576 @CSRFRequired()
593 @CSRFRequired()
577 def settings_hooks_update(self):
594 def settings_hooks_update(self):
578 _ = self.request.translate
595 _ = self.request.translate
579 c = self.load_default_context()
596 c = self.load_default_context()
580 c.active = 'hooks'
597 c.active = 'hooks'
581 if c.visual.allow_custom_hooks_settings:
598 if c.visual.allow_custom_hooks_settings:
582 ui_key = self.request.POST.get('new_hook_ui_key')
599 ui_key = self.request.POST.get('new_hook_ui_key')
583 ui_value = self.request.POST.get('new_hook_ui_value')
600 ui_value = self.request.POST.get('new_hook_ui_value')
584
601
585 hook_id = self.request.POST.get('hook_id')
602 hook_id = self.request.POST.get('hook_id')
586 new_hook = False
603 new_hook = False
587
604
588 model = SettingsModel()
605 model = SettingsModel()
589 try:
606 try:
590 if ui_value and ui_key:
607 if ui_value and ui_key:
591 model.create_or_update_hook(ui_key, ui_value)
608 model.create_or_update_hook(ui_key, ui_value)
592 h.flash(_('Added new hook'), category='success')
609 h.flash(_('Added new hook'), category='success')
593 new_hook = True
610 new_hook = True
594 elif hook_id:
611 elif hook_id:
595 RhodeCodeUi.delete(hook_id)
612 RhodeCodeUi.delete(hook_id)
596 Session().commit()
613 Session().commit()
597
614
598 # check for edits
615 # check for edits
599 update = False
616 update = False
600 _d = self.request.POST.dict_of_lists()
617 _d = self.request.POST.dict_of_lists()
601 for k, v in zip(_d.get('hook_ui_key', []),
618 for k, v in zip(_d.get('hook_ui_key', []),
602 _d.get('hook_ui_value_new', [])):
619 _d.get('hook_ui_value_new', [])):
603 model.create_or_update_hook(k, v)
620 model.create_or_update_hook(k, v)
604 update = True
621 update = True
605
622
606 if update and not new_hook:
623 if update and not new_hook:
607 h.flash(_('Updated hooks'), category='success')
624 h.flash(_('Updated hooks'), category='success')
608 Session().commit()
625 Session().commit()
609 except Exception:
626 except Exception:
610 log.exception("Exception during hook creation")
627 log.exception("Exception during hook creation")
611 h.flash(_('Error occurred during hook creation'),
628 h.flash(_('Error occurred during hook creation'),
612 category='error')
629 category='error')
613
630
614 raise HTTPFound(h.route_path('admin_settings_hooks'))
631 raise HTTPFound(h.route_path('admin_settings_hooks'))
615
632
616 @LoginRequired()
633 @LoginRequired()
617 @HasPermissionAllDecorator('hg.admin')
634 @HasPermissionAllDecorator('hg.admin')
618 def settings_search(self):
635 def settings_search(self):
619 c = self.load_default_context()
636 c = self.load_default_context()
620 c.active = 'search'
637 c.active = 'search'
621
638
622 c.searcher = searcher_from_config(self.request.registry.settings)
639 c.searcher = searcher_from_config(self.request.registry.settings)
623 c.statistics = c.searcher.statistics(self.request.translate)
640 c.statistics = c.searcher.statistics(self.request.translate)
624
641
625 return self._get_template_context(c)
642 return self._get_template_context(c)
626
643
627 @LoginRequired()
644 @LoginRequired()
628 @HasPermissionAllDecorator('hg.admin')
645 @HasPermissionAllDecorator('hg.admin')
629 def settings_labs(self):
646 def settings_labs(self):
630 c = self.load_default_context()
647 c = self.load_default_context()
631 if not c.labs_active:
648 if not c.labs_active:
632 raise HTTPFound(h.route_path('admin_settings'))
649 raise HTTPFound(h.route_path('admin_settings'))
633
650
634 c.active = 'labs'
651 c.active = 'labs'
635 c.lab_settings = _LAB_SETTINGS
652 c.lab_settings = _LAB_SETTINGS
636
653
637 data = render('rhodecode:templates/admin/settings/settings.mako',
654 data = render('rhodecode:templates/admin/settings/settings.mako',
638 self._get_template_context(c), self.request)
655 self._get_template_context(c), self.request)
639 html = formencode.htmlfill.render(
656 html = formencode.htmlfill.render(
640 data,
657 data,
641 defaults=self._form_defaults(),
658 defaults=self._form_defaults(),
642 encoding="UTF-8",
659 encoding="UTF-8",
643 force_defaults=False
660 force_defaults=False
644 )
661 )
645 return Response(html)
662 return Response(html)
646
663
647 @LoginRequired()
664 @LoginRequired()
648 @HasPermissionAllDecorator('hg.admin')
665 @HasPermissionAllDecorator('hg.admin')
649 @CSRFRequired()
666 @CSRFRequired()
650 def settings_labs_update(self):
667 def settings_labs_update(self):
651 _ = self.request.translate
668 _ = self.request.translate
652 c = self.load_default_context()
669 c = self.load_default_context()
653 c.active = 'labs'
670 c.active = 'labs'
654
671
655 application_form = LabsSettingsForm(self.request.translate)()
672 application_form = LabsSettingsForm(self.request.translate)()
656 try:
673 try:
657 form_result = application_form.to_python(dict(self.request.POST))
674 form_result = application_form.to_python(dict(self.request.POST))
658 except formencode.Invalid as errors:
675 except formencode.Invalid as errors:
659 h.flash(
676 h.flash(
660 _("Some form inputs contain invalid data."),
677 _("Some form inputs contain invalid data."),
661 category='error')
678 category='error')
662 data = render('rhodecode:templates/admin/settings/settings.mako',
679 data = render('rhodecode:templates/admin/settings/settings.mako',
663 self._get_template_context(c), self.request)
680 self._get_template_context(c), self.request)
664 html = formencode.htmlfill.render(
681 html = formencode.htmlfill.render(
665 data,
682 data,
666 defaults=errors.value,
683 defaults=errors.value,
667 errors=errors.unpack_errors() or {},
684 errors=errors.unpack_errors() or {},
668 prefix_error=False,
685 prefix_error=False,
669 encoding="UTF-8",
686 encoding="UTF-8",
670 force_defaults=False
687 force_defaults=False
671 )
688 )
672 return Response(html)
689 return Response(html)
673
690
674 try:
691 try:
675 session = Session()
692 session = Session()
676 for setting in _LAB_SETTINGS:
693 for setting in _LAB_SETTINGS:
677 setting_name = setting.key[len('rhodecode_'):]
694 setting_name = setting.key[len('rhodecode_'):]
678 sett = SettingsModel().create_or_update_setting(
695 sett = SettingsModel().create_or_update_setting(
679 setting_name, form_result[setting.key], setting.type)
696 setting_name, form_result[setting.key], setting.type)
680 session.add(sett)
697 session.add(sett)
681
698
682 except Exception:
699 except Exception:
683 log.exception('Exception while updating lab settings')
700 log.exception('Exception while updating lab settings')
684 h.flash(_('Error occurred during updating labs settings'),
701 h.flash(_('Error occurred during updating labs settings'),
685 category='error')
702 category='error')
686 else:
703 else:
687 Session().commit()
704 Session().commit()
688 SettingsModel().invalidate_settings_cache()
705 SettingsModel().invalidate_settings_cache()
689 h.flash(_('Updated Labs settings'), category='success')
706 h.flash(_('Updated Labs settings'), category='success')
690 raise HTTPFound(h.route_path('admin_settings_labs'))
707 raise HTTPFound(h.route_path('admin_settings_labs'))
691
708
692 data = render('rhodecode:templates/admin/settings/settings.mako',
709 data = render('rhodecode:templates/admin/settings/settings.mako',
693 self._get_template_context(c), self.request)
710 self._get_template_context(c), self.request)
694 html = formencode.htmlfill.render(
711 html = formencode.htmlfill.render(
695 data,
712 data,
696 defaults=self._form_defaults(),
713 defaults=self._form_defaults(),
697 encoding="UTF-8",
714 encoding="UTF-8",
698 force_defaults=False
715 force_defaults=False
699 )
716 )
700 return Response(html)
717 return Response(html)
701
718
702
719
703 # :param key: name of the setting including the 'rhodecode_' prefix
720 # :param key: name of the setting including the 'rhodecode_' prefix
704 # :param type: the RhodeCodeSetting type to use.
721 # :param type: the RhodeCodeSetting type to use.
705 # :param group: the i18ned group in which we should dispaly this setting
722 # :param group: the i18ned group in which we should dispaly this setting
706 # :param label: the i18ned label we should display for this setting
723 # :param label: the i18ned label we should display for this setting
707 # :param help: the i18ned help we should dispaly for this setting
724 # :param help: the i18ned help we should dispaly for this setting
708 LabSetting = collections.namedtuple(
725 LabSetting = collections.namedtuple(
709 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
726 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
710
727
711
728
712 # This list has to be kept in sync with the form
729 # This list has to be kept in sync with the form
713 # rhodecode.model.forms.LabsSettingsForm.
730 # rhodecode.model.forms.LabsSettingsForm.
714 _LAB_SETTINGS = [
731 _LAB_SETTINGS = [
715
732
716 ]
733 ]
@@ -1,895 +1,928 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities library for RhodeCode
20 Utilities library for RhodeCode
21 """
21 """
22
22
23 import datetime
23 import datetime
24 import importlib
24 import importlib
25
25
26 import decorator
26 import decorator
27 import logging
27 import logging
28 import os
28 import os
29 import re
29 import re
30 import sys
30 import sys
31 import shutil
31 import shutil
32 import socket
32 import socket
33 import tempfile
33 import tempfile
34 import traceback
34 import traceback
35 import tarfile
35 import tarfile
36
36
37 from functools import wraps
37 from functools import wraps
38 from os.path import join as jn
38 from os.path import join as jn
39
39
40 import paste
40 import paste
41 import pkg_resources
41 import pkg_resources
42 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
42 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
43
43
44 from mako import exceptions
44 from mako import exceptions
45
45
46 import rhodecode
46 import rhodecode
47 from rhodecode import ConfigGet
47 from rhodecode import ConfigGet
48 from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRepo, ClientNotSupported
48 from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRepo, ClientNotSupported
49 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
49 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
50 from rhodecode.lib.type_utils import AttributeDict
50 from rhodecode.lib.type_utils import AttributeDict
51 from rhodecode.lib.str_utils import safe_bytes, safe_str
51 from rhodecode.lib.str_utils import safe_bytes, safe_str
52 from rhodecode.lib.vcs.backends.base import Config
52 from rhodecode.lib.vcs.backends.base import Config
53 from rhodecode.lib.vcs.exceptions import VCSError
53 from rhodecode.lib.vcs.exceptions import VCSError
54 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
54 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
55 from rhodecode.lib.ext_json import sjson as json
55 from rhodecode.lib.ext_json import sjson as json
56 from rhodecode.model import meta
56 from rhodecode.model import meta
57 from rhodecode.model.db import (
57 from rhodecode.model.db import (
58 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
58 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
59 from rhodecode.model.meta import Session
59 from rhodecode.model.meta import Session
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66 # String which contains characters that are not allowed in slug names for
66 # String which contains characters that are not allowed in slug names for
67 # repositories or repository groups. It is properly escaped to use it in
67 # repositories or repository groups. It is properly escaped to use it in
68 # regular expressions.
68 # regular expressions.
69 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
69 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
70
70
71 # Regex that matches forbidden characters in repo/group slugs.
71 # Regex that matches forbidden characters in repo/group slugs.
72 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
72 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
73
73
74 # Regex that matches allowed characters in repo/group slugs.
74 # Regex that matches allowed characters in repo/group slugs.
75 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
75 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
76
76
77 # Regex that matches whole repo/group slugs.
77 # Regex that matches whole repo/group slugs.
78 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
78 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
79
79
80 _license_cache = None
80 _license_cache = None
81
81
82
82
83 def adopt_for_celery(func):
83 def adopt_for_celery(func):
84 """
84 """
85 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
85 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
86 for further usage as a celery tasks.
86 for further usage as a celery tasks.
87 """
87 """
88 @wraps(func)
88 @wraps(func)
89 def wrapper(extras):
89 def wrapper(extras):
90 extras = AttributeDict(extras)
90 extras = AttributeDict(extras)
91
91
92 try:
92 try:
93 # HooksResponse implements to_json method which must be used there.
93 # HooksResponse implements to_json method which must be used there.
94 return func(extras).to_json()
94 return func(extras).to_json()
95 except HTTPBranchProtected as error:
95 except HTTPBranchProtected as error:
96 # Those special cases don't need error reporting. It's a case of
96 # Those special cases don't need error reporting. It's a case of
97 # locked repo or protected branch
97 # locked repo or protected branch
98 error_args = error.args
98 error_args = error.args
99 return {
99 return {
100 'status': error.code,
100 'status': error.code,
101 'output': error.explanation,
101 'output': error.explanation,
102 'exception': type(error).__name__,
102 'exception': type(error).__name__,
103 'exception_args': error_args,
103 'exception_args': error_args,
104 'exception_traceback': '',
104 'exception_traceback': '',
105 }
105 }
106 except ClientNotSupported as error:
106 except ClientNotSupported as error:
107 # Those special cases don't need error reporting. It's a case of
107 # Those special cases don't need error reporting. It's a case of
108 # locked repo or protected branch
108 # locked repo or protected branch
109 error_args = error.args
109 error_args = error.args
110 return {
110 return {
111 'status': error.code,
111 'status': error.code,
112 'output': error.explanation,
112 'output': error.explanation,
113 'exception': type(error).__name__,
113 'exception': type(error).__name__,
114 'exception_args': error_args,
114 'exception_args': error_args,
115 'exception_traceback': '',
115 'exception_traceback': '',
116 }
116 }
117 except HTTPLockedRepo as error:
117 except HTTPLockedRepo as error:
118 # Those special cases don't need error reporting. It's a case of
118 # Those special cases don't need error reporting. It's a case of
119 # locked repo or protected branch
119 # locked repo or protected branch
120 error_args = error.args
120 error_args = error.args
121 return {
121 return {
122 'status': error.code,
122 'status': error.code,
123 'output': error.explanation,
123 'output': error.explanation,
124 'exception': type(error).__name__,
124 'exception': type(error).__name__,
125 'exception_args': error_args,
125 'exception_args': error_args,
126 'exception_traceback': '',
126 'exception_traceback': '',
127 }
127 }
128 except Exception as e:
128 except Exception as e:
129 return {
129 return {
130 'status': 128,
130 'status': 128,
131 'output': '',
131 'output': '',
132 'exception': type(e).__name__,
132 'exception': type(e).__name__,
133 'exception_args': e.args,
133 'exception_args': e.args,
134 'exception_traceback': traceback.format_exc(),
134 'exception_traceback': traceback.format_exc(),
135 }
135 }
136 return wrapper
136 return wrapper
137
137
138
138
139 def repo_name_slug(value):
139 def repo_name_slug(value):
140 """
140 """
141 Return slug of name of repository
141 Return slug of name of repository
142 This function is called on each creation/modification
142 This function is called on each creation/modification
143 of repository to prevent bad names in repo
143 of repository to prevent bad names in repo
144 """
144 """
145
145
146 replacement_char = '-'
146 replacement_char = '-'
147
147
148 slug = strip_tags(value)
148 slug = strip_tags(value)
149 slug = convert_accented_entities(slug)
149 slug = convert_accented_entities(slug)
150 slug = convert_misc_entities(slug)
150 slug = convert_misc_entities(slug)
151
151
152 slug = SLUG_BAD_CHAR_RE.sub('', slug)
152 slug = SLUG_BAD_CHAR_RE.sub('', slug)
153 slug = re.sub(r'[\s]+', '-', slug)
153 slug = re.sub(r'[\s]+', '-', slug)
154 slug = collapse(slug, replacement_char)
154 slug = collapse(slug, replacement_char)
155
155
156 return slug
156 return slug
157
157
158
158
159 #==============================================================================
159 #==============================================================================
160 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
160 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
161 #==============================================================================
161 #==============================================================================
162 def get_repo_slug(request):
162 def get_repo_slug(request):
163 _repo = ''
163 _repo = ''
164
164
165 if hasattr(request, 'db_repo_name'):
165 if hasattr(request, 'db_repo_name'):
166 # if our requests has set db reference use it for name, this
166 # if our requests has set db reference use it for name, this
167 # translates the example.com/_<id> into proper repo names
167 # translates the example.com/_<id> into proper repo names
168 _repo = request.db_repo_name
168 _repo = request.db_repo_name
169 elif getattr(request, 'matchdict', None):
169 elif getattr(request, 'matchdict', None):
170 # pyramid
170 # pyramid
171 _repo = request.matchdict.get('repo_name')
171 _repo = request.matchdict.get('repo_name')
172
172
173 if _repo:
173 if _repo:
174 _repo = _repo.rstrip('/')
174 _repo = _repo.rstrip('/')
175 return _repo
175 return _repo
176
176
177
177
178 def get_repo_group_slug(request):
178 def get_repo_group_slug(request):
179 _group = ''
179 _group = ''
180 if hasattr(request, 'db_repo_group'):
180 if hasattr(request, 'db_repo_group'):
181 # if our requests has set db reference use it for name, this
181 # if our requests has set db reference use it for name, this
182 # translates the example.com/_<id> into proper repo group names
182 # translates the example.com/_<id> into proper repo group names
183 _group = request.db_repo_group.group_name
183 _group = request.db_repo_group.group_name
184 elif getattr(request, 'matchdict', None):
184 elif getattr(request, 'matchdict', None):
185 # pyramid
185 # pyramid
186 _group = request.matchdict.get('repo_group_name')
186 _group = request.matchdict.get('repo_group_name')
187
187
188 if _group:
188 if _group:
189 _group = _group.rstrip('/')
189 _group = _group.rstrip('/')
190 return _group
190 return _group
191
191
192
192
193 def get_user_group_slug(request):
193 def get_user_group_slug(request):
194 _user_group = ''
194 _user_group = ''
195
195
196 if hasattr(request, 'db_user_group'):
196 if hasattr(request, 'db_user_group'):
197 _user_group = request.db_user_group.users_group_name
197 _user_group = request.db_user_group.users_group_name
198 elif getattr(request, 'matchdict', None):
198 elif getattr(request, 'matchdict', None):
199 # pyramid
199 # pyramid
200 _user_group = request.matchdict.get('user_group_id')
200 _user_group = request.matchdict.get('user_group_id')
201 _user_group_name = request.matchdict.get('user_group_name')
201 _user_group_name = request.matchdict.get('user_group_name')
202 try:
202 try:
203 if _user_group:
203 if _user_group:
204 _user_group = UserGroup.get(_user_group)
204 _user_group = UserGroup.get(_user_group)
205 elif _user_group_name:
205 elif _user_group_name:
206 _user_group = UserGroup.get_by_group_name(_user_group_name)
206 _user_group = UserGroup.get_by_group_name(_user_group_name)
207
207
208 if _user_group:
208 if _user_group:
209 _user_group = _user_group.users_group_name
209 _user_group = _user_group.users_group_name
210 except Exception:
210 except Exception:
211 log.exception('Failed to get user group by id and name')
211 log.exception('Failed to get user group by id and name')
212 # catch all failures here
212 # catch all failures here
213 return None
213 return None
214
214
215 return _user_group
215 return _user_group
216
216
217
217
218 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
218 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
219 """
219 """
220 Scans given path for repos and return (name,(type,path)) tuple
220 Scans given path for repos and return (name,(type,path)) tuple
221
221
222 :param path: path to scan for repositories
222 :param path: path to scan for repositories
223 :param recursive: recursive search and return names with subdirs in front
223 :param recursive: recursive search and return names with subdirs in front
224 """
224 """
225
225
226 # remove ending slash for better results
226 # remove ending slash for better results
227 path = path.rstrip(os.sep)
227 path = path.rstrip(os.sep)
228 log.debug('now scanning in %s location recursive:%s...', path, recursive)
228 log.debug('now scanning in %s location recursive:%s...', path, recursive)
229
229
230 def _get_repos(p):
230 def _get_repos(p):
231 dirpaths = get_dirpaths(p)
231 dirpaths = get_dirpaths(p)
232 if not _is_dir_writable(p):
232 if not _is_dir_writable(p):
233 log.warning('repo path without write access: %s', p)
233 log.warning('repo path without write access: %s', p)
234
234
235 for dirpath in dirpaths:
235 for dirpath in dirpaths:
236 if os.path.isfile(os.path.join(p, dirpath)):
236 if os.path.isfile(os.path.join(p, dirpath)):
237 continue
237 continue
238 cur_path = os.path.join(p, dirpath)
238 cur_path = os.path.join(p, dirpath)
239
239
240 # skip removed repos
240 # skip removed repos
241 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
241 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
242 continue
242 continue
243
243
244 #skip .<somethin> dirs
244 #skip .<somethin> dirs
245 if dirpath.startswith('.'):
245 if dirpath.startswith('.'):
246 continue
246 continue
247
247
248 try:
248 try:
249 scm_info = get_scm(cur_path)
249 scm_info = get_scm(cur_path)
250 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
250 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
251 except VCSError:
251 except VCSError:
252 if not recursive:
252 if not recursive:
253 continue
253 continue
254 #check if this dir containts other repos for recursive scan
254 #check if this dir containts other repos for recursive scan
255 rec_path = os.path.join(p, dirpath)
255 rec_path = os.path.join(p, dirpath)
256 if os.path.isdir(rec_path):
256 if os.path.isdir(rec_path):
257 yield from _get_repos(rec_path)
257 yield from _get_repos(rec_path)
258
258
259 return _get_repos(path)
259 return _get_repos(path)
260
260
261
261
262 def get_dirpaths(p: str) -> list:
262 def get_dirpaths(p: str) -> list:
263 try:
263 try:
264 # OS-independable way of checking if we have at least read-only
264 # OS-independable way of checking if we have at least read-only
265 # access or not.
265 # access or not.
266 dirpaths = os.listdir(p)
266 dirpaths = os.listdir(p)
267 except OSError:
267 except OSError:
268 log.warning('ignoring repo path without read access: %s', p)
268 log.warning('ignoring repo path without read access: %s', p)
269 return []
269 return []
270
270
271 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
271 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
272 # decode paths and suddenly returns unicode objects itself. The items it
272 # decode paths and suddenly returns unicode objects itself. The items it
273 # cannot decode are returned as strings and cause issues.
273 # cannot decode are returned as strings and cause issues.
274 #
274 #
275 # Those paths are ignored here until a solid solution for path handling has
275 # Those paths are ignored here until a solid solution for path handling has
276 # been built.
276 # been built.
277 expected_type = type(p)
277 expected_type = type(p)
278
278
279 def _has_correct_type(item):
279 def _has_correct_type(item):
280 if type(item) is not expected_type:
280 if type(item) is not expected_type:
281 log.error(
281 log.error(
282 "Ignoring path %s since it cannot be decoded into str.",
282 "Ignoring path %s since it cannot be decoded into str.",
283 # Using "repr" to make sure that we see the byte value in case
283 # Using "repr" to make sure that we see the byte value in case
284 # of support.
284 # of support.
285 repr(item))
285 repr(item))
286 return False
286 return False
287 return True
287 return True
288
288
289 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
289 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
290
290
291 return dirpaths
291 return dirpaths
292
292
293
293
294 def _is_dir_writable(path):
294 def _is_dir_writable(path):
295 """
295 """
296 Probe if `path` is writable.
296 Probe if `path` is writable.
297
297
298 Due to trouble on Cygwin / Windows, this is actually probing if it is
298 Due to trouble on Cygwin / Windows, this is actually probing if it is
299 possible to create a file inside of `path`, stat does not produce reliable
299 possible to create a file inside of `path`, stat does not produce reliable
300 results in this case.
300 results in this case.
301 """
301 """
302 try:
302 try:
303 with tempfile.TemporaryFile(dir=path):
303 with tempfile.TemporaryFile(dir=path):
304 pass
304 pass
305 except OSError:
305 except OSError:
306 return False
306 return False
307 return True
307 return True
308
308
309
309
310 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
310 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
311 """
311 """
312 Returns True if given path is a valid repository False otherwise.
312 Returns True if given path is a valid repository False otherwise.
313 If expect_scm param is given also, compare if given scm is the same
313 If expect_scm param is given also, compare if given scm is the same
314 as expected from scm parameter. If explicit_scm is given don't try to
314 as expected from scm parameter. If explicit_scm is given don't try to
315 detect the scm, just use the given one to check if repo is valid
315 detect the scm, just use the given one to check if repo is valid
316
316
317 :param repo_name:
317 :param repo_name:
318 :param base_path:
318 :param base_path:
319 :param expect_scm:
319 :param expect_scm:
320 :param explicit_scm:
320 :param explicit_scm:
321 :param config:
321 :param config:
322
322
323 :return True: if given path is a valid repository
323 :return True: if given path is a valid repository
324 """
324 """
325 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
325 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
326 log.debug('Checking if `%s` is a valid path for repository. '
326 log.debug('Checking if `%s` is a valid path for repository. '
327 'Explicit type: %s', repo_name, explicit_scm)
327 'Explicit type: %s', repo_name, explicit_scm)
328
328
329 try:
329 try:
330 if explicit_scm:
330 if explicit_scm:
331 detected_scms = [get_scm_backend(explicit_scm)(
331 detected_scms = [get_scm_backend(explicit_scm)(
332 full_path, config=config).alias]
332 full_path, config=config).alias]
333 else:
333 else:
334 detected_scms = get_scm(full_path)
334 detected_scms = get_scm(full_path)
335
335
336 if expect_scm:
336 if expect_scm:
337 return detected_scms[0] == expect_scm
337 return detected_scms[0] == expect_scm
338 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
338 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
339 return True
339 return True
340 except VCSError:
340 except VCSError:
341 log.debug('path: %s is not a valid repo !', full_path)
341 log.debug('path: %s is not a valid repo !', full_path)
342 return False
342 return False
343
343
344
344
345 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
345 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
346 """
346 """
347 Returns True if a given path is a repository group, False otherwise
347 Returns True if a given path is a repository group, False otherwise
348
348
349 :param repo_group_name:
349 :param repo_group_name:
350 :param base_path:
350 :param base_path:
351 """
351 """
352 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
352 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
353 log.debug('Checking if `%s` is a valid path for repository group',
353 log.debug('Checking if `%s` is a valid path for repository group',
354 repo_group_name)
354 repo_group_name)
355
355
356 # check if it's not a repo
356 # check if it's not a repo
357 if is_valid_repo(repo_group_name, base_path):
357 if is_valid_repo(repo_group_name, base_path):
358 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
358 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
359 return False
359 return False
360
360
361 try:
361 try:
362 # we need to check bare git repos at higher level
362 # we need to check bare git repos at higher level
363 # since we might match branches/hooks/info/objects or possible
363 # since we might match branches/hooks/info/objects or possible
364 # other things inside bare git repo
364 # other things inside bare git repo
365 maybe_repo = os.path.dirname(full_path)
365 maybe_repo = os.path.dirname(full_path)
366 if maybe_repo == base_path:
366 if maybe_repo == base_path:
367 # skip root level repo check; we know root location CANNOT BE a repo group
367 # skip root level repo check; we know root location CANNOT BE a repo group
368 return False
368 return False
369
369
370 scm_ = get_scm(maybe_repo)
370 scm_ = get_scm(maybe_repo)
371 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
371 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
372 return False
372 return False
373 except VCSError:
373 except VCSError:
374 pass
374 pass
375
375
376 # check if it's a valid path
376 # check if it's a valid path
377 if skip_path_check or os.path.isdir(full_path):
377 if skip_path_check or os.path.isdir(full_path):
378 log.debug('path: %s is a valid repo group !', full_path)
378 log.debug('path: %s is a valid repo group !', full_path)
379 return True
379 return True
380
380
381 log.debug('path: %s is not a valid repo group !', full_path)
381 log.debug('path: %s is not a valid repo group !', full_path)
382 return False
382 return False
383
383
384
384
385 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
385 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
386 while True:
386 while True:
387 ok = input(prompt)
387 ok = input(prompt)
388 if ok.lower() in ('y', 'ye', 'yes'):
388 if ok.lower() in ('y', 'ye', 'yes'):
389 return True
389 return True
390 if ok.lower() in ('n', 'no', 'nop', 'nope'):
390 if ok.lower() in ('n', 'no', 'nop', 'nope'):
391 return False
391 return False
392 retries = retries - 1
392 retries = retries - 1
393 if retries < 0:
393 if retries < 0:
394 raise OSError
394 raise OSError
395 print(complaint)
395 print(complaint)
396
396
397 # propagated from mercurial documentation
397 # propagated from mercurial documentation
398 ui_sections = [
398 ui_sections = [
399 'alias', 'auth',
399 'alias', 'auth',
400 'decode/encode', 'defaults',
400 'decode/encode', 'defaults',
401 'diff', 'email',
401 'diff', 'email',
402 'extensions', 'format',
402 'extensions', 'format',
403 'merge-patterns', 'merge-tools',
403 'merge-patterns', 'merge-tools',
404 'hooks', 'http_proxy',
404 'hooks', 'http_proxy',
405 'smtp', 'patch',
405 'smtp', 'patch',
406 'paths', 'profiling',
406 'paths', 'profiling',
407 'server', 'trusted',
407 'server', 'trusted',
408 'ui', 'web', ]
408 'ui', 'web', ]
409
409
410
410
411 def prepare_config_data(clear_session=True, repo=None):
411 def prepare_config_data(clear_session=True, repo=None):
412 """
412 """
413 Read the configuration data from the database, *.ini files and return configuration
413 Read the configuration data from the database, *.ini files and return configuration
414 tuples.
414 tuples.
415 """
415 """
416 from rhodecode.model.settings import VcsSettingsModel
416 from rhodecode.model.settings import VcsSettingsModel
417
417
418 sa = meta.Session()
418 sa = meta.Session()
419 settings_model = VcsSettingsModel(repo=repo, sa=sa)
419 settings_model = VcsSettingsModel(repo=repo, sa=sa)
420
420
421 ui_settings = settings_model.get_ui_settings()
421 ui_settings = settings_model.get_ui_settings()
422
422
423 ui_data = []
423 ui_data = []
424 config = [
424 config = [
425 ('web', 'push_ssl', 'false'),
425 ('web', 'push_ssl', 'false'),
426 ]
426 ]
427 for setting in ui_settings:
427 for setting in ui_settings:
428 # skip certain deprecated keys that might be still in DB
428 # skip certain deprecated keys that might be still in DB
429 if f"{setting.section}_{setting.key}" in ['extensions_hgsubversion']:
429 if f"{setting.section}_{setting.key}" in ['extensions_hgsubversion']:
430 continue
430 continue
431
431
432 # Todo: remove this section once transition to *.ini files will be completed
432 # Todo: remove this section once transition to *.ini files will be completed
433 if setting.section in ('largefiles', 'vcs_git_lfs'):
433 if setting.section in ('largefiles', 'vcs_git_lfs'):
434 if setting.key != 'enabled':
434 if setting.key != 'enabled':
435 continue
435 continue
436 if setting.active:
436 if setting.active:
437 ui_data.append((setting.section, setting.key, setting.value))
437 ui_data.append((setting.section, setting.key, setting.value))
438 config.append((
438 config.append((
439 safe_str(setting.section), safe_str(setting.key),
439 safe_str(setting.section), safe_str(setting.key),
440 safe_str(setting.value)))
440 safe_str(setting.value)))
441 if setting.key == 'push_ssl':
441 if setting.key == 'push_ssl':
442 # force set push_ssl requirement to False this is deprecated, and we must force it to False
442 # force set push_ssl requirement to False this is deprecated, and we must force it to False
443 config.append((
443 config.append((
444 safe_str(setting.section), safe_str(setting.key), False))
444 safe_str(setting.section), safe_str(setting.key), False))
445 config_getter = ConfigGet()
445 config_getter = ConfigGet()
446 config.append(('vcs_git_lfs', 'store_location', config_getter.get_str('vcs.git.lfs.storage_location')))
446 config.append(('vcs_git_lfs', 'store_location', config_getter.get_str('vcs.git.lfs.storage_location')))
447 config.append(('largefiles', 'usercache', config_getter.get_str('vcs.hg.largefiles.storage_location')))
447 config.append(('largefiles', 'usercache', config_getter.get_str('vcs.hg.largefiles.storage_location')))
448 log.debug(
448 log.debug(
449 'settings ui from db@repo[%s]: %s',
449 'settings ui from db@repo[%s]: %s',
450 repo,
450 repo,
451 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
451 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
452 if clear_session:
452 if clear_session:
453 meta.Session.remove()
453 meta.Session.remove()
454
454
455 # TODO: mikhail: probably it makes no sense to re-read hooks information.
455 # TODO: mikhail: probably it makes no sense to re-read hooks information.
456 # It's already there and activated/deactivated
456 # It's already there and activated/deactivated
457 skip_entries = []
457 skip_entries = []
458 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
458 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
459 if 'pull' not in enabled_hook_classes:
459 if 'pull' not in enabled_hook_classes:
460 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
460 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
461 if 'push' not in enabled_hook_classes:
461 if 'push' not in enabled_hook_classes:
462 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
462 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
463 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
463 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
464 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
464 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
465
465
466 config = [entry for entry in config if entry[:2] not in skip_entries]
466 config = [entry for entry in config if entry[:2] not in skip_entries]
467
467
468 return config
468 return config
469
469
470
470
471 def make_db_config(clear_session=True, repo=None):
471 def make_db_config(clear_session=True, repo=None):
472 """
472 """
473 Create a :class:`Config` instance based on the values in the database.
473 Create a :class:`Config` instance based on the values in the database.
474 """
474 """
475 config = Config()
475 config = Config()
476 config_data = prepare_config_data(clear_session=clear_session, repo=repo)
476 config_data = prepare_config_data(clear_session=clear_session, repo=repo)
477 for section, option, value in config_data:
477 for section, option, value in config_data:
478 config.set(section, option, value)
478 config.set(section, option, value)
479 return config
479 return config
480
480
481
481
482 def get_enabled_hook_classes(ui_settings):
482 def get_enabled_hook_classes(ui_settings):
483 """
483 """
484 Return the enabled hook classes.
484 Return the enabled hook classes.
485
485
486 :param ui_settings: List of ui_settings as returned
486 :param ui_settings: List of ui_settings as returned
487 by :meth:`VcsSettingsModel.get_ui_settings`
487 by :meth:`VcsSettingsModel.get_ui_settings`
488
488
489 :return: a list with the enabled hook classes. The order is not guaranteed.
489 :return: a list with the enabled hook classes. The order is not guaranteed.
490 :rtype: list
490 :rtype: list
491 """
491 """
492 enabled_hooks = []
492 enabled_hooks = []
493 active_hook_keys = [
493 active_hook_keys = [
494 key for section, key, value, active in ui_settings
494 key for section, key, value, active in ui_settings
495 if section == 'hooks' and active]
495 if section == 'hooks' and active]
496
496
497 hook_names = {
497 hook_names = {
498 RhodeCodeUi.HOOK_PUSH: 'push',
498 RhodeCodeUi.HOOK_PUSH: 'push',
499 RhodeCodeUi.HOOK_PULL: 'pull',
499 RhodeCodeUi.HOOK_PULL: 'pull',
500 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
500 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
501 }
501 }
502
502
503 for key in active_hook_keys:
503 for key in active_hook_keys:
504 hook = hook_names.get(key)
504 hook = hook_names.get(key)
505 if hook:
505 if hook:
506 enabled_hooks.append(hook)
506 enabled_hooks.append(hook)
507
507
508 return enabled_hooks
508 return enabled_hooks
509
509
510
510
511 def set_rhodecode_config(config):
511 def set_rhodecode_config(config):
512 """
512 """
513 Updates pyramid config with new settings from database
513 Updates pyramid config with new settings from database
514
514
515 :param config:
515 :param config:
516 """
516 """
517 from rhodecode.model.settings import SettingsModel
517 from rhodecode.model.settings import SettingsModel
518 app_settings = SettingsModel().get_all_settings()
518 app_settings = SettingsModel().get_all_settings()
519
519
520 for k, v in list(app_settings.items()):
520 for k, v in list(app_settings.items()):
521 config[k] = v
521 config[k] = v
522
522
523
523
524 def get_rhodecode_realm():
524 def get_rhodecode_realm():
525 """
525 """
526 Return the rhodecode realm from database.
526 Return the rhodecode realm from database.
527 """
527 """
528 from rhodecode.model.settings import SettingsModel
528 from rhodecode.model.settings import SettingsModel
529 realm = SettingsModel().get_setting_by_name('realm')
529 realm = SettingsModel().get_setting_by_name('realm')
530 return safe_str(realm.app_settings_value)
530 return safe_str(realm.app_settings_value)
531
531
532
532
533 def get_rhodecode_repo_store_path():
533 def get_rhodecode_repo_store_path():
534 """
534 """
535 Returns the base path. The base path is the filesystem path which points
535 Returns the base path. The base path is the filesystem path which points
536 to the repository store.
536 to the repository store.
537 """
537 """
538
538
539 import rhodecode
539 import rhodecode
540 return rhodecode.CONFIG['repo_store.path']
540 return rhodecode.CONFIG['repo_store.path']
541
541
542
542
543 def map_groups(path):
543 def map_groups(path):
544 """
544 """
545 Given a full path to a repository, create all nested groups that this
545 Given a full path to a repository, create all nested groups that this
546 repo is inside. This function creates parent-child relationships between
546 repo is inside. This function creates parent-child relationships between
547 groups and creates default perms for all new groups.
547 groups and creates default perms for all new groups.
548
548
549 :param paths: full path to repository
549 :param paths: full path to repository
550 """
550 """
551 from rhodecode.model.repo_group import RepoGroupModel
551 from rhodecode.model.repo_group import RepoGroupModel
552 sa = meta.Session()
552 sa = meta.Session()
553 groups = path.split(Repository.NAME_SEP)
553 groups = path.split(Repository.NAME_SEP)
554 parent = None
554 parent = None
555 group = None
555 group = None
556
556
557 # last element is repo in nested groups structure
557 # last element is repo in nested groups structure
558 groups = groups[:-1]
558 groups = groups[:-1]
559 rgm = RepoGroupModel(sa)
559 rgm = RepoGroupModel(sa)
560 owner = User.get_first_super_admin()
560 owner = User.get_first_super_admin()
561 for lvl, group_name in enumerate(groups):
561 for lvl, group_name in enumerate(groups):
562 group_name = '/'.join(groups[:lvl] + [group_name])
562 group_name = '/'.join(groups[:lvl] + [group_name])
563 group = RepoGroup.get_by_group_name(group_name)
563 group = RepoGroup.get_by_group_name(group_name)
564 desc = '%s group' % group_name
564 desc = '%s group' % group_name
565
565
566 # skip folders that are now removed repos
566 # skip folders that are now removed repos
567 if REMOVED_REPO_PAT.match(group_name):
567 if REMOVED_REPO_PAT.match(group_name):
568 break
568 break
569
569
570 if group is None:
570 if group is None:
571 log.debug('creating group level: %s group_name: %s',
571 log.debug('creating group level: %s group_name: %s',
572 lvl, group_name)
572 lvl, group_name)
573 group = RepoGroup(group_name, parent)
573 group = RepoGroup(group_name, parent)
574 group.group_description = desc
574 group.group_description = desc
575 group.user = owner
575 group.user = owner
576 sa.add(group)
576 sa.add(group)
577 perm_obj = rgm._create_default_perms(group)
577 perm_obj = rgm._create_default_perms(group)
578 sa.add(perm_obj)
578 sa.add(perm_obj)
579 sa.flush()
579 sa.flush()
580
580
581 parent = group
581 parent = group
582 return group
582 return group
583
583
584
584
585 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
585 def repo2db_mapper(initial_repo_list, force_hooks_rebuild=False):
586 """
586 """
587 maps all repos given in initial_repo_list, non existing repositories
587 maps all repos given in initial_repo_list, non-existing repositories
588 are created, if remove_obsolete is True it also checks for db entries
588 are created
589 that are not in initial_repo_list and removes them.
590
591 :param initial_repo_list: list of repositories found by scanning methods
592 :param remove_obsolete: check for obsolete entries in database
593 """
589 """
594 from rhodecode.model.repo import RepoModel
590 from rhodecode.model.repo import RepoModel
595 from rhodecode.model.repo_group import RepoGroupModel
596 from rhodecode.model.settings import SettingsModel
591 from rhodecode.model.settings import SettingsModel
597
592
598 sa = meta.Session()
593 sa = meta.Session()
599 repo_model = RepoModel()
594 repo_model = RepoModel()
600 user = User.get_first_super_admin()
595 user = User.get_first_super_admin()
601 added = []
596 added = []
597 errors = []
602
598
603 # creation defaults
599 # creation defaults
604 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
600 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
605 enable_statistics = defs.get('repo_enable_statistics')
601 enable_statistics = defs.get('repo_enable_statistics')
606 enable_locking = defs.get('repo_enable_locking')
602 enable_locking = defs.get('repo_enable_locking')
607 enable_downloads = defs.get('repo_enable_downloads')
603 enable_downloads = defs.get('repo_enable_downloads')
608 private = defs.get('repo_private')
604 private = defs.get('repo_private')
609
605
610 for name, repo in list(initial_repo_list.items()):
606 for name, repo in list(initial_repo_list.items()):
611 group = map_groups(name)
607 group = map_groups(name)
612 str_name = safe_str(name)
608 str_name = safe_str(name)
613 db_repo = repo_model.get_by_repo_name(str_name)
609 db_repo = repo_model.get_by_repo_name(str_name)
614
610
615 # found repo that is on filesystem not in RhodeCode database
611 # found repo that is on filesystem not in RhodeCode database
616 if not db_repo:
612 if not db_repo:
617 log.info('repository `%s` not found in the database, creating now', name)
613 log.info('repository `%s` not found in the database, creating now', name)
618 added.append(name)
614 added.append(name)
619 desc = (repo.description
615 desc = repo.description if repo.description != 'unknown' else f'{name} repository'
620 if repo.description != 'unknown'
621 else '%s repository' % name)
622
616
623 db_repo = repo_model._create_repo(
617 db_repo = repo_model._create_repo(
624 repo_name=name,
618 repo_name=name,
625 repo_type=repo.alias,
619 repo_type=repo.alias,
626 description=desc,
620 description=desc,
627 repo_group=getattr(group, 'group_id', None),
621 repo_group=getattr(group, 'group_id', None),
628 owner=user,
622 owner=user,
629 enable_locking=enable_locking,
623 enable_locking=enable_locking,
630 enable_downloads=enable_downloads,
624 enable_downloads=enable_downloads,
631 enable_statistics=enable_statistics,
625 enable_statistics=enable_statistics,
632 private=private,
626 private=private,
633 state=Repository.STATE_CREATED
627 state=Repository.STATE_CREATED
634 )
628 )
635 sa.commit()
629 sa.commit()
630
631 try:
632 config = db_repo._config
633 config.set('extensions', 'largefiles', '')
634 scm_repo = db_repo.scm_instance(config=config)
635 except Exception:
636 log.error(traceback.format_exc())
637 errors.append(f'getting vcs instance for {name} failed')
638 continue
639
640 try:
641 db_repo.update_commit_cache(recursive=False)
642 except Exception:
643 log.error(traceback.format_exc())
644 errors.append(f'update_commit_cache for {name} failed')
645 continue
646
647 try:
648 scm_repo.install_hooks(force=force_hooks_rebuild)
649 except Exception:
650 log.error(traceback.format_exc())
651 errors.append(f'install_hooks for {name} failed')
652 continue
653
654 try:
636 # we added that repo just now, and make sure we updated server info
655 # we added that repo just now, and make sure we updated server info
637 if db_repo.repo_type == 'git':
656 if db_repo.repo_type == 'git':
638 git_repo = db_repo.scm_instance()
639 # update repository server-info
657 # update repository server-info
640 log.debug('Running update server info')
658 log.debug('Running update server info')
641 git_repo._update_server_info(force=True)
659 scm_repo._update_server_info(force=True)
642
660 except Exception:
643 db_repo.update_commit_cache(recursive=False)
661 log.error(traceback.format_exc())
662 errors.append(f'update_server_info for {name} failed')
663 continue
644
664
645 config = db_repo._config
665 return added, errors
646 config.set('extensions', 'largefiles', '')
647 repo = db_repo.scm_instance(config=config)
648 repo.install_hooks(force=force_hooks_rebuild)
649
666
667 def repo2db_cleanup(skip_repos=None, skip_groups=None):
668 from rhodecode.model.repo import RepoModel
669 from rhodecode.model.repo_group import RepoGroupModel
670
671 sa = meta.Session()
650 removed = []
672 removed = []
651 if remove_obsolete:
673 errors = []
652 # remove from database those repositories that are not in the filesystem
674
653 for repo in sa.query(Repository).all():
675
654 if repo.repo_name not in list(initial_repo_list.keys()):
676 all_repos = Repository.execute(
655 log.debug("Removing non-existing repository found in db `%s`",
677 Repository.select(Repository)\
656 repo.repo_name)
678 .order_by(Repository.repo_name)
657 try:
679 ).scalars()
658 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
659 sa.commit()
660 removed.append(repo.repo_name)
661 except Exception:
662 # don't hold further removals on error
663 log.error(traceback.format_exc())
664 sa.rollback()
665
680
666 def splitter(full_repo_name):
681 # remove from database those repositories that are not in the filesystem
667 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
682 for db_repo in all_repos:
668 gr_name = None
683 db_repo_name = db_repo.repo_name
669 if len(_parts) == 2:
684 if skip_repos and db_repo_name in skip_repos:
670 gr_name = _parts[0]
685 log.debug('Skipping repo `%s`', db_repo_name)
671 return gr_name
686 continue
687 try:
688 instance = db_repo.scm_instance()
689 except Exception:
690 instance = None
672
691
673 initial_repo_group_list = [splitter(x) for x in
692 if not instance:
674 list(initial_repo_list.keys()) if splitter(x)]
693 log.debug("Removing non-existing repository found in db `%s`", db_repo_name)
694 try:
695 RepoModel(sa).delete(db_repo, forks='detach', fs_remove=False, call_events=False)
696 sa.commit()
697 removed.append(db_repo_name)
698 except Exception:
699 # don't hold further removals on error
700 log.error(traceback.format_exc())
701 sa.rollback()
702 errors.append(db_repo_name)
675
703
676 # remove from database those repository groups that are not in the
704 # remove from database those repository groups that are not in the
677 # filesystem due to parent child relationships we need to delete them
705 # filesystem due to parent child relationships we need to delete them
678 # in a specific order of most nested first
706 # in a specific order of most nested first
679 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
707 all_groups = RepoGroup.execute(
680 def nested_sort(gr):
708 RepoGroup.select(RepoGroup.group_name)\
681 return len(gr.split('/'))
709 .order_by(RepoGroup.group_name)
682 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
710 ).scalars().all()
683 if group_name not in initial_repo_group_list:
711
684 repo_group = RepoGroup.get_by_group_name(group_name)
712 def nested_sort(gr):
685 if (repo_group.children.all() or
713 return len(gr.split('/'))
686 not RepoGroupModel().check_exist_filesystem(
687 group_name=group_name, exc_on_failure=False)):
688 continue
689
714
690 log.info(
715 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
691 'Removing non-existing repository group found in db `%s`',
716 if skip_groups and group_name in skip_groups:
692 group_name)
717 log.debug('Skipping repo group `%s`', group_name)
693 try:
718 continue
694 RepoGroupModel(sa).delete(group_name, fs_remove=False)
719
695 sa.commit()
720 repo_group = RepoGroup.get_by_group_name(group_name)
696 removed.append(group_name)
721
697 except Exception:
722 if repo_group.children.all() or not RepoGroupModel().check_exist_filesystem(group_name=group_name, exc_on_failure=False):
698 # don't hold further removals on error
723 continue
699 log.exception(
724
700 'Unable to remove repository group `%s`',
725 log.info('Removing non-existing repository group found in db `%s`', group_name)
701 group_name)
702 sa.rollback()
703 raise
704
726
705 return added, removed
727 try:
728 RepoGroupModel(sa).delete(group_name, fs_remove=False, call_events=False)
729 sa.commit()
730 removed.append(group_name)
731 except Exception:
732 # don't hold further removals on error
733 log.exception('Unable to remove repository group `%s`',group_name)
734 sa.rollback()
735 errors.append(group_name)
736
737 return removed, errors
738
706
739
707 def deep_reload_package(package_name):
740 def deep_reload_package(package_name):
708 """
741 """
709 Deeply reload a package by removing it and its submodules from sys.modules,
742 Deeply reload a package by removing it and its submodules from sys.modules,
710 then re-importing it.
743 then re-importing it.
711 """
744 """
712 # Remove the package and its submodules from sys.modules
745 # Remove the package and its submodules from sys.modules
713 to_reload = [name for name in sys.modules if name == package_name or name.startswith(package_name + ".")]
746 to_reload = [name for name in sys.modules if name == package_name or name.startswith(package_name + ".")]
714 for module_name in to_reload:
747 for module_name in to_reload:
715 del sys.modules[module_name]
748 del sys.modules[module_name]
716 log.debug(f"Removed module from cache: {module_name}")
749 log.debug(f"Removed module from cache: {module_name}")
717
750
718 # Re-import the package
751 # Re-import the package
719 package = importlib.import_module(package_name)
752 package = importlib.import_module(package_name)
720 log.debug(f"Re-imported package: {package_name}")
753 log.debug(f"Re-imported package: {package_name}")
721
754
722 return package
755 return package
723
756
724 def load_rcextensions(root_path):
757 def load_rcextensions(root_path):
725 import rhodecode
758 import rhodecode
726 from rhodecode.config import conf
759 from rhodecode.config import conf
727
760
728 path = os.path.join(root_path)
761 path = os.path.join(root_path)
729 deep_reload = path in sys.path
762 deep_reload = path in sys.path
730 sys.path.insert(0, path)
763 sys.path.insert(0, path)
731
764
732 try:
765 try:
733 rcextensions = __import__('rcextensions', fromlist=[''])
766 rcextensions = __import__('rcextensions', fromlist=[''])
734 except ImportError:
767 except ImportError:
735 if os.path.isdir(os.path.join(path, 'rcextensions')):
768 if os.path.isdir(os.path.join(path, 'rcextensions')):
736 log.warning('Unable to load rcextensions from %s', path)
769 log.warning('Unable to load rcextensions from %s', path)
737 rcextensions = None
770 rcextensions = None
738
771
739 if rcextensions:
772 if rcextensions:
740 if deep_reload:
773 if deep_reload:
741 rcextensions = deep_reload_package('rcextensions')
774 rcextensions = deep_reload_package('rcextensions')
742 log.info('Loaded rcextensions from %s...', rcextensions)
775 log.info('Loaded rcextensions from %s...', rcextensions)
743 rhodecode.EXTENSIONS = rcextensions
776 rhodecode.EXTENSIONS = rcextensions
744
777
745 # Additional mappings that are not present in the pygments lexers
778 # Additional mappings that are not present in the pygments lexers
746 conf.LANGUAGES_EXTENSIONS_MAP.update(
779 conf.LANGUAGES_EXTENSIONS_MAP.update(
747 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
780 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
748
781
749
782
750 def get_custom_lexer(extension):
783 def get_custom_lexer(extension):
751 """
784 """
752 returns a custom lexer if it is defined in rcextensions module, or None
785 returns a custom lexer if it is defined in rcextensions module, or None
753 if there's no custom lexer defined
786 if there's no custom lexer defined
754 """
787 """
755 import rhodecode
788 import rhodecode
756 from pygments import lexers
789 from pygments import lexers
757
790
758 # custom override made by RhodeCode
791 # custom override made by RhodeCode
759 if extension in ['mako']:
792 if extension in ['mako']:
760 return lexers.get_lexer_by_name('html+mako')
793 return lexers.get_lexer_by_name('html+mako')
761
794
762 # check if we didn't define this extension as other lexer
795 # check if we didn't define this extension as other lexer
763 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
796 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
764 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
797 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
765 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
798 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
766 return lexers.get_lexer_by_name(_lexer_name)
799 return lexers.get_lexer_by_name(_lexer_name)
767
800
768
801
769 #==============================================================================
802 #==============================================================================
770 # TEST FUNCTIONS AND CREATORS
803 # TEST FUNCTIONS AND CREATORS
771 #==============================================================================
804 #==============================================================================
772 def create_test_index(repo_location, config):
805 def create_test_index(repo_location, config):
773 """
806 """
774 Makes default test index.
807 Makes default test index.
775 """
808 """
776 try:
809 try:
777 import rc_testdata
810 import rc_testdata
778 except ImportError:
811 except ImportError:
779 raise ImportError('Failed to import rc_testdata, '
812 raise ImportError('Failed to import rc_testdata, '
780 'please make sure this package is installed from requirements_test.txt')
813 'please make sure this package is installed from requirements_test.txt')
781
814
782 rc_testdata.extract_search_index(
815 rc_testdata.extract_search_index(
783 'vcs_search_index', os.path.dirname(config['search.location']))
816 'vcs_search_index', os.path.dirname(config['search.location']))
784
817
785
818
786 def create_test_directory(test_path):
819 def create_test_directory(test_path):
787 """
820 """
788 Create test directory if it doesn't exist.
821 Create test directory if it doesn't exist.
789 """
822 """
790 if not os.path.isdir(test_path):
823 if not os.path.isdir(test_path):
791 log.debug('Creating testdir %s', test_path)
824 log.debug('Creating testdir %s', test_path)
792 os.makedirs(test_path)
825 os.makedirs(test_path)
793
826
794
827
795 def create_test_database(test_path, config):
828 def create_test_database(test_path, config):
796 """
829 """
797 Makes a fresh database.
830 Makes a fresh database.
798 """
831 """
799 from rhodecode.lib.db_manage import DbManage
832 from rhodecode.lib.db_manage import DbManage
800 from rhodecode.lib.utils2 import get_encryption_key
833 from rhodecode.lib.utils2 import get_encryption_key
801
834
802 # PART ONE create db
835 # PART ONE create db
803 dbconf = config['sqlalchemy.db1.url']
836 dbconf = config['sqlalchemy.db1.url']
804 enc_key = get_encryption_key(config)
837 enc_key = get_encryption_key(config)
805
838
806 log.debug('making test db %s', dbconf)
839 log.debug('making test db %s', dbconf)
807
840
808 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
841 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
809 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
842 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
810 dbmanage.create_tables(override=True)
843 dbmanage.create_tables(override=True)
811 dbmanage.set_db_version()
844 dbmanage.set_db_version()
812 # for tests dynamically set new root paths based on generated content
845 # for tests dynamically set new root paths based on generated content
813 dbmanage.create_settings(dbmanage.config_prompt(test_path))
846 dbmanage.create_settings(dbmanage.config_prompt(test_path))
814 dbmanage.create_default_user()
847 dbmanage.create_default_user()
815 dbmanage.create_test_admin_and_users()
848 dbmanage.create_test_admin_and_users()
816 dbmanage.create_permissions()
849 dbmanage.create_permissions()
817 dbmanage.populate_default_permissions()
850 dbmanage.populate_default_permissions()
818 Session().commit()
851 Session().commit()
819
852
820
853
821 def create_test_repositories(test_path, config):
854 def create_test_repositories(test_path, config):
822 """
855 """
823 Creates test repositories in the temporary directory. Repositories are
856 Creates test repositories in the temporary directory. Repositories are
824 extracted from archives within the rc_testdata package.
857 extracted from archives within the rc_testdata package.
825 """
858 """
826 try:
859 try:
827 import rc_testdata
860 import rc_testdata
828 except ImportError:
861 except ImportError:
829 raise ImportError('Failed to import rc_testdata, '
862 raise ImportError('Failed to import rc_testdata, '
830 'please make sure this package is installed from requirements_test.txt')
863 'please make sure this package is installed from requirements_test.txt')
831
864
832 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
865 from rhodecode.bootstrap import HG_REPO, GIT_REPO, SVN_REPO
833
866
834 log.debug('making test vcs repositories at %s', test_path)
867 log.debug('making test vcs repositories at %s', test_path)
835
868
836 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
869 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
837 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
870 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
838
871
839 # Note: Subversion is in the process of being integrated with the system,
872 # Note: Subversion is in the process of being integrated with the system,
840 # until we have a properly packed version of the test svn repository, this
873 # until we have a properly packed version of the test svn repository, this
841 # tries to copy over the repo from a package "rc_testdata"
874 # tries to copy over the repo from a package "rc_testdata"
842 svn_repo_path = rc_testdata.get_svn_repo_archive()
875 svn_repo_path = rc_testdata.get_svn_repo_archive()
843 with tarfile.open(svn_repo_path) as tar:
876 with tarfile.open(svn_repo_path) as tar:
844 tar.extractall(jn(test_path, SVN_REPO))
877 tar.extractall(jn(test_path, SVN_REPO))
845
878
846
879
847 def password_changed(auth_user, session):
880 def password_changed(auth_user, session):
848 # Never report password change in case of default user or anonymous user.
881 # Never report password change in case of default user or anonymous user.
849 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
882 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
850 return False
883 return False
851
884
852 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
885 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
853 rhodecode_user = session.get('rhodecode_user', {})
886 rhodecode_user = session.get('rhodecode_user', {})
854 session_password_hash = rhodecode_user.get('password', '')
887 session_password_hash = rhodecode_user.get('password', '')
855 return password_hash != session_password_hash
888 return password_hash != session_password_hash
856
889
857
890
858 def read_opensource_licenses():
891 def read_opensource_licenses():
859 global _license_cache
892 global _license_cache
860
893
861 if not _license_cache:
894 if not _license_cache:
862 licenses = pkg_resources.resource_string(
895 licenses = pkg_resources.resource_string(
863 'rhodecode', 'config/licenses.json')
896 'rhodecode', 'config/licenses.json')
864 _license_cache = json.loads(licenses)
897 _license_cache = json.loads(licenses)
865
898
866 return _license_cache
899 return _license_cache
867
900
868
901
869 def generate_platform_uuid():
902 def generate_platform_uuid():
870 """
903 """
871 Generates platform UUID based on it's name
904 Generates platform UUID based on it's name
872 """
905 """
873 import platform
906 import platform
874
907
875 try:
908 try:
876 uuid_list = [platform.platform()]
909 uuid_list = [platform.platform()]
877 return sha256_safe(':'.join(uuid_list))
910 return sha256_safe(':'.join(uuid_list))
878 except Exception as e:
911 except Exception as e:
879 log.error('Failed to generate host uuid: %s', e)
912 log.error('Failed to generate host uuid: %s', e)
880 return 'UNDEFINED'
913 return 'UNDEFINED'
881
914
882
915
883 def send_test_email(recipients, email_body='TEST EMAIL'):
916 def send_test_email(recipients, email_body='TEST EMAIL'):
884 """
917 """
885 Simple code for generating test emails.
918 Simple code for generating test emails.
886 Usage::
919 Usage::
887
920
888 from rhodecode.lib import utils
921 from rhodecode.lib import utils
889 utils.send_test_email()
922 utils.send_test_email()
890 """
923 """
891 from rhodecode.lib.celerylib import tasks, run_task
924 from rhodecode.lib.celerylib import tasks, run_task
892
925
893 email_body = email_body_plaintext = email_body
926 email_body = email_body_plaintext = email_body
894 subject = f'SUBJECT FROM: {socket.gethostname()}'
927 subject = f'SUBJECT FROM: {socket.gethostname()}'
895 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
928 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,93 +1,94 b''
1 # Copyright (C) 2014-2024 RhodeCode GmbH
1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 VCS Backends module
20 VCS Backends module
21 """
21 """
22
22
23 import os
23 import os
24 import logging
24 import logging
25
25
26 from rhodecode import typing
26 from rhodecode import typing
27
27
28 from rhodecode.lib.vcs.conf import settings
28 from rhodecode.lib.vcs.conf import settings
29 from rhodecode.lib.vcs.exceptions import VCSError
29 from rhodecode.lib.vcs.exceptions import VCSError
30 from rhodecode.lib.vcs.utils.helpers import get_scm
30 from rhodecode.lib.vcs.utils.helpers import get_scm
31 from rhodecode.lib.vcs.utils.imports import import_class
31 from rhodecode.lib.vcs.utils.imports import import_class
32
32
33
33
34 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
35
35
36
36
37 def get_vcs_instance(repo_path, *args, **kwargs) -> typing.VCSRepo | None:
37 def get_vcs_instance(repo_path, *args, **kwargs) -> typing.VCSRepo | None:
38 """
38 """
39 Given a path to a repository an instance of the corresponding vcs backend
39 Given a path to a repository an instance of the corresponding vcs backend
40 repository class is created and returned. If no repository can be found
40 repository class is created and returned. If no repository can be found
41 for the path it returns None. Arguments and keyword arguments are passed
41 for the path it returns None. Arguments and keyword arguments are passed
42 to the vcs backend repository class.
42 to the vcs backend repository class.
43 """
43 """
44 from rhodecode.lib.utils2 import safe_str
44 from rhodecode.lib.utils2 import safe_str
45
45
46 explicit_vcs_alias = kwargs.pop('_vcs_alias', None)
46 explicit_vcs_alias = kwargs.pop('_vcs_alias', None)
47 try:
47 try:
48 vcs_alias = safe_str(explicit_vcs_alias or get_scm(repo_path)[0])
48 vcs_alias = safe_str(explicit_vcs_alias or get_scm(repo_path)[0])
49 log.debug(
49 log.debug(
50 'Creating instance of %s repository from %s', vcs_alias,
50 'Creating instance of %s repository from %s', vcs_alias,
51 safe_str(repo_path))
51 safe_str(repo_path))
52 backend = get_backend(vcs_alias)
52 backend = get_backend(vcs_alias)
53
53
54 if explicit_vcs_alias:
54 if explicit_vcs_alias:
55 # do final verification of existence of the path, this does the
55 # do final verification of existence of the path, this does the
56 # same as get_scm() call which we skip in explicit_vcs_alias
56 # same as get_scm() call which we skip in explicit_vcs_alias
57 if not os.path.isdir(repo_path):
57 if not os.path.isdir(repo_path):
58 raise VCSError(f"Given path {repo_path} is not a directory")
58 raise VCSError(f"Given path {repo_path} is not a directory")
59 except VCSError:
59 except VCSError:
60 log.exception(
60 log.exception(
61 'Perhaps this repository is in db and not in '
61 'Perhaps this repository is in db and not in filesystem.'
62 'filesystem run rescan repositories with '
62 'Run cleanup filesystem option from admin settings under Remap and rescan'
63 '"destroy old data" option from admin panel')
63 )
64
64 return None
65 return None
65
66
66 return backend(repo_path=repo_path, *args, **kwargs)
67 return backend(repo_path=repo_path, *args, **kwargs)
67
68
68
69
69 def get_backend(alias) -> typing.VCSRepoClass:
70 def get_backend(alias) -> typing.VCSRepoClass:
70 """
71 """
71 Returns ``Repository`` class identified by the given alias or raises
72 Returns ``Repository`` class identified by the given alias or raises
72 VCSError if alias is not recognized or backend class cannot be imported.
73 VCSError if alias is not recognized or backend class cannot be imported.
73 """
74 """
74 if alias not in settings.BACKENDS:
75 if alias not in settings.BACKENDS:
75 raise VCSError(
76 raise VCSError(
76 f"Given alias '{alias}' is not recognized! "
77 f"Given alias '{alias}' is not recognized! "
77 f"Allowed aliases:{settings.BACKENDS.keys()}")
78 f"Allowed aliases:{settings.BACKENDS.keys()}")
78 backend_path = settings.BACKENDS[alias]
79 backend_path = settings.BACKENDS[alias]
79 klass = import_class(backend_path)
80 klass = import_class(backend_path)
80 return klass
81 return klass
81
82
82
83
83 def get_supported_backends():
84 def get_supported_backends():
84 """
85 """
85 Returns list of aliases of supported backends.
86 Returns list of aliases of supported backends.
86 """
87 """
87 return settings.BACKENDS.keys()
88 return settings.BACKENDS.keys()
88
89
89
90
90 def get_vcsserver_service_data():
91 def get_vcsserver_service_data():
91 from rhodecode.lib.vcs import connection
92 from rhodecode.lib.vcs import connection
92 return connection.Service.get_vcsserver_service_data()
93 return connection.Service.get_vcsserver_service_data()
93
94
@@ -1,1212 +1,1219 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import re
20 import re
21 import shutil
21 import shutil
22 import time
22 import time
23 import logging
23 import logging
24 import traceback
24 import traceback
25 import datetime
25 import datetime
26
26
27 from pyramid.threadlocal import get_current_request
27 from pyramid.threadlocal import get_current_request
28 from sqlalchemy.orm import aliased
28 from sqlalchemy.orm import aliased
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode import events
31 from rhodecode import events
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.caching_query import FromCache
33 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError
35 from rhodecode.lib import hooks_base
35 from rhodecode.lib import hooks_base
36 from rhodecode.lib.user_log_filter import user_log_filter
36 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.utils import make_db_config
37 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils2 import (
38 from rhodecode.lib.utils2 import (
39 safe_str, remove_prefix, obfuscate_url_pw,
39 safe_str, remove_prefix, obfuscate_url_pw,
40 get_current_rhodecode_user, safe_int, action_logger_generic)
40 get_current_rhodecode_user, safe_int, action_logger_generic)
41 from rhodecode.lib.vcs.backends import get_backend
41 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.nodes import NodeKind
42 from rhodecode.lib.vcs.nodes import NodeKind
43 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class RepoModel(BaseModel):
55 class RepoModel(BaseModel):
56
56
57 cls = Repository
57 cls = Repository
58
58
59 def _get_user_group(self, users_group):
59 def _get_user_group(self, users_group):
60 return self._get_instance(UserGroup, users_group,
60 return self._get_instance(UserGroup, users_group,
61 callback=UserGroup.get_by_group_name)
61 callback=UserGroup.get_by_group_name)
62
62
63 def _get_repo_group(self, repo_group):
63 def _get_repo_group(self, repo_group):
64 return self._get_instance(RepoGroup, repo_group,
64 return self._get_instance(RepoGroup, repo_group,
65 callback=RepoGroup.get_by_group_name)
65 callback=RepoGroup.get_by_group_name)
66
66
67 def _create_default_perms(self, repository, private):
67 def _create_default_perms(self, repository, private):
68 # create default permission
68 # create default permission
69 default = 'repository.read'
69 default = 'repository.read'
70 def_user = User.get_default_user()
70 def_user = User.get_default_user()
71 for p in def_user.user_perms:
71 for p in def_user.user_perms:
72 if p.permission.permission_name.startswith('repository.'):
72 if p.permission.permission_name.startswith('repository.'):
73 default = p.permission.permission_name
73 default = p.permission.permission_name
74 break
74 break
75
75
76 default_perm = 'repository.none' if private else default
76 default_perm = 'repository.none' if private else default
77
77
78 repo_to_perm = UserRepoToPerm()
78 repo_to_perm = UserRepoToPerm()
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80
80
81 repo_to_perm.repository = repository
81 repo_to_perm.repository = repository
82 repo_to_perm.user = def_user
82 repo_to_perm.user = def_user
83
83
84 return repo_to_perm
84 return repo_to_perm
85
85
86 def get(self, repo_id):
86 def get(self, repo_id):
87 repo = self.sa.query(Repository) \
87 repo = self.sa.query(Repository) \
88 .filter(Repository.repo_id == repo_id)
88 .filter(Repository.repo_id == repo_id)
89
89
90 return repo.scalar()
90 return repo.scalar()
91
91
92 def get_repo(self, repository):
92 def get_repo(self, repository):
93 return self._get_repo(repository)
93 return self._get_repo(repository)
94
94
95 def get_by_repo_name(self, repo_name, cache=False):
95 def get_by_repo_name(self, repo_name, cache=False):
96 repo = self.sa.query(Repository) \
96 repo = self.sa.query(Repository) \
97 .filter(Repository.repo_name == repo_name)
97 .filter(Repository.repo_name == repo_name)
98
98
99 if cache:
99 if cache:
100 name_key = _hash_key(repo_name)
100 name_key = _hash_key(repo_name)
101 repo = repo.options(
101 repo = repo.options(
102 FromCache("sql_cache_short", f"get_repo_{name_key}"))
102 FromCache("sql_cache_short", f"get_repo_{name_key}"))
103 return repo.scalar()
103 return repo.scalar()
104
104
105 def _extract_id_from_repo_name(self, repo_name):
105 def _extract_id_from_repo_name(self, repo_name):
106 if repo_name.startswith('/'):
106 if repo_name.startswith('/'):
107 repo_name = repo_name.lstrip('/')
107 repo_name = repo_name.lstrip('/')
108 by_id_match = re.match(r'^_(\d+)', repo_name)
108 by_id_match = re.match(r'^_(\d+)', repo_name)
109 if by_id_match:
109 if by_id_match:
110 return by_id_match.groups()[0]
110 return by_id_match.groups()[0]
111
111
112 def get_repo_by_id(self, repo_name):
112 def get_repo_by_id(self, repo_name):
113 """
113 """
114 Extracts repo_name by id from special urls.
114 Extracts repo_name by id from special urls.
115 Example url is _11/repo_name
115 Example url is _11/repo_name
116
116
117 :param repo_name:
117 :param repo_name:
118 :return: repo object if matched else None
118 :return: repo object if matched else None
119 """
119 """
120 _repo_id = None
120 _repo_id = None
121 try:
121 try:
122 _repo_id = self._extract_id_from_repo_name(repo_name)
122 _repo_id = self._extract_id_from_repo_name(repo_name)
123 if _repo_id:
123 if _repo_id:
124 return self.get(_repo_id)
124 return self.get(_repo_id)
125 except Exception:
125 except Exception:
126 log.exception('Failed to extract repo_name from URL')
126 log.exception('Failed to extract repo_name from URL')
127 if _repo_id:
127 if _repo_id:
128 Session().rollback()
128 Session().rollback()
129
129
130 return None
130 return None
131
131
132 def get_repos_for_root(self, root, traverse=False):
132 def get_repos_for_root(self, root, traverse=False):
133 if traverse:
133 if traverse:
134 like_expression = u'{}%'.format(safe_str(root))
134 like_expression = u'{}%'.format(safe_str(root))
135 repos = Repository.query().filter(
135 repos = Repository.query().filter(
136 Repository.repo_name.like(like_expression)).all()
136 Repository.repo_name.like(like_expression)).all()
137 else:
137 else:
138 if root and not isinstance(root, RepoGroup):
138 if root and not isinstance(root, RepoGroup):
139 raise ValueError(
139 raise ValueError(
140 'Root must be an instance '
140 'Root must be an instance '
141 'of RepoGroup, got:{} instead'.format(type(root)))
141 'of RepoGroup, got:{} instead'.format(type(root)))
142 repos = Repository.query().filter(Repository.group == root).all()
142 repos = Repository.query().filter(Repository.group == root).all()
143 return repos
143 return repos
144
144
145 def get_url(self, repo, request=None, permalink=False):
145 def get_url(self, repo, request=None, permalink=False):
146 if not request:
146 if not request:
147 request = get_current_request()
147 request = get_current_request()
148
148
149 if not request:
149 if not request:
150 return
150 return
151
151
152 if permalink:
152 if permalink:
153 return request.route_url(
153 return request.route_url(
154 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
154 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
155 else:
155 else:
156 return request.route_url(
156 return request.route_url(
157 'repo_summary', repo_name=safe_str(repo.repo_name))
157 'repo_summary', repo_name=safe_str(repo.repo_name))
158
158
159 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
159 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
160 if not request:
160 if not request:
161 request = get_current_request()
161 request = get_current_request()
162
162
163 if not request:
163 if not request:
164 return
164 return
165
165
166 if permalink:
166 if permalink:
167 return request.route_url(
167 return request.route_url(
168 'repo_commit', repo_name=safe_str(repo.repo_id),
168 'repo_commit', repo_name=safe_str(repo.repo_id),
169 commit_id=commit_id)
169 commit_id=commit_id)
170
170
171 else:
171 else:
172 return request.route_url(
172 return request.route_url(
173 'repo_commit', repo_name=safe_str(repo.repo_name),
173 'repo_commit', repo_name=safe_str(repo.repo_name),
174 commit_id=commit_id)
174 commit_id=commit_id)
175
175
176 def get_repo_log(self, repo, filter_term):
176 def get_repo_log(self, repo, filter_term):
177 repo_log = UserLog.query()\
177 repo_log = UserLog.query()\
178 .filter(or_(UserLog.repository_id == repo.repo_id,
178 .filter(or_(UserLog.repository_id == repo.repo_id,
179 UserLog.repository_name == repo.repo_name))\
179 UserLog.repository_name == repo.repo_name))\
180 .options(joinedload(UserLog.user))\
180 .options(joinedload(UserLog.user))\
181 .options(joinedload(UserLog.repository))\
181 .options(joinedload(UserLog.repository))\
182 .order_by(UserLog.action_date.desc())
182 .order_by(UserLog.action_date.desc())
183
183
184 repo_log = user_log_filter(repo_log, filter_term)
184 repo_log = user_log_filter(repo_log, filter_term)
185 return repo_log
185 return repo_log
186
186
187 @classmethod
187 @classmethod
188 def update_commit_cache(cls, repositories=None):
188 def update_commit_cache(cls, repositories=None):
189 if not repositories:
189 if not repositories:
190 repositories = Repository.getAll()
190 repositories = Repository.getAll()
191 for repo in repositories:
191 for repo in repositories:
192 repo.update_commit_cache()
192 repo.update_commit_cache()
193
193
194 def get_repos_as_dict(self, repo_list=None, admin=False,
194 def get_repos_as_dict(self, repo_list=None, admin=False,
195 super_user_actions=False, short_name=None):
195 super_user_actions=False, short_name=None):
196
196
197 _render = get_current_request().get_partial_renderer(
197 _render = get_current_request().get_partial_renderer(
198 'rhodecode:templates/data_table/_dt_elements.mako')
198 'rhodecode:templates/data_table/_dt_elements.mako')
199 c = _render.get_call_context()
199 c = _render.get_call_context()
200 h = _render.get_helpers()
200 h = _render.get_helpers()
201
201
202 def quick_menu(repo_name):
202 def quick_menu(repo_name):
203 return _render('quick_menu', repo_name)
203 return _render('quick_menu', repo_name)
204
204
205 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
205 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
206 if short_name is not None:
206 if short_name is not None:
207 short_name_var = short_name
207 short_name_var = short_name
208 else:
208 else:
209 short_name_var = not admin
209 short_name_var = not admin
210 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
210 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
211 short_name=short_name_var, admin=False)
211 short_name=short_name_var, admin=False)
212
212
213 def last_change(last_change):
213 def last_change(last_change):
214 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
214 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
215 ts = time.time()
215 ts = time.time()
216 utc_offset = (datetime.datetime.fromtimestamp(ts)
216 utc_offset = (datetime.datetime.fromtimestamp(ts)
217 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
217 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
218 last_change = last_change + datetime.timedelta(seconds=utc_offset)
218 last_change = last_change + datetime.timedelta(seconds=utc_offset)
219
219
220 return _render("last_change", last_change)
220 return _render("last_change", last_change)
221
221
222 def rss_lnk(repo_name):
222 def rss_lnk(repo_name):
223 return _render("rss", repo_name)
223 return _render("rss", repo_name)
224
224
225 def atom_lnk(repo_name):
225 def atom_lnk(repo_name):
226 return _render("atom", repo_name)
226 return _render("atom", repo_name)
227
227
228 def last_rev(repo_name, cs_cache):
228 def last_rev(repo_name, cs_cache):
229 return _render('revision', repo_name, cs_cache.get('revision'),
229 return _render('revision', repo_name, cs_cache.get('revision'),
230 cs_cache.get('raw_id'), cs_cache.get('author'),
230 cs_cache.get('raw_id'), cs_cache.get('author'),
231 cs_cache.get('message'), cs_cache.get('date'))
231 cs_cache.get('message'), cs_cache.get('date'))
232
232
233 def desc(desc):
233 def desc(desc):
234 return _render('repo_desc', desc, c.visual.stylify_metatags)
234 return _render('repo_desc', desc, c.visual.stylify_metatags)
235
235
236 def state(repo_state):
236 def state(repo_state):
237 return _render("repo_state", repo_state)
237 return _render("repo_state", repo_state)
238
238
239 def repo_actions(repo_name):
239 def repo_actions(repo_name):
240 return _render('repo_actions', repo_name, super_user_actions)
240 return _render('repo_actions', repo_name, super_user_actions)
241
241
242 def user_profile(username):
242 def user_profile(username):
243 return _render('user_profile', username)
243 return _render('user_profile', username)
244
244
245 repos_data = []
245 repos_data = []
246 for repo in repo_list:
246 for repo in repo_list:
247 # NOTE(marcink): because we use only raw column we need to load it like that
247 # NOTE(marcink): because we use only raw column we need to load it like that
248 changeset_cache = Repository._load_changeset_cache(
248 changeset_cache = Repository._load_changeset_cache(
249 repo.repo_id, repo._changeset_cache)
249 repo.repo_id, repo._changeset_cache)
250
250
251 row = {
251 row = {
252 "menu": quick_menu(repo.repo_name),
252 "menu": quick_menu(repo.repo_name),
253
253
254 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
254 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
255 repo.private, repo.archived, repo.fork_repo_name),
255 repo.private, repo.archived, repo.fork_repo_name),
256
256
257 "desc": desc(h.escape(repo.description)),
257 "desc": desc(h.escape(repo.description)),
258
258
259 "last_change": last_change(repo.updated_on),
259 "last_change": last_change(repo.updated_on),
260
260
261 "last_changeset": last_rev(repo.repo_name, changeset_cache),
261 "last_changeset": last_rev(repo.repo_name, changeset_cache),
262 "last_changeset_raw": changeset_cache.get('revision'),
262 "last_changeset_raw": changeset_cache.get('revision'),
263
263
264 "owner": user_profile(repo.owner_username),
264 "owner": user_profile(repo.owner_username),
265
265
266 "state": state(repo.repo_state),
266 "state": state(repo.repo_state),
267 "rss": rss_lnk(repo.repo_name),
267 "rss": rss_lnk(repo.repo_name),
268 "atom": atom_lnk(repo.repo_name),
268 "atom": atom_lnk(repo.repo_name),
269 }
269 }
270 if admin:
270 if admin:
271 row.update({
271 row.update({
272 "action": repo_actions(repo.repo_name),
272 "action": repo_actions(repo.repo_name),
273 })
273 })
274 repos_data.append(row)
274 repos_data.append(row)
275
275
276 return repos_data
276 return repos_data
277
277
278 def get_repos_data_table(
278 def get_repos_data_table(
279 self, draw, start, limit,
279 self, draw, start, limit,
280 search_q, order_by, order_dir,
280 search_q, order_by, order_dir,
281 auth_user, repo_group_id):
281 auth_user, repo_group_id):
282 from rhodecode.model.scm import RepoList
282 from rhodecode.model.scm import RepoList
283
283
284 _perms = ['repository.read', 'repository.write', 'repository.admin']
284 _perms = ['repository.read', 'repository.write', 'repository.admin']
285
285
286 repos = Repository.query() \
286 repos = Repository.query() \
287 .filter(Repository.group_id == repo_group_id) \
287 .filter(Repository.group_id == repo_group_id) \
288 .all()
288 .all()
289 auth_repo_list = RepoList(
289 auth_repo_list = RepoList(
290 repos, perm_set=_perms,
290 repos, perm_set=_perms,
291 extra_kwargs=dict(user=auth_user))
291 extra_kwargs=dict(user=auth_user))
292
292
293 allowed_ids = [-1]
293 allowed_ids = [-1]
294 for repo in auth_repo_list:
294 for repo in auth_repo_list:
295 allowed_ids.append(repo.repo_id)
295 allowed_ids.append(repo.repo_id)
296
296
297 repos_data_total_count = Repository.query() \
297 repos_data_total_count = Repository.query() \
298 .filter(Repository.group_id == repo_group_id) \
298 .filter(Repository.group_id == repo_group_id) \
299 .filter(or_(
299 .filter(or_(
300 # generate multiple IN to fix limitation problems
300 # generate multiple IN to fix limitation problems
301 *in_filter_generator(Repository.repo_id, allowed_ids))
301 *in_filter_generator(Repository.repo_id, allowed_ids))
302 ) \
302 ) \
303 .count()
303 .count()
304
304
305 RepoFork = aliased(Repository)
305 RepoFork = aliased(Repository)
306 OwnerUser = aliased(User)
306 OwnerUser = aliased(User)
307 base_q = Session.query(
307 base_q = Session.query(
308 Repository.repo_id,
308 Repository.repo_id,
309 Repository.repo_name,
309 Repository.repo_name,
310 Repository.description,
310 Repository.description,
311 Repository.repo_type,
311 Repository.repo_type,
312 Repository.repo_state,
312 Repository.repo_state,
313 Repository.private,
313 Repository.private,
314 Repository.archived,
314 Repository.archived,
315 Repository.updated_on,
315 Repository.updated_on,
316 Repository._changeset_cache,
316 Repository._changeset_cache,
317 RepoFork.repo_name.label('fork_repo_name'),
317 RepoFork.repo_name.label('fork_repo_name'),
318 OwnerUser.username.label('owner_username'),
318 OwnerUser.username.label('owner_username'),
319 ) \
319 ) \
320 .filter(Repository.group_id == repo_group_id) \
320 .filter(Repository.group_id == repo_group_id) \
321 .filter(or_(
321 .filter(or_(
322 # generate multiple IN to fix limitation problems
322 # generate multiple IN to fix limitation problems
323 *in_filter_generator(Repository.repo_id, allowed_ids))
323 *in_filter_generator(Repository.repo_id, allowed_ids))
324 ) \
324 ) \
325 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
325 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
326 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
326 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
327
327
328 repos_data_total_filtered_count = base_q.count()
328 repos_data_total_filtered_count = base_q.count()
329
329
330 sort_defined = False
330 sort_defined = False
331 if order_by == 'repo_name':
331 if order_by == 'repo_name':
332 sort_col = func.lower(Repository.repo_name)
332 sort_col = func.lower(Repository.repo_name)
333 sort_defined = True
333 sort_defined = True
334 elif order_by == 'user_username':
334 elif order_by == 'user_username':
335 sort_col = User.username
335 sort_col = User.username
336 else:
336 else:
337 sort_col = getattr(Repository, order_by, None)
337 sort_col = getattr(Repository, order_by, None)
338
338
339 if sort_defined or sort_col:
339 if sort_defined or sort_col:
340 if order_dir == 'asc':
340 if order_dir == 'asc':
341 sort_col = sort_col.asc()
341 sort_col = sort_col.asc()
342 else:
342 else:
343 sort_col = sort_col.desc()
343 sort_col = sort_col.desc()
344
344
345 base_q = base_q.order_by(sort_col)
345 base_q = base_q.order_by(sort_col)
346 base_q = base_q.offset(start).limit(limit)
346 base_q = base_q.offset(start).limit(limit)
347
347
348 repos_list = base_q.all()
348 repos_list = base_q.all()
349
349
350 repos_data = RepoModel().get_repos_as_dict(
350 repos_data = RepoModel().get_repos_as_dict(
351 repo_list=repos_list, admin=False)
351 repo_list=repos_list, admin=False)
352
352
353 data = ({
353 data = ({
354 'draw': draw,
354 'draw': draw,
355 'data': repos_data,
355 'data': repos_data,
356 'recordsTotal': repos_data_total_count,
356 'recordsTotal': repos_data_total_count,
357 'recordsFiltered': repos_data_total_filtered_count,
357 'recordsFiltered': repos_data_total_filtered_count,
358 })
358 })
359 return data
359 return data
360
360
361 def _get_defaults(self, repo_name):
361 def _get_defaults(self, repo_name):
362 """
362 """
363 Gets information about repository, and returns a dict for
363 Gets information about repository, and returns a dict for
364 usage in forms
364 usage in forms
365
365
366 :param repo_name:
366 :param repo_name:
367 """
367 """
368
368
369 repo_info = Repository.get_by_repo_name(repo_name)
369 repo_info = Repository.get_by_repo_name(repo_name)
370
370
371 if repo_info is None:
371 if repo_info is None:
372 return None
372 return None
373
373
374 defaults = repo_info.get_dict()
374 defaults = repo_info.get_dict()
375 defaults['repo_name'] = repo_info.just_name
375 defaults['repo_name'] = repo_info.just_name
376
376
377 groups = repo_info.groups_with_parents
377 groups = repo_info.groups_with_parents
378 parent_group = groups[-1] if groups else None
378 parent_group = groups[-1] if groups else None
379
379
380 # we use -1 as this is how in HTML, we mark an empty group
380 # we use -1 as this is how in HTML, we mark an empty group
381 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
381 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
382
382
383 keys_to_process = (
383 keys_to_process = (
384 {'k': 'repo_type', 'strip': False},
384 {'k': 'repo_type', 'strip': False},
385 {'k': 'repo_enable_downloads', 'strip': True},
385 {'k': 'repo_enable_downloads', 'strip': True},
386 {'k': 'repo_description', 'strip': True},
386 {'k': 'repo_description', 'strip': True},
387 {'k': 'repo_enable_locking', 'strip': True},
387 {'k': 'repo_enable_locking', 'strip': True},
388 {'k': 'repo_landing_rev', 'strip': True},
388 {'k': 'repo_landing_rev', 'strip': True},
389 {'k': 'clone_uri', 'strip': False},
389 {'k': 'clone_uri', 'strip': False},
390 {'k': 'push_uri', 'strip': False},
390 {'k': 'push_uri', 'strip': False},
391 {'k': 'repo_private', 'strip': True},
391 {'k': 'repo_private', 'strip': True},
392 {'k': 'repo_enable_statistics', 'strip': True}
392 {'k': 'repo_enable_statistics', 'strip': True}
393 )
393 )
394
394
395 for item in keys_to_process:
395 for item in keys_to_process:
396 attr = item['k']
396 attr = item['k']
397 if item['strip']:
397 if item['strip']:
398 attr = remove_prefix(item['k'], 'repo_')
398 attr = remove_prefix(item['k'], 'repo_')
399
399
400 val = defaults[attr]
400 val = defaults[attr]
401 if item['k'] == 'repo_landing_rev':
401 if item['k'] == 'repo_landing_rev':
402 val = ':'.join(defaults[attr])
402 val = ':'.join(defaults[attr])
403 defaults[item['k']] = val
403 defaults[item['k']] = val
404 if item['k'] == 'clone_uri':
404 if item['k'] == 'clone_uri':
405 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
405 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
406 if item['k'] == 'push_uri':
406 if item['k'] == 'push_uri':
407 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
407 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
408
408
409 # fill owner
409 # fill owner
410 if repo_info.user:
410 if repo_info.user:
411 defaults.update({'user': repo_info.user.username})
411 defaults.update({'user': repo_info.user.username})
412 else:
412 else:
413 replacement_user = User.get_first_super_admin().username
413 replacement_user = User.get_first_super_admin().username
414 defaults.update({'user': replacement_user})
414 defaults.update({'user': replacement_user})
415
415
416 return defaults
416 return defaults
417
417
418 def update(self, repo, **kwargs):
418 def update(self, repo, **kwargs):
419 try:
419 try:
420 cur_repo = self._get_repo(repo)
420 cur_repo = self._get_repo(repo)
421 source_repo_name = cur_repo.repo_name
421 source_repo_name = cur_repo.repo_name
422
422
423 affected_user_ids = []
423 affected_user_ids = []
424 if 'user' in kwargs:
424 if 'user' in kwargs:
425 old_owner_id = cur_repo.user.user_id
425 old_owner_id = cur_repo.user.user_id
426 new_owner = User.get_by_username(kwargs['user'])
426 new_owner = User.get_by_username(kwargs['user'])
427 cur_repo.user = new_owner
427 cur_repo.user = new_owner
428
428
429 if old_owner_id != new_owner.user_id:
429 if old_owner_id != new_owner.user_id:
430 affected_user_ids = [new_owner.user_id, old_owner_id]
430 affected_user_ids = [new_owner.user_id, old_owner_id]
431
431
432 if 'repo_group' in kwargs:
432 if 'repo_group' in kwargs:
433 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
433 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
434 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
434 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
435
435
436 update_keys = [
436 update_keys = [
437 (1, 'repo_description'),
437 (1, 'repo_description'),
438 (1, 'repo_landing_rev'),
438 (1, 'repo_landing_rev'),
439 (1, 'repo_private'),
439 (1, 'repo_private'),
440 (1, 'repo_enable_downloads'),
440 (1, 'repo_enable_downloads'),
441 (1, 'repo_enable_locking'),
441 (1, 'repo_enable_locking'),
442 (1, 'repo_enable_statistics'),
442 (1, 'repo_enable_statistics'),
443 (0, 'clone_uri'),
443 (0, 'clone_uri'),
444 (0, 'push_uri'),
444 (0, 'push_uri'),
445 (0, 'fork_id')
445 (0, 'fork_id')
446 ]
446 ]
447 for strip, k in update_keys:
447 for strip, k in update_keys:
448 if k in kwargs:
448 if k in kwargs:
449 val = kwargs[k]
449 val = kwargs[k]
450 if strip:
450 if strip:
451 k = remove_prefix(k, 'repo_')
451 k = remove_prefix(k, 'repo_')
452
452
453 setattr(cur_repo, k, val)
453 setattr(cur_repo, k, val)
454
454
455 new_name = source_repo_name
455 new_name = source_repo_name
456 if 'repo_name' in kwargs:
456 if 'repo_name' in kwargs:
457 new_name = cur_repo.get_new_name(kwargs['repo_name'])
457 new_name = cur_repo.get_new_name(kwargs['repo_name'])
458 cur_repo.repo_name = new_name
458 cur_repo.repo_name = new_name
459
459
460 if 'repo_private' in kwargs:
460 if 'repo_private' in kwargs:
461 # if private flag is set to True, reset default permission to NONE
461 # if private flag is set to True, reset default permission to NONE
462 set_private_to = kwargs.get('repo_private')
462 set_private_to = kwargs.get('repo_private')
463 if set_private_to:
463 if set_private_to:
464 EMPTY_PERM = 'repository.none'
464 EMPTY_PERM = 'repository.none'
465 RepoModel().grant_user_permission(
465 RepoModel().grant_user_permission(
466 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
466 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
467 )
467 )
468 if set_private_to != cur_repo.private:
468 if set_private_to != cur_repo.private:
469 # NOTE(dan): we change repo private mode we need to notify all USERS
469 # NOTE(dan): we change repo private mode we need to notify all USERS
470 # this is just by having this value set to a different value then it was before
470 # this is just by having this value set to a different value then it was before
471 affected_user_ids = User.get_all_user_ids()
471 affected_user_ids = User.get_all_user_ids()
472
472
473 if kwargs.get('repo_landing_rev'):
473 if kwargs.get('repo_landing_rev'):
474 landing_rev_val = kwargs['repo_landing_rev']
474 landing_rev_val = kwargs['repo_landing_rev']
475 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
475 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
476
476
477 # handle extra fields
477 # handle extra fields
478 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
478 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
479 k = RepositoryField.un_prefix_key(field)
479 k = RepositoryField.un_prefix_key(field)
480 ex_field = RepositoryField.get_by_key_name(
480 ex_field = RepositoryField.get_by_key_name(
481 key=k, repo=cur_repo)
481 key=k, repo=cur_repo)
482 if ex_field:
482 if ex_field:
483 ex_field.field_value = kwargs[field]
483 ex_field.field_value = kwargs[field]
484 self.sa.add(ex_field)
484 self.sa.add(ex_field)
485
485
486 self.sa.add(cur_repo)
486 self.sa.add(cur_repo)
487
487
488 if source_repo_name != new_name:
488 if source_repo_name != new_name:
489 # rename repository
489 # rename repository
490 self._rename_filesystem_repo(
490 self._rename_filesystem_repo(
491 old=source_repo_name, new=new_name)
491 old=source_repo_name, new=new_name)
492
492
493 if affected_user_ids:
493 if affected_user_ids:
494 PermissionModel().trigger_permission_flush(affected_user_ids)
494 PermissionModel().trigger_permission_flush(affected_user_ids)
495
495
496 return cur_repo
496 return cur_repo
497 except Exception:
497 except Exception:
498 log.error(traceback.format_exc())
498 log.error(traceback.format_exc())
499 raise
499 raise
500
500
501 def _create_repo(self, repo_name, repo_type, description, owner,
501 def _create_repo(self, repo_name, repo_type, description, owner,
502 private=False, clone_uri=None, repo_group=None,
502 private=False, clone_uri=None, repo_group=None,
503 landing_rev=None, fork_of=None,
503 landing_rev=None, fork_of=None,
504 copy_fork_permissions=False, enable_statistics=False,
504 copy_fork_permissions=False, enable_statistics=False,
505 enable_locking=False, enable_downloads=False,
505 enable_locking=False, enable_downloads=False,
506 copy_group_permissions=False,
506 copy_group_permissions=False,
507 state=Repository.STATE_PENDING):
507 state=Repository.STATE_PENDING):
508 """
508 """
509 Create repository inside database with PENDING state, this should be
509 Create repository inside database with PENDING state, this should be
510 only executed by create() repo. With exception of importing existing
510 only executed by create() repo. With exception of importing existing
511 repos
511 repos
512 """
512 """
513 from rhodecode.model.scm import ScmModel
513 from rhodecode.model.scm import ScmModel
514
514
515 owner = self._get_user(owner)
515 owner = self._get_user(owner)
516 fork_of = self._get_repo(fork_of)
516 fork_of = self._get_repo(fork_of)
517 repo_group = self._get_repo_group(safe_int(repo_group))
517 repo_group = self._get_repo_group(safe_int(repo_group))
518 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
518 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
519 landing_rev = landing_rev or default_landing_ref
519 landing_rev = landing_rev or default_landing_ref
520
520
521 try:
521 try:
522 repo_name = safe_str(repo_name)
522 repo_name = safe_str(repo_name)
523 description = safe_str(description)
523 description = safe_str(description)
524 # repo name is just a name of repository
524 # repo name is just a name of repository
525 # while repo_name_full is a full qualified name that is combined
525 # while repo_name_full is a full qualified name that is combined
526 # with name and path of group
526 # with name and path of group
527 repo_name_full = repo_name
527 repo_name_full = repo_name
528 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
528 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
529
529
530 new_repo = Repository()
530 new_repo = Repository()
531 new_repo.repo_state = state
531 new_repo.repo_state = state
532 new_repo.enable_statistics = False
532 new_repo.enable_statistics = False
533 new_repo.repo_name = repo_name_full
533 new_repo.repo_name = repo_name_full
534 new_repo.repo_type = repo_type
534 new_repo.repo_type = repo_type
535 new_repo.user = owner
535 new_repo.user = owner
536 new_repo.group = repo_group
536 new_repo.group = repo_group
537 new_repo.description = description or repo_name
537 new_repo.description = description or repo_name
538 new_repo.private = private
538 new_repo.private = private
539 new_repo.archived = False
539 new_repo.archived = False
540 new_repo.clone_uri = clone_uri
540 new_repo.clone_uri = clone_uri
541 new_repo.landing_rev = landing_rev
541 new_repo.landing_rev = landing_rev
542
542
543 new_repo.enable_statistics = enable_statistics
543 new_repo.enable_statistics = enable_statistics
544 new_repo.enable_locking = enable_locking
544 new_repo.enable_locking = enable_locking
545 new_repo.enable_downloads = enable_downloads
545 new_repo.enable_downloads = enable_downloads
546
546
547 if repo_group:
547 if repo_group:
548 new_repo.enable_locking = repo_group.enable_locking
548 new_repo.enable_locking = repo_group.enable_locking
549
549
550 if fork_of:
550 if fork_of:
551 parent_repo = fork_of
551 parent_repo = fork_of
552 new_repo.fork = parent_repo
552 new_repo.fork = parent_repo
553
553
554 events.trigger(events.RepoPreCreateEvent(new_repo))
554 events.trigger(events.RepoPreCreateEvent(new_repo))
555
555
556 self.sa.add(new_repo)
556 self.sa.add(new_repo)
557
557
558 EMPTY_PERM = 'repository.none'
558 EMPTY_PERM = 'repository.none'
559 if fork_of and copy_fork_permissions:
559 if fork_of and copy_fork_permissions:
560 repo = fork_of
560 repo = fork_of
561 user_perms = UserRepoToPerm.query() \
561 user_perms = UserRepoToPerm.query() \
562 .filter(UserRepoToPerm.repository == repo).all()
562 .filter(UserRepoToPerm.repository == repo).all()
563 group_perms = UserGroupRepoToPerm.query() \
563 group_perms = UserGroupRepoToPerm.query() \
564 .filter(UserGroupRepoToPerm.repository == repo).all()
564 .filter(UserGroupRepoToPerm.repository == repo).all()
565
565
566 for perm in user_perms:
566 for perm in user_perms:
567 UserRepoToPerm.create(
567 UserRepoToPerm.create(
568 perm.user, new_repo, perm.permission)
568 perm.user, new_repo, perm.permission)
569
569
570 for perm in group_perms:
570 for perm in group_perms:
571 UserGroupRepoToPerm.create(
571 UserGroupRepoToPerm.create(
572 perm.users_group, new_repo, perm.permission)
572 perm.users_group, new_repo, perm.permission)
573 # in case we copy permissions and also set this repo to private
573 # in case we copy permissions and also set this repo to private
574 # override the default user permission to make it a private repo
574 # override the default user permission to make it a private repo
575 if private:
575 if private:
576 RepoModel(self.sa).grant_user_permission(
576 RepoModel(self.sa).grant_user_permission(
577 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
577 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
578
578
579 elif repo_group and copy_group_permissions:
579 elif repo_group and copy_group_permissions:
580 user_perms = UserRepoGroupToPerm.query() \
580 user_perms = UserRepoGroupToPerm.query() \
581 .filter(UserRepoGroupToPerm.group == repo_group).all()
581 .filter(UserRepoGroupToPerm.group == repo_group).all()
582
582
583 group_perms = UserGroupRepoGroupToPerm.query() \
583 group_perms = UserGroupRepoGroupToPerm.query() \
584 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
584 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
585
585
586 for perm in user_perms:
586 for perm in user_perms:
587 perm_name = perm.permission.permission_name.replace(
587 perm_name = perm.permission.permission_name.replace(
588 'group.', 'repository.')
588 'group.', 'repository.')
589 perm_obj = Permission.get_by_key(perm_name)
589 perm_obj = Permission.get_by_key(perm_name)
590 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
590 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
591
591
592 for perm in group_perms:
592 for perm in group_perms:
593 perm_name = perm.permission.permission_name.replace(
593 perm_name = perm.permission.permission_name.replace(
594 'group.', 'repository.')
594 'group.', 'repository.')
595 perm_obj = Permission.get_by_key(perm_name)
595 perm_obj = Permission.get_by_key(perm_name)
596 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
596 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
597
597
598 if private:
598 if private:
599 RepoModel(self.sa).grant_user_permission(
599 RepoModel(self.sa).grant_user_permission(
600 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
600 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
601
601
602 else:
602 else:
603 perm_obj = self._create_default_perms(new_repo, private)
603 perm_obj = self._create_default_perms(new_repo, private)
604 self.sa.add(perm_obj)
604 self.sa.add(perm_obj)
605
605
606 # now automatically start following this repository as owner
606 # now automatically start following this repository as owner
607 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
607 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
608
608
609 # we need to flush here, in order to check if database won't
609 # we need to flush here, in order to check if database won't
610 # throw any exceptions, create filesystem dirs at the very end
610 # throw any exceptions, create filesystem dirs at the very end
611 self.sa.flush()
611 self.sa.flush()
612 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
612 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
613 return new_repo
613 return new_repo
614
614
615 except Exception:
615 except Exception:
616 log.error(traceback.format_exc())
616 log.error(traceback.format_exc())
617 raise
617 raise
618
618
619 def create(self, form_data, cur_user):
619 def create(self, form_data, cur_user):
620 """
620 """
621 Create repository using celery tasks
621 Create repository using celery tasks
622
622
623 :param form_data:
623 :param form_data:
624 :param cur_user:
624 :param cur_user:
625 """
625 """
626 from rhodecode.lib.celerylib import tasks, run_task
626 from rhodecode.lib.celerylib import tasks, run_task
627 return run_task(tasks.create_repo, form_data, cur_user)
627 return run_task(tasks.create_repo, form_data, cur_user)
628
628
629 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
629 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
630 perm_deletions=None, check_perms=True,
630 perm_deletions=None, check_perms=True,
631 cur_user=None):
631 cur_user=None):
632 if not perm_additions:
632 if not perm_additions:
633 perm_additions = []
633 perm_additions = []
634 if not perm_updates:
634 if not perm_updates:
635 perm_updates = []
635 perm_updates = []
636 if not perm_deletions:
636 if not perm_deletions:
637 perm_deletions = []
637 perm_deletions = []
638
638
639 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
639 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
640
640
641 changes = {
641 changes = {
642 'added': [],
642 'added': [],
643 'updated': [],
643 'updated': [],
644 'deleted': [],
644 'deleted': [],
645 'default_user_changed': None
645 'default_user_changed': None
646 }
646 }
647
647
648 repo = self._get_repo(repo)
648 repo = self._get_repo(repo)
649
649
650 # update permissions
650 # update permissions
651 for member_id, perm, member_type in perm_updates:
651 for member_id, perm, member_type in perm_updates:
652 member_id = int(member_id)
652 member_id = int(member_id)
653 if member_type == 'user':
653 if member_type == 'user':
654 member_name = User.get(member_id).username
654 member_name = User.get(member_id).username
655 if member_name == User.DEFAULT_USER:
655 if member_name == User.DEFAULT_USER:
656 # NOTE(dan): detect if we changed permissions for default user
656 # NOTE(dan): detect if we changed permissions for default user
657 perm_obj = self.sa.query(UserRepoToPerm) \
657 perm_obj = self.sa.query(UserRepoToPerm) \
658 .filter(UserRepoToPerm.user_id == member_id) \
658 .filter(UserRepoToPerm.user_id == member_id) \
659 .filter(UserRepoToPerm.repository == repo) \
659 .filter(UserRepoToPerm.repository == repo) \
660 .scalar()
660 .scalar()
661 if perm_obj and perm_obj.permission.permission_name != perm:
661 if perm_obj and perm_obj.permission.permission_name != perm:
662 changes['default_user_changed'] = True
662 changes['default_user_changed'] = True
663
663
664 # this updates also current one if found
664 # this updates also current one if found
665 self.grant_user_permission(
665 self.grant_user_permission(
666 repo=repo, user=member_id, perm=perm)
666 repo=repo, user=member_id, perm=perm)
667 elif member_type == 'user_group':
667 elif member_type == 'user_group':
668 # check if we have permissions to alter this usergroup
668 # check if we have permissions to alter this usergroup
669 member_name = UserGroup.get(member_id).users_group_name
669 member_name = UserGroup.get(member_id).users_group_name
670 if not check_perms or HasUserGroupPermissionAny(
670 if not check_perms or HasUserGroupPermissionAny(
671 *req_perms)(member_name, user=cur_user):
671 *req_perms)(member_name, user=cur_user):
672 self.grant_user_group_permission(
672 self.grant_user_group_permission(
673 repo=repo, group_name=member_id, perm=perm)
673 repo=repo, group_name=member_id, perm=perm)
674 else:
674 else:
675 raise ValueError("member_type must be 'user' or 'user_group' "
675 raise ValueError("member_type must be 'user' or 'user_group' "
676 "got {} instead".format(member_type))
676 "got {} instead".format(member_type))
677 changes['updated'].append({'type': member_type, 'id': member_id,
677 changes['updated'].append({'type': member_type, 'id': member_id,
678 'name': member_name, 'new_perm': perm})
678 'name': member_name, 'new_perm': perm})
679
679
680 # set new permissions
680 # set new permissions
681 for member_id, perm, member_type in perm_additions:
681 for member_id, perm, member_type in perm_additions:
682 member_id = int(member_id)
682 member_id = int(member_id)
683 if member_type == 'user':
683 if member_type == 'user':
684 member_name = User.get(member_id).username
684 member_name = User.get(member_id).username
685 self.grant_user_permission(
685 self.grant_user_permission(
686 repo=repo, user=member_id, perm=perm)
686 repo=repo, user=member_id, perm=perm)
687 elif member_type == 'user_group':
687 elif member_type == 'user_group':
688 # check if we have permissions to alter this usergroup
688 # check if we have permissions to alter this usergroup
689 member_name = UserGroup.get(member_id).users_group_name
689 member_name = UserGroup.get(member_id).users_group_name
690 if not check_perms or HasUserGroupPermissionAny(
690 if not check_perms or HasUserGroupPermissionAny(
691 *req_perms)(member_name, user=cur_user):
691 *req_perms)(member_name, user=cur_user):
692 self.grant_user_group_permission(
692 self.grant_user_group_permission(
693 repo=repo, group_name=member_id, perm=perm)
693 repo=repo, group_name=member_id, perm=perm)
694 else:
694 else:
695 raise ValueError("member_type must be 'user' or 'user_group' "
695 raise ValueError("member_type must be 'user' or 'user_group' "
696 "got {} instead".format(member_type))
696 "got {} instead".format(member_type))
697
697
698 changes['added'].append({'type': member_type, 'id': member_id,
698 changes['added'].append({'type': member_type, 'id': member_id,
699 'name': member_name, 'new_perm': perm})
699 'name': member_name, 'new_perm': perm})
700 # delete permissions
700 # delete permissions
701 for member_id, perm, member_type in perm_deletions:
701 for member_id, perm, member_type in perm_deletions:
702 member_id = int(member_id)
702 member_id = int(member_id)
703 if member_type == 'user':
703 if member_type == 'user':
704 member_name = User.get(member_id).username
704 member_name = User.get(member_id).username
705 self.revoke_user_permission(repo=repo, user=member_id)
705 self.revoke_user_permission(repo=repo, user=member_id)
706 elif member_type == 'user_group':
706 elif member_type == 'user_group':
707 # check if we have permissions to alter this usergroup
707 # check if we have permissions to alter this usergroup
708 member_name = UserGroup.get(member_id).users_group_name
708 member_name = UserGroup.get(member_id).users_group_name
709 if not check_perms or HasUserGroupPermissionAny(
709 if not check_perms or HasUserGroupPermissionAny(
710 *req_perms)(member_name, user=cur_user):
710 *req_perms)(member_name, user=cur_user):
711 self.revoke_user_group_permission(
711 self.revoke_user_group_permission(
712 repo=repo, group_name=member_id)
712 repo=repo, group_name=member_id)
713 else:
713 else:
714 raise ValueError("member_type must be 'user' or 'user_group' "
714 raise ValueError("member_type must be 'user' or 'user_group' "
715 "got {} instead".format(member_type))
715 "got {} instead".format(member_type))
716
716
717 changes['deleted'].append({'type': member_type, 'id': member_id,
717 changes['deleted'].append({'type': member_type, 'id': member_id,
718 'name': member_name, 'new_perm': perm})
718 'name': member_name, 'new_perm': perm})
719 return changes
719 return changes
720
720
721 def create_fork(self, form_data, cur_user):
721 def create_fork(self, form_data, cur_user):
722 """
722 """
723 Simple wrapper into executing celery task for fork creation
723 Simple wrapper into executing celery task for fork creation
724
724
725 :param form_data:
725 :param form_data:
726 :param cur_user:
726 :param cur_user:
727 """
727 """
728 from rhodecode.lib.celerylib import tasks, run_task
728 from rhodecode.lib.celerylib import tasks, run_task
729 return run_task(tasks.create_repo_fork, form_data, cur_user)
729 return run_task(tasks.create_repo_fork, form_data, cur_user)
730
730
731 def archive(self, repo):
731 def archive(self, repo):
732 """
732 """
733 Archive given repository. Set archive flag.
733 Archive given repository. Set archive flag.
734
734
735 :param repo:
735 :param repo:
736 """
736 """
737 repo = self._get_repo(repo)
737 repo = self._get_repo(repo)
738 if repo:
738 if repo:
739
739
740 try:
740 try:
741 repo.archived = True
741 repo.archived = True
742 self.sa.add(repo)
742 self.sa.add(repo)
743 self.sa.commit()
743 self.sa.commit()
744 except Exception:
744 except Exception:
745 log.error(traceback.format_exc())
745 log.error(traceback.format_exc())
746 raise
746 raise
747
747
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None):
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None, call_events=True):
749 """
749 """
750 Delete given repository, forks parameter defines what do do with
750 Delete given repository, forks parameter defines what do do with
751 attached forks. Throws AttachedForksError if deleted repo has attached
751 attached forks. Throws AttachedForksError if deleted repo has attached
752 forks
752 forks
753
753
754 :param repo:
754 :param repo:
755 :param forks: str 'delete' or 'detach'
755 :param forks: str 'delete' or 'detach'
756 :param pull_requests: str 'delete' or None
756 :param pull_requests: str 'delete' or None
757 :param artifacts: str 'delete' or None
757 :param artifacts: str 'delete' or None
758 :param fs_remove: remove(archive) repo from filesystem
758 :param fs_remove: remove(archive) repo from filesystem
759 """
759 """
760 if not cur_user:
760 if not cur_user:
761 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
761 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
762 repo = self._get_repo(repo)
762 repo = self._get_repo(repo)
763 if repo:
763 if not repo:
764 if forks == 'detach':
764 return False
765 for r in repo.forks:
766 r.fork = None
767 self.sa.add(r)
768 elif forks == 'delete':
769 for r in repo.forks:
770 self.delete(r, forks='delete')
771 elif [f for f in repo.forks]:
772 raise AttachedForksError()
773
765
774 # check for pull requests
766 if forks == 'detach':
775 pr_sources = repo.pull_requests_source
767 for r in repo.forks:
776 pr_targets = repo.pull_requests_target
768 r.fork = None
777 if pull_requests != 'delete' and (pr_sources or pr_targets):
769 self.sa.add(r)
778 raise AttachedPullRequestsError()
770 elif forks == 'delete':
771 for r in repo.forks:
772 self.delete(r, forks='delete')
773 elif [f for f in repo.forks]:
774 raise AttachedForksError()
775
776 # check for pull requests
777 pr_sources = repo.pull_requests_source
778 pr_targets = repo.pull_requests_target
779 if pull_requests != 'delete' and (pr_sources or pr_targets):
780 raise AttachedPullRequestsError()
779
781
780 artifacts_objs = repo.artifacts
782 artifacts_objs = repo.artifacts
781 if artifacts == 'delete':
783 if artifacts == 'delete':
782 for a in artifacts_objs:
784 for a in artifacts_objs:
783 self.sa.delete(a)
785 self.sa.delete(a)
784 elif [a for a in artifacts_objs]:
786 elif [a for a in artifacts_objs]:
785 raise AttachedArtifactsError()
787 raise AttachedArtifactsError()
786
788
787 old_repo_dict = repo.get_dict()
789 old_repo_dict = repo.get_dict()
790 if call_events:
788 events.trigger(events.RepoPreDeleteEvent(repo))
791 events.trigger(events.RepoPreDeleteEvent(repo))
789 try:
792
790 self.sa.delete(repo)
793 try:
791 if fs_remove:
794 self.sa.delete(repo)
792 self._delete_filesystem_repo(repo)
795 if fs_remove:
793 else:
796 self._delete_filesystem_repo(repo)
794 log.debug('skipping removal from filesystem')
797 else:
795 old_repo_dict.update({
798 log.debug('skipping removal from filesystem')
796 'deleted_by': cur_user,
799 old_repo_dict.update({
797 'deleted_on': time.time(),
800 'deleted_by': cur_user,
798 })
801 'deleted_on': time.time(),
802 })
803 if call_events:
799 hooks_base.delete_repository(**old_repo_dict)
804 hooks_base.delete_repository(**old_repo_dict)
800 events.trigger(events.RepoDeleteEvent(repo))
805 events.trigger(events.RepoDeleteEvent(repo))
801 except Exception:
806 except Exception:
802 log.error(traceback.format_exc())
807 log.error(traceback.format_exc())
803 raise
808 raise
809
810 return True
804
811
805 def grant_user_permission(self, repo, user, perm):
812 def grant_user_permission(self, repo, user, perm):
806 """
813 """
807 Grant permission for user on given repository, or update existing one
814 Grant permission for user on given repository, or update existing one
808 if found
815 if found
809
816
810 :param repo: Instance of Repository, repository_id, or repository name
817 :param repo: Instance of Repository, repository_id, or repository name
811 :param user: Instance of User, user_id or username
818 :param user: Instance of User, user_id or username
812 :param perm: Instance of Permission, or permission_name
819 :param perm: Instance of Permission, or permission_name
813 """
820 """
814 user = self._get_user(user)
821 user = self._get_user(user)
815 repo = self._get_repo(repo)
822 repo = self._get_repo(repo)
816 permission = self._get_perm(perm)
823 permission = self._get_perm(perm)
817
824
818 # check if we have that permission already
825 # check if we have that permission already
819 obj = self.sa.query(UserRepoToPerm) \
826 obj = self.sa.query(UserRepoToPerm) \
820 .filter(UserRepoToPerm.user == user) \
827 .filter(UserRepoToPerm.user == user) \
821 .filter(UserRepoToPerm.repository == repo) \
828 .filter(UserRepoToPerm.repository == repo) \
822 .scalar()
829 .scalar()
823 if obj is None:
830 if obj is None:
824 # create new !
831 # create new !
825 obj = UserRepoToPerm()
832 obj = UserRepoToPerm()
826 obj.repository = repo
833 obj.repository = repo
827 obj.user = user
834 obj.user = user
828 obj.permission = permission
835 obj.permission = permission
829 self.sa.add(obj)
836 self.sa.add(obj)
830 log.debug('Granted perm %s to %s on %s', perm, user, repo)
837 log.debug('Granted perm %s to %s on %s', perm, user, repo)
831 action_logger_generic(
838 action_logger_generic(
832 'granted permission: {} to user: {} on repo: {}'.format(
839 'granted permission: {} to user: {} on repo: {}'.format(
833 perm, user, repo), namespace='security.repo')
840 perm, user, repo), namespace='security.repo')
834 return obj
841 return obj
835
842
836 def revoke_user_permission(self, repo, user):
843 def revoke_user_permission(self, repo, user):
837 """
844 """
838 Revoke permission for user on given repository
845 Revoke permission for user on given repository
839
846
840 :param repo: Instance of Repository, repository_id, or repository name
847 :param repo: Instance of Repository, repository_id, or repository name
841 :param user: Instance of User, user_id or username
848 :param user: Instance of User, user_id or username
842 """
849 """
843
850
844 user = self._get_user(user)
851 user = self._get_user(user)
845 repo = self._get_repo(repo)
852 repo = self._get_repo(repo)
846
853
847 obj = self.sa.query(UserRepoToPerm) \
854 obj = self.sa.query(UserRepoToPerm) \
848 .filter(UserRepoToPerm.repository == repo) \
855 .filter(UserRepoToPerm.repository == repo) \
849 .filter(UserRepoToPerm.user == user) \
856 .filter(UserRepoToPerm.user == user) \
850 .scalar()
857 .scalar()
851 if obj:
858 if obj:
852 self.sa.delete(obj)
859 self.sa.delete(obj)
853 log.debug('Revoked perm on %s on %s', repo, user)
860 log.debug('Revoked perm on %s on %s', repo, user)
854 action_logger_generic(
861 action_logger_generic(
855 'revoked permission from user: {} on repo: {}'.format(
862 'revoked permission from user: {} on repo: {}'.format(
856 user, repo), namespace='security.repo')
863 user, repo), namespace='security.repo')
857
864
858 def grant_user_group_permission(self, repo, group_name, perm):
865 def grant_user_group_permission(self, repo, group_name, perm):
859 """
866 """
860 Grant permission for user group on given repository, or update
867 Grant permission for user group on given repository, or update
861 existing one if found
868 existing one if found
862
869
863 :param repo: Instance of Repository, repository_id, or repository name
870 :param repo: Instance of Repository, repository_id, or repository name
864 :param group_name: Instance of UserGroup, users_group_id,
871 :param group_name: Instance of UserGroup, users_group_id,
865 or user group name
872 or user group name
866 :param perm: Instance of Permission, or permission_name
873 :param perm: Instance of Permission, or permission_name
867 """
874 """
868 repo = self._get_repo(repo)
875 repo = self._get_repo(repo)
869 group_name = self._get_user_group(group_name)
876 group_name = self._get_user_group(group_name)
870 permission = self._get_perm(perm)
877 permission = self._get_perm(perm)
871
878
872 # check if we have that permission already
879 # check if we have that permission already
873 obj = self.sa.query(UserGroupRepoToPerm) \
880 obj = self.sa.query(UserGroupRepoToPerm) \
874 .filter(UserGroupRepoToPerm.users_group == group_name) \
881 .filter(UserGroupRepoToPerm.users_group == group_name) \
875 .filter(UserGroupRepoToPerm.repository == repo) \
882 .filter(UserGroupRepoToPerm.repository == repo) \
876 .scalar()
883 .scalar()
877
884
878 if obj is None:
885 if obj is None:
879 # create new
886 # create new
880 obj = UserGroupRepoToPerm()
887 obj = UserGroupRepoToPerm()
881
888
882 obj.repository = repo
889 obj.repository = repo
883 obj.users_group = group_name
890 obj.users_group = group_name
884 obj.permission = permission
891 obj.permission = permission
885 self.sa.add(obj)
892 self.sa.add(obj)
886 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
893 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
887 action_logger_generic(
894 action_logger_generic(
888 'granted permission: {} to usergroup: {} on repo: {}'.format(
895 'granted permission: {} to usergroup: {} on repo: {}'.format(
889 perm, group_name, repo), namespace='security.repo')
896 perm, group_name, repo), namespace='security.repo')
890
897
891 return obj
898 return obj
892
899
893 def revoke_user_group_permission(self, repo, group_name):
900 def revoke_user_group_permission(self, repo, group_name):
894 """
901 """
895 Revoke permission for user group on given repository
902 Revoke permission for user group on given repository
896
903
897 :param repo: Instance of Repository, repository_id, or repository name
904 :param repo: Instance of Repository, repository_id, or repository name
898 :param group_name: Instance of UserGroup, users_group_id,
905 :param group_name: Instance of UserGroup, users_group_id,
899 or user group name
906 or user group name
900 """
907 """
901 repo = self._get_repo(repo)
908 repo = self._get_repo(repo)
902 group_name = self._get_user_group(group_name)
909 group_name = self._get_user_group(group_name)
903
910
904 obj = self.sa.query(UserGroupRepoToPerm) \
911 obj = self.sa.query(UserGroupRepoToPerm) \
905 .filter(UserGroupRepoToPerm.repository == repo) \
912 .filter(UserGroupRepoToPerm.repository == repo) \
906 .filter(UserGroupRepoToPerm.users_group == group_name) \
913 .filter(UserGroupRepoToPerm.users_group == group_name) \
907 .scalar()
914 .scalar()
908 if obj:
915 if obj:
909 self.sa.delete(obj)
916 self.sa.delete(obj)
910 log.debug('Revoked perm to %s on %s', repo, group_name)
917 log.debug('Revoked perm to %s on %s', repo, group_name)
911 action_logger_generic(
918 action_logger_generic(
912 'revoked permission from usergroup: {} on repo: {}'.format(
919 'revoked permission from usergroup: {} on repo: {}'.format(
913 group_name, repo), namespace='security.repo')
920 group_name, repo), namespace='security.repo')
914
921
915 def delete_stats(self, repo_name):
922 def delete_stats(self, repo_name):
916 """
923 """
917 removes stats for given repo
924 removes stats for given repo
918
925
919 :param repo_name:
926 :param repo_name:
920 """
927 """
921 repo = self._get_repo(repo_name)
928 repo = self._get_repo(repo_name)
922 try:
929 try:
923 obj = self.sa.query(Statistics) \
930 obj = self.sa.query(Statistics) \
924 .filter(Statistics.repository == repo).scalar()
931 .filter(Statistics.repository == repo).scalar()
925 if obj:
932 if obj:
926 self.sa.delete(obj)
933 self.sa.delete(obj)
927 except Exception:
934 except Exception:
928 log.error(traceback.format_exc())
935 log.error(traceback.format_exc())
929 raise
936 raise
930
937
931 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
938 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
932 field_type='str', field_desc=''):
939 field_type='str', field_desc=''):
933
940
934 repo = self._get_repo(repo_name)
941 repo = self._get_repo(repo_name)
935
942
936 new_field = RepositoryField()
943 new_field = RepositoryField()
937 new_field.repository = repo
944 new_field.repository = repo
938 new_field.field_key = field_key
945 new_field.field_key = field_key
939 new_field.field_type = field_type # python type
946 new_field.field_type = field_type # python type
940 new_field.field_value = field_value
947 new_field.field_value = field_value
941 new_field.field_desc = field_desc
948 new_field.field_desc = field_desc
942 new_field.field_label = field_label
949 new_field.field_label = field_label
943 self.sa.add(new_field)
950 self.sa.add(new_field)
944 return new_field
951 return new_field
945
952
946 def delete_repo_field(self, repo_name, field_key):
953 def delete_repo_field(self, repo_name, field_key):
947 repo = self._get_repo(repo_name)
954 repo = self._get_repo(repo_name)
948 field = RepositoryField.get_by_key_name(field_key, repo)
955 field = RepositoryField.get_by_key_name(field_key, repo)
949 if field:
956 if field:
950 self.sa.delete(field)
957 self.sa.delete(field)
951
958
952 def set_landing_rev(self, repo, landing_rev_name):
959 def set_landing_rev(self, repo, landing_rev_name):
953 if landing_rev_name.startswith('branch:'):
960 if landing_rev_name.startswith('branch:'):
954 landing_rev_name = landing_rev_name.split('branch:')[-1]
961 landing_rev_name = landing_rev_name.split('branch:')[-1]
955 scm_instance = repo.scm_instance()
962 scm_instance = repo.scm_instance()
956 if scm_instance:
963 if scm_instance:
957 return scm_instance._remote.set_head_ref(landing_rev_name)
964 return scm_instance._remote.set_head_ref(landing_rev_name)
958
965
959 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
966 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
960 clone_uri=None, repo_store_location=None,
967 clone_uri=None, repo_store_location=None,
961 use_global_config=False, install_hooks=True):
968 use_global_config=False, install_hooks=True):
962 """
969 """
963 makes repository on filesystem. It's group aware means it'll create
970 makes repository on filesystem. It's group aware means it'll create
964 a repository within a group, and alter the paths accordingly of
971 a repository within a group, and alter the paths accordingly of
965 group location
972 group location
966
973
967 :param repo_name:
974 :param repo_name:
968 :param alias:
975 :param alias:
969 :param parent:
976 :param parent:
970 :param clone_uri:
977 :param clone_uri:
971 :param repo_store_location:
978 :param repo_store_location:
972 """
979 """
973 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
980 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
974 from rhodecode.model.scm import ScmModel
981 from rhodecode.model.scm import ScmModel
975
982
976 if Repository.NAME_SEP in repo_name:
983 if Repository.NAME_SEP in repo_name:
977 raise ValueError(
984 raise ValueError(
978 'repo_name must not contain groups got `%s`' % repo_name)
985 'repo_name must not contain groups got `%s`' % repo_name)
979
986
980 if isinstance(repo_group, RepoGroup):
987 if isinstance(repo_group, RepoGroup):
981 new_parent_path = os.sep.join(repo_group.full_path_splitted)
988 new_parent_path = os.sep.join(repo_group.full_path_splitted)
982 else:
989 else:
983 new_parent_path = repo_group or ''
990 new_parent_path = repo_group or ''
984
991
985 if repo_store_location:
992 if repo_store_location:
986 _paths = [repo_store_location]
993 _paths = [repo_store_location]
987 else:
994 else:
988 _paths = [self.repos_path, new_parent_path, repo_name]
995 _paths = [self.repos_path, new_parent_path, repo_name]
989 # we need to make it str for mercurial
996 # we need to make it str for mercurial
990 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
997 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
991
998
992 # check if this path is not a repository
999 # check if this path is not a repository
993 if is_valid_repo(repo_path, self.repos_path):
1000 if is_valid_repo(repo_path, self.repos_path):
994 raise Exception(f'This path {repo_path} is a valid repository')
1001 raise Exception(f'This path {repo_path} is a valid repository')
995
1002
996 # check if this path is a group
1003 # check if this path is a group
997 if is_valid_repo_group(repo_path, self.repos_path):
1004 if is_valid_repo_group(repo_path, self.repos_path):
998 raise Exception(f'This path {repo_path} is a valid group')
1005 raise Exception(f'This path {repo_path} is a valid group')
999
1006
1000 log.info('creating repo %s in %s from url: `%s`',
1007 log.info('creating repo %s in %s from url: `%s`',
1001 repo_name, safe_str(repo_path),
1008 repo_name, safe_str(repo_path),
1002 obfuscate_url_pw(clone_uri))
1009 obfuscate_url_pw(clone_uri))
1003
1010
1004 backend = get_backend(repo_type)
1011 backend = get_backend(repo_type)
1005
1012
1006 config_repo = None if use_global_config else repo_name
1013 config_repo = None if use_global_config else repo_name
1007 if config_repo and new_parent_path:
1014 if config_repo and new_parent_path:
1008 config_repo = Repository.NAME_SEP.join(
1015 config_repo = Repository.NAME_SEP.join(
1009 (new_parent_path, config_repo))
1016 (new_parent_path, config_repo))
1010 config = make_db_config(clear_session=False, repo=config_repo)
1017 config = make_db_config(clear_session=False, repo=config_repo)
1011 config.set('extensions', 'largefiles', '')
1018 config.set('extensions', 'largefiles', '')
1012
1019
1013 # patch and reset hooks section of UI config to not run any
1020 # patch and reset hooks section of UI config to not run any
1014 # hooks on creating remote repo
1021 # hooks on creating remote repo
1015 config.clear_section('hooks')
1022 config.clear_section('hooks')
1016
1023
1017 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1024 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1018 if repo_type == 'git':
1025 if repo_type == 'git':
1019 repo = backend(
1026 repo = backend(
1020 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1027 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1021 with_wire={"cache": False})
1028 with_wire={"cache": False})
1022 else:
1029 else:
1023 repo = backend(
1030 repo = backend(
1024 repo_path, config=config, create=True, src_url=clone_uri,
1031 repo_path, config=config, create=True, src_url=clone_uri,
1025 with_wire={"cache": False})
1032 with_wire={"cache": False})
1026
1033
1027 if install_hooks:
1034 if install_hooks:
1028 repo.install_hooks()
1035 repo.install_hooks()
1029
1036
1030 log.debug('Created repo %s with %s backend',
1037 log.debug('Created repo %s with %s backend',
1031 safe_str(repo_name), safe_str(repo_type))
1038 safe_str(repo_name), safe_str(repo_type))
1032 return repo
1039 return repo
1033
1040
1034 def _rename_filesystem_repo(self, old, new):
1041 def _rename_filesystem_repo(self, old, new):
1035 """
1042 """
1036 renames repository on filesystem
1043 renames repository on filesystem
1037
1044
1038 :param old: old name
1045 :param old: old name
1039 :param new: new name
1046 :param new: new name
1040 """
1047 """
1041 log.info('renaming repo from %s to %s', old, new)
1048 log.info('renaming repo from %s to %s', old, new)
1042
1049
1043 old_path = os.path.join(self.repos_path, old)
1050 old_path = os.path.join(self.repos_path, old)
1044 new_path = os.path.join(self.repos_path, new)
1051 new_path = os.path.join(self.repos_path, new)
1045 if os.path.isdir(new_path):
1052 if os.path.isdir(new_path):
1046 raise Exception(
1053 raise Exception(
1047 'Was trying to rename to already existing dir %s' % new_path
1054 'Was trying to rename to already existing dir %s' % new_path
1048 )
1055 )
1049 shutil.move(old_path, new_path)
1056 shutil.move(old_path, new_path)
1050
1057
1051 def _delete_filesystem_repo(self, repo):
1058 def _delete_filesystem_repo(self, repo):
1052 """
1059 """
1053 removes repo from filesystem, the removal is actually made by
1060 removes repo from filesystem, the removal is actually made by
1054 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1061 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1055 repository is no longer valid for rhodecode, can be undeleted later on
1062 repository is no longer valid for rhodecode, can be undeleted later on
1056 by reverting the renames on this repository
1063 by reverting the renames on this repository
1057
1064
1058 :param repo: repo object
1065 :param repo: repo object
1059 """
1066 """
1060 rm_path = os.path.join(self.repos_path, repo.repo_name)
1067 rm_path = os.path.join(self.repos_path, repo.repo_name)
1061 repo_group = repo.group
1068 repo_group = repo.group
1062 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1069 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1063 # disable hg/git internal that it doesn't get detected as repo
1070 # disable hg/git internal that it doesn't get detected as repo
1064 alias = repo.repo_type
1071 alias = repo.repo_type
1065
1072
1066 config = make_db_config(clear_session=False)
1073 config = make_db_config(clear_session=False)
1067 config.set('extensions', 'largefiles', '')
1074 config.set('extensions', 'largefiles', '')
1068 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1075 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1069
1076
1070 # skip this for bare git repos
1077 # skip this for bare git repos
1071 if not bare:
1078 if not bare:
1072 # disable VCS repo
1079 # disable VCS repo
1073 vcs_path = os.path.join(rm_path, '.%s' % alias)
1080 vcs_path = os.path.join(rm_path, '.%s' % alias)
1074 if os.path.exists(vcs_path):
1081 if os.path.exists(vcs_path):
1075 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1082 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1076
1083
1077 _now = datetime.datetime.now()
1084 _now = datetime.datetime.now()
1078 _ms = str(_now.microsecond).rjust(6, '0')
1085 _ms = str(_now.microsecond).rjust(6, '0')
1079 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1086 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1080 repo.just_name)
1087 repo.just_name)
1081 if repo_group:
1088 if repo_group:
1082 # if repository is in group, prefix the removal path with the group
1089 # if repository is in group, prefix the removal path with the group
1083 args = repo_group.full_path_splitted + [_d]
1090 args = repo_group.full_path_splitted + [_d]
1084 _d = os.path.join(*args)
1091 _d = os.path.join(*args)
1085
1092
1086 if os.path.isdir(rm_path):
1093 if os.path.isdir(rm_path):
1087 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1094 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1088
1095
1089 # finally cleanup diff-cache if it exists
1096 # finally cleanup diff-cache if it exists
1090 cached_diffs_dir = repo.cached_diffs_dir
1097 cached_diffs_dir = repo.cached_diffs_dir
1091 if os.path.isdir(cached_diffs_dir):
1098 if os.path.isdir(cached_diffs_dir):
1092 shutil.rmtree(cached_diffs_dir)
1099 shutil.rmtree(cached_diffs_dir)
1093
1100
1094
1101
1095 class ReadmeFinder:
1102 class ReadmeFinder:
1096 """
1103 """
1097 Utility which knows how to find a readme for a specific commit.
1104 Utility which knows how to find a readme for a specific commit.
1098
1105
1099 The main idea is that this is a configurable algorithm. When creating an
1106 The main idea is that this is a configurable algorithm. When creating an
1100 instance you can define parameters, currently only the `default_renderer`.
1107 instance you can define parameters, currently only the `default_renderer`.
1101 Based on this configuration the method :meth:`search` behaves slightly
1108 Based on this configuration the method :meth:`search` behaves slightly
1102 different.
1109 different.
1103 """
1110 """
1104
1111
1105 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1112 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1106 path_re = re.compile(r'^docs?', re.IGNORECASE)
1113 path_re = re.compile(r'^docs?', re.IGNORECASE)
1107
1114
1108 default_priorities = {
1115 default_priorities = {
1109 None: 0,
1116 None: 0,
1110 '.rst': 1,
1117 '.rst': 1,
1111 '.md': 1,
1118 '.md': 1,
1112 '.rest': 2,
1119 '.rest': 2,
1113 '.mkdn': 2,
1120 '.mkdn': 2,
1114 '.text': 2,
1121 '.text': 2,
1115 '.txt': 3,
1122 '.txt': 3,
1116 '.mdown': 3,
1123 '.mdown': 3,
1117 '.markdown': 4,
1124 '.markdown': 4,
1118 }
1125 }
1119
1126
1120 path_priority = {
1127 path_priority = {
1121 'doc': 0,
1128 'doc': 0,
1122 'docs': 1,
1129 'docs': 1,
1123 }
1130 }
1124
1131
1125 FALLBACK_PRIORITY = 99
1132 FALLBACK_PRIORITY = 99
1126
1133
1127 RENDERER_TO_EXTENSION = {
1134 RENDERER_TO_EXTENSION = {
1128 'rst': ['.rst', '.rest'],
1135 'rst': ['.rst', '.rest'],
1129 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1136 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1130 }
1137 }
1131
1138
1132 def __init__(self, default_renderer=None):
1139 def __init__(self, default_renderer=None):
1133 self._default_renderer = default_renderer
1140 self._default_renderer = default_renderer
1134 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1141 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1135 default_renderer, [])
1142 default_renderer, [])
1136
1143
1137 def search(self, commit, path='/'):
1144 def search(self, commit, path='/'):
1138 """
1145 """
1139 Find a readme in the given `commit`.
1146 Find a readme in the given `commit`.
1140 """
1147 """
1141 # firstly, check the PATH type if it is actually a DIR
1148 # firstly, check the PATH type if it is actually a DIR
1142 if commit.get_node(path).kind != NodeKind.DIR:
1149 if commit.get_node(path).kind != NodeKind.DIR:
1143 return None
1150 return None
1144
1151
1145 nodes = commit.get_nodes(path)
1152 nodes = commit.get_nodes(path)
1146 matches = self._match_readmes(nodes)
1153 matches = self._match_readmes(nodes)
1147 matches = self._sort_according_to_priority(matches)
1154 matches = self._sort_according_to_priority(matches)
1148 if matches:
1155 if matches:
1149 return matches[0].node
1156 return matches[0].node
1150
1157
1151 paths = self._match_paths(nodes)
1158 paths = self._match_paths(nodes)
1152 paths = self._sort_paths_according_to_priority(paths)
1159 paths = self._sort_paths_according_to_priority(paths)
1153 for path in paths:
1160 for path in paths:
1154 match = self.search(commit, path=path)
1161 match = self.search(commit, path=path)
1155 if match:
1162 if match:
1156 return match
1163 return match
1157
1164
1158 return None
1165 return None
1159
1166
1160 def _match_readmes(self, nodes):
1167 def _match_readmes(self, nodes):
1161 for node in nodes:
1168 for node in nodes:
1162 if not node.is_file():
1169 if not node.is_file():
1163 continue
1170 continue
1164 path = node.path.rsplit('/', 1)[-1]
1171 path = node.path.rsplit('/', 1)[-1]
1165 match = self.readme_re.match(path)
1172 match = self.readme_re.match(path)
1166 if match:
1173 if match:
1167 extension = match.group(1)
1174 extension = match.group(1)
1168 yield ReadmeMatch(node, match, self._priority(extension))
1175 yield ReadmeMatch(node, match, self._priority(extension))
1169
1176
1170 def _match_paths(self, nodes):
1177 def _match_paths(self, nodes):
1171 for node in nodes:
1178 for node in nodes:
1172 if not node.is_dir():
1179 if not node.is_dir():
1173 continue
1180 continue
1174 match = self.path_re.match(node.path)
1181 match = self.path_re.match(node.path)
1175 if match:
1182 if match:
1176 yield node.path
1183 yield node.path
1177
1184
1178 def _priority(self, extension):
1185 def _priority(self, extension):
1179 renderer_priority = (
1186 renderer_priority = (
1180 0 if extension in self._renderer_extensions else 1)
1187 0 if extension in self._renderer_extensions else 1)
1181 extension_priority = self.default_priorities.get(
1188 extension_priority = self.default_priorities.get(
1182 extension, self.FALLBACK_PRIORITY)
1189 extension, self.FALLBACK_PRIORITY)
1183 return (renderer_priority, extension_priority)
1190 return (renderer_priority, extension_priority)
1184
1191
1185 def _sort_according_to_priority(self, matches):
1192 def _sort_according_to_priority(self, matches):
1186
1193
1187 def priority_and_path(match):
1194 def priority_and_path(match):
1188 return (match.priority, match.path)
1195 return (match.priority, match.path)
1189
1196
1190 return sorted(matches, key=priority_and_path)
1197 return sorted(matches, key=priority_and_path)
1191
1198
1192 def _sort_paths_according_to_priority(self, paths):
1199 def _sort_paths_according_to_priority(self, paths):
1193
1200
1194 def priority_and_path(path):
1201 def priority_and_path(path):
1195 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1202 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1196
1203
1197 return sorted(paths, key=priority_and_path)
1204 return sorted(paths, key=priority_and_path)
1198
1205
1199
1206
1200 class ReadmeMatch:
1207 class ReadmeMatch:
1201
1208
1202 def __init__(self, node, match, priority):
1209 def __init__(self, node, match, priority):
1203 self.node = node
1210 self.node = node
1204 self._match = match
1211 self._match = match
1205 self.priority = priority
1212 self.priority = priority
1206
1213
1207 @property
1214 @property
1208 def path(self):
1215 def path(self):
1209 return self.node.path
1216 return self.node.path
1210
1217
1211 def __repr__(self):
1218 def __repr__(self):
1212 return f'<ReadmeMatch {self.path} priority={self.priority}'
1219 return f'<ReadmeMatch {self.path} priority={self.priority}'
@@ -1,889 +1,892 b''
1 # Copyright (C) 2011-2024 RhodeCode GmbH
1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 repo group model for RhodeCode
21 repo group model for RhodeCode
22 """
22 """
23
23
24 import os
24 import os
25 import datetime
25 import datetime
26 import itertools
26 import itertools
27 import logging
27 import logging
28 import shutil
28 import shutil
29 import time
29 import time
30 import traceback
30 import traceback
31 import string
31 import string
32
32
33
33
34 from rhodecode import events
34 from rhodecode import events
35 from rhodecode.model import BaseModel
35 from rhodecode.model import BaseModel
36 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
36 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
37 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
37 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
38 UserGroup, Repository)
38 UserGroup, Repository)
39 from rhodecode.model.permission import PermissionModel
39 from rhodecode.model.permission import PermissionModel
40 from rhodecode.model.settings import SettingsModel
40 from rhodecode.model.settings import SettingsModel
41 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.lib.utils2 import action_logger_generic
42 from rhodecode.lib.utils2 import action_logger_generic
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class RepoGroupModel(BaseModel):
47 class RepoGroupModel(BaseModel):
48
48
49 cls = RepoGroup
49 cls = RepoGroup
50 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
50 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
51 PERSONAL_GROUP_PATTERN = '${username}' # default
51 PERSONAL_GROUP_PATTERN = '${username}' # default
52
52
53 def _get_user_group(self, users_group):
53 def _get_user_group(self, users_group):
54 return self._get_instance(UserGroup, users_group,
54 return self._get_instance(UserGroup, users_group,
55 callback=UserGroup.get_by_group_name)
55 callback=UserGroup.get_by_group_name)
56
56
57 def _get_repo_group(self, repo_group):
57 def _get_repo_group(self, repo_group):
58 return self._get_instance(RepoGroup, repo_group,
58 return self._get_instance(RepoGroup, repo_group,
59 callback=RepoGroup.get_by_group_name)
59 callback=RepoGroup.get_by_group_name)
60
60
61 def get_repo_group(self, repo_group):
61 def get_repo_group(self, repo_group):
62 return self._get_repo_group(repo_group)
62 return self._get_repo_group(repo_group)
63
63
64 def get_by_group_name(self, repo_group_name, cache=None):
64 def get_by_group_name(self, repo_group_name, cache=None):
65 repo = self.sa.query(RepoGroup) \
65 repo = self.sa.query(RepoGroup) \
66 .filter(RepoGroup.group_name == repo_group_name)
66 .filter(RepoGroup.group_name == repo_group_name)
67
67
68 if cache:
68 if cache:
69 name_key = _hash_key(repo_group_name)
69 name_key = _hash_key(repo_group_name)
70 repo = repo.options(
70 repo = repo.options(
71 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
71 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
72 return repo.scalar()
72 return repo.scalar()
73
73
74 def get_default_create_personal_repo_group(self):
74 def get_default_create_personal_repo_group(self):
75 value = SettingsModel().get_setting_by_name(
75 value = SettingsModel().get_setting_by_name(
76 'create_personal_repo_group')
76 'create_personal_repo_group')
77 return value.app_settings_value if value else None or False
77 return value.app_settings_value if value else None or False
78
78
79 def get_personal_group_name_pattern(self):
79 def get_personal_group_name_pattern(self):
80 value = SettingsModel().get_setting_by_name(
80 value = SettingsModel().get_setting_by_name(
81 'personal_repo_group_pattern')
81 'personal_repo_group_pattern')
82 val = value.app_settings_value if value else None
82 val = value.app_settings_value if value else None
83 group_template = val or self.PERSONAL_GROUP_PATTERN
83 group_template = val or self.PERSONAL_GROUP_PATTERN
84
84
85 group_template = group_template.lstrip('/')
85 group_template = group_template.lstrip('/')
86 return group_template
86 return group_template
87
87
88 def get_personal_group_name(self, user):
88 def get_personal_group_name(self, user):
89 template = self.get_personal_group_name_pattern()
89 template = self.get_personal_group_name_pattern()
90 return string.Template(template).safe_substitute(
90 return string.Template(template).safe_substitute(
91 username=user.username,
91 username=user.username,
92 user_id=user.user_id,
92 user_id=user.user_id,
93 first_name=user.first_name,
93 first_name=user.first_name,
94 last_name=user.last_name,
94 last_name=user.last_name,
95 )
95 )
96
96
97 def create_personal_repo_group(self, user, commit_early=True):
97 def create_personal_repo_group(self, user, commit_early=True):
98 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
98 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
99 personal_repo_group_name = self.get_personal_group_name(user)
99 personal_repo_group_name = self.get_personal_group_name(user)
100
100
101 # create a new one
101 # create a new one
102 RepoGroupModel().create(
102 RepoGroupModel().create(
103 group_name=personal_repo_group_name,
103 group_name=personal_repo_group_name,
104 group_description=desc,
104 group_description=desc,
105 owner=user.username,
105 owner=user.username,
106 personal=True,
106 personal=True,
107 commit_early=commit_early)
107 commit_early=commit_early)
108
108
109 def _create_default_perms(self, new_group):
109 def _create_default_perms(self, new_group):
110 # create default permission
110 # create default permission
111 default_perm = 'group.read'
111 default_perm = 'group.read'
112 def_user = User.get_default_user()
112 def_user = User.get_default_user()
113 for p in def_user.user_perms:
113 for p in def_user.user_perms:
114 if p.permission.permission_name.startswith('group.'):
114 if p.permission.permission_name.startswith('group.'):
115 default_perm = p.permission.permission_name
115 default_perm = p.permission.permission_name
116 break
116 break
117
117
118 repo_group_to_perm = UserRepoGroupToPerm()
118 repo_group_to_perm = UserRepoGroupToPerm()
119 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
119 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
120
120
121 repo_group_to_perm.group = new_group
121 repo_group_to_perm.group = new_group
122 repo_group_to_perm.user = def_user
122 repo_group_to_perm.user = def_user
123 return repo_group_to_perm
123 return repo_group_to_perm
124
124
125 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
125 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
126 get_object=False):
126 get_object=False):
127 """
127 """
128 Get's the group name and a parent group name from given group name.
128 Get's the group name and a parent group name from given group name.
129 If repo_in_path is set to truth, we asume the full path also includes
129 If repo_in_path is set to truth, we asume the full path also includes
130 repo name, in such case we clean the last element.
130 repo name, in such case we clean the last element.
131
131
132 :param group_name_full:
132 :param group_name_full:
133 """
133 """
134 split_paths = 1
134 split_paths = 1
135 if repo_in_path:
135 if repo_in_path:
136 split_paths = 2
136 split_paths = 2
137 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
137 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
138
138
139 if repo_in_path and len(_parts) > 1:
139 if repo_in_path and len(_parts) > 1:
140 # such case last element is the repo_name
140 # such case last element is the repo_name
141 _parts.pop(-1)
141 _parts.pop(-1)
142 group_name_cleaned = _parts[-1] # just the group name
142 group_name_cleaned = _parts[-1] # just the group name
143 parent_repo_group_name = None
143 parent_repo_group_name = None
144
144
145 if len(_parts) > 1:
145 if len(_parts) > 1:
146 parent_repo_group_name = _parts[0]
146 parent_repo_group_name = _parts[0]
147
147
148 parent_group = None
148 parent_group = None
149 if parent_repo_group_name:
149 if parent_repo_group_name:
150 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
150 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
151
151
152 if get_object:
152 if get_object:
153 return group_name_cleaned, parent_repo_group_name, parent_group
153 return group_name_cleaned, parent_repo_group_name, parent_group
154
154
155 return group_name_cleaned, parent_repo_group_name
155 return group_name_cleaned, parent_repo_group_name
156
156
157 def check_exist_filesystem(self, group_name, exc_on_failure=True):
157 def check_exist_filesystem(self, group_name, exc_on_failure=True):
158 create_path = os.path.join(self.repos_path, group_name)
158 create_path = os.path.join(self.repos_path, group_name)
159 log.debug('creating new group in %s', create_path)
159 log.debug('checking FS presence for repo group in %s', create_path)
160
160
161 if os.path.isdir(create_path):
161 if os.path.isdir(create_path):
162 if exc_on_failure:
162 if exc_on_failure:
163 abs_create_path = os.path.abspath(create_path)
163 abs_create_path = os.path.abspath(create_path)
164 raise Exception(f'Directory `{abs_create_path}` already exists !')
164 raise Exception(f'Directory `{abs_create_path}` already exists !')
165 return False
165 return False
166 return True
166 return True
167
167
168 def _create_group(self, group_name):
168 def _create_group(self, group_name):
169 """
169 """
170 makes repository group on filesystem
170 makes repository group on filesystem
171
171
172 :param repo_name:
172 :param repo_name:
173 :param parent_id:
173 :param parent_id:
174 """
174 """
175
175
176 self.check_exist_filesystem(group_name)
176 self.check_exist_filesystem(group_name)
177 create_path = os.path.join(self.repos_path, group_name)
177 create_path = os.path.join(self.repos_path, group_name)
178 log.debug('creating new group in %s', create_path)
178 log.debug('creating new group in %s', create_path)
179 os.makedirs(create_path, mode=0o755)
179 os.makedirs(create_path, mode=0o755)
180 log.debug('created group in %s', create_path)
180 log.debug('created group in %s', create_path)
181
181
182 def _rename_group(self, old, new):
182 def _rename_group(self, old, new):
183 """
183 """
184 Renames a group on filesystem
184 Renames a group on filesystem
185
185
186 :param group_name:
186 :param group_name:
187 """
187 """
188
188
189 if old == new:
189 if old == new:
190 log.debug('skipping group rename')
190 log.debug('skipping group rename')
191 return
191 return
192
192
193 log.debug('renaming repository group from %s to %s', old, new)
193 log.debug('renaming repository group from %s to %s', old, new)
194
194
195 old_path = os.path.join(self.repos_path, old)
195 old_path = os.path.join(self.repos_path, old)
196 new_path = os.path.join(self.repos_path, new)
196 new_path = os.path.join(self.repos_path, new)
197
197
198 log.debug('renaming repos paths from %s to %s', old_path, new_path)
198 log.debug('renaming repos paths from %s to %s', old_path, new_path)
199
199
200 if os.path.isdir(new_path):
200 if os.path.isdir(new_path):
201 raise Exception('Was trying to rename to already '
201 raise Exception('Was trying to rename to already '
202 'existing dir %s' % new_path)
202 'existing dir %s' % new_path)
203 shutil.move(old_path, new_path)
203 shutil.move(old_path, new_path)
204
204
205 def _delete_filesystem_group(self, group, force_delete=False):
205 def _delete_filesystem_group(self, group, force_delete=False):
206 """
206 """
207 Deletes a group from a filesystem
207 Deletes a group from a filesystem
208
208
209 :param group: instance of group from database
209 :param group: instance of group from database
210 :param force_delete: use shutil rmtree to remove all objects
210 :param force_delete: use shutil rmtree to remove all objects
211 """
211 """
212 paths = group.full_path.split(RepoGroup.url_sep())
212 paths = group.full_path.split(RepoGroup.url_sep())
213 paths = os.sep.join(paths)
213 paths = os.sep.join(paths)
214
214
215 rm_path = os.path.join(self.repos_path, paths)
215 rm_path = os.path.join(self.repos_path, paths)
216 log.info("Removing group %s", rm_path)
216 log.info("Removing group %s", rm_path)
217 # delete only if that path really exists
217 # delete only if that path really exists
218 if os.path.isdir(rm_path):
218 if os.path.isdir(rm_path):
219 if force_delete:
219 if force_delete:
220 shutil.rmtree(rm_path)
220 shutil.rmtree(rm_path)
221 else:
221 else:
222 # archive that group`
222 # archive that group`
223 _now = datetime.datetime.now()
223 _now = datetime.datetime.now()
224 _ms = str(_now.microsecond).rjust(6, '0')
224 _ms = str(_now.microsecond).rjust(6, '0')
225 _d = 'rm__{}_GROUP_{}'.format(
225 _d = 'rm__{}_GROUP_{}'.format(
226 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
226 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
227 shutil.move(rm_path, os.path.join(self.repos_path, _d))
227 shutil.move(rm_path, os.path.join(self.repos_path, _d))
228
228
229 def create(self, group_name, group_description, owner, just_db=False,
229 def create(self, group_name, group_description, owner, just_db=False,
230 copy_permissions=False, personal=None, commit_early=True):
230 copy_permissions=False, personal=None, commit_early=True):
231
231
232 (group_name_cleaned,
232 (group_name_cleaned,
233 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
233 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
234
234
235 parent_group = None
235 parent_group = None
236 if parent_group_name:
236 if parent_group_name:
237 parent_group = self._get_repo_group(parent_group_name)
237 parent_group = self._get_repo_group(parent_group_name)
238 if not parent_group:
238 if not parent_group:
239 # we tried to create a nested group, but the parent is not
239 # we tried to create a nested group, but the parent is not
240 # existing
240 # existing
241 raise ValueError(
241 raise ValueError(
242 'Parent group `%s` given in `%s` group name '
242 'Parent group `%s` given in `%s` group name '
243 'is not yet existing.' % (parent_group_name, group_name))
243 'is not yet existing.' % (parent_group_name, group_name))
244
244
245 # because we are doing a cleanup, we need to check if such directory
245 # because we are doing a cleanup, we need to check if such directory
246 # already exists. If we don't do that we can accidentally delete
246 # already exists. If we don't do that we can accidentally delete
247 # existing directory via cleanup that can cause data issues, since
247 # existing directory via cleanup that can cause data issues, since
248 # delete does a folder rename to special syntax later cleanup
248 # delete does a folder rename to special syntax later cleanup
249 # functions can delete this
249 # functions can delete this
250 cleanup_group = self.check_exist_filesystem(group_name,
250 cleanup_group = self.check_exist_filesystem(group_name,
251 exc_on_failure=False)
251 exc_on_failure=False)
252 user = self._get_user(owner)
252 user = self._get_user(owner)
253 if not user:
253 if not user:
254 raise ValueError('Owner %s not found as rhodecode user', owner)
254 raise ValueError('Owner %s not found as rhodecode user', owner)
255
255
256 try:
256 try:
257 new_repo_group = RepoGroup()
257 new_repo_group = RepoGroup()
258 new_repo_group.user = user
258 new_repo_group.user = user
259 new_repo_group.group_description = group_description or group_name
259 new_repo_group.group_description = group_description or group_name
260 new_repo_group.parent_group = parent_group
260 new_repo_group.parent_group = parent_group
261 new_repo_group.group_name = group_name
261 new_repo_group.group_name = group_name
262 new_repo_group.personal = personal
262 new_repo_group.personal = personal
263
263
264 self.sa.add(new_repo_group)
264 self.sa.add(new_repo_group)
265
265
266 # create an ADMIN permission for owner except if we're super admin,
266 # create an ADMIN permission for owner except if we're super admin,
267 # later owner should go into the owner field of groups
267 # later owner should go into the owner field of groups
268 if not user.is_admin:
268 if not user.is_admin:
269 self.grant_user_permission(repo_group=new_repo_group,
269 self.grant_user_permission(repo_group=new_repo_group,
270 user=owner, perm='group.admin')
270 user=owner, perm='group.admin')
271
271
272 if parent_group and copy_permissions:
272 if parent_group and copy_permissions:
273 # copy permissions from parent
273 # copy permissions from parent
274 user_perms = UserRepoGroupToPerm.query() \
274 user_perms = UserRepoGroupToPerm.query() \
275 .filter(UserRepoGroupToPerm.group == parent_group).all()
275 .filter(UserRepoGroupToPerm.group == parent_group).all()
276
276
277 group_perms = UserGroupRepoGroupToPerm.query() \
277 group_perms = UserGroupRepoGroupToPerm.query() \
278 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
278 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
279
279
280 for perm in user_perms:
280 for perm in user_perms:
281 # don't copy over the permission for user who is creating
281 # don't copy over the permission for user who is creating
282 # this group, if he is not super admin he get's admin
282 # this group, if he is not super admin he get's admin
283 # permission set above
283 # permission set above
284 if perm.user != user or user.is_admin:
284 if perm.user != user or user.is_admin:
285 UserRepoGroupToPerm.create(
285 UserRepoGroupToPerm.create(
286 perm.user, new_repo_group, perm.permission)
286 perm.user, new_repo_group, perm.permission)
287
287
288 for perm in group_perms:
288 for perm in group_perms:
289 UserGroupRepoGroupToPerm.create(
289 UserGroupRepoGroupToPerm.create(
290 perm.users_group, new_repo_group, perm.permission)
290 perm.users_group, new_repo_group, perm.permission)
291 else:
291 else:
292 perm_obj = self._create_default_perms(new_repo_group)
292 perm_obj = self._create_default_perms(new_repo_group)
293 self.sa.add(perm_obj)
293 self.sa.add(perm_obj)
294
294
295 # now commit the changes, earlier so we are sure everything is in
295 # now commit the changes, earlier so we are sure everything is in
296 # the database.
296 # the database.
297 if commit_early:
297 if commit_early:
298 self.sa.commit()
298 self.sa.commit()
299 if not just_db:
299 if not just_db:
300 self._create_group(new_repo_group.group_name)
300 self._create_group(new_repo_group.group_name)
301
301
302 # trigger the post hook
302 # trigger the post hook
303 from rhodecode.lib import hooks_base
303 from rhodecode.lib import hooks_base
304 repo_group = RepoGroup.get_by_group_name(group_name)
304 repo_group = RepoGroup.get_by_group_name(group_name)
305
305
306 # update repo group commit caches initially
306 # update repo group commit caches initially
307 repo_group.update_commit_cache()
307 repo_group.update_commit_cache()
308
308
309 hooks_base.create_repository_group(
309 hooks_base.create_repository_group(
310 created_by=user.username, **repo_group.get_dict())
310 created_by=user.username, **repo_group.get_dict())
311
311
312 # Trigger create event.
312 # Trigger create event.
313 events.trigger(events.RepoGroupCreateEvent(repo_group))
313 events.trigger(events.RepoGroupCreateEvent(repo_group))
314
314
315 return new_repo_group
315 return new_repo_group
316 except Exception:
316 except Exception:
317 self.sa.rollback()
317 self.sa.rollback()
318 log.exception('Exception occurred when creating repository group, '
318 log.exception('Exception occurred when creating repository group, '
319 'doing cleanup...')
319 'doing cleanup...')
320 # rollback things manually !
320 # rollback things manually !
321 repo_group = RepoGroup.get_by_group_name(group_name)
321 repo_group = RepoGroup.get_by_group_name(group_name)
322 if repo_group:
322 if repo_group:
323 RepoGroup.delete(repo_group.group_id)
323 RepoGroup.delete(repo_group.group_id)
324 self.sa.commit()
324 self.sa.commit()
325 if cleanup_group:
325 if cleanup_group:
326 RepoGroupModel()._delete_filesystem_group(repo_group)
326 RepoGroupModel()._delete_filesystem_group(repo_group)
327 raise
327 raise
328
328
329 def update_permissions(
329 def update_permissions(
330 self, repo_group, perm_additions=None, perm_updates=None,
330 self, repo_group, perm_additions=None, perm_updates=None,
331 perm_deletions=None, recursive=None, check_perms=True,
331 perm_deletions=None, recursive=None, check_perms=True,
332 cur_user=None):
332 cur_user=None):
333 from rhodecode.model.repo import RepoModel
333 from rhodecode.model.repo import RepoModel
334 from rhodecode.lib.auth import HasUserGroupPermissionAny
334 from rhodecode.lib.auth import HasUserGroupPermissionAny
335
335
336 if not perm_additions:
336 if not perm_additions:
337 perm_additions = []
337 perm_additions = []
338 if not perm_updates:
338 if not perm_updates:
339 perm_updates = []
339 perm_updates = []
340 if not perm_deletions:
340 if not perm_deletions:
341 perm_deletions = []
341 perm_deletions = []
342
342
343 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
343 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
344
344
345 changes = {
345 changes = {
346 'added': [],
346 'added': [],
347 'updated': [],
347 'updated': [],
348 'deleted': [],
348 'deleted': [],
349 'default_user_changed': None
349 'default_user_changed': None
350 }
350 }
351
351
352 def _set_perm_user(_obj: RepoGroup | Repository, _user_obj: User, _perm):
352 def _set_perm_user(_obj: RepoGroup | Repository, _user_obj: User, _perm):
353
353
354 if isinstance(_obj, RepoGroup):
354 if isinstance(_obj, RepoGroup):
355 self.grant_user_permission(repo_group=_obj, user=_user_obj, perm=_perm)
355 self.grant_user_permission(repo_group=_obj, user=_user_obj, perm=_perm)
356 elif isinstance(_obj, Repository):
356 elif isinstance(_obj, Repository):
357 # private repos will not allow to change the default
357 # private repos will not allow to change the default
358 # permissions using recursive mode
358 # permissions using recursive mode
359 if _obj.private and _user_obj.username == User.DEFAULT_USER:
359 if _obj.private and _user_obj.username == User.DEFAULT_USER:
360 log.debug('Skipping private repo %s for user %s', _obj, _user_obj)
360 log.debug('Skipping private repo %s for user %s', _obj, _user_obj)
361 return
361 return
362
362
363 # we set group permission, we have to switch to repo permission definition
363 # we set group permission, we have to switch to repo permission definition
364 new_perm = _perm.replace('group.', 'repository.')
364 new_perm = _perm.replace('group.', 'repository.')
365 RepoModel().grant_user_permission(repo=_obj, user=_user_obj, perm=new_perm)
365 RepoModel().grant_user_permission(repo=_obj, user=_user_obj, perm=new_perm)
366
366
367 def _set_perm_group(_obj: RepoGroup | Repository, users_group: UserGroup, _perm):
367 def _set_perm_group(_obj: RepoGroup | Repository, users_group: UserGroup, _perm):
368 if isinstance(_obj, RepoGroup):
368 if isinstance(_obj, RepoGroup):
369 self.grant_user_group_permission(repo_group=_obj, group_name=users_group, perm=_perm)
369 self.grant_user_group_permission(repo_group=_obj, group_name=users_group, perm=_perm)
370 elif isinstance(_obj, Repository):
370 elif isinstance(_obj, Repository):
371 # we set group permission, we have to switch to repo permission definition
371 # we set group permission, we have to switch to repo permission definition
372 new_perm = _perm.replace('group.', 'repository.')
372 new_perm = _perm.replace('group.', 'repository.')
373 RepoModel().grant_user_group_permission(repo=_obj, group_name=users_group, perm=new_perm)
373 RepoModel().grant_user_group_permission(repo=_obj, group_name=users_group, perm=new_perm)
374
374
375 def _revoke_perm_user(_obj: RepoGroup | Repository, _user_obj: User):
375 def _revoke_perm_user(_obj: RepoGroup | Repository, _user_obj: User):
376 if isinstance(_obj, RepoGroup):
376 if isinstance(_obj, RepoGroup):
377 self.revoke_user_permission(repo_group=_obj, user=_user_obj)
377 self.revoke_user_permission(repo_group=_obj, user=_user_obj)
378 elif isinstance(_obj, Repository):
378 elif isinstance(_obj, Repository):
379 # private repos will not allow to change the default
379 # private repos will not allow to change the default
380 # permissions using recursive mode, also there's no revocation fo default user, just update
380 # permissions using recursive mode, also there's no revocation fo default user, just update
381 if _user_obj.username == User.DEFAULT_USER:
381 if _user_obj.username == User.DEFAULT_USER:
382 log.debug('Skipping private repo %s for user %s', _obj, _user_obj)
382 log.debug('Skipping private repo %s for user %s', _obj, _user_obj)
383 return
383 return
384 RepoModel().revoke_user_permission(repo=_obj, user=_user_obj)
384 RepoModel().revoke_user_permission(repo=_obj, user=_user_obj)
385
385
386 def _revoke_perm_group(_obj: RepoGroup | Repository, user_group: UserGroup):
386 def _revoke_perm_group(_obj: RepoGroup | Repository, user_group: UserGroup):
387 if isinstance(_obj, RepoGroup):
387 if isinstance(_obj, RepoGroup):
388 self.revoke_user_group_permission(repo_group=_obj, group_name=user_group)
388 self.revoke_user_group_permission(repo_group=_obj, group_name=user_group)
389 elif isinstance(_obj, Repository):
389 elif isinstance(_obj, Repository):
390 RepoModel().revoke_user_group_permission(repo=_obj, group_name=user_group)
390 RepoModel().revoke_user_group_permission(repo=_obj, group_name=user_group)
391
391
392 # start updates
392 # start updates
393 log.debug('Now updating permissions for %s in recursive mode:%s',
393 log.debug('Now updating permissions for %s in recursive mode:%s',
394 repo_group, recursive)
394 repo_group, recursive)
395
395
396 # initialize check function, we'll call that multiple times
396 # initialize check function, we'll call that multiple times
397 has_group_perm = HasUserGroupPermissionAny(*req_perms)
397 has_group_perm = HasUserGroupPermissionAny(*req_perms)
398
398
399 for obj in repo_group.recursive_groups_and_repos():
399 for obj in repo_group.recursive_groups_and_repos():
400 # iterated obj is an instance of a repos group or repository in
400 # iterated obj is an instance of a repos group or repository in
401 # that group, recursive option can be: none, repos, groups, all
401 # that group, recursive option can be: none, repos, groups, all
402 if recursive == 'all':
402 if recursive == 'all':
403 obj = obj
403 obj = obj
404 elif recursive == 'repos':
404 elif recursive == 'repos':
405 # skip groups, other than this one
405 # skip groups, other than this one
406 if isinstance(obj, RepoGroup) and not obj == repo_group:
406 if isinstance(obj, RepoGroup) and not obj == repo_group:
407 continue
407 continue
408 elif recursive == 'groups':
408 elif recursive == 'groups':
409 # skip repos
409 # skip repos
410 if isinstance(obj, Repository):
410 if isinstance(obj, Repository):
411 continue
411 continue
412 else: # recursive == 'none':
412 else: # recursive == 'none':
413 # DEFAULT option - don't apply to iterated objects
413 # DEFAULT option - don't apply to iterated objects
414 # also we do a break at the end of this loop. if we are not
414 # also we do a break at the end of this loop. if we are not
415 # in recursive mode
415 # in recursive mode
416 obj = repo_group
416 obj = repo_group
417
417
418 change_obj = obj.get_api_data()
418 change_obj = obj.get_api_data()
419
419
420 # update permissions
420 # update permissions
421 for member_id, perm, member_type in perm_updates:
421 for member_id, perm, member_type in perm_updates:
422 member_id = int(member_id)
422 member_id = int(member_id)
423 if member_type == 'user':
423 if member_type == 'user':
424 member_obj = User.get(member_id)
424 member_obj = User.get(member_id)
425 member_name = member_obj.username
425 member_name = member_obj.username
426 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
426 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
427 # NOTE(dan): detect if we changed permissions for default user
427 # NOTE(dan): detect if we changed permissions for default user
428 perm_obj = self.sa.query(UserRepoGroupToPerm) \
428 perm_obj = self.sa.query(UserRepoGroupToPerm) \
429 .filter(UserRepoGroupToPerm.user_id == member_id) \
429 .filter(UserRepoGroupToPerm.user_id == member_id) \
430 .filter(UserRepoGroupToPerm.group == repo_group) \
430 .filter(UserRepoGroupToPerm.group == repo_group) \
431 .scalar()
431 .scalar()
432 if perm_obj and perm_obj.permission.permission_name != perm:
432 if perm_obj and perm_obj.permission.permission_name != perm:
433 changes['default_user_changed'] = True
433 changes['default_user_changed'] = True
434
434
435 # this updates also current one if found
435 # this updates also current one if found
436 _set_perm_user(obj, member_obj, perm)
436 _set_perm_user(obj, member_obj, perm)
437 elif member_type == 'user_group':
437 elif member_type == 'user_group':
438 member_obj = UserGroup.get(member_id)
438 member_obj = UserGroup.get(member_id)
439 member_name = member_obj.users_group_name
439 member_name = member_obj.users_group_name
440 if not check_perms or has_group_perm(member_name, user=cur_user):
440 if not check_perms or has_group_perm(member_name, user=cur_user):
441 _set_perm_group(obj, member_obj, perm)
441 _set_perm_group(obj, member_obj, perm)
442 else:
442 else:
443 raise ValueError(
443 raise ValueError(
444 f"member_type must be 'user' or 'user_group' got {member_type} instead"
444 f"member_type must be 'user' or 'user_group' got {member_type} instead"
445 )
445 )
446
446
447 changes['updated'].append(
447 changes['updated'].append(
448 {'change_obj': change_obj, 'type': member_type,
448 {'change_obj': change_obj, 'type': member_type,
449 'id': member_id, 'name': member_name, 'new_perm': perm})
449 'id': member_id, 'name': member_name, 'new_perm': perm})
450
450
451 # set new permissions
451 # set new permissions
452 for member_id, perm, member_type in perm_additions:
452 for member_id, perm, member_type in perm_additions:
453 member_id = int(member_id)
453 member_id = int(member_id)
454 if member_type == 'user':
454 if member_type == 'user':
455 member_obj = User.get(member_id)
455 member_obj = User.get(member_id)
456 member_name = member_obj.username
456 member_name = member_obj.username
457 _set_perm_user(obj, member_obj, perm)
457 _set_perm_user(obj, member_obj, perm)
458 elif member_type == 'user_group':
458 elif member_type == 'user_group':
459 # check if we have permissions to alter this usergroup
459 # check if we have permissions to alter this usergroup
460 member_obj = UserGroup.get(member_id)
460 member_obj = UserGroup.get(member_id)
461 member_name = member_obj.users_group_name
461 member_name = member_obj.users_group_name
462 if not check_perms or has_group_perm(member_name, user=cur_user):
462 if not check_perms or has_group_perm(member_name, user=cur_user):
463 _set_perm_group(obj, member_obj, perm)
463 _set_perm_group(obj, member_obj, perm)
464 else:
464 else:
465 raise ValueError(
465 raise ValueError(
466 f"member_type must be 'user' or 'user_group' got {member_type} instead"
466 f"member_type must be 'user' or 'user_group' got {member_type} instead"
467 )
467 )
468
468
469 changes['added'].append(
469 changes['added'].append(
470 {'change_obj': change_obj, 'type': member_type,
470 {'change_obj': change_obj, 'type': member_type,
471 'id': member_id, 'name': member_name, 'new_perm': perm})
471 'id': member_id, 'name': member_name, 'new_perm': perm})
472
472
473 # delete permissions
473 # delete permissions
474 for member_id, perm, member_type in perm_deletions:
474 for member_id, perm, member_type in perm_deletions:
475 member_id = int(member_id)
475 member_id = int(member_id)
476 if member_type == 'user':
476 if member_type == 'user':
477 member_obj = User.get(member_id)
477 member_obj = User.get(member_id)
478 member_name = member_obj.username
478 member_name = member_obj.username
479 _revoke_perm_user(obj, member_obj)
479 _revoke_perm_user(obj, member_obj)
480 elif member_type == 'user_group':
480 elif member_type == 'user_group':
481 # check if we have permissions to alter this usergroup
481 # check if we have permissions to alter this usergroup
482 member_obj = UserGroup.get(member_id)
482 member_obj = UserGroup.get(member_id)
483 member_name = member_obj.users_group_name
483 member_name = member_obj.users_group_name
484 if not check_perms or has_group_perm(member_name, user=cur_user):
484 if not check_perms or has_group_perm(member_name, user=cur_user):
485 _revoke_perm_group(obj, member_obj)
485 _revoke_perm_group(obj, member_obj)
486 else:
486 else:
487 raise ValueError(
487 raise ValueError(
488 f"member_type must be 'user' or 'user_group' got {member_type} instead"
488 f"member_type must be 'user' or 'user_group' got {member_type} instead"
489 )
489 )
490 changes['deleted'].append(
490 changes['deleted'].append(
491 {'change_obj': change_obj, 'type': member_type,
491 {'change_obj': change_obj, 'type': member_type,
492 'id': member_id, 'name': member_name, 'new_perm': perm})
492 'id': member_id, 'name': member_name, 'new_perm': perm})
493
493
494 # if it's not recursive call for all,repos,groups
494 # if it's not recursive call for all,repos,groups
495 # break the loop and don't proceed with other changes
495 # break the loop and don't proceed with other changes
496 if recursive not in ['all', 'repos', 'groups']:
496 if recursive not in ['all', 'repos', 'groups']:
497 break
497 break
498
498
499 return changes
499 return changes
500
500
501 def update(self, repo_group, form_data):
501 def update(self, repo_group, form_data):
502 try:
502 try:
503 repo_group = self._get_repo_group(repo_group)
503 repo_group = self._get_repo_group(repo_group)
504 old_path = repo_group.full_path
504 old_path = repo_group.full_path
505
505
506 # change properties
506 # change properties
507 if 'group_description' in form_data:
507 if 'group_description' in form_data:
508 repo_group.group_description = form_data['group_description']
508 repo_group.group_description = form_data['group_description']
509
509
510 if 'enable_locking' in form_data:
510 if 'enable_locking' in form_data:
511 repo_group.enable_locking = form_data['enable_locking']
511 repo_group.enable_locking = form_data['enable_locking']
512
512
513 if 'group_parent_id' in form_data:
513 if 'group_parent_id' in form_data:
514 parent_group = (
514 parent_group = (
515 self._get_repo_group(form_data['group_parent_id']))
515 self._get_repo_group(form_data['group_parent_id']))
516 repo_group.group_parent_id = (
516 repo_group.group_parent_id = (
517 parent_group.group_id if parent_group else None)
517 parent_group.group_id if parent_group else None)
518 repo_group.parent_group = parent_group
518 repo_group.parent_group = parent_group
519
519
520 # mikhail: to update the full_path, we have to explicitly
520 # mikhail: to update the full_path, we have to explicitly
521 # update group_name
521 # update group_name
522 group_name = form_data.get('group_name', repo_group.name)
522 group_name = form_data.get('group_name', repo_group.name)
523 repo_group.group_name = repo_group.get_new_name(group_name)
523 repo_group.group_name = repo_group.get_new_name(group_name)
524
524
525 new_path = repo_group.full_path
525 new_path = repo_group.full_path
526
526
527 affected_user_ids = []
527 affected_user_ids = []
528 if 'user' in form_data:
528 if 'user' in form_data:
529 old_owner_id = repo_group.user.user_id
529 old_owner_id = repo_group.user.user_id
530 new_owner = User.get_by_username(form_data['user'])
530 new_owner = User.get_by_username(form_data['user'])
531 repo_group.user = new_owner
531 repo_group.user = new_owner
532
532
533 if old_owner_id != new_owner.user_id:
533 if old_owner_id != new_owner.user_id:
534 affected_user_ids = [new_owner.user_id, old_owner_id]
534 affected_user_ids = [new_owner.user_id, old_owner_id]
535
535
536 self.sa.add(repo_group)
536 self.sa.add(repo_group)
537
537
538 # iterate over all members of this groups and do fixes
538 # iterate over all members of this groups and do fixes
539 # set locking if given
539 # set locking if given
540 # if obj is a repoGroup also fix the name of the group according
540 # if obj is a repoGroup also fix the name of the group according
541 # to the parent
541 # to the parent
542 # if obj is a Repo fix it's name
542 # if obj is a Repo fix it's name
543 # this can be potentially heavy operation
543 # this can be potentially heavy operation
544 for obj in repo_group.recursive_groups_and_repos():
544 for obj in repo_group.recursive_groups_and_repos():
545 # set the value from it's parent
545 # set the value from it's parent
546 obj.enable_locking = repo_group.enable_locking
546 obj.enable_locking = repo_group.enable_locking
547 if isinstance(obj, RepoGroup):
547 if isinstance(obj, RepoGroup):
548 new_name = obj.get_new_name(obj.name)
548 new_name = obj.get_new_name(obj.name)
549 log.debug('Fixing group %s to new name %s',
549 log.debug('Fixing group %s to new name %s',
550 obj.group_name, new_name)
550 obj.group_name, new_name)
551 obj.group_name = new_name
551 obj.group_name = new_name
552
552
553 elif isinstance(obj, Repository):
553 elif isinstance(obj, Repository):
554 # we need to get all repositories from this new group and
554 # we need to get all repositories from this new group and
555 # rename them accordingly to new group path
555 # rename them accordingly to new group path
556 new_name = obj.get_new_name(obj.just_name)
556 new_name = obj.get_new_name(obj.just_name)
557 log.debug('Fixing repo %s to new name %s',
557 log.debug('Fixing repo %s to new name %s',
558 obj.repo_name, new_name)
558 obj.repo_name, new_name)
559 obj.repo_name = new_name
559 obj.repo_name = new_name
560
560
561 self.sa.add(obj)
561 self.sa.add(obj)
562
562
563 self._rename_group(old_path, new_path)
563 self._rename_group(old_path, new_path)
564
564
565 # Trigger update event.
565 # Trigger update event.
566 events.trigger(events.RepoGroupUpdateEvent(repo_group))
566 events.trigger(events.RepoGroupUpdateEvent(repo_group))
567
567
568 if affected_user_ids:
568 if affected_user_ids:
569 PermissionModel().trigger_permission_flush(affected_user_ids)
569 PermissionModel().trigger_permission_flush(affected_user_ids)
570
570
571 return repo_group
571 return repo_group
572 except Exception:
572 except Exception:
573 log.error(traceback.format_exc())
573 log.error(traceback.format_exc())
574 raise
574 raise
575
575
576 def delete(self, repo_group, force_delete=False, fs_remove=True):
576 def delete(self, repo_group, force_delete=False, fs_remove=True, call_events=True):
577 repo_group = self._get_repo_group(repo_group)
577 repo_group = self._get_repo_group(repo_group)
578 if not repo_group:
578 if not repo_group:
579 return False
579 return False
580 repo_group_name = repo_group.group_name
580 try:
581 try:
581 self.sa.delete(repo_group)
582 self.sa.delete(repo_group)
582 if fs_remove:
583 if fs_remove:
583 self._delete_filesystem_group(repo_group, force_delete)
584 self._delete_filesystem_group(repo_group, force_delete)
584 else:
585 else:
585 log.debug('skipping removal from filesystem')
586 log.debug('skipping removal from filesystem')
586
587
587 # Trigger delete event.
588 # Trigger delete event.
588 events.trigger(events.RepoGroupDeleteEvent(repo_group))
589 if call_events:
589 return True
590 events.trigger(events.RepoGroupDeleteEvent(repo_group))
590
591
591 except Exception:
592 except Exception:
592 log.error('Error removing repo_group %s', repo_group)
593 log.error('Error removing repo_group %s', repo_group_name)
593 raise
594 raise
594
595
596 return True
597
595 def grant_user_permission(self, repo_group, user, perm):
598 def grant_user_permission(self, repo_group, user, perm):
596 """
599 """
597 Grant permission for user on given repository group, or update
600 Grant permission for user on given repository group, or update
598 existing one if found
601 existing one if found
599
602
600 :param repo_group: Instance of RepoGroup, repositories_group_id,
603 :param repo_group: Instance of RepoGroup, repositories_group_id,
601 or repositories_group name
604 or repositories_group name
602 :param user: Instance of User, user_id or username
605 :param user: Instance of User, user_id or username
603 :param perm: Instance of Permission, or permission_name
606 :param perm: Instance of Permission, or permission_name
604 """
607 """
605
608
606 repo_group = self._get_repo_group(repo_group)
609 repo_group = self._get_repo_group(repo_group)
607 user = self._get_user(user)
610 user = self._get_user(user)
608 permission = self._get_perm(perm)
611 permission = self._get_perm(perm)
609
612
610 # check if we have that permission already
613 # check if we have that permission already
611 obj = self.sa.query(UserRepoGroupToPerm)\
614 obj = self.sa.query(UserRepoGroupToPerm)\
612 .filter(UserRepoGroupToPerm.user == user)\
615 .filter(UserRepoGroupToPerm.user == user)\
613 .filter(UserRepoGroupToPerm.group == repo_group)\
616 .filter(UserRepoGroupToPerm.group == repo_group)\
614 .scalar()
617 .scalar()
615 if obj is None:
618 if obj is None:
616 # create new !
619 # create new !
617 obj = UserRepoGroupToPerm()
620 obj = UserRepoGroupToPerm()
618 obj.group = repo_group
621 obj.group = repo_group
619 obj.user = user
622 obj.user = user
620 obj.permission = permission
623 obj.permission = permission
621 self.sa.add(obj)
624 self.sa.add(obj)
622 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
625 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
623 action_logger_generic(
626 action_logger_generic(
624 'granted permission: {} to user: {} on repogroup: {}'.format(
627 'granted permission: {} to user: {} on repogroup: {}'.format(
625 perm, user, repo_group), namespace='security.repogroup')
628 perm, user, repo_group), namespace='security.repogroup')
626 return obj
629 return obj
627
630
628 def revoke_user_permission(self, repo_group, user):
631 def revoke_user_permission(self, repo_group, user):
629 """
632 """
630 Revoke permission for user on given repository group
633 Revoke permission for user on given repository group
631
634
632 :param repo_group: Instance of RepoGroup, repositories_group_id,
635 :param repo_group: Instance of RepoGroup, repositories_group_id,
633 or repositories_group name
636 or repositories_group name
634 :param user: Instance of User, user_id or username
637 :param user: Instance of User, user_id or username
635 """
638 """
636
639
637 repo_group = self._get_repo_group(repo_group)
640 repo_group = self._get_repo_group(repo_group)
638 user = self._get_user(user)
641 user = self._get_user(user)
639
642
640 obj = self.sa.query(UserRepoGroupToPerm)\
643 obj = self.sa.query(UserRepoGroupToPerm)\
641 .filter(UserRepoGroupToPerm.user == user)\
644 .filter(UserRepoGroupToPerm.user == user)\
642 .filter(UserRepoGroupToPerm.group == repo_group)\
645 .filter(UserRepoGroupToPerm.group == repo_group)\
643 .scalar()
646 .scalar()
644 if obj:
647 if obj:
645 self.sa.delete(obj)
648 self.sa.delete(obj)
646 log.debug('Revoked perm on %s on %s', repo_group, user)
649 log.debug('Revoked perm on %s on %s', repo_group, user)
647 action_logger_generic(
650 action_logger_generic(
648 'revoked permission from user: {} on repogroup: {}'.format(
651 'revoked permission from user: {} on repogroup: {}'.format(
649 user, repo_group), namespace='security.repogroup')
652 user, repo_group), namespace='security.repogroup')
650
653
651 def grant_user_group_permission(self, repo_group, group_name, perm):
654 def grant_user_group_permission(self, repo_group, group_name, perm):
652 """
655 """
653 Grant permission for user group on given repository group, or update
656 Grant permission for user group on given repository group, or update
654 existing one if found
657 existing one if found
655
658
656 :param repo_group: Instance of RepoGroup, repositories_group_id,
659 :param repo_group: Instance of RepoGroup, repositories_group_id,
657 or repositories_group name
660 or repositories_group name
658 :param group_name: Instance of UserGroup, users_group_id,
661 :param group_name: Instance of UserGroup, users_group_id,
659 or user group name
662 or user group name
660 :param perm: Instance of Permission, or permission_name
663 :param perm: Instance of Permission, or permission_name
661 """
664 """
662 repo_group = self._get_repo_group(repo_group)
665 repo_group = self._get_repo_group(repo_group)
663 group_name = self._get_user_group(group_name)
666 group_name = self._get_user_group(group_name)
664 permission = self._get_perm(perm)
667 permission = self._get_perm(perm)
665
668
666 # check if we have that permission already
669 # check if we have that permission already
667 obj = self.sa.query(UserGroupRepoGroupToPerm)\
670 obj = self.sa.query(UserGroupRepoGroupToPerm)\
668 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
671 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
669 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
672 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
670 .scalar()
673 .scalar()
671
674
672 if obj is None:
675 if obj is None:
673 # create new
676 # create new
674 obj = UserGroupRepoGroupToPerm()
677 obj = UserGroupRepoGroupToPerm()
675
678
676 obj.group = repo_group
679 obj.group = repo_group
677 obj.users_group = group_name
680 obj.users_group = group_name
678 obj.permission = permission
681 obj.permission = permission
679 self.sa.add(obj)
682 self.sa.add(obj)
680 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
683 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
681 action_logger_generic(
684 action_logger_generic(
682 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
685 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
683 perm, group_name, repo_group), namespace='security.repogroup')
686 perm, group_name, repo_group), namespace='security.repogroup')
684 return obj
687 return obj
685
688
686 def revoke_user_group_permission(self, repo_group, group_name):
689 def revoke_user_group_permission(self, repo_group, group_name):
687 """
690 """
688 Revoke permission for user group on given repository group
691 Revoke permission for user group on given repository group
689
692
690 :param repo_group: Instance of RepoGroup, repositories_group_id,
693 :param repo_group: Instance of RepoGroup, repositories_group_id,
691 or repositories_group name
694 or repositories_group name
692 :param group_name: Instance of UserGroup, users_group_id,
695 :param group_name: Instance of UserGroup, users_group_id,
693 or user group name
696 or user group name
694 """
697 """
695 repo_group = self._get_repo_group(repo_group)
698 repo_group = self._get_repo_group(repo_group)
696 group_name = self._get_user_group(group_name)
699 group_name = self._get_user_group(group_name)
697
700
698 obj = self.sa.query(UserGroupRepoGroupToPerm)\
701 obj = self.sa.query(UserGroupRepoGroupToPerm)\
699 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
702 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
700 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
703 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
701 .scalar()
704 .scalar()
702 if obj:
705 if obj:
703 self.sa.delete(obj)
706 self.sa.delete(obj)
704 log.debug('Revoked perm to %s on %s', repo_group, group_name)
707 log.debug('Revoked perm to %s on %s', repo_group, group_name)
705 action_logger_generic(
708 action_logger_generic(
706 'revoked permission from usergroup: {} on repogroup: {}'.format(
709 'revoked permission from usergroup: {} on repogroup: {}'.format(
707 group_name, repo_group), namespace='security.repogroup')
710 group_name, repo_group), namespace='security.repogroup')
708
711
709 @classmethod
712 @classmethod
710 def update_commit_cache(cls, repo_groups=None):
713 def update_commit_cache(cls, repo_groups=None):
711 if not repo_groups:
714 if not repo_groups:
712 repo_groups = RepoGroup.getAll()
715 repo_groups = RepoGroup.getAll()
713 for repo_group in repo_groups:
716 for repo_group in repo_groups:
714 repo_group.update_commit_cache()
717 repo_group.update_commit_cache()
715
718
716 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
719 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
717 super_user_actions=False):
720 super_user_actions=False):
718
721
719 from pyramid.threadlocal import get_current_request
722 from pyramid.threadlocal import get_current_request
720 _render = get_current_request().get_partial_renderer(
723 _render = get_current_request().get_partial_renderer(
721 'rhodecode:templates/data_table/_dt_elements.mako')
724 'rhodecode:templates/data_table/_dt_elements.mako')
722 c = _render.get_call_context()
725 c = _render.get_call_context()
723 h = _render.get_helpers()
726 h = _render.get_helpers()
724
727
725 def quick_menu(repo_group_name):
728 def quick_menu(repo_group_name):
726 return _render('quick_repo_group_menu', repo_group_name)
729 return _render('quick_repo_group_menu', repo_group_name)
727
730
728 def repo_group_lnk(repo_group_name):
731 def repo_group_lnk(repo_group_name):
729 return _render('repo_group_name', repo_group_name)
732 return _render('repo_group_name', repo_group_name)
730
733
731 def last_change(last_change):
734 def last_change(last_change):
732 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
735 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
733 ts = time.time()
736 ts = time.time()
734 utc_offset = (datetime.datetime.fromtimestamp(ts)
737 utc_offset = (datetime.datetime.fromtimestamp(ts)
735 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
738 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
736 last_change = last_change + datetime.timedelta(seconds=utc_offset)
739 last_change = last_change + datetime.timedelta(seconds=utc_offset)
737 return _render("last_change", last_change)
740 return _render("last_change", last_change)
738
741
739 def desc(desc, personal):
742 def desc(desc, personal):
740 return _render(
743 return _render(
741 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
744 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
742
745
743 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
746 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
744 return _render(
747 return _render(
745 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
748 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
746
749
747 def repo_group_name(repo_group_name, children_groups):
750 def repo_group_name(repo_group_name, children_groups):
748 return _render("repo_group_name", repo_group_name, children_groups)
751 return _render("repo_group_name", repo_group_name, children_groups)
749
752
750 def user_profile(username):
753 def user_profile(username):
751 return _render('user_profile', username)
754 return _render('user_profile', username)
752
755
753 repo_group_data = []
756 repo_group_data = []
754 for group in repo_group_list:
757 for group in repo_group_list:
755 # NOTE(marcink): because we use only raw column we need to load it like that
758 # NOTE(marcink): because we use only raw column we need to load it like that
756 changeset_cache = RepoGroup._load_changeset_cache(
759 changeset_cache = RepoGroup._load_changeset_cache(
757 '', group._changeset_cache)
760 '', group._changeset_cache)
758 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
761 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
759 row = {
762 row = {
760 "menu": quick_menu(group.group_name),
763 "menu": quick_menu(group.group_name),
761 "name": repo_group_lnk(group.group_name),
764 "name": repo_group_lnk(group.group_name),
762 "name_raw": group.group_name,
765 "name_raw": group.group_name,
763
766
764 "last_change": last_change(last_commit_change),
767 "last_change": last_change(last_commit_change),
765
768
766 "last_changeset": "",
769 "last_changeset": "",
767 "last_changeset_raw": "",
770 "last_changeset_raw": "",
768
771
769 "desc": desc(h.escape(group.group_description), group.personal),
772 "desc": desc(h.escape(group.group_description), group.personal),
770 "top_level_repos": 0,
773 "top_level_repos": 0,
771 "owner": user_profile(group.User.username)
774 "owner": user_profile(group.User.username)
772 }
775 }
773 if admin:
776 if admin:
774 repo_count = group.repositories.count()
777 repo_count = group.repositories.count()
775 children_groups = list(map(
778 children_groups = list(map(
776 h.safe_str,
779 h.safe_str,
777 itertools.chain((g.name for g in group.parents),
780 itertools.chain((g.name for g in group.parents),
778 (x.name for x in [group]))))
781 (x.name for x in [group]))))
779 row.update({
782 row.update({
780 "action": repo_group_actions(
783 "action": repo_group_actions(
781 group.group_id, group.group_name, repo_count),
784 group.group_id, group.group_name, repo_count),
782 "top_level_repos": repo_count,
785 "top_level_repos": repo_count,
783 "name": repo_group_name(group.group_name, children_groups),
786 "name": repo_group_name(group.group_name, children_groups),
784
787
785 })
788 })
786 repo_group_data.append(row)
789 repo_group_data.append(row)
787
790
788 return repo_group_data
791 return repo_group_data
789
792
790 def get_repo_groups_data_table(
793 def get_repo_groups_data_table(
791 self, draw, start, limit,
794 self, draw, start, limit,
792 search_q, order_by, order_dir,
795 search_q, order_by, order_dir,
793 auth_user, repo_group_id):
796 auth_user, repo_group_id):
794 from rhodecode.model.scm import RepoGroupList
797 from rhodecode.model.scm import RepoGroupList
795
798
796 _perms = ['group.read', 'group.write', 'group.admin']
799 _perms = ['group.read', 'group.write', 'group.admin']
797 repo_groups = RepoGroup.query() \
800 repo_groups = RepoGroup.query() \
798 .filter(RepoGroup.group_parent_id == repo_group_id) \
801 .filter(RepoGroup.group_parent_id == repo_group_id) \
799 .all()
802 .all()
800 auth_repo_group_list = RepoGroupList(
803 auth_repo_group_list = RepoGroupList(
801 repo_groups, perm_set=_perms,
804 repo_groups, perm_set=_perms,
802 extra_kwargs=dict(user=auth_user))
805 extra_kwargs=dict(user=auth_user))
803
806
804 allowed_ids = [-1]
807 allowed_ids = [-1]
805 for repo_group in auth_repo_group_list:
808 for repo_group in auth_repo_group_list:
806 allowed_ids.append(repo_group.group_id)
809 allowed_ids.append(repo_group.group_id)
807
810
808 repo_groups_data_total_count = RepoGroup.query() \
811 repo_groups_data_total_count = RepoGroup.query() \
809 .filter(RepoGroup.group_parent_id == repo_group_id) \
812 .filter(RepoGroup.group_parent_id == repo_group_id) \
810 .filter(or_(
813 .filter(or_(
811 # generate multiple IN to fix limitation problems
814 # generate multiple IN to fix limitation problems
812 *in_filter_generator(RepoGroup.group_id, allowed_ids))
815 *in_filter_generator(RepoGroup.group_id, allowed_ids))
813 ) \
816 ) \
814 .count()
817 .count()
815
818
816 base_q = Session.query(
819 base_q = Session.query(
817 RepoGroup.group_name,
820 RepoGroup.group_name,
818 RepoGroup.group_name_hash,
821 RepoGroup.group_name_hash,
819 RepoGroup.group_description,
822 RepoGroup.group_description,
820 RepoGroup.group_id,
823 RepoGroup.group_id,
821 RepoGroup.personal,
824 RepoGroup.personal,
822 RepoGroup.updated_on,
825 RepoGroup.updated_on,
823 RepoGroup._changeset_cache,
826 RepoGroup._changeset_cache,
824 User,
827 User,
825 ) \
828 ) \
826 .filter(RepoGroup.group_parent_id == repo_group_id) \
829 .filter(RepoGroup.group_parent_id == repo_group_id) \
827 .filter(or_(
830 .filter(or_(
828 # generate multiple IN to fix limitation problems
831 # generate multiple IN to fix limitation problems
829 *in_filter_generator(RepoGroup.group_id, allowed_ids))
832 *in_filter_generator(RepoGroup.group_id, allowed_ids))
830 ) \
833 ) \
831 .join(User, User.user_id == RepoGroup.user_id) \
834 .join(User, User.user_id == RepoGroup.user_id) \
832 .group_by(RepoGroup, User)
835 .group_by(RepoGroup, User)
833
836
834 repo_groups_data_total_filtered_count = base_q.count()
837 repo_groups_data_total_filtered_count = base_q.count()
835
838
836 sort_defined = False
839 sort_defined = False
837
840
838 if order_by == 'group_name':
841 if order_by == 'group_name':
839 sort_col = func.lower(RepoGroup.group_name)
842 sort_col = func.lower(RepoGroup.group_name)
840 sort_defined = True
843 sort_defined = True
841 elif order_by == 'user_username':
844 elif order_by == 'user_username':
842 sort_col = User.username
845 sort_col = User.username
843 else:
846 else:
844 sort_col = getattr(RepoGroup, order_by, None)
847 sort_col = getattr(RepoGroup, order_by, None)
845
848
846 if sort_defined or sort_col:
849 if sort_defined or sort_col:
847 if order_dir == 'asc':
850 if order_dir == 'asc':
848 sort_col = sort_col.asc()
851 sort_col = sort_col.asc()
849 else:
852 else:
850 sort_col = sort_col.desc()
853 sort_col = sort_col.desc()
851
854
852 base_q = base_q.order_by(sort_col)
855 base_q = base_q.order_by(sort_col)
853 base_q = base_q.offset(start).limit(limit)
856 base_q = base_q.offset(start).limit(limit)
854
857
855 repo_group_list = base_q.all()
858 repo_group_list = base_q.all()
856
859
857 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
860 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
858 repo_group_list=repo_group_list, admin=False)
861 repo_group_list=repo_group_list, admin=False)
859
862
860 data = ({
863 data = ({
861 'draw': draw,
864 'draw': draw,
862 'data': repo_groups_data,
865 'data': repo_groups_data,
863 'recordsTotal': repo_groups_data_total_count,
866 'recordsTotal': repo_groups_data_total_count,
864 'recordsFiltered': repo_groups_data_total_filtered_count,
867 'recordsFiltered': repo_groups_data_total_filtered_count,
865 })
868 })
866 return data
869 return data
867
870
868 def _get_defaults(self, repo_group_name):
871 def _get_defaults(self, repo_group_name):
869 repo_group = RepoGroup.get_by_group_name(repo_group_name)
872 repo_group = RepoGroup.get_by_group_name(repo_group_name)
870
873
871 if repo_group is None:
874 if repo_group is None:
872 return None
875 return None
873
876
874 defaults = repo_group.get_dict()
877 defaults = repo_group.get_dict()
875 defaults['repo_group_name'] = repo_group.name
878 defaults['repo_group_name'] = repo_group.name
876 defaults['repo_group_description'] = repo_group.group_description
879 defaults['repo_group_description'] = repo_group.group_description
877 defaults['repo_group_enable_locking'] = repo_group.enable_locking
880 defaults['repo_group_enable_locking'] = repo_group.enable_locking
878
881
879 # we use -1 as this is how in HTML, we mark an empty group
882 # we use -1 as this is how in HTML, we mark an empty group
880 defaults['repo_group'] = defaults['group_parent_id'] or -1
883 defaults['repo_group'] = defaults['group_parent_id'] or -1
881
884
882 # fill owner
885 # fill owner
883 if repo_group.user:
886 if repo_group.user:
884 defaults.update({'user': repo_group.user.username})
887 defaults.update({'user': repo_group.user.username})
885 else:
888 else:
886 replacement_user = User.get_first_super_admin().username
889 replacement_user = User.get_first_super_admin().username
887 defaults.update({'user': replacement_user})
890 defaults.update({'user': replacement_user})
888
891
889 return defaults
892 return defaults
@@ -1,422 +1,422 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 import io
18 import io
19 import shlex
19 import shlex
20
20
21 import math
21 import math
22 import re
22 import re
23 import os
23 import os
24 import datetime
24 import datetime
25 import logging
25 import logging
26 import queue
26 import queue
27 import subprocess
27 import subprocess
28
28
29
29
30 from dateutil.parser import parse
30 from dateutil.parser import parse
31 from pyramid.interfaces import IRoutesMapper
31 from pyramid.interfaces import IRoutesMapper
32 from pyramid.settings import asbool
32 from pyramid.settings import asbool
33 from pyramid.path import AssetResolver
33 from pyramid.path import AssetResolver
34 from threading import Thread
34 from threading import Thread
35
35
36 from rhodecode.config.jsroutes import generate_jsroutes_content
36 from rhodecode.config.jsroutes import generate_jsroutes_content
37 from rhodecode.lib.base import get_auth_user
37 from rhodecode.lib.base import get_auth_user
38 from rhodecode.lib.celerylib.loader import set_celery_conf
38 from rhodecode.lib.celerylib.loader import set_celery_conf
39
39
40 import rhodecode
40 import rhodecode
41
41
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 def add_renderer_globals(event):
46 def add_renderer_globals(event):
47 from rhodecode.lib import helpers
47 from rhodecode.lib import helpers
48
48
49 # TODO: When executed in pyramid view context the request is not available
49 # TODO: When executed in pyramid view context the request is not available
50 # in the event. Find a better solution to get the request.
50 # in the event. Find a better solution to get the request.
51 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
52 request = event['request'] or get_current_request()
52 request = event['request'] or get_current_request()
53
53
54 # Add Pyramid translation as '_' to context
54 # Add Pyramid translation as '_' to context
55 event['_'] = request.translate
55 event['_'] = request.translate
56 event['_ungettext'] = request.plularize
56 event['_ungettext'] = request.plularize
57 event['h'] = helpers
57 event['h'] = helpers
58
58
59
59
60 def set_user_lang(event):
60 def set_user_lang(event):
61 request = event.request
61 request = event.request
62 cur_user = getattr(request, 'user', None)
62 cur_user = getattr(request, 'user', None)
63
63
64 if cur_user:
64 if cur_user:
65 user_lang = cur_user.get_instance().user_data.get('language')
65 user_lang = cur_user.get_instance().user_data.get('language')
66 if user_lang:
66 if user_lang:
67 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
67 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
68 event.request._LOCALE_ = user_lang
68 event.request._LOCALE_ = user_lang
69
69
70
70
71 def update_celery_conf(event):
71 def update_celery_conf(event):
72 log.debug('Setting celery config from new request')
72 log.debug('Setting celery config from new request')
73 set_celery_conf(request=event.request, registry=event.request.registry)
73 set_celery_conf(request=event.request, registry=event.request.registry)
74
74
75
75
76 def add_request_user_context(event):
76 def add_request_user_context(event):
77 """
77 """
78 Adds auth user into request context
78 Adds auth user into request context
79 """
79 """
80
80
81 request = event.request
81 request = event.request
82 # access req_id as soon as possible
82 # access req_id as soon as possible
83 req_id = request.req_id
83 req_id = request.req_id
84
84
85 if hasattr(request, 'vcs_call'):
85 if hasattr(request, 'vcs_call'):
86 # skip vcs calls
86 # skip vcs calls
87 return
87 return
88
88
89 if hasattr(request, 'rpc_method'):
89 if hasattr(request, 'rpc_method'):
90 # skip api calls
90 # skip api calls
91 return
91 return
92
92
93 auth_user, auth_token = get_auth_user(request)
93 auth_user, auth_token = get_auth_user(request)
94 request.user = auth_user
94 request.user = auth_user
95 request.user_auth_token = auth_token
95 request.user_auth_token = auth_token
96 request.environ['rc_auth_user'] = auth_user
96 request.environ['rc_auth_user'] = auth_user
97 request.environ['rc_auth_user_id'] = str(auth_user.user_id)
97 request.environ['rc_auth_user_id'] = str(auth_user.user_id)
98 request.environ['rc_req_id'] = req_id
98 request.environ['rc_req_id'] = req_id
99
99
100
100
101 def reset_log_bucket(event):
101 def reset_log_bucket(event):
102 """
102 """
103 reset the log bucket on new request
103 reset the log bucket on new request
104 """
104 """
105 request = event.request
105 request = event.request
106 request.req_id_records_init()
106 request.req_id_records_init()
107
107
108
108
109 def scan_repositories_if_enabled(event):
109 def scan_repositories_if_enabled(event):
110 """
110 """
111 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
111 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
112 does a repository scan if enabled in the settings.
112 does a repository scan if enabled in the settings.
113 """
113 """
114
114
115 settings = event.app.registry.settings
115 settings = event.app.registry.settings
116 vcs_server_enabled = settings['vcs.server.enable']
116 vcs_server_enabled = settings['vcs.server.enable']
117 import_on_startup = settings['startup.import_repos']
117 import_on_startup = settings['startup.import_repos']
118
118
119 if vcs_server_enabled and import_on_startup:
119 if vcs_server_enabled and import_on_startup:
120 from rhodecode.model.scm import ScmModel
120 from rhodecode.model.scm import ScmModel
121 from rhodecode.lib.utils import repo2db_mapper
121 from rhodecode.lib.utils import repo2db_mapper
122 scm = ScmModel()
122 scm = ScmModel()
123 repositories = scm.repo_scan(scm.repos_path)
123 repositories = scm.repo_scan(scm.repos_path)
124 repo2db_mapper(repositories, remove_obsolete=False)
124 repo2db_mapper(repositories)
125
125
126
126
127 def write_metadata_if_needed(event):
127 def write_metadata_if_needed(event):
128 """
128 """
129 Writes upgrade metadata
129 Writes upgrade metadata
130 """
130 """
131 import rhodecode
131 import rhodecode
132 from rhodecode.lib import system_info
132 from rhodecode.lib import system_info
133 from rhodecode.lib import ext_json
133 from rhodecode.lib import ext_json
134
134
135 fname = '.rcmetadata.json'
135 fname = '.rcmetadata.json'
136 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
136 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
137 metadata_destination = os.path.join(ini_loc, fname)
137 metadata_destination = os.path.join(ini_loc, fname)
138
138
139 def get_update_age():
139 def get_update_age():
140 now = datetime.datetime.utcnow()
140 now = datetime.datetime.utcnow()
141
141
142 with open(metadata_destination, 'rb') as f:
142 with open(metadata_destination, 'rb') as f:
143 data = ext_json.json.loads(f.read())
143 data = ext_json.json.loads(f.read())
144 if 'created_on' in data:
144 if 'created_on' in data:
145 update_date = parse(data['created_on'])
145 update_date = parse(data['created_on'])
146 diff = now - update_date
146 diff = now - update_date
147 return diff.total_seconds() / 60.0
147 return diff.total_seconds() / 60.0
148
148
149 return 0
149 return 0
150
150
151 def write():
151 def write():
152 configuration = system_info.SysInfo(
152 configuration = system_info.SysInfo(
153 system_info.rhodecode_config)()['value']
153 system_info.rhodecode_config)()['value']
154 license_token = configuration['config']['license_token']
154 license_token = configuration['config']['license_token']
155
155
156 setup = dict(
156 setup = dict(
157 workers=configuration['config']['server:main'].get(
157 workers=configuration['config']['server:main'].get(
158 'workers', '?'),
158 'workers', '?'),
159 worker_type=configuration['config']['server:main'].get(
159 worker_type=configuration['config']['server:main'].get(
160 'worker_class', 'sync'),
160 'worker_class', 'sync'),
161 )
161 )
162 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
162 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
163 del dbinfo['url']
163 del dbinfo['url']
164
164
165 metadata = dict(
165 metadata = dict(
166 desc='upgrade metadata info',
166 desc='upgrade metadata info',
167 license_token=license_token,
167 license_token=license_token,
168 created_on=datetime.datetime.utcnow().isoformat(),
168 created_on=datetime.datetime.utcnow().isoformat(),
169 usage=system_info.SysInfo(system_info.usage_info)()['value'],
169 usage=system_info.SysInfo(system_info.usage_info)()['value'],
170 platform=system_info.SysInfo(system_info.platform_type)()['value'],
170 platform=system_info.SysInfo(system_info.platform_type)()['value'],
171 database=dbinfo,
171 database=dbinfo,
172 cpu=system_info.SysInfo(system_info.cpu)()['value'],
172 cpu=system_info.SysInfo(system_info.cpu)()['value'],
173 memory=system_info.SysInfo(system_info.memory)()['value'],
173 memory=system_info.SysInfo(system_info.memory)()['value'],
174 setup=setup
174 setup=setup
175 )
175 )
176
176
177 with open(metadata_destination, 'wb') as f:
177 with open(metadata_destination, 'wb') as f:
178 f.write(ext_json.json.dumps(metadata))
178 f.write(ext_json.json.dumps(metadata))
179
179
180 settings = event.app.registry.settings
180 settings = event.app.registry.settings
181 if settings.get('metadata.skip'):
181 if settings.get('metadata.skip'):
182 return
182 return
183
183
184 # only write this every 24h, workers restart caused unwanted delays
184 # only write this every 24h, workers restart caused unwanted delays
185 try:
185 try:
186 age_in_min = get_update_age()
186 age_in_min = get_update_age()
187 except Exception:
187 except Exception:
188 age_in_min = 0
188 age_in_min = 0
189
189
190 if age_in_min > 60 * 60 * 24:
190 if age_in_min > 60 * 60 * 24:
191 return
191 return
192
192
193 try:
193 try:
194 write()
194 write()
195 except Exception:
195 except Exception:
196 pass
196 pass
197
197
198
198
199 def write_usage_data(event):
199 def write_usage_data(event):
200 import rhodecode
200 import rhodecode
201 from rhodecode.lib import system_info
201 from rhodecode.lib import system_info
202 from rhodecode.lib import ext_json
202 from rhodecode.lib import ext_json
203
203
204 settings = event.app.registry.settings
204 settings = event.app.registry.settings
205 instance_tag = settings.get('metadata.write_usage_tag')
205 instance_tag = settings.get('metadata.write_usage_tag')
206 if not settings.get('metadata.write_usage'):
206 if not settings.get('metadata.write_usage'):
207 return
207 return
208
208
209 def get_update_age(dest_file):
209 def get_update_age(dest_file):
210 now = datetime.datetime.now(datetime.UTC)
210 now = datetime.datetime.now(datetime.UTC)
211
211
212 with open(dest_file, 'rb') as f:
212 with open(dest_file, 'rb') as f:
213 data = ext_json.json.loads(f.read())
213 data = ext_json.json.loads(f.read())
214 if 'created_on' in data:
214 if 'created_on' in data:
215 update_date = parse(data['created_on'])
215 update_date = parse(data['created_on'])
216 diff = now - update_date
216 diff = now - update_date
217 return math.ceil(diff.total_seconds() / 60.0)
217 return math.ceil(diff.total_seconds() / 60.0)
218
218
219 return 0
219 return 0
220
220
221 utc_date = datetime.datetime.now(datetime.UTC)
221 utc_date = datetime.datetime.now(datetime.UTC)
222 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
222 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
223 fname = f'.rc_usage_{utc_date.year}{utc_date.month:02d}{utc_date.day:02d}_{hour_quarter}.json'
223 fname = f'.rc_usage_{utc_date.year}{utc_date.month:02d}{utc_date.day:02d}_{hour_quarter}.json'
224 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
224 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
225
225
226 usage_dir = os.path.join(ini_loc, '.rcusage')
226 usage_dir = os.path.join(ini_loc, '.rcusage')
227 if not os.path.isdir(usage_dir):
227 if not os.path.isdir(usage_dir):
228 os.makedirs(usage_dir)
228 os.makedirs(usage_dir)
229 usage_metadata_destination = os.path.join(usage_dir, fname)
229 usage_metadata_destination = os.path.join(usage_dir, fname)
230
230
231 try:
231 try:
232 age_in_min = get_update_age(usage_metadata_destination)
232 age_in_min = get_update_age(usage_metadata_destination)
233 except Exception:
233 except Exception:
234 age_in_min = 0
234 age_in_min = 0
235
235
236 # write every 6th hour
236 # write every 6th hour
237 if age_in_min and age_in_min < 60 * 6:
237 if age_in_min and age_in_min < 60 * 6:
238 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
238 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
239 age_in_min, 60 * 6)
239 age_in_min, 60 * 6)
240 return
240 return
241
241
242 def write(dest_file):
242 def write(dest_file):
243 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
243 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
244 license_token = configuration['config']['license_token']
244 license_token = configuration['config']['license_token']
245
245
246 metadata = dict(
246 metadata = dict(
247 desc='Usage data',
247 desc='Usage data',
248 instance_tag=instance_tag,
248 instance_tag=instance_tag,
249 license_token=license_token,
249 license_token=license_token,
250 created_on=datetime.datetime.utcnow().isoformat(),
250 created_on=datetime.datetime.utcnow().isoformat(),
251 usage=system_info.SysInfo(system_info.usage_info)()['value'],
251 usage=system_info.SysInfo(system_info.usage_info)()['value'],
252 )
252 )
253
253
254 with open(dest_file, 'wb') as f:
254 with open(dest_file, 'wb') as f:
255 f.write(ext_json.formatted_json(metadata))
255 f.write(ext_json.formatted_json(metadata))
256
256
257 try:
257 try:
258 log.debug('Writing usage file at: %s', usage_metadata_destination)
258 log.debug('Writing usage file at: %s', usage_metadata_destination)
259 write(usage_metadata_destination)
259 write(usage_metadata_destination)
260 except Exception:
260 except Exception:
261 pass
261 pass
262
262
263
263
264 def write_js_routes_if_enabled(event):
264 def write_js_routes_if_enabled(event):
265 registry = event.app.registry
265 registry = event.app.registry
266
266
267 mapper = registry.queryUtility(IRoutesMapper)
267 mapper = registry.queryUtility(IRoutesMapper)
268 _argument_prog = re.compile(r'\{(.*?)\}|:\((.*)\)')
268 _argument_prog = re.compile(r'\{(.*?)\}|:\((.*)\)')
269
269
270 def _extract_route_information(route):
270 def _extract_route_information(route):
271 """
271 """
272 Convert a route into tuple(name, path, args), eg:
272 Convert a route into tuple(name, path, args), eg:
273 ('show_user', '/profile/%(username)s', ['username'])
273 ('show_user', '/profile/%(username)s', ['username'])
274 """
274 """
275
275
276 route_path = route.pattern
276 route_path = route.pattern
277 pattern = route.pattern
277 pattern = route.pattern
278
278
279 def replace(matchobj):
279 def replace(matchobj):
280 if matchobj.group(1):
280 if matchobj.group(1):
281 return "%%(%s)s" % matchobj.group(1).split(':')[0]
281 return "%%(%s)s" % matchobj.group(1).split(':')[0]
282 else:
282 else:
283 return "%%(%s)s" % matchobj.group(2)
283 return "%%(%s)s" % matchobj.group(2)
284
284
285 route_path = _argument_prog.sub(replace, route_path)
285 route_path = _argument_prog.sub(replace, route_path)
286
286
287 if not route_path.startswith('/'):
287 if not route_path.startswith('/'):
288 route_path = f'/{route_path}'
288 route_path = f'/{route_path}'
289
289
290 return (
290 return (
291 route.name,
291 route.name,
292 route_path,
292 route_path,
293 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
293 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
294 for arg in _argument_prog.findall(pattern)]
294 for arg in _argument_prog.findall(pattern)]
295 )
295 )
296
296
297 def get_routes():
297 def get_routes():
298 # pyramid routes
298 # pyramid routes
299 for route in mapper.get_routes():
299 for route in mapper.get_routes():
300 if not route.name.startswith('__'):
300 if not route.name.startswith('__'):
301 yield _extract_route_information(route)
301 yield _extract_route_information(route)
302
302
303 if asbool(registry.settings.get('generate_js_files', 'false')):
303 if asbool(registry.settings.get('generate_js_files', 'false')):
304 static_path = AssetResolver().resolve('rhodecode:public').abspath()
304 static_path = AssetResolver().resolve('rhodecode:public').abspath()
305 jsroutes = get_routes()
305 jsroutes = get_routes()
306 jsroutes_file_content = generate_jsroutes_content(jsroutes)
306 jsroutes_file_content = generate_jsroutes_content(jsroutes)
307 jsroutes_file_path = os.path.join(
307 jsroutes_file_path = os.path.join(
308 static_path, 'js', 'rhodecode', 'routes.js')
308 static_path, 'js', 'rhodecode', 'routes.js')
309
309
310 try:
310 try:
311 with open(jsroutes_file_path, 'w', encoding='utf-8') as f:
311 with open(jsroutes_file_path, 'w', encoding='utf-8') as f:
312 f.write(jsroutes_file_content)
312 f.write(jsroutes_file_content)
313 log.debug('generated JS files in %s', jsroutes_file_path)
313 log.debug('generated JS files in %s', jsroutes_file_path)
314 except Exception:
314 except Exception:
315 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
315 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
316
316
317
317
318 def import_license_if_present(event):
318 def import_license_if_present(event):
319 """
319 """
320 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
320 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
321 does a import license key based on a presence of the file.
321 does a import license key based on a presence of the file.
322 """
322 """
323 settings = event.app.registry.settings
323 settings = event.app.registry.settings
324
324
325 rhodecode_edition_id = settings.get('rhodecode.edition_id')
325 rhodecode_edition_id = settings.get('rhodecode.edition_id')
326 license_file_path = settings.get('license.import_path')
326 license_file_path = settings.get('license.import_path')
327 force = settings.get('license.import_path_mode') == 'force'
327 force = settings.get('license.import_path_mode') == 'force'
328
328
329 if license_file_path and rhodecode_edition_id == 'EE':
329 if license_file_path and rhodecode_edition_id == 'EE':
330 log.debug('license.import_path= is set importing license from %s', license_file_path)
330 log.debug('license.import_path= is set importing license from %s', license_file_path)
331 from rhodecode.model.meta import Session
331 from rhodecode.model.meta import Session
332 from rhodecode.model.license import apply_license_from_file
332 from rhodecode.model.license import apply_license_from_file
333 try:
333 try:
334 apply_license_from_file(license_file_path, force=force)
334 apply_license_from_file(license_file_path, force=force)
335 Session().commit()
335 Session().commit()
336 except OSError:
336 except OSError:
337 log.exception('Failed to import license from %s, make sure this file exists', license_file_path)
337 log.exception('Failed to import license from %s, make sure this file exists', license_file_path)
338
338
339
339
340 class Subscriber(object):
340 class Subscriber(object):
341 """
341 """
342 Base class for subscribers to the pyramid event system.
342 Base class for subscribers to the pyramid event system.
343 """
343 """
344 def __call__(self, event):
344 def __call__(self, event):
345 self.run(event)
345 self.run(event)
346
346
347 def run(self, event):
347 def run(self, event):
348 raise NotImplementedError('Subclass has to implement this.')
348 raise NotImplementedError('Subclass has to implement this.')
349
349
350
350
351 class AsyncSubscriber(Subscriber):
351 class AsyncSubscriber(Subscriber):
352 """
352 """
353 Subscriber that handles the execution of events in a separate task to not
353 Subscriber that handles the execution of events in a separate task to not
354 block the execution of the code which triggers the event. It puts the
354 block the execution of the code which triggers the event. It puts the
355 received events into a queue from which the worker process takes them in
355 received events into a queue from which the worker process takes them in
356 order.
356 order.
357 """
357 """
358 def __init__(self):
358 def __init__(self):
359 self._stop = False
359 self._stop = False
360 self._eventq = queue.Queue()
360 self._eventq = queue.Queue()
361 self._worker = self.create_worker()
361 self._worker = self.create_worker()
362 self._worker.start()
362 self._worker.start()
363
363
364 def __call__(self, event):
364 def __call__(self, event):
365 self._eventq.put(event)
365 self._eventq.put(event)
366
366
367 def create_worker(self):
367 def create_worker(self):
368 worker = Thread(target=self.do_work)
368 worker = Thread(target=self.do_work)
369 worker.daemon = True
369 worker.daemon = True
370 return worker
370 return worker
371
371
372 def stop_worker(self):
372 def stop_worker(self):
373 self._stop = False
373 self._stop = False
374 self._eventq.put(None)
374 self._eventq.put(None)
375 self._worker.join()
375 self._worker.join()
376
376
377 def do_work(self):
377 def do_work(self):
378 while not self._stop:
378 while not self._stop:
379 event = self._eventq.get()
379 event = self._eventq.get()
380 if event is not None:
380 if event is not None:
381 self.run(event)
381 self.run(event)
382
382
383
383
384 class AsyncSubprocessSubscriber(AsyncSubscriber):
384 class AsyncSubprocessSubscriber(AsyncSubscriber):
385 """
385 """
386 Subscriber that uses the subprocess module to execute a command if an
386 Subscriber that uses the subprocess module to execute a command if an
387 event is received. Events are handled asynchronously::
387 event is received. Events are handled asynchronously::
388
388
389 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
389 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
390 subscriber(dummyEvent) # running __call__(event)
390 subscriber(dummyEvent) # running __call__(event)
391
391
392 """
392 """
393
393
394 def __init__(self, cmd, timeout=None):
394 def __init__(self, cmd, timeout=None):
395 if not isinstance(cmd, (list, tuple)):
395 if not isinstance(cmd, (list, tuple)):
396 cmd = shlex.split(cmd)
396 cmd = shlex.split(cmd)
397 super().__init__()
397 super().__init__()
398 self._cmd = cmd
398 self._cmd = cmd
399 self._timeout = timeout
399 self._timeout = timeout
400
400
401 def run(self, event):
401 def run(self, event):
402 cmd = self._cmd
402 cmd = self._cmd
403 timeout = self._timeout
403 timeout = self._timeout
404 log.debug('Executing command %s.', cmd)
404 log.debug('Executing command %s.', cmd)
405
405
406 try:
406 try:
407 output = subprocess.check_output(
407 output = subprocess.check_output(
408 cmd, timeout=timeout, stderr=subprocess.STDOUT)
408 cmd, timeout=timeout, stderr=subprocess.STDOUT)
409 log.debug('Command finished %s', cmd)
409 log.debug('Command finished %s', cmd)
410 if output:
410 if output:
411 log.debug('Command output: %s', output)
411 log.debug('Command output: %s', output)
412 except subprocess.TimeoutExpired as e:
412 except subprocess.TimeoutExpired as e:
413 log.exception('Timeout while executing command.')
413 log.exception('Timeout while executing command.')
414 if e.output:
414 if e.output:
415 log.error('Command output: %s', e.output)
415 log.error('Command output: %s', e.output)
416 except subprocess.CalledProcessError as e:
416 except subprocess.CalledProcessError as e:
417 log.exception('Error while executing command.')
417 log.exception('Error while executing command.')
418 if e.output:
418 if e.output:
419 log.error('Command output: %s', e.output)
419 log.error('Command output: %s', e.output)
420 except Exception:
420 except Exception:
421 log.exception(
421 log.exception(
422 'Exception while executing command %s.', cmd)
422 'Exception while executing command %s.', cmd)
@@ -1,33 +1,45 b''
1 ${h.secure_form(h.route_path('admin_settings_mapping_update'), request=request)}
1
2
2
3 <div class="panel panel-default">
3 <div class="panel panel-default">
4 <div class="panel-heading">
4 <div class="panel-heading">
5 <h3 class="panel-title">${_('Import New Groups or Repositories')}</h3>
5 <h3 class="panel-title">${_('Import new repository groups and repositories')}</h3>
6 </div>
6 </div>
7 <div class="panel-body">
7 <div class="panel-body">
8
8 ${h.secure_form(h.route_path('admin_settings_mapping_create'), request=request)}
9 <p>
9 <p>
10 ${_('This function will scann all data under the current storage path location at')} <code>${c.storage_path}</code>
10 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code><br/>
11 ${_('Each folder will be imported as a new repository group, and each repository found will be also imported to root level or corresponding repository group')}
11 </p>
12 </p>
12
13
13 <div class="checkbox">
14 <div class="checkbox">
14 ${h.checkbox('destroy',True)}
15 <label for="destroy">${_('Destroy old data')}</label>
16 </div>
17 <span class="help-block">${_('In case a repository or a group was deleted from the filesystem and it still exists in the database, check this option to remove obsolete data from the database.')}</span>
18
19 <div class="checkbox">
20 ${h.checkbox('invalidate',True)}
15 ${h.checkbox('invalidate',True)}
21 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
16 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
22 </div>
17 </div>
23 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
18 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
24
19
25 <div class="buttons">
20 <div class="buttons">
26 ${h.submit('rescan',_('Rescan Filesystem'),class_="btn")}
21 ${h.submit('rescan',_('Scan filesystem'),class_="btn")}
27 </div>
22 </div>
28
23 ${h.end_form()}
29 </div>
24 </div>
30 </div>
25 </div>
31
26
32
27
33 ${h.end_form()}
28 <div class="panel panel-default">
29 <div class="panel-heading">
30 <h3 class="panel-title">${_('Cleanup removed Repository Groups or Repositories')}</h3>
31 </div>
32 <div class="panel-body">
33 ${h.secure_form(h.route_path('admin_settings_mapping_cleanup'), request=request)}
34 <p>
35 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code>
36 ${_('Then it will remove all repository groups and repositories that are no longer present in the filesystem.')}
37 </p>
38
39 <div class="buttons">
40 ${h.submit('rescan',_('Cleanup filesystem'),class_="btn btn-danger")}
41 </div>
42 ${h.end_form()}
43 </div>
44 </div>
45
@@ -1,1695 +1,1697 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import collections
19 import collections
20 import datetime
20 import datetime
21 import os
21 import os
22 import re
22 import re
23 import pprint
23 import pprint
24 import shutil
24 import shutil
25 import socket
25 import socket
26 import subprocess
26 import subprocess
27 import time
27 import time
28 import uuid
28 import uuid
29 import dateutil.tz
29 import dateutil.tz
30 import logging
30 import logging
31 import functools
31 import functools
32 import textwrap
32 import textwrap
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39 import pyramid.paster
39 import pyramid.paster
40
40
41 import rhodecode
41 import rhodecode
42 import rhodecode.lib
42 import rhodecode.lib
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 PullRequest,
46 PullRequest,
47 PullRequestReviewers,
47 PullRequestReviewers,
48 Repository,
48 Repository,
49 RhodeCodeSetting,
49 RhodeCodeSetting,
50 ChangesetStatus,
50 ChangesetStatus,
51 RepoGroup,
51 RepoGroup,
52 UserGroup,
52 UserGroup,
53 RepoRhodeCodeUi,
53 RepoRhodeCodeUi,
54 RepoRhodeCodeSetting,
54 RepoRhodeCodeSetting,
55 RhodeCodeUi,
55 RhodeCodeUi,
56 )
56 )
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58 from rhodecode.model.pull_request import PullRequestModel
58 from rhodecode.model.pull_request import PullRequestModel
59 from rhodecode.model.repo import RepoModel
59 from rhodecode.model.repo import RepoModel
60 from rhodecode.model.repo_group import RepoGroupModel
60 from rhodecode.model.repo_group import RepoGroupModel
61 from rhodecode.model.user import UserModel
61 from rhodecode.model.user import UserModel
62 from rhodecode.model.settings import VcsSettingsModel
62 from rhodecode.model.settings import VcsSettingsModel
63 from rhodecode.model.user_group import UserGroupModel
63 from rhodecode.model.user_group import UserGroupModel
64 from rhodecode.model.integration import IntegrationModel
64 from rhodecode.model.integration import IntegrationModel
65 from rhodecode.integrations import integration_type_registry
65 from rhodecode.integrations import integration_type_registry
66 from rhodecode.integrations.types.base import IntegrationTypeBase
66 from rhodecode.integrations.types.base import IntegrationTypeBase
67 from rhodecode.lib.utils import repo2db_mapper
67 from rhodecode.lib.utils import repo2db_mapper
68 from rhodecode.lib.str_utils import safe_bytes
68 from rhodecode.lib.str_utils import safe_bytes
69 from rhodecode.lib.hash_utils import sha1_safe
69 from rhodecode.lib.hash_utils import sha1_safe
70 from rhodecode.lib.vcs.backends import get_backend
70 from rhodecode.lib.vcs.backends import get_backend
71 from rhodecode.lib.vcs.nodes import FileNode
71 from rhodecode.lib.vcs.nodes import FileNode
72 from rhodecode.lib.base import bootstrap_config
72 from rhodecode.lib.base import bootstrap_config
73 from rhodecode.tests import (
73 from rhodecode.tests import (
74 login_user_session,
74 login_user_session,
75 get_new_dir,
75 get_new_dir,
76 utils,
76 utils,
77 TESTS_TMP_PATH,
77 TESTS_TMP_PATH,
78 TEST_USER_ADMIN_LOGIN,
78 TEST_USER_ADMIN_LOGIN,
79 TEST_USER_REGULAR_LOGIN,
79 TEST_USER_REGULAR_LOGIN,
80 TEST_USER_REGULAR2_LOGIN,
80 TEST_USER_REGULAR2_LOGIN,
81 TEST_USER_REGULAR_PASS,
81 TEST_USER_REGULAR_PASS,
82 console_printer,
82 console_printer,
83 )
83 )
84 from rhodecode.tests.utils import set_anonymous_access
84 from rhodecode.tests.utils import set_anonymous_access
85 from rhodecode.tests.fixtures.rc_fixture import Fixture
85 from rhodecode.tests.fixtures.rc_fixture import Fixture
86 from rhodecode.config import utils as config_utils
86 from rhodecode.config import utils as config_utils
87
87
88 log = logging.getLogger(__name__)
88 log = logging.getLogger(__name__)
89
89
90
90
91 def cmp(a, b):
91 def cmp(a, b):
92 # backport cmp from python2 so we can still use it in the custom code in this module
92 # backport cmp from python2 so we can still use it in the custom code in this module
93 return (a > b) - (a < b)
93 return (a > b) - (a < b)
94
94
95
95
96 @pytest.fixture(scope="session")
96 @pytest.fixture(scope="session")
97 def http_environ_session():
97 def http_environ_session():
98 """
98 """
99 Allow to use "http_environ" in session scope.
99 Allow to use "http_environ" in session scope.
100 """
100 """
101 return plain_http_environ()
101 return plain_http_environ()
102
102
103
103
104 def plain_http_host_stub():
104 def plain_http_host_stub():
105 """
105 """
106 Value of HTTP_HOST in the test run.
106 Value of HTTP_HOST in the test run.
107 """
107 """
108 return "example.com:80"
108 return "example.com:80"
109
109
110
110
111 def plain_config_stub(request, request_stub):
111 def plain_config_stub(request, request_stub):
112 """
112 """
113 Set up pyramid.testing and return the Configurator.
113 Set up pyramid.testing and return the Configurator.
114 """
114 """
115
115
116 config = bootstrap_config(request=request_stub)
116 config = bootstrap_config(request=request_stub)
117
117
118 @request.addfinalizer
118 @request.addfinalizer
119 def cleanup():
119 def cleanup():
120 pyramid.testing.tearDown()
120 pyramid.testing.tearDown()
121
121
122 return config
122 return config
123
123
124
124
125 def plain_request_stub():
125 def plain_request_stub():
126 """
126 """
127 Stub request object.
127 Stub request object.
128 """
128 """
129 from rhodecode.lib.base import bootstrap_request
129 from rhodecode.lib.base import bootstrap_request
130
130
131 _request = bootstrap_request(scheme="https")
131 _request = bootstrap_request(scheme="https")
132 return _request
132 return _request
133
133
134
134
135 @pytest.fixture()
135 @pytest.fixture()
136 def http_host_stub():
136 def http_host_stub():
137 """
137 """
138 Value of HTTP_HOST in the test run.
138 Value of HTTP_HOST in the test run.
139 """
139 """
140 return plain_http_host_stub()
140 return plain_http_host_stub()
141
141
142
142
143 def plain_http_host_only_stub():
143 def plain_http_host_only_stub():
144 """
144 """
145 Value of HTTP_HOST in the test run.
145 Value of HTTP_HOST in the test run.
146 """
146 """
147 return plain_http_host_stub().split(":")[0]
147 return plain_http_host_stub().split(":")[0]
148
148
149
149
150 @pytest.fixture()
150 @pytest.fixture()
151 def http_host_only_stub():
151 def http_host_only_stub():
152 """
152 """
153 Value of HTTP_HOST in the test run.
153 Value of HTTP_HOST in the test run.
154 """
154 """
155 return plain_http_host_only_stub()
155 return plain_http_host_only_stub()
156
156
157
157
158 def plain_http_environ():
158 def plain_http_environ():
159 """
159 """
160 HTTP extra environ keys.
160 HTTP extra environ keys.
161
161
162 Used by the test application and as well for setting up the pylons
162 Used by the test application and as well for setting up the pylons
163 environment. In the case of the fixture "app" it should be possible
163 environment. In the case of the fixture "app" it should be possible
164 to override this for a specific test case.
164 to override this for a specific test case.
165 """
165 """
166 return {
166 return {
167 "SERVER_NAME": plain_http_host_only_stub(),
167 "SERVER_NAME": plain_http_host_only_stub(),
168 "SERVER_PORT": plain_http_host_stub().split(":")[1],
168 "SERVER_PORT": plain_http_host_stub().split(":")[1],
169 "HTTP_HOST": plain_http_host_stub(),
169 "HTTP_HOST": plain_http_host_stub(),
170 "HTTP_USER_AGENT": "rc-test-agent",
170 "HTTP_USER_AGENT": "rc-test-agent",
171 "REQUEST_METHOD": "GET",
171 "REQUEST_METHOD": "GET",
172 }
172 }
173
173
174
174
175 @pytest.fixture(scope="session")
175 @pytest.fixture(scope="session")
176 def baseapp(request, ini_config, http_environ_session, available_port_factory, vcsserver_factory, celery_factory):
176 def baseapp(request, ini_config, http_environ_session, available_port_factory, vcsserver_factory, celery_factory):
177 from rhodecode.lib.config_utils import get_app_config
177 from rhodecode.lib.config_utils import get_app_config
178 from rhodecode.config.middleware import make_pyramid_app
178 from rhodecode.config.middleware import make_pyramid_app
179
179
180 log.info("Using the RhodeCode configuration:%s", ini_config)
180 log.info("Using the RhodeCode configuration:%s", ini_config)
181 pyramid.paster.setup_logging(ini_config)
181 pyramid.paster.setup_logging(ini_config)
182
182
183 settings = get_app_config(ini_config)
183 settings = get_app_config(ini_config)
184 store_dir = os.path.dirname(ini_config)
184 store_dir = os.path.dirname(ini_config)
185
185
186 # start vcsserver
186 # start vcsserver
187 _vcsserver_port = available_port_factory()
187 _vcsserver_port = available_port_factory()
188 vcsserver_instance = vcsserver_factory(
188 vcsserver_instance = vcsserver_factory(
189 request,
189 request,
190 store_dir=store_dir,
190 store_dir=store_dir,
191 port=_vcsserver_port,
191 port=_vcsserver_port,
192 info_prefix="base-app-"
192 info_prefix="base-app-"
193 )
193 )
194
194
195 settings["vcs.server"] = vcsserver_instance.bind_addr
195 settings["vcs.server"] = vcsserver_instance.bind_addr
196
196
197 # we skip setting store_dir for baseapp, it's internally set via testing rhodecode.ini
197 # we skip setting store_dir for baseapp, it's internally set via testing rhodecode.ini
198 # settings['repo_store.path'] = str(store_dir)
198 # settings['repo_store.path'] = str(store_dir)
199 console_printer(f' :warning: [green]pytest-setup[/green] Starting base pyramid-app: {ini_config}')
199 console_printer(f' :warning: [green]pytest-setup[/green] Starting base pyramid-app: {ini_config}')
200 pyramid_baseapp = make_pyramid_app({"__file__": ini_config}, **settings)
200 pyramid_baseapp = make_pyramid_app({"__file__": ini_config}, **settings)
201
201
202 # start celery
202 # start celery
203 celery_factory(
203 celery_factory(
204 request,
204 request,
205 store_dir=store_dir,
205 store_dir=store_dir,
206 port=None,
206 port=None,
207 info_prefix="base-app-",
207 info_prefix="base-app-",
208 overrides=(
208 overrides=(
209 {'handler_console': {'level': 'DEBUG'}},
209 {'handler_console': {'level': 'DEBUG'}},
210 {'app:main': {'vcs.server': vcsserver_instance.bind_addr}},
210 {'app:main': {'vcs.server': vcsserver_instance.bind_addr}},
211 {'app:main': {'repo_store.path': store_dir}}
211 {'app:main': {'repo_store.path': store_dir}}
212 )
212 )
213 )
213 )
214
214
215 return pyramid_baseapp
215 return pyramid_baseapp
216
216
217
217
218 @pytest.fixture(scope="session")
218 @pytest.fixture(scope="session")
219 def app_settings(baseapp, ini_config):
219 def app_settings(baseapp, ini_config):
220 """
220 """
221 Settings dictionary used to create the app.
221 Settings dictionary used to create the app.
222
222
223 Parses the ini file and passes the result through the sanitize and apply
223 Parses the ini file and passes the result through the sanitize and apply
224 defaults mechanism in `rhodecode.config.middleware`.
224 defaults mechanism in `rhodecode.config.middleware`.
225 """
225 """
226 return baseapp.config.get_settings()
226 return baseapp.config.get_settings()
227
227
228
228
229 @pytest.fixture(scope="session")
229 @pytest.fixture(scope="session")
230 def db_connection(ini_settings):
230 def db_connection(ini_settings):
231 # Initialize the database connection.
231 # Initialize the database connection.
232 config_utils.initialize_database(ini_settings)
232 config_utils.initialize_database(ini_settings)
233
233
234
234
235 LoginData = collections.namedtuple("LoginData", ("csrf_token", "user"))
235 LoginData = collections.namedtuple("LoginData", ("csrf_token", "user"))
236
236
237
237
238 def _autologin_user(app, *args):
238 def _autologin_user(app, *args):
239 session = login_user_session(app, *args)
239 session = login_user_session(app, *args)
240 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
240 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
241 return LoginData(csrf_token, session["rhodecode_user"])
241 return LoginData(csrf_token, session["rhodecode_user"])
242
242
243
243
244 @pytest.fixture()
244 @pytest.fixture()
245 def autologin_user(app):
245 def autologin_user(app):
246 """
246 """
247 Utility fixture which makes sure that the admin user is logged in
247 Utility fixture which makes sure that the admin user is logged in
248 """
248 """
249 return _autologin_user(app)
249 return _autologin_user(app)
250
250
251
251
252 @pytest.fixture()
252 @pytest.fixture()
253 def autologin_regular_user(app):
253 def autologin_regular_user(app):
254 """
254 """
255 Utility fixture which makes sure that the regular user is logged in
255 Utility fixture which makes sure that the regular user is logged in
256 """
256 """
257 return _autologin_user(app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
257 return _autologin_user(app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
258
258
259
259
260 @pytest.fixture(scope="function")
260 @pytest.fixture(scope="function")
261 def csrf_token(request, autologin_user):
261 def csrf_token(request, autologin_user):
262 return autologin_user.csrf_token
262 return autologin_user.csrf_token
263
263
264
264
265 @pytest.fixture(scope="function")
265 @pytest.fixture(scope="function")
266 def xhr_header(request):
266 def xhr_header(request):
267 return {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
267 return {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
268
268
269
269
270 @pytest.fixture()
270 @pytest.fixture()
271 def real_crypto_backend(monkeypatch):
271 def real_crypto_backend(monkeypatch):
272 """
272 """
273 Switch the production crypto backend on for this test.
273 Switch the production crypto backend on for this test.
274
274
275 During the test run the crypto backend is replaced with a faster
275 During the test run the crypto backend is replaced with a faster
276 implementation based on the MD5 algorithm.
276 implementation based on the MD5 algorithm.
277 """
277 """
278 monkeypatch.setattr(rhodecode, "is_test", False)
278 monkeypatch.setattr(rhodecode, "is_test", False)
279
279
280
280
281 @pytest.fixture(scope="class")
281 @pytest.fixture(scope="class")
282 def index_location(request, baseapp):
282 def index_location(request, baseapp):
283 index_location = baseapp.config.get_settings()["search.location"]
283 index_location = baseapp.config.get_settings()["search.location"]
284 if request.cls:
284 if request.cls:
285 request.cls.index_location = index_location
285 request.cls.index_location = index_location
286 return index_location
286 return index_location
287
287
288
288
289 @pytest.fixture(scope="session", autouse=True)
289 @pytest.fixture(scope="session", autouse=True)
290 def tests_tmp_path(request):
290 def tests_tmp_path(request):
291 """
291 """
292 Create temporary directory to be used during the test session.
292 Create temporary directory to be used during the test session.
293 """
293 """
294 if not os.path.exists(TESTS_TMP_PATH):
294 if not os.path.exists(TESTS_TMP_PATH):
295 os.makedirs(TESTS_TMP_PATH)
295 os.makedirs(TESTS_TMP_PATH)
296
296
297 if not request.config.getoption("--keep-tmp-path"):
297 if not request.config.getoption("--keep-tmp-path"):
298
298
299 @request.addfinalizer
299 @request.addfinalizer
300 def remove_tmp_path():
300 def remove_tmp_path():
301 shutil.rmtree(TESTS_TMP_PATH)
301 shutil.rmtree(TESTS_TMP_PATH)
302
302
303 return TESTS_TMP_PATH
303 return TESTS_TMP_PATH
304
304
305
305
306 @pytest.fixture()
306 @pytest.fixture()
307 def test_repo_group(request):
307 def test_repo_group(request):
308 """
308 """
309 Create a temporary repository group, and destroy it after
309 Create a temporary repository group, and destroy it after
310 usage automatically
310 usage automatically
311 """
311 """
312 fixture = Fixture()
312 fixture = Fixture()
313 repogroupid = "test_repo_group_%s" % str(time.time()).replace(".", "")
313 repogroupid = "test_repo_group_%s" % str(time.time()).replace(".", "")
314 repo_group = fixture.create_repo_group(repogroupid)
314 repo_group = fixture.create_repo_group(repogroupid)
315
315
316 def _cleanup():
316 def _cleanup():
317 fixture.destroy_repo_group(repogroupid)
317 fixture.destroy_repo_group(repogroupid)
318
318
319 request.addfinalizer(_cleanup)
319 request.addfinalizer(_cleanup)
320 return repo_group
320 return repo_group
321
321
322
322
323 @pytest.fixture()
323 @pytest.fixture()
324 def test_user_group(request):
324 def test_user_group(request):
325 """
325 """
326 Create a temporary user group, and destroy it after
326 Create a temporary user group, and destroy it after
327 usage automatically
327 usage automatically
328 """
328 """
329 fixture = Fixture()
329 fixture = Fixture()
330 usergroupid = "test_user_group_%s" % str(time.time()).replace(".", "")
330 usergroupid = "test_user_group_%s" % str(time.time()).replace(".", "")
331 user_group = fixture.create_user_group(usergroupid)
331 user_group = fixture.create_user_group(usergroupid)
332
332
333 def _cleanup():
333 def _cleanup():
334 fixture.destroy_user_group(user_group)
334 fixture.destroy_user_group(user_group)
335
335
336 request.addfinalizer(_cleanup)
336 request.addfinalizer(_cleanup)
337 return user_group
337 return user_group
338
338
339
339
340 @pytest.fixture(scope="session")
340 @pytest.fixture(scope="session")
341 def test_repo(request):
341 def test_repo(request):
342 container = TestRepoContainer()
342 container = TestRepoContainer()
343 request.addfinalizer(container._cleanup)
343 request.addfinalizer(container._cleanup)
344 return container
344 return container
345
345
346
346
347 class TestRepoContainer(object):
347 class TestRepoContainer(object):
348 """
348 """
349 Container for test repositories which are used read only.
349 Container for test repositories which are used read only.
350
350
351 Repositories will be created on demand and re-used during the lifetime
351 Repositories will be created on demand and re-used during the lifetime
352 of this object.
352 of this object.
353
353
354 Usage to get the svn test repository "minimal"::
354 Usage to get the svn test repository "minimal"::
355
355
356 test_repo = TestContainer()
356 test_repo = TestContainer()
357 repo = test_repo('minimal', 'svn')
357 repo = test_repo('minimal', 'svn')
358
358
359 """
359 """
360
360
361 dump_extractors = {
361 dump_extractors = {
362 "git": utils.extract_git_repo_from_dump,
362 "git": utils.extract_git_repo_from_dump,
363 "hg": utils.extract_hg_repo_from_dump,
363 "hg": utils.extract_hg_repo_from_dump,
364 "svn": utils.extract_svn_repo_from_dump,
364 "svn": utils.extract_svn_repo_from_dump,
365 }
365 }
366
366
367 def __init__(self):
367 def __init__(self):
368 self._cleanup_repos = []
368 self._cleanup_repos = []
369 self._fixture = Fixture()
369 self._fixture = Fixture()
370 self._repos = {}
370 self._repos = {}
371
371
372 def __call__(self, dump_name, backend_alias, config=None):
372 def __call__(self, dump_name, backend_alias, config=None):
373 key = (dump_name, backend_alias)
373 key = (dump_name, backend_alias)
374 if key not in self._repos:
374 if key not in self._repos:
375 repo = self._create_repo(dump_name, backend_alias, config)
375 repo = self._create_repo(dump_name, backend_alias, config)
376 self._repos[key] = repo.repo_id
376 self._repos[key] = repo.repo_id
377 return Repository.get(self._repos[key])
377 return Repository.get(self._repos[key])
378
378
379 def _create_repo(self, dump_name, backend_alias, config):
379 def _create_repo(self, dump_name, backend_alias, config):
380 repo_name = f"{backend_alias}-{dump_name}"
380 repo_name = f"{backend_alias}-{dump_name}"
381 backend = get_backend(backend_alias)
381 backend = get_backend(backend_alias)
382 dump_extractor = self.dump_extractors[backend_alias]
382 dump_extractor = self.dump_extractors[backend_alias]
383 repo_path = dump_extractor(dump_name, repo_name)
383 repo_path = dump_extractor(dump_name, repo_name)
384
384
385 vcs_repo = backend(repo_path, config=config)
385 vcs_repo = backend(repo_path, config=config)
386 repo2db_mapper({repo_name: vcs_repo})
386 repo2db_mapper({repo_name: vcs_repo})
387
387
388 repo = RepoModel().get_by_repo_name(repo_name)
388 repo = RepoModel().get_by_repo_name(repo_name)
389 self._cleanup_repos.append(repo_name)
389 self._cleanup_repos.append(repo_name)
390 return repo
390 return repo
391
391
392 def _cleanup(self):
392 def _cleanup(self):
393 for repo_name in reversed(self._cleanup_repos):
393 for repo_name in reversed(self._cleanup_repos):
394 self._fixture.destroy_repo(repo_name)
394 self._fixture.destroy_repo(repo_name)
395
395
396
396
397 def backend_base(request, backend_alias, test_repo):
397 def backend_base(request, backend_alias, test_repo):
398 if backend_alias not in request.config.getoption("--backends"):
398 if backend_alias not in request.config.getoption("--backends"):
399 pytest.skip(f"Backend {backend_alias} not selected.")
399 pytest.skip(f"Backend {backend_alias} not selected.")
400
400
401 utils.check_xfail_backends(request.node, backend_alias)
401 utils.check_xfail_backends(request.node, backend_alias)
402 utils.check_skip_backends(request.node, backend_alias)
402 utils.check_skip_backends(request.node, backend_alias)
403
403
404 repo_name = "vcs_test_%s" % (backend_alias,)
404 repo_name = "vcs_test_%s" % (backend_alias,)
405 backend = Backend(
405 backend = Backend(
406 alias=backend_alias, repo_name=repo_name, test_name=request.node.name, test_repo_container=test_repo
406 alias=backend_alias, repo_name=repo_name, test_name=request.node.name, test_repo_container=test_repo
407 )
407 )
408 request.addfinalizer(backend.cleanup)
408 request.addfinalizer(backend.cleanup)
409 return backend
409 return backend
410
410
411
411
412 @pytest.fixture()
412 @pytest.fixture()
413 def backend(request, backend_alias, baseapp, test_repo):
413 def backend(request, backend_alias, baseapp, test_repo):
414 """
414 """
415 Parametrized fixture which represents a single backend implementation.
415 Parametrized fixture which represents a single backend implementation.
416
416
417 It respects the option `--backends` to focus the test run on specific
417 It respects the option `--backends` to focus the test run on specific
418 backend implementations.
418 backend implementations.
419
419
420 It also supports `pytest.mark.xfail_backends` to mark tests as failing
420 It also supports `pytest.mark.xfail_backends` to mark tests as failing
421 for specific backends. This is intended as a utility for incremental
421 for specific backends. This is intended as a utility for incremental
422 development of a new backend implementation.
422 development of a new backend implementation.
423 """
423 """
424 return backend_base(request, backend_alias, test_repo)
424 return backend_base(request, backend_alias, test_repo)
425
425
426
426
427 @pytest.fixture()
427 @pytest.fixture()
428 def backend_git(request, baseapp, test_repo):
428 def backend_git(request, baseapp, test_repo):
429 return backend_base(request, "git", test_repo)
429 return backend_base(request, "git", test_repo)
430
430
431
431
432 @pytest.fixture()
432 @pytest.fixture()
433 def backend_hg(request, baseapp, test_repo):
433 def backend_hg(request, baseapp, test_repo):
434 return backend_base(request, "hg", test_repo)
434 return backend_base(request, "hg", test_repo)
435
435
436
436
437 @pytest.fixture()
437 @pytest.fixture()
438 def backend_svn(request, baseapp, test_repo):
438 def backend_svn(request, baseapp, test_repo):
439 return backend_base(request, "svn", test_repo)
439 return backend_base(request, "svn", test_repo)
440
440
441
441
442 @pytest.fixture()
442 @pytest.fixture()
443 def backend_random(backend_git):
443 def backend_random(backend_git):
444 """
444 """
445 Use this to express that your tests need "a backend.
445 Use this to express that your tests need "a backend.
446
446
447 A few of our tests need a backend, so that we can run the code. This
447 A few of our tests need a backend, so that we can run the code. This
448 fixture is intended to be used for such cases. It will pick one of the
448 fixture is intended to be used for such cases. It will pick one of the
449 backends and run the tests.
449 backends and run the tests.
450
450
451 The fixture `backend` would run the test multiple times for each
451 The fixture `backend` would run the test multiple times for each
452 available backend which is a pure waste of time if the test is
452 available backend which is a pure waste of time if the test is
453 independent of the backend type.
453 independent of the backend type.
454 """
454 """
455 # TODO: johbo: Change this to pick a random backend
455 # TODO: johbo: Change this to pick a random backend
456 return backend_git
456 return backend_git
457
457
458
458
459 @pytest.fixture()
459 @pytest.fixture()
460 def backend_stub(backend_git):
460 def backend_stub(backend_git):
461 """
461 """
462 Use this to express that your tests need a backend stub
462 Use this to express that your tests need a backend stub
463
463
464 TODO: mikhail: Implement a real stub logic instead of returning
464 TODO: mikhail: Implement a real stub logic instead of returning
465 a git backend
465 a git backend
466 """
466 """
467 return backend_git
467 return backend_git
468
468
469
469
470 @pytest.fixture()
470 @pytest.fixture()
471 def repo_stub(backend_stub):
471 def repo_stub(backend_stub):
472 """
472 """
473 Use this to express that your tests need a repository stub
473 Use this to express that your tests need a repository stub
474 """
474 """
475 return backend_stub.create_repo()
475 return backend_stub.create_repo()
476
476
477
477
478 class Backend(object):
478 class Backend(object):
479 """
479 """
480 Represents the test configuration for one supported backend
480 Represents the test configuration for one supported backend
481
481
482 Provides easy access to different test repositories based on
482 Provides easy access to different test repositories based on
483 `__getitem__`. Such repositories will only be created once per test
483 `__getitem__`. Such repositories will only be created once per test
484 session.
484 session.
485 """
485 """
486
486
487 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
487 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
488 _master_repo = None
488 _master_repo = None
489 _master_repo_path = ""
489 _master_repo_path = ""
490 _commit_ids = {}
490 _commit_ids = {}
491
491
492 def __init__(self, alias, repo_name, test_name, test_repo_container):
492 def __init__(self, alias, repo_name, test_name, test_repo_container):
493 self.alias = alias
493 self.alias = alias
494 self.repo_name = repo_name
494 self.repo_name = repo_name
495 self._cleanup_repos = []
495 self._cleanup_repos = []
496 self._test_name = test_name
496 self._test_name = test_name
497 self._test_repo_container = test_repo_container
497 self._test_repo_container = test_repo_container
498 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
498 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
499 # Fixture will survive in the end.
499 # Fixture will survive in the end.
500 self._fixture = Fixture()
500 self._fixture = Fixture()
501
501
502 def __getitem__(self, key):
502 def __getitem__(self, key):
503 return self._test_repo_container(key, self.alias)
503 return self._test_repo_container(key, self.alias)
504
504
505 def create_test_repo(self, key, config=None):
505 def create_test_repo(self, key, config=None):
506 return self._test_repo_container(key, self.alias, config)
506 return self._test_repo_container(key, self.alias, config)
507
507
508 @property
508 @property
509 def repo_id(self):
509 def repo_id(self):
510 # just fake some repo_id
510 # just fake some repo_id
511 return self.repo.repo_id
511 return self.repo.repo_id
512
512
513 @property
513 @property
514 def repo(self):
514 def repo(self):
515 """
515 """
516 Returns the "current" repository. This is the vcs_test repo or the
516 Returns the "current" repository. This is the vcs_test repo or the
517 last repo which has been created with `create_repo`.
517 last repo which has been created with `create_repo`.
518 """
518 """
519 from rhodecode.model.db import Repository
519 from rhodecode.model.db import Repository
520
520
521 return Repository.get_by_repo_name(self.repo_name)
521 return Repository.get_by_repo_name(self.repo_name)
522
522
523 @property
523 @property
524 def default_branch_name(self):
524 def default_branch_name(self):
525 VcsRepository = get_backend(self.alias)
525 VcsRepository = get_backend(self.alias)
526 return VcsRepository.DEFAULT_BRANCH_NAME
526 return VcsRepository.DEFAULT_BRANCH_NAME
527
527
528 @property
528 @property
529 def default_head_id(self):
529 def default_head_id(self):
530 """
530 """
531 Returns the default head id of the underlying backend.
531 Returns the default head id of the underlying backend.
532
532
533 This will be the default branch name in case the backend does have a
533 This will be the default branch name in case the backend does have a
534 default branch. In the other cases it will point to a valid head
534 default branch. In the other cases it will point to a valid head
535 which can serve as the base to create a new commit on top of it.
535 which can serve as the base to create a new commit on top of it.
536 """
536 """
537 vcsrepo = self.repo.scm_instance()
537 vcsrepo = self.repo.scm_instance()
538 head_id = vcsrepo.DEFAULT_BRANCH_NAME or vcsrepo.commit_ids[-1]
538 head_id = vcsrepo.DEFAULT_BRANCH_NAME or vcsrepo.commit_ids[-1]
539 return head_id
539 return head_id
540
540
541 @property
541 @property
542 def commit_ids(self):
542 def commit_ids(self):
543 """
543 """
544 Returns the list of commits for the last created repository
544 Returns the list of commits for the last created repository
545 """
545 """
546 return self._commit_ids
546 return self._commit_ids
547
547
548 def create_master_repo(self, commits):
548 def create_master_repo(self, commits):
549 """
549 """
550 Create a repository and remember it as a template.
550 Create a repository and remember it as a template.
551
551
552 This allows to easily create derived repositories to construct
552 This allows to easily create derived repositories to construct
553 more complex scenarios for diff, compare and pull requests.
553 more complex scenarios for diff, compare and pull requests.
554
554
555 Returns a commit map which maps from commit message to raw_id.
555 Returns a commit map which maps from commit message to raw_id.
556 """
556 """
557 self._master_repo = self.create_repo(commits=commits)
557 self._master_repo = self.create_repo(commits=commits)
558 self._master_repo_path = self._master_repo.repo_full_path
558 self._master_repo_path = self._master_repo.repo_full_path
559
559
560 return self._commit_ids
560 return self._commit_ids
561
561
562 def create_repo(self, commits=None, number_of_commits=0, heads=None, name_suffix="", bare=False, **kwargs):
562 def create_repo(self, commits=None, number_of_commits=0, heads=None, name_suffix="", bare=False, **kwargs):
563 """
563 """
564 Create a repository and record it for later cleanup.
564 Create a repository and record it for later cleanup.
565
565
566 :param commits: Optional. A sequence of dict instances.
566 :param commits: Optional. A sequence of dict instances.
567 Will add a commit per entry to the new repository.
567 Will add a commit per entry to the new repository.
568 :param number_of_commits: Optional. If set to a number, this number of
568 :param number_of_commits: Optional. If set to a number, this number of
569 commits will be added to the new repository.
569 commits will be added to the new repository.
570 :param heads: Optional. Can be set to a sequence of of commit
570 :param heads: Optional. Can be set to a sequence of of commit
571 names which shall be pulled in from the master repository.
571 names which shall be pulled in from the master repository.
572 :param name_suffix: adds special suffix to generated repo name
572 :param name_suffix: adds special suffix to generated repo name
573 :param bare: set a repo as bare (no checkout)
573 :param bare: set a repo as bare (no checkout)
574 """
574 """
575 self.repo_name = self._next_repo_name() + name_suffix
575 self.repo_name = self._next_repo_name() + name_suffix
576 repo = self._fixture.create_repo(self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
576 repo = self._fixture.create_repo(self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
577 self._cleanup_repos.append(repo.repo_name)
577 self._cleanup_repos.append(repo.repo_name)
578
578
579 commits = commits or [{"message": f"Commit {x} of {self.repo_name}"} for x in range(number_of_commits)]
579 commits = commits or [{"message": f"Commit {x} of {self.repo_name}"} for x in range(number_of_commits)]
580 vcs_repo = repo.scm_instance()
580 vcs_repo = repo.scm_instance()
581 vcs_repo.count()
581 vcs_repo.count()
582 self._add_commits_to_repo(vcs_repo, commits)
582 self._add_commits_to_repo(vcs_repo, commits)
583 if heads:
583 if heads:
584 self.pull_heads(repo, heads)
584 self.pull_heads(repo, heads)
585
585
586 return repo
586 return repo
587
587
588 def pull_heads(self, repo, heads, do_fetch=False):
588 def pull_heads(self, repo, heads, do_fetch=False):
589 """
589 """
590 Make sure that repo contains all commits mentioned in `heads`
590 Make sure that repo contains all commits mentioned in `heads`
591 """
591 """
592 vcsrepo = repo.scm_instance()
592 vcsrepo = repo.scm_instance()
593 vcsrepo.config.clear_section("hooks")
593 vcsrepo.config.clear_section("hooks")
594 commit_ids = [self._commit_ids[h] for h in heads]
594 commit_ids = [self._commit_ids[h] for h in heads]
595 if do_fetch:
595 if do_fetch:
596 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
596 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
597 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
597 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
598
598
599 def create_fork(self):
599 def create_fork(self):
600 repo_to_fork = self.repo_name
600 repo_to_fork = self.repo_name
601 self.repo_name = self._next_repo_name()
601 self.repo_name = self._next_repo_name()
602 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
602 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
603 self._cleanup_repos.append(self.repo_name)
603 self._cleanup_repos.append(self.repo_name)
604 return repo
604 return repo
605
605
606 def new_repo_name(self, suffix=""):
606 def new_repo_name(self, suffix=""):
607 self.repo_name = self._next_repo_name() + suffix
607 self.repo_name = self._next_repo_name() + suffix
608 self._cleanup_repos.append(self.repo_name)
608 self._cleanup_repos.append(self.repo_name)
609 return self.repo_name
609 return self.repo_name
610
610
611 def _next_repo_name(self):
611 def _next_repo_name(self):
612 return "%s_%s" % (self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
612 return "%s_%s" % (self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
613
613
614 def ensure_file(self, filename, content=b"Test content\n"):
614 def ensure_file(self, filename, content=b"Test content\n"):
615 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
615 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
616 commits = [
616 commits = [
617 {
617 {
618 "added": [
618 "added": [
619 FileNode(filename, content=content),
619 FileNode(filename, content=content),
620 ]
620 ]
621 },
621 },
622 ]
622 ]
623 self._add_commits_to_repo(self.repo.scm_instance(), commits)
623 self._add_commits_to_repo(self.repo.scm_instance(), commits)
624
624
625 def enable_downloads(self):
625 def enable_downloads(self):
626 repo = self.repo
626 repo = self.repo
627 repo.enable_downloads = True
627 repo.enable_downloads = True
628 Session().add(repo)
628 Session().add(repo)
629 Session().commit()
629 Session().commit()
630
630
631 def cleanup(self):
631 def cleanup(self):
632 for repo_name in reversed(self._cleanup_repos):
632 for repo_name in reversed(self._cleanup_repos):
633 self._fixture.destroy_repo(repo_name)
633 self._fixture.destroy_repo(repo_name)
634
634
635 def _add_commits_to_repo(self, repo, commits):
635 def _add_commits_to_repo(self, repo, commits):
636 commit_ids = _add_commits_to_repo(repo, commits)
636 commit_ids = _add_commits_to_repo(repo, commits)
637 if not commit_ids:
637 if not commit_ids:
638 return
638 return
639 self._commit_ids = commit_ids
639 self._commit_ids = commit_ids
640
640
641 # Creating refs for Git to allow fetching them from remote repository
641 # Creating refs for Git to allow fetching them from remote repository
642 if self.alias == "git":
642 if self.alias == "git":
643 refs = {}
643 refs = {}
644 for message in self._commit_ids:
644 for message in self._commit_ids:
645 cleanup_message = message.replace(" ", "")
645 cleanup_message = message.replace(" ", "")
646 ref_name = f"refs/test-refs/{cleanup_message}"
646 ref_name = f"refs/test-refs/{cleanup_message}"
647 refs[ref_name] = self._commit_ids[message]
647 refs[ref_name] = self._commit_ids[message]
648 self._create_refs(repo, refs)
648 self._create_refs(repo, refs)
649
649
650 def _create_refs(self, repo, refs):
650 def _create_refs(self, repo, refs):
651 for ref_name, ref_val in refs.items():
651 for ref_name, ref_val in refs.items():
652 repo.set_refs(ref_name, ref_val)
652 repo.set_refs(ref_name, ref_val)
653
653
654
654
655 class VcsBackend(object):
655 class VcsBackend(object):
656 """
656 """
657 Represents the test configuration for one supported vcs backend.
657 Represents the test configuration for one supported vcs backend.
658 """
658 """
659
659
660 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
660 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
661
661
662 def __init__(self, alias, repo_path, test_name, test_repo_container):
662 def __init__(self, alias, repo_path, test_name, test_repo_container):
663 self.alias = alias
663 self.alias = alias
664 self._repo_path = repo_path
664 self._repo_path = repo_path
665 self._cleanup_repos = []
665 self._cleanup_repos = []
666 self._test_name = test_name
666 self._test_name = test_name
667 self._test_repo_container = test_repo_container
667 self._test_repo_container = test_repo_container
668
668
669 def __getitem__(self, key):
669 def __getitem__(self, key):
670 return self._test_repo_container(key, self.alias).scm_instance()
670 return self._test_repo_container(key, self.alias).scm_instance()
671
671
672 def __repr__(self):
672 def __repr__(self):
673 return f"{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})"
673 return f"{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})"
674
674
675 @property
675 @property
676 def repo(self):
676 def repo(self):
677 """
677 """
678 Returns the "current" repository. This is the vcs_test repo of the last
678 Returns the "current" repository. This is the vcs_test repo of the last
679 repo which has been created.
679 repo which has been created.
680 """
680 """
681 Repository = get_backend(self.alias)
681 Repository = get_backend(self.alias)
682 return Repository(self._repo_path)
682 return Repository(self._repo_path)
683
683
684 @property
684 @property
685 def backend(self):
685 def backend(self):
686 """
686 """
687 Returns the backend implementation class.
687 Returns the backend implementation class.
688 """
688 """
689 return get_backend(self.alias)
689 return get_backend(self.alias)
690
690
691 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, bare=False):
691 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, bare=False):
692 repo_name = self._next_repo_name()
692 repo_name = self._next_repo_name()
693 self._repo_path = get_new_dir(repo_name)
693 self._repo_path = get_new_dir(repo_name)
694 repo_class = get_backend(self.alias)
694 repo_class = get_backend(self.alias)
695 src_url = None
695 src_url = None
696 if _clone_repo:
696 if _clone_repo:
697 src_url = _clone_repo.path
697 src_url = _clone_repo.path
698 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
698 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
699 self._cleanup_repos.append(repo)
699 self._cleanup_repos.append(repo)
700
700
701 commits = commits or [{"message": "Commit %s of %s" % (x, repo_name)} for x in range(number_of_commits)]
701 commits = commits or [{"message": "Commit %s of %s" % (x, repo_name)} for x in range(number_of_commits)]
702 _add_commits_to_repo(repo, commits)
702 _add_commits_to_repo(repo, commits)
703 return repo
703 return repo
704
704
705 def clone_repo(self, repo):
705 def clone_repo(self, repo):
706 return self.create_repo(_clone_repo=repo)
706 return self.create_repo(_clone_repo=repo)
707
707
708 def cleanup(self):
708 def cleanup(self):
709 for repo in self._cleanup_repos:
709 for repo in self._cleanup_repos:
710 shutil.rmtree(repo.path)
710 shutil.rmtree(repo.path)
711
711
712 def new_repo_path(self):
712 def new_repo_path(self):
713 repo_name = self._next_repo_name()
713 repo_name = self._next_repo_name()
714 self._repo_path = get_new_dir(repo_name)
714 self._repo_path = get_new_dir(repo_name)
715 return self._repo_path
715 return self._repo_path
716
716
717 def _next_repo_name(self):
717 def _next_repo_name(self):
718 return "{}_{}".format(self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
718 return "{}_{}".format(self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
719
719
720 def add_file(self, repo, filename, content="Test content\n"):
720 def add_file(self, repo, filename, content="Test content\n"):
721 imc = repo.in_memory_commit
721 imc = repo.in_memory_commit
722 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
722 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
723 imc.commit(message="Automatic commit from vcsbackend fixture", author="Automatic <automatic@rhodecode.com>")
723 imc.commit(message="Automatic commit from vcsbackend fixture", author="Automatic <automatic@rhodecode.com>")
724
724
725 def ensure_file(self, filename, content="Test content\n"):
725 def ensure_file(self, filename, content="Test content\n"):
726 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
726 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
727 self.add_file(self.repo, filename, content)
727 self.add_file(self.repo, filename, content)
728
728
729
729
730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
731 if backend_alias not in request.config.getoption("--backends"):
731 if backend_alias not in request.config.getoption("--backends"):
732 pytest.skip("Backend %s not selected." % (backend_alias,))
732 pytest.skip("Backend %s not selected." % (backend_alias,))
733
733
734 utils.check_xfail_backends(request.node, backend_alias)
734 utils.check_xfail_backends(request.node, backend_alias)
735 utils.check_skip_backends(request.node, backend_alias)
735 utils.check_skip_backends(request.node, backend_alias)
736
736
737 repo_name = f"vcs_test_{backend_alias}"
737 repo_name = f"vcs_test_{backend_alias}"
738 repo_path = os.path.join(tests_tmp_path, repo_name)
738 repo_path = os.path.join(tests_tmp_path, repo_name)
739 backend = VcsBackend(
739 backend = VcsBackend(
740 alias=backend_alias, repo_path=repo_path, test_name=request.node.name, test_repo_container=test_repo
740 alias=backend_alias, repo_path=repo_path, test_name=request.node.name, test_repo_container=test_repo
741 )
741 )
742 request.addfinalizer(backend.cleanup)
742 request.addfinalizer(backend.cleanup)
743 return backend
743 return backend
744
744
745
745
746 @pytest.fixture()
746 @pytest.fixture()
747 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
747 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
748 """
748 """
749 Parametrized fixture which represents a single vcs backend implementation.
749 Parametrized fixture which represents a single vcs backend implementation.
750
750
751 See the fixture `backend` for more details. This one implements the same
751 See the fixture `backend` for more details. This one implements the same
752 concept, but on vcs level. So it does not provide model instances etc.
752 concept, but on vcs level. So it does not provide model instances etc.
753
753
754 Parameters are generated dynamically, see :func:`pytest_generate_tests`
754 Parameters are generated dynamically, see :func:`pytest_generate_tests`
755 for how this works.
755 for how this works.
756 """
756 """
757 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
757 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
758
758
759
759
760 @pytest.fixture()
760 @pytest.fixture()
761 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
762 return vcsbackend_base(request, "git", tests_tmp_path, baseapp, test_repo)
762 return vcsbackend_base(request, "git", tests_tmp_path, baseapp, test_repo)
763
763
764
764
765 @pytest.fixture()
765 @pytest.fixture()
766 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
767 return vcsbackend_base(request, "hg", tests_tmp_path, baseapp, test_repo)
767 return vcsbackend_base(request, "hg", tests_tmp_path, baseapp, test_repo)
768
768
769
769
770 @pytest.fixture()
770 @pytest.fixture()
771 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
772 return vcsbackend_base(request, "svn", tests_tmp_path, baseapp, test_repo)
772 return vcsbackend_base(request, "svn", tests_tmp_path, baseapp, test_repo)
773
773
774
774
775 @pytest.fixture()
775 @pytest.fixture()
776 def vcsbackend_stub(vcsbackend_git):
776 def vcsbackend_stub(vcsbackend_git):
777 """
777 """
778 Use this to express that your test just needs a stub of a vcsbackend.
778 Use this to express that your test just needs a stub of a vcsbackend.
779
779
780 Plan is to eventually implement an in-memory stub to speed tests up.
780 Plan is to eventually implement an in-memory stub to speed tests up.
781 """
781 """
782 return vcsbackend_git
782 return vcsbackend_git
783
783
784
784
785 def _add_commits_to_repo(vcs_repo, commits):
785 def _add_commits_to_repo(vcs_repo, commits):
786 commit_ids = {}
786 commit_ids = {}
787 if not commits:
787 if not commits:
788 return commit_ids
788 return commit_ids
789
789
790 imc = vcs_repo.in_memory_commit
790 imc = vcs_repo.in_memory_commit
791
791
792 for idx, commit in enumerate(commits):
792 for idx, commit in enumerate(commits):
793 message = str(commit.get("message", f"Commit {idx}"))
793 message = str(commit.get("message", f"Commit {idx}"))
794
794
795 for node in commit.get("added", []):
795 for node in commit.get("added", []):
796 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 imc.add(FileNode(safe_bytes(node.path), content=node.content))
797 for node in commit.get("changed", []):
797 for node in commit.get("changed", []):
798 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 imc.change(FileNode(safe_bytes(node.path), content=node.content))
799 for node in commit.get("removed", []):
799 for node in commit.get("removed", []):
800 imc.remove(FileNode(safe_bytes(node.path)))
800 imc.remove(FileNode(safe_bytes(node.path)))
801
801
802 parents = [vcs_repo.get_commit(commit_id=commit_ids[p]) for p in commit.get("parents", [])]
802 parents = [vcs_repo.get_commit(commit_id=commit_ids[p]) for p in commit.get("parents", [])]
803
803
804 operations = ("added", "changed", "removed")
804 operations = ("added", "changed", "removed")
805 if not any((commit.get(o) for o in operations)):
805 if not any((commit.get(o) for o in operations)):
806 imc.add(FileNode(b"file_%b" % safe_bytes(str(idx)), content=safe_bytes(message)))
806 imc.add(FileNode(b"file_%b" % safe_bytes(str(idx)), content=safe_bytes(message)))
807
807
808 commit = imc.commit(
808 commit = imc.commit(
809 message=message,
809 message=message,
810 author=str(commit.get("author", "Automatic <automatic@rhodecode.com>")),
810 author=str(commit.get("author", "Automatic <automatic@rhodecode.com>")),
811 date=commit.get("date"),
811 date=commit.get("date"),
812 branch=commit.get("branch"),
812 branch=commit.get("branch"),
813 parents=parents,
813 parents=parents,
814 )
814 )
815
815
816 commit_ids[commit.message] = commit.raw_id
816 commit_ids[commit.message] = commit.raw_id
817
817
818 return commit_ids
818 return commit_ids
819
819
820
820
821 @pytest.fixture()
821 @pytest.fixture()
822 def reposerver(request):
822 def reposerver(request):
823 """
823 """
824 Allows to serve a backend repository
824 Allows to serve a backend repository
825 """
825 """
826
826
827 repo_server = RepoServer()
827 repo_server = RepoServer()
828 request.addfinalizer(repo_server.cleanup)
828 request.addfinalizer(repo_server.cleanup)
829 return repo_server
829 return repo_server
830
830
831
831
832 class RepoServer(object):
832 class RepoServer(object):
833 """
833 """
834 Utility to serve a local repository for the duration of a test case.
834 Utility to serve a local repository for the duration of a test case.
835
835
836 Supports only Subversion so far.
836 Supports only Subversion so far.
837 """
837 """
838
838
839 url = None
839 url = None
840
840
841 def __init__(self):
841 def __init__(self):
842 self._cleanup_servers = []
842 self._cleanup_servers = []
843
843
844 def serve(self, vcsrepo):
844 def serve(self, vcsrepo):
845 if vcsrepo.alias != "svn":
845 if vcsrepo.alias != "svn":
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847
847
848 proc = subprocess.Popen(
848 proc = subprocess.Popen(
849 ["svnserve", "-d", "--foreground", "--listen-host", "localhost", "--root", vcsrepo.path]
849 ["svnserve", "-d", "--foreground", "--listen-host", "localhost", "--root", vcsrepo.path]
850 )
850 )
851 self._cleanup_servers.append(proc)
851 self._cleanup_servers.append(proc)
852 self.url = "svn://localhost"
852 self.url = "svn://localhost"
853
853
854 def cleanup(self):
854 def cleanup(self):
855 for proc in self._cleanup_servers:
855 for proc in self._cleanup_servers:
856 proc.terminate()
856 proc.terminate()
857
857
858
858
859 @pytest.fixture()
859 @pytest.fixture()
860 def pr_util(backend, request, config_stub):
860 def pr_util(backend, request, config_stub):
861 """
861 """
862 Utility for tests of models and for functional tests around pull requests.
862 Utility for tests of models and for functional tests around pull requests.
863
863
864 It gives an instance of :class:`PRTestUtility` which provides various
864 It gives an instance of :class:`PRTestUtility` which provides various
865 utility methods around one pull request.
865 utility methods around one pull request.
866
866
867 This fixture uses `backend` and inherits its parameterization.
867 This fixture uses `backend` and inherits its parameterization.
868 """
868 """
869
869
870 util = PRTestUtility(backend)
870 util = PRTestUtility(backend)
871 request.addfinalizer(util.cleanup)
871 request.addfinalizer(util.cleanup)
872
872
873 return util
873 return util
874
874
875
875
876 class PRTestUtility(object):
876 class PRTestUtility(object):
877 pull_request = None
877 pull_request = None
878 pull_request_id = None
878 pull_request_id = None
879 mergeable_patcher = None
879 mergeable_patcher = None
880 mergeable_mock = None
880 mergeable_mock = None
881 notification_patcher = None
881 notification_patcher = None
882 commit_ids: dict
882 commit_ids: dict
883
883
884 def __init__(self, backend):
884 def __init__(self, backend):
885 self.backend = backend
885 self.backend = backend
886
886
887 def create_pull_request(
887 def create_pull_request(
888 self,
888 self,
889 commits=None,
889 commits=None,
890 target_head=None,
890 target_head=None,
891 source_head=None,
891 source_head=None,
892 revisions=None,
892 revisions=None,
893 approved=False,
893 approved=False,
894 author=None,
894 author=None,
895 mergeable=False,
895 mergeable=False,
896 enable_notifications=True,
896 enable_notifications=True,
897 name_suffix="",
897 name_suffix="",
898 reviewers=None,
898 reviewers=None,
899 observers=None,
899 observers=None,
900 title="Test",
900 title="Test",
901 description="Description",
901 description="Description",
902 ):
902 ):
903 self.set_mergeable(mergeable)
903 self.set_mergeable(mergeable)
904 if not enable_notifications:
904 if not enable_notifications:
905 # mock notification side effect
905 # mock notification side effect
906 self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
906 self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
907 self.notification_patcher.start()
907 self.notification_patcher.start()
908
908
909 if not self.pull_request:
909 if not self.pull_request:
910 if not commits:
910 if not commits:
911 commits = [
911 commits = [
912 {"message": "c1"},
912 {"message": "c1"},
913 {"message": "c2"},
913 {"message": "c2"},
914 {"message": "c3"},
914 {"message": "c3"},
915 ]
915 ]
916 target_head = "c1"
916 target_head = "c1"
917 source_head = "c2"
917 source_head = "c2"
918 revisions = ["c2"]
918 revisions = ["c2"]
919
919
920 self.commit_ids = self.backend.create_master_repo(commits)
920 self.commit_ids = self.backend.create_master_repo(commits)
921 self.target_repository = self.backend.create_repo(heads=[target_head], name_suffix=name_suffix)
921 self.target_repository = self.backend.create_repo(heads=[target_head], name_suffix=name_suffix)
922 self.source_repository = self.backend.create_repo(heads=[source_head], name_suffix=name_suffix)
922 self.source_repository = self.backend.create_repo(heads=[source_head], name_suffix=name_suffix)
923 self.author = author or UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
923 self.author = author or UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
924
924
925 model = PullRequestModel()
925 model = PullRequestModel()
926 self.create_parameters = {
926 self.create_parameters = {
927 "created_by": self.author,
927 "created_by": self.author,
928 "source_repo": self.source_repository.repo_name,
928 "source_repo": self.source_repository.repo_name,
929 "source_ref": self._default_branch_reference(source_head),
929 "source_ref": self._default_branch_reference(source_head),
930 "target_repo": self.target_repository.repo_name,
930 "target_repo": self.target_repository.repo_name,
931 "target_ref": self._default_branch_reference(target_head),
931 "target_ref": self._default_branch_reference(target_head),
932 "revisions": [self.commit_ids[r] for r in revisions],
932 "revisions": [self.commit_ids[r] for r in revisions],
933 "reviewers": reviewers or self._get_reviewers(),
933 "reviewers": reviewers or self._get_reviewers(),
934 "observers": observers or self._get_observers(),
934 "observers": observers or self._get_observers(),
935 "title": title,
935 "title": title,
936 "description": description,
936 "description": description,
937 }
937 }
938 self.pull_request = model.create(**self.create_parameters)
938 self.pull_request = model.create(**self.create_parameters)
939 assert model.get_versions(self.pull_request) == []
939 assert model.get_versions(self.pull_request) == []
940
940
941 self.pull_request_id = self.pull_request.pull_request_id
941 self.pull_request_id = self.pull_request.pull_request_id
942
942
943 if approved:
943 if approved:
944 self.approve()
944 self.approve()
945
945
946 Session().add(self.pull_request)
946 Session().add(self.pull_request)
947 Session().commit()
947 Session().commit()
948
948
949 return self.pull_request
949 return self.pull_request
950
950
951 def approve(self):
951 def approve(self):
952 self.create_status_votes(ChangesetStatus.STATUS_APPROVED, *self.pull_request.reviewers)
952 self.create_status_votes(ChangesetStatus.STATUS_APPROVED, *self.pull_request.reviewers)
953
953
954 def close(self):
954 def close(self):
955 PullRequestModel().close_pull_request(self.pull_request, self.author)
955 PullRequestModel().close_pull_request(self.pull_request, self.author)
956
956
957 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
957 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
958 default_branch = branch or self.backend.default_branch_name
958 default_branch = branch or self.backend.default_branch_name
959 message = self.commit_ids[commit_message]
959 message = self.commit_ids[commit_message]
960 reference = f"branch:{default_branch}:{message}"
960 reference = f"branch:{default_branch}:{message}"
961
961
962 return reference
962 return reference
963
963
964 def _get_reviewers(self):
964 def _get_reviewers(self):
965 role = PullRequestReviewers.ROLE_REVIEWER
965 role = PullRequestReviewers.ROLE_REVIEWER
966 return [
966 return [
967 (TEST_USER_REGULAR_LOGIN, ["default1"], False, role, []),
967 (TEST_USER_REGULAR_LOGIN, ["default1"], False, role, []),
968 (TEST_USER_REGULAR2_LOGIN, ["default2"], False, role, []),
968 (TEST_USER_REGULAR2_LOGIN, ["default2"], False, role, []),
969 ]
969 ]
970
970
971 def _get_observers(self):
971 def _get_observers(self):
972 return []
972 return []
973
973
974 def update_source_repository(self, head=None, do_fetch=False):
974 def update_source_repository(self, head=None, do_fetch=False):
975 heads = [head or "c3"]
975 heads = [head or "c3"]
976 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
976 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
977
977
978 def update_target_repository(self, head=None, do_fetch=False):
978 def update_target_repository(self, head=None, do_fetch=False):
979 heads = [head or "c3"]
979 heads = [head or "c3"]
980 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
980 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
981
981
982 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
982 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
983 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
983 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
984 self.pull_request.target_ref = full_ref
984 self.pull_request.target_ref = full_ref
985 return full_ref
985 return full_ref
986
986
987 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
987 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
988 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
988 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
989 self.pull_request.source_ref = full_ref
989 self.pull_request.source_ref = full_ref
990 return full_ref
990 return full_ref
991
991
992 def add_one_commit(self, head=None):
992 def add_one_commit(self, head=None):
993 self.update_source_repository(head=head)
993 self.update_source_repository(head=head)
994 old_commit_ids = set(self.pull_request.revisions)
994 old_commit_ids = set(self.pull_request.revisions)
995 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
995 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
996 commit_ids = set(self.pull_request.revisions)
996 commit_ids = set(self.pull_request.revisions)
997 new_commit_ids = commit_ids - old_commit_ids
997 new_commit_ids = commit_ids - old_commit_ids
998 assert len(new_commit_ids) == 1
998 assert len(new_commit_ids) == 1
999 return new_commit_ids.pop()
999 return new_commit_ids.pop()
1000
1000
1001 def remove_one_commit(self):
1001 def remove_one_commit(self):
1002 assert len(self.pull_request.revisions) == 2
1002 assert len(self.pull_request.revisions) == 2
1003 source_vcs = self.source_repository.scm_instance()
1003 source_vcs = self.source_repository.scm_instance()
1004 removed_commit_id = source_vcs.commit_ids[-1]
1004 removed_commit_id = source_vcs.commit_ids[-1]
1005
1005
1006 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1006 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1007 # remove the if once that's sorted out.
1007 # remove the if once that's sorted out.
1008 if self.backend.alias == "git":
1008 if self.backend.alias == "git":
1009 kwargs = {"branch_name": self.backend.default_branch_name}
1009 kwargs = {"branch_name": self.backend.default_branch_name}
1010 else:
1010 else:
1011 kwargs = {}
1011 kwargs = {}
1012 source_vcs.strip(removed_commit_id, **kwargs)
1012 source_vcs.strip(removed_commit_id, **kwargs)
1013
1013
1014 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1014 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1015 assert len(self.pull_request.revisions) == 1
1015 assert len(self.pull_request.revisions) == 1
1016 return removed_commit_id
1016 return removed_commit_id
1017
1017
1018 def create_comment(self, linked_to=None):
1018 def create_comment(self, linked_to=None):
1019 comment = CommentsModel().create(
1019 comment = CommentsModel().create(
1020 text="Test comment", repo=self.target_repository.repo_name, user=self.author, pull_request=self.pull_request
1020 text="Test comment", repo=self.target_repository.repo_name, user=self.author, pull_request=self.pull_request
1021 )
1021 )
1022 assert comment.pull_request_version_id is None
1022 assert comment.pull_request_version_id is None
1023
1023
1024 if linked_to:
1024 if linked_to:
1025 PullRequestModel()._link_comments_to_version(linked_to)
1025 PullRequestModel()._link_comments_to_version(linked_to)
1026
1026
1027 return comment
1027 return comment
1028
1028
1029 def create_inline_comment(self, linked_to=None, line_no="n1", file_path="file_1"):
1029 def create_inline_comment(self, linked_to=None, line_no="n1", file_path="file_1"):
1030 comment = CommentsModel().create(
1030 comment = CommentsModel().create(
1031 text="Test comment",
1031 text="Test comment",
1032 repo=self.target_repository.repo_name,
1032 repo=self.target_repository.repo_name,
1033 user=self.author,
1033 user=self.author,
1034 line_no=line_no,
1034 line_no=line_no,
1035 f_path=file_path,
1035 f_path=file_path,
1036 pull_request=self.pull_request,
1036 pull_request=self.pull_request,
1037 )
1037 )
1038 assert comment.pull_request_version_id is None
1038 assert comment.pull_request_version_id is None
1039
1039
1040 if linked_to:
1040 if linked_to:
1041 PullRequestModel()._link_comments_to_version(linked_to)
1041 PullRequestModel()._link_comments_to_version(linked_to)
1042
1042
1043 return comment
1043 return comment
1044
1044
1045 def create_version_of_pull_request(self):
1045 def create_version_of_pull_request(self):
1046 pull_request = self.create_pull_request()
1046 pull_request = self.create_pull_request()
1047 version = PullRequestModel()._create_version_from_snapshot(pull_request)
1047 version = PullRequestModel()._create_version_from_snapshot(pull_request)
1048 return version
1048 return version
1049
1049
1050 def create_status_votes(self, status, *reviewers):
1050 def create_status_votes(self, status, *reviewers):
1051 for reviewer in reviewers:
1051 for reviewer in reviewers:
1052 ChangesetStatusModel().set_status(
1052 ChangesetStatusModel().set_status(
1053 repo=self.pull_request.target_repo, status=status, user=reviewer.user_id, pull_request=self.pull_request
1053 repo=self.pull_request.target_repo, status=status, user=reviewer.user_id, pull_request=self.pull_request
1054 )
1054 )
1055
1055
1056 def set_mergeable(self, value):
1056 def set_mergeable(self, value):
1057 if not self.mergeable_patcher:
1057 if not self.mergeable_patcher:
1058 self.mergeable_patcher = mock.patch.object(VcsSettingsModel, "get_general_settings")
1058 self.mergeable_patcher = mock.patch.object(VcsSettingsModel, "get_general_settings")
1059 self.mergeable_mock = self.mergeable_patcher.start()
1059 self.mergeable_mock = self.mergeable_patcher.start()
1060 self.mergeable_mock.return_value = {"rhodecode_pr_merge_enabled": value}
1060 self.mergeable_mock.return_value = {"rhodecode_pr_merge_enabled": value}
1061
1061
1062 def cleanup(self):
1062 def cleanup(self):
1063 # In case the source repository is already cleaned up, the pull
1063 # In case the source repository is already cleaned up, the pull
1064 # request will already be deleted.
1064 # request will already be deleted.
1065 pull_request = PullRequest().get(self.pull_request_id)
1065 pull_request = PullRequest().get(self.pull_request_id)
1066 if pull_request:
1066 if pull_request:
1067 PullRequestModel().delete(pull_request, pull_request.author)
1067 PullRequestModel().delete(pull_request, pull_request.author)
1068 Session().commit()
1068 Session().commit()
1069
1069
1070 if self.notification_patcher:
1070 if self.notification_patcher:
1071 self.notification_patcher.stop()
1071 self.notification_patcher.stop()
1072
1072
1073 if self.mergeable_patcher:
1073 if self.mergeable_patcher:
1074 self.mergeable_patcher.stop()
1074 self.mergeable_patcher.stop()
1075
1075
1076
1076
1077 @pytest.fixture()
1077 @pytest.fixture()
1078 def user_admin(baseapp):
1078 def user_admin(baseapp):
1079 """
1079 """
1080 Provides the default admin test user as an instance of `db.User`.
1080 Provides the default admin test user as an instance of `db.User`.
1081 """
1081 """
1082 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1082 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1083 return user
1083 return user
1084
1084
1085
1085
1086 @pytest.fixture()
1086 @pytest.fixture()
1087 def user_regular(baseapp):
1087 def user_regular(baseapp):
1088 """
1088 """
1089 Provides the default regular test user as an instance of `db.User`.
1089 Provides the default regular test user as an instance of `db.User`.
1090 """
1090 """
1091 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1091 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1092 return user
1092 return user
1093
1093
1094
1094
1095 @pytest.fixture()
1095 @pytest.fixture()
1096 def user_util(request, db_connection):
1096 def user_util(request, db_connection):
1097 """
1097 """
1098 Provides a wired instance of `UserUtility` with integrated cleanup.
1098 Provides a wired instance of `UserUtility` with integrated cleanup.
1099 """
1099 """
1100 utility = UserUtility(test_name=request.node.name)
1100 utility = UserUtility(test_name=request.node.name)
1101 request.addfinalizer(utility.cleanup)
1101 request.addfinalizer(utility.cleanup)
1102 return utility
1102 return utility
1103
1103
1104
1104
1105 # TODO: johbo: Split this up into utilities per domain or something similar
1105 # TODO: johbo: Split this up into utilities per domain or something similar
1106 class UserUtility(object):
1106 class UserUtility(object):
1107 def __init__(self, test_name="test"):
1107 def __init__(self, test_name="test"):
1108 self._test_name = self._sanitize_name(test_name)
1108 self._test_name = self._sanitize_name(test_name)
1109 self.fixture = Fixture()
1109 self.fixture = Fixture()
1110 self.repo_group_ids = []
1110 self.repo_group_ids = []
1111 self.repos_ids = []
1111 self.repos_ids = []
1112 self.user_ids = []
1112 self.user_ids = []
1113 self.user_group_ids = []
1113 self.user_group_ids = []
1114 self.user_repo_permission_ids = []
1114 self.user_repo_permission_ids = []
1115 self.user_group_repo_permission_ids = []
1115 self.user_group_repo_permission_ids = []
1116 self.user_repo_group_permission_ids = []
1116 self.user_repo_group_permission_ids = []
1117 self.user_group_repo_group_permission_ids = []
1117 self.user_group_repo_group_permission_ids = []
1118 self.user_user_group_permission_ids = []
1118 self.user_user_group_permission_ids = []
1119 self.user_group_user_group_permission_ids = []
1119 self.user_group_user_group_permission_ids = []
1120 self.user_permissions = []
1120 self.user_permissions = []
1121
1121
1122 def _sanitize_name(self, name):
1122 def _sanitize_name(self, name):
1123 for char in ["[", "]"]:
1123 for char in ["[", "]"]:
1124 name = name.replace(char, "_")
1124 name = name.replace(char, "_")
1125 return name
1125 return name
1126
1126
1127 def create_repo_group(self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1127 def create_repo_group(self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1128 group_name = "{prefix}_repogroup_{count}".format(prefix=self._test_name, count=len(self.repo_group_ids))
1128 group_name = "{prefix}_repogroup_{count}".format(prefix=self._test_name, count=len(self.repo_group_ids))
1129 repo_group = self.fixture.create_repo_group(group_name, cur_user=owner)
1129 repo_group = self.fixture.create_repo_group(group_name, cur_user=owner)
1130 if auto_cleanup:
1130 if auto_cleanup:
1131 self.repo_group_ids.append(repo_group.group_id)
1131 self.repo_group_ids.append(repo_group.group_id)
1132 return repo_group
1132 return repo_group
1133
1133
1134 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True, repo_type="hg", bare=False):
1134 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True, repo_type="hg", bare=False):
1135 repo_name = "{prefix}_repository_{count}".format(prefix=self._test_name, count=len(self.repos_ids))
1135 repo_name = "{prefix}_repository_{count}".format(prefix=self._test_name, count=len(self.repos_ids))
1136
1136
1137 repository = self.fixture.create_repo(
1137 repository = self.fixture.create_repo(
1138 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare
1138 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare
1139 )
1139 )
1140 if auto_cleanup:
1140 if auto_cleanup:
1141 self.repos_ids.append(repository.repo_id)
1141 self.repos_ids.append(repository.repo_id)
1142 return repository
1142 return repository
1143
1143
1144 def create_user(self, auto_cleanup=True, **kwargs):
1144 def create_user(self, auto_cleanup=True, **kwargs):
1145 user_name = "{prefix}_user_{count}".format(prefix=self._test_name, count=len(self.user_ids))
1145 user_name = "{prefix}_user_{count}".format(prefix=self._test_name, count=len(self.user_ids))
1146 user = self.fixture.create_user(user_name, **kwargs)
1146 user = self.fixture.create_user(user_name, **kwargs)
1147 if auto_cleanup:
1147 if auto_cleanup:
1148 self.user_ids.append(user.user_id)
1148 self.user_ids.append(user.user_id)
1149 return user
1149 return user
1150
1150
1151 def create_additional_user_email(self, user, email):
1151 def create_additional_user_email(self, user, email):
1152 uem = self.fixture.create_additional_user_email(user=user, email=email)
1152 uem = self.fixture.create_additional_user_email(user=user, email=email)
1153 return uem
1153 return uem
1154
1154
1155 def create_user_with_group(self):
1155 def create_user_with_group(self):
1156 user = self.create_user()
1156 user = self.create_user()
1157 user_group = self.create_user_group(members=[user])
1157 user_group = self.create_user_group(members=[user])
1158 return user, user_group
1158 return user, user_group
1159
1159
1160 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, auto_cleanup=True, **kwargs):
1160 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, auto_cleanup=True, **kwargs):
1161 group_name = "{prefix}_usergroup_{count}".format(prefix=self._test_name, count=len(self.user_group_ids))
1161 group_name = "{prefix}_usergroup_{count}".format(prefix=self._test_name, count=len(self.user_group_ids))
1162 user_group = self.fixture.create_user_group(group_name, cur_user=owner, **kwargs)
1162 user_group = self.fixture.create_user_group(group_name, cur_user=owner, **kwargs)
1163
1163
1164 if auto_cleanup:
1164 if auto_cleanup:
1165 self.user_group_ids.append(user_group.users_group_id)
1165 self.user_group_ids.append(user_group.users_group_id)
1166 if members:
1166 if members:
1167 for user in members:
1167 for user in members:
1168 UserGroupModel().add_user_to_group(user_group, user)
1168 UserGroupModel().add_user_to_group(user_group, user)
1169 return user_group
1169 return user_group
1170
1170
1171 def grant_user_permission(self, user_name, permission_name):
1171 def grant_user_permission(self, user_name, permission_name):
1172 self.inherit_default_user_permissions(user_name, False)
1172 self.inherit_default_user_permissions(user_name, False)
1173 self.user_permissions.append((user_name, permission_name))
1173 self.user_permissions.append((user_name, permission_name))
1174
1174
1175 def grant_user_permission_to_repo_group(self, repo_group, user, permission_name):
1175 def grant_user_permission_to_repo_group(self, repo_group, user, permission_name):
1176 permission = RepoGroupModel().grant_user_permission(repo_group, user, permission_name)
1176 permission = RepoGroupModel().grant_user_permission(repo_group, user, permission_name)
1177 self.user_repo_group_permission_ids.append((repo_group.group_id, user.user_id))
1177 self.user_repo_group_permission_ids.append((repo_group.group_id, user.user_id))
1178 return permission
1178 return permission
1179
1179
1180 def grant_user_group_permission_to_repo_group(self, repo_group, user_group, permission_name):
1180 def grant_user_group_permission_to_repo_group(self, repo_group, user_group, permission_name):
1181 permission = RepoGroupModel().grant_user_group_permission(repo_group, user_group, permission_name)
1181 permission = RepoGroupModel().grant_user_group_permission(repo_group, user_group, permission_name)
1182 self.user_group_repo_group_permission_ids.append((repo_group.group_id, user_group.users_group_id))
1182 self.user_group_repo_group_permission_ids.append((repo_group.group_id, user_group.users_group_id))
1183 return permission
1183 return permission
1184
1184
1185 def grant_user_permission_to_repo(self, repo, user, permission_name):
1185 def grant_user_permission_to_repo(self, repo, user, permission_name):
1186 permission = RepoModel().grant_user_permission(repo, user, permission_name)
1186 permission = RepoModel().grant_user_permission(repo, user, permission_name)
1187 self.user_repo_permission_ids.append((repo.repo_id, user.user_id))
1187 self.user_repo_permission_ids.append((repo.repo_id, user.user_id))
1188 return permission
1188 return permission
1189
1189
1190 def grant_user_group_permission_to_repo(self, repo, user_group, permission_name):
1190 def grant_user_group_permission_to_repo(self, repo, user_group, permission_name):
1191 permission = RepoModel().grant_user_group_permission(repo, user_group, permission_name)
1191 permission = RepoModel().grant_user_group_permission(repo, user_group, permission_name)
1192 self.user_group_repo_permission_ids.append((repo.repo_id, user_group.users_group_id))
1192 self.user_group_repo_permission_ids.append((repo.repo_id, user_group.users_group_id))
1193 return permission
1193 return permission
1194
1194
1195 def grant_user_permission_to_user_group(self, target_user_group, user, permission_name):
1195 def grant_user_permission_to_user_group(self, target_user_group, user, permission_name):
1196 permission = UserGroupModel().grant_user_permission(target_user_group, user, permission_name)
1196 permission = UserGroupModel().grant_user_permission(target_user_group, user, permission_name)
1197 self.user_user_group_permission_ids.append((target_user_group.users_group_id, user.user_id))
1197 self.user_user_group_permission_ids.append((target_user_group.users_group_id, user.user_id))
1198 return permission
1198 return permission
1199
1199
1200 def grant_user_group_permission_to_user_group(self, target_user_group, user_group, permission_name):
1200 def grant_user_group_permission_to_user_group(self, target_user_group, user_group, permission_name):
1201 permission = UserGroupModel().grant_user_group_permission(target_user_group, user_group, permission_name)
1201 permission = UserGroupModel().grant_user_group_permission(target_user_group, user_group, permission_name)
1202 self.user_group_user_group_permission_ids.append((target_user_group.users_group_id, user_group.users_group_id))
1202 self.user_group_user_group_permission_ids.append((target_user_group.users_group_id, user_group.users_group_id))
1203 return permission
1203 return permission
1204
1204
1205 def revoke_user_permission(self, user_name, permission_name):
1205 def revoke_user_permission(self, user_name, permission_name):
1206 self.inherit_default_user_permissions(user_name, True)
1206 self.inherit_default_user_permissions(user_name, True)
1207 UserModel().revoke_perm(user_name, permission_name)
1207 UserModel().revoke_perm(user_name, permission_name)
1208
1208
1209 def inherit_default_user_permissions(self, user_name, value):
1209 def inherit_default_user_permissions(self, user_name, value):
1210 user = UserModel().get_by_username(user_name)
1210 user = UserModel().get_by_username(user_name)
1211 user.inherit_default_permissions = value
1211 user.inherit_default_permissions = value
1212 Session().add(user)
1212 Session().add(user)
1213 Session().commit()
1213 Session().commit()
1214
1214
1215 def cleanup(self):
1215 def cleanup(self):
1216 self._cleanup_permissions()
1216 self._cleanup_permissions()
1217 self._cleanup_repos()
1217 self._cleanup_repos()
1218 self._cleanup_repo_groups()
1218 self._cleanup_repo_groups()
1219 self._cleanup_user_groups()
1219 self._cleanup_user_groups()
1220 self._cleanup_users()
1220 self._cleanup_users()
1221
1221
1222 def _cleanup_permissions(self):
1222 def _cleanup_permissions(self):
1223 if self.user_permissions:
1223 if self.user_permissions:
1224 for user_name, permission_name in self.user_permissions:
1224 for user_name, permission_name in self.user_permissions:
1225 self.revoke_user_permission(user_name, permission_name)
1225 self.revoke_user_permission(user_name, permission_name)
1226
1226
1227 for permission in self.user_repo_permission_ids:
1227 for permission in self.user_repo_permission_ids:
1228 RepoModel().revoke_user_permission(*permission)
1228 RepoModel().revoke_user_permission(*permission)
1229
1229
1230 for permission in self.user_group_repo_permission_ids:
1230 for permission in self.user_group_repo_permission_ids:
1231 RepoModel().revoke_user_group_permission(*permission)
1231 RepoModel().revoke_user_group_permission(*permission)
1232
1232
1233 for permission in self.user_repo_group_permission_ids:
1233 for permission in self.user_repo_group_permission_ids:
1234 RepoGroupModel().revoke_user_permission(*permission)
1234 RepoGroupModel().revoke_user_permission(*permission)
1235
1235
1236 for permission in self.user_group_repo_group_permission_ids:
1236 for permission in self.user_group_repo_group_permission_ids:
1237 RepoGroupModel().revoke_user_group_permission(*permission)
1237 RepoGroupModel().revoke_user_group_permission(*permission)
1238
1238
1239 for permission in self.user_user_group_permission_ids:
1239 for permission in self.user_user_group_permission_ids:
1240 UserGroupModel().revoke_user_permission(*permission)
1240 UserGroupModel().revoke_user_permission(*permission)
1241
1241
1242 for permission in self.user_group_user_group_permission_ids:
1242 for permission in self.user_group_user_group_permission_ids:
1243 UserGroupModel().revoke_user_group_permission(*permission)
1243 UserGroupModel().revoke_user_group_permission(*permission)
1244
1244
1245 def _cleanup_repo_groups(self):
1245 def _cleanup_repo_groups(self):
1246 def _repo_group_compare(first_group_id, second_group_id):
1246 def _repo_group_compare(first_group_id, second_group_id):
1247 """
1247 """
1248 Gives higher priority to the groups with the most complex paths
1248 Gives higher priority to the groups with the most complex paths
1249 """
1249 """
1250 first_group = RepoGroup.get(first_group_id)
1250 first_group = RepoGroup.get(first_group_id)
1251 second_group = RepoGroup.get(second_group_id)
1251 second_group = RepoGroup.get(second_group_id)
1252 first_group_parts = len(first_group.group_name.split("/")) if first_group else 0
1252 first_group_parts = len(first_group.group_name.split("/")) if first_group else 0
1253 second_group_parts = len(second_group.group_name.split("/")) if second_group else 0
1253 second_group_parts = len(second_group.group_name.split("/")) if second_group else 0
1254 return cmp(second_group_parts, first_group_parts)
1254 return cmp(second_group_parts, first_group_parts)
1255
1255
1256 sorted_repo_group_ids = sorted(self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1256 sorted_repo_group_ids = sorted(self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1257 for repo_group_id in sorted_repo_group_ids:
1257 for repo_group_id in sorted_repo_group_ids:
1258 self.fixture.destroy_repo_group(repo_group_id)
1258 self.fixture.destroy_repo_group(repo_group_id)
1259
1259
1260 def _cleanup_repos(self):
1260 def _cleanup_repos(self):
1261 sorted_repos_ids = sorted(self.repos_ids)
1261 sorted_repos_ids = sorted(self.repos_ids)
1262 for repo_id in sorted_repos_ids:
1262 for repo_id in sorted_repos_ids:
1263 self.fixture.destroy_repo(repo_id)
1263 self.fixture.destroy_repo(repo_id)
1264
1264
1265 def _cleanup_user_groups(self):
1265 def _cleanup_user_groups(self):
1266 def _user_group_compare(first_group_id, second_group_id):
1266 def _user_group_compare(first_group_id, second_group_id):
1267 """
1267 """
1268 Gives higher priority to the groups with the most complex paths
1268 Gives higher priority to the groups with the most complex paths
1269 """
1269 """
1270 first_group = UserGroup.get(first_group_id)
1270 first_group = UserGroup.get(first_group_id)
1271 second_group = UserGroup.get(second_group_id)
1271 second_group = UserGroup.get(second_group_id)
1272 first_group_parts = len(first_group.users_group_name.split("/")) if first_group else 0
1272 first_group_parts = len(first_group.users_group_name.split("/")) if first_group else 0
1273 second_group_parts = len(second_group.users_group_name.split("/")) if second_group else 0
1273 second_group_parts = len(second_group.users_group_name.split("/")) if second_group else 0
1274 return cmp(second_group_parts, first_group_parts)
1274 return cmp(second_group_parts, first_group_parts)
1275
1275
1276 sorted_user_group_ids = sorted(self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1276 sorted_user_group_ids = sorted(self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1277 for user_group_id in sorted_user_group_ids:
1277 for user_group_id in sorted_user_group_ids:
1278 self.fixture.destroy_user_group(user_group_id)
1278 self.fixture.destroy_user_group(user_group_id)
1279
1279
1280 def _cleanup_users(self):
1280 def _cleanup_users(self):
1281 for user_id in self.user_ids:
1281 for user_id in self.user_ids:
1282 self.fixture.destroy_user(user_id)
1282 self.fixture.destroy_user(user_id)
1283
1283
1284
1284
1285 @pytest.fixture(scope="session")
1285 @pytest.fixture(scope="session")
1286 def testrun():
1286 def testrun():
1287 return {
1287 return {
1288 "uuid": uuid.uuid4(),
1288 "uuid": uuid.uuid4(),
1289 "start": datetime.datetime.utcnow().isoformat(),
1289 "start": datetime.datetime.utcnow().isoformat(),
1290 "timestamp": int(time.time()),
1290 "timestamp": int(time.time()),
1291 }
1291 }
1292
1292
1293
1293
1294 class AppenlightClient(object):
1294 class AppenlightClient(object):
1295 url_template = "{url}?protocol_version=0.5"
1295 url_template = "{url}?protocol_version=0.5"
1296
1296
1297 def __init__(self, url, api_key, add_server=True, add_timestamp=True, namespace=None, request=None, testrun=None):
1297 def __init__(self, url, api_key, add_server=True, add_timestamp=True, namespace=None, request=None, testrun=None):
1298 self.url = self.url_template.format(url=url)
1298 self.url = self.url_template.format(url=url)
1299 self.api_key = api_key
1299 self.api_key = api_key
1300 self.add_server = add_server
1300 self.add_server = add_server
1301 self.add_timestamp = add_timestamp
1301 self.add_timestamp = add_timestamp
1302 self.namespace = namespace
1302 self.namespace = namespace
1303 self.request = request
1303 self.request = request
1304 self.server = socket.getfqdn(socket.gethostname())
1304 self.server = socket.getfqdn(socket.gethostname())
1305 self.tags_before = {}
1305 self.tags_before = {}
1306 self.tags_after = {}
1306 self.tags_after = {}
1307 self.stats = []
1307 self.stats = []
1308 self.testrun = testrun or {}
1308 self.testrun = testrun or {}
1309
1309
1310 def tag_before(self, tag, value):
1310 def tag_before(self, tag, value):
1311 self.tags_before[tag] = value
1311 self.tags_before[tag] = value
1312
1312
1313 def tag_after(self, tag, value):
1313 def tag_after(self, tag, value):
1314 self.tags_after[tag] = value
1314 self.tags_after[tag] = value
1315
1315
1316 def collect(self, data):
1316 def collect(self, data):
1317 if self.add_server:
1317 if self.add_server:
1318 data.setdefault("server", self.server)
1318 data.setdefault("server", self.server)
1319 if self.add_timestamp:
1319 if self.add_timestamp:
1320 data.setdefault("date", datetime.datetime.utcnow().isoformat())
1320 data.setdefault("date", datetime.datetime.utcnow().isoformat())
1321 if self.namespace:
1321 if self.namespace:
1322 data.setdefault("namespace", self.namespace)
1322 data.setdefault("namespace", self.namespace)
1323 if self.request:
1323 if self.request:
1324 data.setdefault("request", self.request)
1324 data.setdefault("request", self.request)
1325 self.stats.append(data)
1325 self.stats.append(data)
1326
1326
1327 def send_stats(self):
1327 def send_stats(self):
1328 tags = [
1328 tags = [
1329 ("testrun", self.request),
1329 ("testrun", self.request),
1330 ("testrun.start", self.testrun["start"]),
1330 ("testrun.start", self.testrun["start"]),
1331 ("testrun.timestamp", self.testrun["timestamp"]),
1331 ("testrun.timestamp", self.testrun["timestamp"]),
1332 ("test", self.namespace),
1332 ("test", self.namespace),
1333 ]
1333 ]
1334 for key, value in self.tags_before.items():
1334 for key, value in self.tags_before.items():
1335 tags.append((key + ".before", value))
1335 tags.append((key + ".before", value))
1336 try:
1336 try:
1337 delta = self.tags_after[key] - value
1337 delta = self.tags_after[key] - value
1338 tags.append((key + ".delta", delta))
1338 tags.append((key + ".delta", delta))
1339 except Exception:
1339 except Exception:
1340 pass
1340 pass
1341 for key, value in self.tags_after.items():
1341 for key, value in self.tags_after.items():
1342 tags.append((key + ".after", value))
1342 tags.append((key + ".after", value))
1343 self.collect(
1343 self.collect(
1344 {
1344 {
1345 "message": "Collected tags",
1345 "message": "Collected tags",
1346 "tags": tags,
1346 "tags": tags,
1347 }
1347 }
1348 )
1348 )
1349
1349
1350 response = requests.post(
1350 response = requests.post(
1351 self.url,
1351 self.url,
1352 headers={"X-appenlight-api-key": self.api_key},
1352 headers={"X-appenlight-api-key": self.api_key},
1353 json=self.stats,
1353 json=self.stats,
1354 )
1354 )
1355
1355
1356 if not response.status_code == 200:
1356 if not response.status_code == 200:
1357 pprint.pprint(self.stats)
1357 pprint.pprint(self.stats)
1358 print(response.headers)
1358 print(response.headers)
1359 print(response.text)
1359 print(response.text)
1360 raise Exception("Sending to appenlight failed")
1360 raise Exception("Sending to appenlight failed")
1361
1361
1362
1362
1363 @pytest.fixture()
1363 @pytest.fixture()
1364 def gist_util(request, db_connection):
1364 def gist_util(request, db_connection):
1365 """
1365 """
1366 Provides a wired instance of `GistUtility` with integrated cleanup.
1366 Provides a wired instance of `GistUtility` with integrated cleanup.
1367 """
1367 """
1368 utility = GistUtility()
1368 utility = GistUtility()
1369 request.addfinalizer(utility.cleanup)
1369 request.addfinalizer(utility.cleanup)
1370 return utility
1370 return utility
1371
1371
1372
1372
1373 class GistUtility(object):
1373 class GistUtility(object):
1374 def __init__(self):
1374 def __init__(self):
1375 self.fixture = Fixture()
1375 self.fixture = Fixture()
1376 self.gist_ids = []
1376 self.gist_ids = []
1377
1377
1378 def create_gist(self, **kwargs):
1378 def create_gist(self, **kwargs):
1379 gist = self.fixture.create_gist(**kwargs)
1379 gist = self.fixture.create_gist(**kwargs)
1380 self.gist_ids.append(gist.gist_id)
1380 self.gist_ids.append(gist.gist_id)
1381 return gist
1381 return gist
1382
1382
1383 def cleanup(self):
1383 def cleanup(self):
1384 for id_ in self.gist_ids:
1384 for id_ in self.gist_ids:
1385 self.fixture.destroy_gists(str(id_))
1385 self.fixture.destroy_gists(str(id_))
1386
1386
1387
1387
1388 @pytest.fixture()
1388 @pytest.fixture()
1389 def enabled_backends(request):
1389 def enabled_backends(request):
1390 backends = request.config.option.backends
1390 backends = request.config.option.backends
1391 return backends[:]
1391 return backends[:]
1392
1392
1393
1393
1394 @pytest.fixture()
1394 @pytest.fixture()
1395 def settings_util(request, db_connection):
1395 def settings_util(request, db_connection):
1396 """
1396 """
1397 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1397 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1398 """
1398 """
1399 utility = SettingsUtility()
1399 utility = SettingsUtility()
1400 request.addfinalizer(utility.cleanup)
1400 request.addfinalizer(utility.cleanup)
1401 return utility
1401 return utility
1402
1402
1403
1403
1404 class SettingsUtility(object):
1404 class SettingsUtility(object):
1405 def __init__(self):
1405 def __init__(self):
1406 self.rhodecode_ui_ids = []
1406 self.rhodecode_ui_ids = []
1407 self.rhodecode_setting_ids = []
1407 self.rhodecode_setting_ids = []
1408 self.repo_rhodecode_ui_ids = []
1408 self.repo_rhodecode_ui_ids = []
1409 self.repo_rhodecode_setting_ids = []
1409 self.repo_rhodecode_setting_ids = []
1410
1410
1411 def create_repo_rhodecode_ui(self, repo, section, value, key=None, active=True, cleanup=True):
1411 def create_repo_rhodecode_ui(self, repo, section, value, key=None, active=True, cleanup=True):
1412 key = key or sha1_safe(f"{section}{value}{repo.repo_id}")
1412 key = key or sha1_safe(f"{section}{value}{repo.repo_id}")
1413
1413
1414 setting = RepoRhodeCodeUi()
1414 setting = RepoRhodeCodeUi()
1415 setting.repository_id = repo.repo_id
1415 setting.repository_id = repo.repo_id
1416 setting.ui_section = section
1416 setting.ui_section = section
1417 setting.ui_value = value
1417 setting.ui_value = value
1418 setting.ui_key = key
1418 setting.ui_key = key
1419 setting.ui_active = active
1419 setting.ui_active = active
1420 Session().add(setting)
1420 Session().add(setting)
1421 Session().commit()
1421 Session().commit()
1422
1422
1423 if cleanup:
1423 if cleanup:
1424 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1424 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1425 return setting
1425 return setting
1426
1426
1427 def create_rhodecode_ui(self, section, value, key=None, active=True, cleanup=True):
1427 def create_rhodecode_ui(self, section, value, key=None, active=True, cleanup=True):
1428 key = key or sha1_safe(f"{section}{value}")
1428 key = key or sha1_safe(f"{section}{value}")
1429
1429
1430 setting = RhodeCodeUi()
1430 setting = RhodeCodeUi()
1431 setting.ui_section = section
1431 setting.ui_section = section
1432 setting.ui_value = value
1432 setting.ui_value = value
1433 setting.ui_key = key
1433 setting.ui_key = key
1434 setting.ui_active = active
1434 setting.ui_active = active
1435 Session().add(setting)
1435 Session().add(setting)
1436 Session().commit()
1436 Session().commit()
1437
1437
1438 if cleanup:
1438 if cleanup:
1439 self.rhodecode_ui_ids.append(setting.ui_id)
1439 self.rhodecode_ui_ids.append(setting.ui_id)
1440 return setting
1440 return setting
1441
1441
1442 def create_repo_rhodecode_setting(self, repo, name, value, type_, cleanup=True):
1442 def create_repo_rhodecode_setting(self, repo, name, value, type_, cleanup=True):
1443 setting = RepoRhodeCodeSetting(repo.repo_id, key=name, val=value, type=type_)
1443 setting = RepoRhodeCodeSetting(repo.repo_id, key=name, val=value, type=type_)
1444 Session().add(setting)
1444 Session().add(setting)
1445 Session().commit()
1445 Session().commit()
1446
1446
1447 if cleanup:
1447 if cleanup:
1448 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1448 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1449 return setting
1449 return setting
1450
1450
1451 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1451 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1452 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1452 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1453 Session().add(setting)
1453 Session().add(setting)
1454 Session().commit()
1454 Session().commit()
1455
1455
1456 if cleanup:
1456 if cleanup:
1457 self.rhodecode_setting_ids.append(setting.app_settings_id)
1457 self.rhodecode_setting_ids.append(setting.app_settings_id)
1458
1458
1459 return setting
1459 return setting
1460
1460
1461 def cleanup(self):
1461 def cleanup(self):
1462 for id_ in self.rhodecode_ui_ids:
1462 for id_ in self.rhodecode_ui_ids:
1463 setting = RhodeCodeUi.get(id_)
1463 setting = RhodeCodeUi.get(id_)
1464 Session().delete(setting)
1464 Session().delete(setting)
1465
1465
1466 for id_ in self.rhodecode_setting_ids:
1466 for id_ in self.rhodecode_setting_ids:
1467 setting = RhodeCodeSetting.get(id_)
1467 setting = RhodeCodeSetting.get(id_)
1468 Session().delete(setting)
1468 Session().delete(setting)
1469
1469
1470 for id_ in self.repo_rhodecode_ui_ids:
1470 for id_ in self.repo_rhodecode_ui_ids:
1471 setting = RepoRhodeCodeUi.get(id_)
1471 setting = RepoRhodeCodeUi.get(id_)
1472 Session().delete(setting)
1472 Session().delete(setting)
1473
1473
1474 for id_ in self.repo_rhodecode_setting_ids:
1474 for id_ in self.repo_rhodecode_setting_ids:
1475 setting = RepoRhodeCodeSetting.get(id_)
1475 setting = RepoRhodeCodeSetting.get(id_)
1476 Session().delete(setting)
1476 Session().delete(setting)
1477
1477
1478 Session().commit()
1478 Session().commit()
1479
1479
1480
1480
1481 @pytest.fixture()
1481 @pytest.fixture()
1482 def no_notifications(request):
1482 def no_notifications(request):
1483 notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
1483 notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
1484 notification_patcher.start()
1484 notification_patcher.start()
1485 request.addfinalizer(notification_patcher.stop)
1485 request.addfinalizer(notification_patcher.stop)
1486
1486
1487
1487
1488 @pytest.fixture(scope="session")
1488 @pytest.fixture(scope="session")
1489 def repeat(request):
1489 def repeat(request):
1490 """
1490 """
1491 The number of repetitions is based on this fixture.
1491 The number of repetitions is based on this fixture.
1492
1492
1493 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1493 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1494 tests are not too slow in our default test suite.
1494 tests are not too slow in our default test suite.
1495 """
1495 """
1496 return request.config.getoption("--repeat")
1496 return request.config.getoption("--repeat")
1497
1497
1498
1498
1499 @pytest.fixture()
1499 @pytest.fixture()
1500 def rhodecode_fixtures():
1500 def rhodecode_fixtures():
1501 return Fixture()
1501 return Fixture()
1502
1502
1503
1503
1504 @pytest.fixture()
1504 @pytest.fixture()
1505 def context_stub():
1505 def context_stub():
1506 """
1506 """
1507 Stub context object.
1507 Stub context object.
1508 """
1508 """
1509 context = pyramid.testing.DummyResource()
1509 context = pyramid.testing.DummyResource()
1510 return context
1510 return context
1511
1511
1512
1512
1513 @pytest.fixture()
1513 @pytest.fixture()
1514 def StubIntegrationType():
1514 def StubIntegrationType():
1515 class _StubIntegrationType(IntegrationTypeBase):
1515 class _StubIntegrationType(IntegrationTypeBase):
1516 """Test integration type class"""
1516 """Test integration type class"""
1517
1517
1518 key = "test"
1518 key = "test"
1519 display_name = "Test integration type"
1519 display_name = "Test integration type"
1520 description = "A test integration type for testing"
1520 description = "A test integration type for testing"
1521
1521
1522 @classmethod
1522 @classmethod
1523 def icon(cls):
1523 def icon(cls):
1524 return "test_icon_html_image"
1524 return "test_icon_html_image"
1525
1525
1526 def __init__(self, settings):
1526 def __init__(self, settings):
1527 super(_StubIntegrationType, self).__init__(settings)
1527 super(_StubIntegrationType, self).__init__(settings)
1528 self.sent_events = [] # for testing
1528 self.sent_events = [] # for testing
1529
1529
1530 def send_event(self, event):
1530 def send_event(self, event):
1531 self.sent_events.append(event)
1531 self.sent_events.append(event)
1532
1532
1533 def settings_schema(self):
1533 def settings_schema(self):
1534 class SettingsSchema(colander.Schema):
1534 class SettingsSchema(colander.Schema):
1535 test_string_field = colander.SchemaNode(
1535 test_string_field = colander.SchemaNode(
1536 colander.String(),
1536 colander.String(),
1537 missing=colander.required,
1537 missing=colander.required,
1538 title="test string field",
1538 title="test string field",
1539 )
1539 )
1540 test_int_field = colander.SchemaNode(
1540 test_int_field = colander.SchemaNode(
1541 colander.Int(),
1541 colander.Int(),
1542 title="some integer setting",
1542 title="some integer setting",
1543 )
1543 )
1544
1544
1545 return SettingsSchema()
1545 return SettingsSchema()
1546
1546
1547 integration_type_registry.register_integration_type(_StubIntegrationType)
1547 integration_type_registry.register_integration_type(_StubIntegrationType)
1548 return _StubIntegrationType
1548 return _StubIntegrationType
1549
1549
1550
1550
1551 @pytest.fixture()
1551 @pytest.fixture()
1552 def stub_integration_settings():
1552 def stub_integration_settings():
1553 return {
1553 return {
1554 "test_string_field": "some data",
1554 "test_string_field": "some data",
1555 "test_int_field": 100,
1555 "test_int_field": 100,
1556 }
1556 }
1557
1557
1558
1558
1559 @pytest.fixture()
1559 @pytest.fixture()
1560 def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings):
1560 def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings):
1561 repo_id = repo_stub.repo_id
1561 integration = IntegrationModel().create(
1562 integration = IntegrationModel().create(
1562 StubIntegrationType,
1563 StubIntegrationType,
1563 settings=stub_integration_settings,
1564 settings=stub_integration_settings,
1564 enabled=True,
1565 enabled=True,
1565 name="test repo integration",
1566 name="test repo integration",
1566 repo=repo_stub,
1567 repo=repo_stub,
1567 repo_group=None,
1568 repo_group=None,
1568 child_repos_only=None,
1569 child_repos_only=None,
1569 )
1570 )
1570
1571
1571 @request.addfinalizer
1572 @request.addfinalizer
1572 def cleanup():
1573 def cleanup():
1573 IntegrationModel().delete(integration)
1574 IntegrationModel().delete(integration)
1575 RepoModel().delete(repo_id)
1574
1576
1575 return integration
1577 return integration
1576
1578
1577
1579
1578 @pytest.fixture()
1580 @pytest.fixture()
1579 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1581 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1580 integration = IntegrationModel().create(
1582 integration = IntegrationModel().create(
1581 StubIntegrationType,
1583 StubIntegrationType,
1582 settings=stub_integration_settings,
1584 settings=stub_integration_settings,
1583 enabled=True,
1585 enabled=True,
1584 name="test repogroup integration",
1586 name="test repogroup integration",
1585 repo=None,
1587 repo=None,
1586 repo_group=test_repo_group,
1588 repo_group=test_repo_group,
1587 child_repos_only=True,
1589 child_repos_only=True,
1588 )
1590 )
1589
1591
1590 @request.addfinalizer
1592 @request.addfinalizer
1591 def cleanup():
1593 def cleanup():
1592 IntegrationModel().delete(integration)
1594 IntegrationModel().delete(integration)
1593
1595
1594 return integration
1596 return integration
1595
1597
1596
1598
1597 @pytest.fixture()
1599 @pytest.fixture()
1598 def repogroup_recursive_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1600 def repogroup_recursive_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1599 integration = IntegrationModel().create(
1601 integration = IntegrationModel().create(
1600 StubIntegrationType,
1602 StubIntegrationType,
1601 settings=stub_integration_settings,
1603 settings=stub_integration_settings,
1602 enabled=True,
1604 enabled=True,
1603 name="test recursive repogroup integration",
1605 name="test recursive repogroup integration",
1604 repo=None,
1606 repo=None,
1605 repo_group=test_repo_group,
1607 repo_group=test_repo_group,
1606 child_repos_only=False,
1608 child_repos_only=False,
1607 )
1609 )
1608
1610
1609 @request.addfinalizer
1611 @request.addfinalizer
1610 def cleanup():
1612 def cleanup():
1611 IntegrationModel().delete(integration)
1613 IntegrationModel().delete(integration)
1612
1614
1613 return integration
1615 return integration
1614
1616
1615
1617
1616 @pytest.fixture()
1618 @pytest.fixture()
1617 def global_integration_stub(request, StubIntegrationType, stub_integration_settings):
1619 def global_integration_stub(request, StubIntegrationType, stub_integration_settings):
1618 integration = IntegrationModel().create(
1620 integration = IntegrationModel().create(
1619 StubIntegrationType,
1621 StubIntegrationType,
1620 settings=stub_integration_settings,
1622 settings=stub_integration_settings,
1621 enabled=True,
1623 enabled=True,
1622 name="test global integration",
1624 name="test global integration",
1623 repo=None,
1625 repo=None,
1624 repo_group=None,
1626 repo_group=None,
1625 child_repos_only=None,
1627 child_repos_only=None,
1626 )
1628 )
1627
1629
1628 @request.addfinalizer
1630 @request.addfinalizer
1629 def cleanup():
1631 def cleanup():
1630 IntegrationModel().delete(integration)
1632 IntegrationModel().delete(integration)
1631
1633
1632 return integration
1634 return integration
1633
1635
1634
1636
1635 @pytest.fixture()
1637 @pytest.fixture()
1636 def root_repos_integration_stub(request, StubIntegrationType, stub_integration_settings):
1638 def root_repos_integration_stub(request, StubIntegrationType, stub_integration_settings):
1637 integration = IntegrationModel().create(
1639 integration = IntegrationModel().create(
1638 StubIntegrationType,
1640 StubIntegrationType,
1639 settings=stub_integration_settings,
1641 settings=stub_integration_settings,
1640 enabled=True,
1642 enabled=True,
1641 name="test global integration",
1643 name="test global integration",
1642 repo=None,
1644 repo=None,
1643 repo_group=None,
1645 repo_group=None,
1644 child_repos_only=True,
1646 child_repos_only=True,
1645 )
1647 )
1646
1648
1647 @request.addfinalizer
1649 @request.addfinalizer
1648 def cleanup():
1650 def cleanup():
1649 IntegrationModel().delete(integration)
1651 IntegrationModel().delete(integration)
1650
1652
1651 return integration
1653 return integration
1652
1654
1653
1655
1654 @pytest.fixture()
1656 @pytest.fixture()
1655 def local_dt_to_utc():
1657 def local_dt_to_utc():
1656 def _factory(dt):
1658 def _factory(dt):
1657 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)
1659 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)
1658
1660
1659 return _factory
1661 return _factory
1660
1662
1661
1663
1662 @pytest.fixture()
1664 @pytest.fixture()
1663 def disable_anonymous_user(request, baseapp):
1665 def disable_anonymous_user(request, baseapp):
1664 set_anonymous_access(False)
1666 set_anonymous_access(False)
1665
1667
1666 @request.addfinalizer
1668 @request.addfinalizer
1667 def cleanup():
1669 def cleanup():
1668 set_anonymous_access(True)
1670 set_anonymous_access(True)
1669
1671
1670
1672
1671 @pytest.fixture(scope="module")
1673 @pytest.fixture(scope="module")
1672 def rc_fixture(request):
1674 def rc_fixture(request):
1673 return Fixture()
1675 return Fixture()
1674
1676
1675
1677
1676 @pytest.fixture()
1678 @pytest.fixture()
1677 def repo_groups(request):
1679 def repo_groups(request):
1678 fixture = Fixture()
1680 fixture = Fixture()
1679
1681
1680 session = Session()
1682 session = Session()
1681 zombie_group = fixture.create_repo_group("zombie")
1683 zombie_group = fixture.create_repo_group("zombie")
1682 parent_group = fixture.create_repo_group("parent")
1684 parent_group = fixture.create_repo_group("parent")
1683 child_group = fixture.create_repo_group("parent/child")
1685 child_group = fixture.create_repo_group("parent/child")
1684 groups_in_db = session.query(RepoGroup).all()
1686 groups_in_db = session.query(RepoGroup).all()
1685 assert len(groups_in_db) == 3
1687 assert len(groups_in_db) == 3
1686 assert child_group.group_parent_id == parent_group.group_id
1688 assert child_group.group_parent_id == parent_group.group_id
1687
1689
1688 @request.addfinalizer
1690 @request.addfinalizer
1689 def cleanup():
1691 def cleanup():
1690 fixture.destroy_repo_group(zombie_group)
1692 fixture.destroy_repo_group(zombie_group)
1691 fixture.destroy_repo_group(child_group)
1693 fixture.destroy_repo_group(child_group)
1692 fixture.destroy_repo_group(parent_group)
1694 fixture.destroy_repo_group(parent_group)
1693
1695
1694 return zombie_group, parent_group, child_group
1696 return zombie_group, parent_group, child_group
1695
1697
@@ -1,223 +1,221 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import time
19 import time
20 import pytest
20 import pytest
21
21
22 from rhodecode import events
22 from rhodecode import events
23 from rhodecode.model.repo import RepoModel
23 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.model.db import Session, Integration
25 from rhodecode.model.db import Session, Integration
25 from rhodecode.model.integration import IntegrationModel
26 from rhodecode.model.integration import IntegrationModel
26
27
27
28
28 class TestDeleteScopesDeletesIntegrations(object):
29 class TestDeleteScopesDeletesIntegrations(object):
29 def test_delete_repo_with_integration_deletes_integration(
30 def test_delete_repo_with_integration_deletes_integration(self, repo_integration_stub):
30 self, repo_integration_stub):
31 RepoModel().delete(repo_integration_stub.repo)
31
32 Session().delete(repo_integration_stub.repo)
33 Session().commit()
32 Session().commit()
34 Session().expire_all()
33 Session().expire_all()
35 integration = Integration.get(repo_integration_stub.integration_id)
34 integration = Integration.get(repo_integration_stub.integration_id)
36 assert integration is None
35 assert integration is None
37
36
38 def test_delete_repo_group_with_integration_deletes_integration(
37 def test_delete_repo_group_with_integration_deletes_integration(self, repogroup_integration_stub):
39 self, repogroup_integration_stub):
40
38
41 Session().delete(repogroup_integration_stub.repo_group)
39 Session().delete(repogroup_integration_stub.repo_group)
42 Session().commit()
40 Session().commit()
43 Session().expire_all()
41 Session().expire_all()
44 integration = Integration.get(repogroup_integration_stub.integration_id)
42 integration = Integration.get(repogroup_integration_stub.integration_id)
45 assert integration is None
43 assert integration is None
46
44
47
45
48 count = 1
46 count = 1
49
47
50
48
51 def counter():
49 def counter():
52 global count
50 global count
53 val = count
51 val = count
54 count += 1
52 count += 1
55 return '{}_{}'.format(val, time.time())
53 return f'{val}_{time.time()}'
56
54
57
55
58 @pytest.fixture()
56 @pytest.fixture()
59 def integration_repos(request, StubIntegrationType, stub_integration_settings):
57 def integration_repos(request, StubIntegrationType, stub_integration_settings):
60 """
58 """
61 Create repositories and integrations for testing, and destroy them after
59 Create repositories and integrations for testing, and destroy them after
62
60
63 Structure:
61 Structure:
64 root_repo
62 root_repo
65 parent_group/
63 parent_group/
66 parent_repo
64 parent_repo
67 child_group/
65 child_group/
68 child_repo
66 child_repo
69 other_group/
67 other_group/
70 other_repo
68 other_repo
71 """
69 """
72 fixture = Fixture()
70 fixture = Fixture()
73
71
74 parent_group_id = 'int_test_parent_group_{}'.format(counter())
72 parent_group_id = 'int_test_parent_group_{}'.format(counter())
75 parent_group = fixture.create_repo_group(parent_group_id)
73 parent_group = fixture.create_repo_group(parent_group_id)
76
74
77 other_group_id = 'int_test_other_group_{}'.format(counter())
75 other_group_id = 'int_test_other_group_{}'.format(counter())
78 other_group = fixture.create_repo_group(other_group_id)
76 other_group = fixture.create_repo_group(other_group_id)
79
77
80 child_group_id = (
78 child_group_id = (
81 parent_group_id + '/' + 'int_test_child_group_{}'.format(counter()))
79 parent_group_id + '/' + 'int_test_child_group_{}'.format(counter()))
82 child_group = fixture.create_repo_group(child_group_id)
80 child_group = fixture.create_repo_group(child_group_id)
83
81
84 parent_repo_id = 'int_test_parent_repo_{}'.format(counter())
82 parent_repo_id = 'int_test_parent_repo_{}'.format(counter())
85 parent_repo = fixture.create_repo(parent_repo_id, repo_group=parent_group)
83 parent_repo = fixture.create_repo(parent_repo_id, repo_group=parent_group)
86
84
87 child_repo_id = 'int_test_child_repo_{}'.format(counter())
85 child_repo_id = 'int_test_child_repo_{}'.format(counter())
88 child_repo = fixture.create_repo(child_repo_id, repo_group=child_group)
86 child_repo = fixture.create_repo(child_repo_id, repo_group=child_group)
89
87
90 other_repo_id = 'int_test_other_repo_{}'.format(counter())
88 other_repo_id = 'int_test_other_repo_{}'.format(counter())
91 other_repo = fixture.create_repo(other_repo_id, repo_group=other_group)
89 other_repo = fixture.create_repo(other_repo_id, repo_group=other_group)
92
90
93 root_repo_id = 'int_test_repo_root_{}'.format(counter())
91 root_repo_id = 'int_test_repo_root_{}'.format(counter())
94 root_repo = fixture.create_repo(root_repo_id)
92 root_repo = fixture.create_repo(root_repo_id)
95
93
96 integrations = {}
94 integrations = {}
97 for name, repo, repo_group, child_repos_only in [
95 for name, repo, repo_group, child_repos_only in [
98 ('global', None, None, None),
96 ('global', None, None, None),
99 ('root_repos', None, None, True),
97 ('root_repos', None, None, True),
100 ('parent_repo', parent_repo, None, None),
98 ('parent_repo', parent_repo, None, None),
101 ('child_repo', child_repo, None, None),
99 ('child_repo', child_repo, None, None),
102 ('other_repo', other_repo, None, None),
100 ('other_repo', other_repo, None, None),
103 ('root_repo', root_repo, None, None),
101 ('root_repo', root_repo, None, None),
104 ('parent_group', None, parent_group, True),
102 ('parent_group', None, parent_group, True),
105 ('parent_group_recursive', None, parent_group, False),
103 ('parent_group_recursive', None, parent_group, False),
106 ('child_group', None, child_group, True),
104 ('child_group', None, child_group, True),
107 ('child_group_recursive', None, child_group, False),
105 ('child_group_recursive', None, child_group, False),
108 ('other_group', None, other_group, True),
106 ('other_group', None, other_group, True),
109 ('other_group_recursive', None, other_group, False),
107 ('other_group_recursive', None, other_group, False),
110 ]:
108 ]:
111 integrations[name] = IntegrationModel().create(
109 integrations[name] = IntegrationModel().create(
112 StubIntegrationType, settings=stub_integration_settings,
110 StubIntegrationType, settings=stub_integration_settings,
113 enabled=True, name='test %s integration' % name,
111 enabled=True, name='test %s integration' % name,
114 repo=repo, repo_group=repo_group, child_repos_only=child_repos_only)
112 repo=repo, repo_group=repo_group, child_repos_only=child_repos_only)
115
113
116 Session().commit()
114 Session().commit()
117
115
118 def _cleanup():
116 def _cleanup():
119 for integration in integrations.values():
117 for integration in integrations.values():
120 Session.delete(integration)
118 Session.delete(integration)
121
119
122 fixture.destroy_repo(root_repo)
120 fixture.destroy_repo(root_repo)
123 fixture.destroy_repo(child_repo)
121 fixture.destroy_repo(child_repo)
124 fixture.destroy_repo(parent_repo)
122 fixture.destroy_repo(parent_repo)
125 fixture.destroy_repo(other_repo)
123 fixture.destroy_repo(other_repo)
126 fixture.destroy_repo_group(child_group)
124 fixture.destroy_repo_group(child_group)
127 fixture.destroy_repo_group(parent_group)
125 fixture.destroy_repo_group(parent_group)
128 fixture.destroy_repo_group(other_group)
126 fixture.destroy_repo_group(other_group)
129
127
130 request.addfinalizer(_cleanup)
128 request.addfinalizer(_cleanup)
131
129
132 return {
130 return {
133 'integrations': integrations,
131 'integrations': integrations,
134 'repos': {
132 'repos': {
135 'root_repo': root_repo,
133 'root_repo': root_repo,
136 'other_repo': other_repo,
134 'other_repo': other_repo,
137 'parent_repo': parent_repo,
135 'parent_repo': parent_repo,
138 'child_repo': child_repo,
136 'child_repo': child_repo,
139 }
137 }
140 }
138 }
141
139
142
140
143 def test_enabled_integration_repo_scopes(integration_repos):
141 def test_enabled_integration_repo_scopes(integration_repos):
144 integrations = integration_repos['integrations']
142 integrations = integration_repos['integrations']
145 repos = integration_repos['repos']
143 repos = integration_repos['repos']
146
144
147 triggered_integrations = IntegrationModel().get_for_event(
145 triggered_integrations = IntegrationModel().get_for_event(
148 events.RepoEvent(repos['root_repo']))
146 events.RepoEvent(repos['root_repo']))
149
147
150 assert triggered_integrations == [
148 assert triggered_integrations == [
151 integrations['global'],
149 integrations['global'],
152 integrations['root_repos'],
150 integrations['root_repos'],
153 integrations['root_repo'],
151 integrations['root_repo'],
154 ]
152 ]
155
153
156 triggered_integrations = IntegrationModel().get_for_event(
154 triggered_integrations = IntegrationModel().get_for_event(
157 events.RepoEvent(repos['other_repo']))
155 events.RepoEvent(repos['other_repo']))
158
156
159 assert triggered_integrations == [
157 assert triggered_integrations == [
160 integrations['global'],
158 integrations['global'],
161 integrations['other_group'],
159 integrations['other_group'],
162 integrations['other_group_recursive'],
160 integrations['other_group_recursive'],
163 integrations['other_repo'],
161 integrations['other_repo'],
164 ]
162 ]
165
163
166 triggered_integrations = IntegrationModel().get_for_event(
164 triggered_integrations = IntegrationModel().get_for_event(
167 events.RepoEvent(repos['parent_repo']))
165 events.RepoEvent(repos['parent_repo']))
168
166
169 assert triggered_integrations == [
167 assert triggered_integrations == [
170 integrations['global'],
168 integrations['global'],
171 integrations['parent_group'],
169 integrations['parent_group'],
172 integrations['parent_group_recursive'],
170 integrations['parent_group_recursive'],
173 integrations['parent_repo'],
171 integrations['parent_repo'],
174 ]
172 ]
175
173
176 triggered_integrations = IntegrationModel().get_for_event(
174 triggered_integrations = IntegrationModel().get_for_event(
177 events.RepoEvent(repos['child_repo']))
175 events.RepoEvent(repos['child_repo']))
178
176
179 assert triggered_integrations == [
177 assert triggered_integrations == [
180 integrations['global'],
178 integrations['global'],
181 integrations['child_group'],
179 integrations['child_group'],
182 integrations['parent_group_recursive'],
180 integrations['parent_group_recursive'],
183 integrations['child_group_recursive'],
181 integrations['child_group_recursive'],
184 integrations['child_repo'],
182 integrations['child_repo'],
185 ]
183 ]
186
184
187
185
188 def test_disabled_integration_repo_scopes(integration_repos):
186 def test_disabled_integration_repo_scopes(integration_repos):
189 integrations = integration_repos['integrations']
187 integrations = integration_repos['integrations']
190 repos = integration_repos['repos']
188 repos = integration_repos['repos']
191
189
192 for integration in integrations.values():
190 for integration in integrations.values():
193 integration.enabled = False
191 integration.enabled = False
194 Session().commit()
192 Session().commit()
195
193
196 triggered_integrations = IntegrationModel().get_for_event(
194 triggered_integrations = IntegrationModel().get_for_event(
197 events.RepoEvent(repos['root_repo']))
195 events.RepoEvent(repos['root_repo']))
198
196
199 assert triggered_integrations == []
197 assert triggered_integrations == []
200
198
201 triggered_integrations = IntegrationModel().get_for_event(
199 triggered_integrations = IntegrationModel().get_for_event(
202 events.RepoEvent(repos['parent_repo']))
200 events.RepoEvent(repos['parent_repo']))
203
201
204 assert triggered_integrations == []
202 assert triggered_integrations == []
205
203
206 triggered_integrations = IntegrationModel().get_for_event(
204 triggered_integrations = IntegrationModel().get_for_event(
207 events.RepoEvent(repos['child_repo']))
205 events.RepoEvent(repos['child_repo']))
208
206
209 assert triggered_integrations == []
207 assert triggered_integrations == []
210
208
211 triggered_integrations = IntegrationModel().get_for_event(
209 triggered_integrations = IntegrationModel().get_for_event(
212 events.RepoEvent(repos['other_repo']))
210 events.RepoEvent(repos['other_repo']))
213
211
214 assert triggered_integrations == []
212 assert triggered_integrations == []
215
213
216
214
217 def test_enabled_non_repo_integrations(integration_repos):
215 def test_enabled_non_repo_integrations(integration_repos):
218 integrations = integration_repos['integrations']
216 integrations = integration_repos['integrations']
219
217
220 triggered_integrations = IntegrationModel().get_for_event(
218 triggered_integrations = IntegrationModel().get_for_event(
221 events.UserPreCreate({}))
219 events.UserPreCreate({}))
222
220
223 assert triggered_integrations == [integrations['global']]
221 assert triggered_integrations == [integrations['global']]
@@ -1,486 +1,540 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import multiprocessing
19 import multiprocessing
20 import os
20 import os
21 import shutil
21
22
22 import mock
23 import mock
23 import py
24 import py
24 import pytest
25 import pytest
25
26
27 import rhodecode
26 from rhodecode.lib import caching_query
28 from rhodecode.lib import caching_query
27 from rhodecode.lib import utils
29 from rhodecode.lib import utils
28 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
29 from rhodecode.model import settings
31 from rhodecode.model import settings
30 from rhodecode.model import db
32 from rhodecode.model import db
31 from rhodecode.model import meta
33 from rhodecode.model import meta
34 from rhodecode.model.meta import Session
32 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.settings import UiSetting, SettingsModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixtures.fixture_pyramid import rhodecode_factory
35 from rhodecode.tests.fixtures.rc_fixture import Fixture
40 from rhodecode.tests.fixtures.rc_fixture import Fixture
36 from rhodecode_tools.lib.hash_utils import md5_safe
41 from rhodecode_tools.lib.hash_utils import md5_safe
37 from rhodecode.lib.ext_json import json
42 from rhodecode.lib.ext_json import json
38
43
39 fixture = Fixture()
44 fixture = Fixture()
40
45
41
46
42 def extract_hooks(config):
47 def extract_hooks(config):
43 """Return a dictionary with the hook entries of the given config."""
48 """Return a dictionary with the hook entries of the given config."""
44 hooks = {}
49 hooks = {}
45 config_items = config.serialize()
50 config_items = config.serialize()
46 for section, name, value in config_items:
51 for section, name, value in config_items:
47 if section != 'hooks':
52 if section != 'hooks':
48 continue
53 continue
49 hooks[name] = value
54 hooks[name] = value
50
55
51 return hooks
56 return hooks
52
57
53
58
54 def disable_hooks(request, hooks):
59 def disable_hooks(request, hooks):
55 """Disables the given hooks from the UI settings."""
60 """Disables the given hooks from the UI settings."""
56 session = meta.Session()
61 session = meta.Session()
57
62
58 model = SettingsModel()
63 model = SettingsModel()
59 for hook_key in hooks:
64 for hook_key in hooks:
60 sett = model.get_ui_by_key(hook_key)
65 sett = model.get_ui_by_key(hook_key)
61 sett.ui_active = False
66 sett.ui_active = False
62 session.add(sett)
67 session.add(sett)
63
68
64 # Invalidate cache
69 # Invalidate cache
65 ui_settings = session.query(db.RhodeCodeUi).options(
70 ui_settings = session.query(db.RhodeCodeUi).options(
66 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
71 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
67
72
68 meta.cache.invalidate(
73 meta.cache.invalidate(
69 ui_settings, {},
74 ui_settings, {},
70 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
75 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
71
76
72 ui_settings = session.query(db.RhodeCodeUi).options(
77 ui_settings = session.query(db.RhodeCodeUi).options(
73 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
78 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
74
79
75 meta.cache.invalidate(
80 meta.cache.invalidate(
76 ui_settings, {},
81 ui_settings, {},
77 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
82 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
78
83
79 @request.addfinalizer
84 @request.addfinalizer
80 def rollback():
85 def rollback():
81 session.rollback()
86 session.rollback()
82
87
83
88
84 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
89 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
85 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
90 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
86 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
91 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
87 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
92 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
88 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
93 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
89 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
94 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
90 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
95 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
91
96
92 HG_HOOKS = frozenset(
97 HG_HOOKS = frozenset(
93 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
98 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
94 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
99 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
95
100
96
101
97 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
102 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
98 ([], HG_HOOKS),
103 ([], HG_HOOKS),
99 (HG_HOOKS, []),
104 (HG_HOOKS, []),
100
105
101 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
106 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
102
107
103 # When a pull/push hook is disabled, its pre-pull/push counterpart should
108 # When a pull/push hook is disabled, its pre-pull/push counterpart should
104 # be disabled too.
109 # be disabled too.
105 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
110 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
106 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
111 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
107 HOOK_PUSH_KEY]),
112 HOOK_PUSH_KEY]),
108 ])
113 ])
109 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
114 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
110 expected_hooks):
115 expected_hooks):
111 disable_hooks(request, disabled_hooks)
116 disable_hooks(request, disabled_hooks)
112
117
113 config = utils.make_db_config()
118 config = utils.make_db_config()
114 hooks = extract_hooks(config)
119 hooks = extract_hooks(config)
115
120
116 assert set(hooks.keys()).intersection(HG_HOOKS) == set(expected_hooks)
121 assert set(hooks.keys()).intersection(HG_HOOKS) == set(expected_hooks)
117
122
118
123
119 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
124 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
120 ([], ['pull', 'push']),
125 ([], ['pull', 'push']),
121 ([HOOK_PUSH], ['pull']),
126 ([HOOK_PUSH], ['pull']),
122 ([HOOK_PULL], ['push']),
127 ([HOOK_PULL], ['push']),
123 ([HOOK_PULL, HOOK_PUSH], []),
128 ([HOOK_PULL, HOOK_PUSH], []),
124 ])
129 ])
125 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
130 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
126 hook_keys = (HOOK_PUSH, HOOK_PULL)
131 hook_keys = (HOOK_PUSH, HOOK_PULL)
127 ui_settings = [
132 ui_settings = [
128 ('hooks', key, 'some value', key not in disabled_hooks)
133 ('hooks', key, 'some value', key not in disabled_hooks)
129 for key in hook_keys]
134 for key in hook_keys]
130
135
131 result = utils.get_enabled_hook_classes(ui_settings)
136 result = utils.get_enabled_hook_classes(ui_settings)
132 assert sorted(result) == expected_hooks
137 assert sorted(result) == expected_hooks
133
138
134
139
135 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
140 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
136 _stub_git_repo(tmpdir.ensure('repo', dir=True))
141 _stub_git_repo(tmpdir.ensure('repo', dir=True))
137 repos = list(utils.get_filesystem_repos(str(tmpdir)))
142 repos = list(utils.get_filesystem_repos(str(tmpdir)))
138 assert repos == [('repo', ('git', tmpdir.join('repo')))]
143 assert repos == [('repo', ('git', tmpdir.join('repo')))]
139
144
140
145
141 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
146 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
142 tmpdir.ensure('not-a-repo', dir=True)
147 tmpdir.ensure('not-a-repo', dir=True)
143 repos = list(utils.get_filesystem_repos(str(tmpdir)))
148 repos = list(utils.get_filesystem_repos(str(tmpdir)))
144 assert repos == []
149 assert repos == []
145
150
146
151
147 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
152 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
148 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
153 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
149 repos = list(utils.get_filesystem_repos(str(tmpdir)))
154 repos = list(utils.get_filesystem_repos(str(tmpdir)))
150 assert repos == []
155 assert repos == []
151
156
152
157
153 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
158 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
154 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
159 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
155 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
160 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
156 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
161 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
157
162
158
163
159 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
164 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
160 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
165 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
161 repos = list(utils.get_filesystem_repos(str(tmpdir)))
166 repos = list(utils.get_filesystem_repos(str(tmpdir)))
162 assert repos == []
167 assert repos == []
163
168
164
169
165 def test_get_filesystem_repos_skips_files(tmpdir):
170 def test_get_filesystem_repos_skips_files(tmpdir):
166 tmpdir.ensure('test-file')
171 tmpdir.ensure('test-file')
167 repos = list(utils.get_filesystem_repos(str(tmpdir)))
172 repos = list(utils.get_filesystem_repos(str(tmpdir)))
168 assert repos == []
173 assert repos == []
169
174
170
175
171 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
176 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
172 removed_repo_name = 'rm__00000000_000000_000000__.stub'
177 removed_repo_name = 'rm__00000000_000000_000000__.stub'
173 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
178 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
174 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
179 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
175 repos = list(utils.get_filesystem_repos(str(tmpdir)))
180 repos = list(utils.get_filesystem_repos(str(tmpdir)))
176 assert repos == []
181 assert repos == []
177
182
178
183
179 def _stub_git_repo(repo_path):
184 def _stub_git_repo(repo_path):
180 """
185 """
181 Make `repo_path` look like a Git repository.
186 Make `repo_path` look like a Git repository.
182 """
187 """
183 repo_path.ensure('.git', dir=True)
188 repo_path.ensure('.git', dir=True)
184
189
185
190
186 def test_get_dirpaths_returns_all_paths_on_str(tmpdir):
191 def test_get_dirpaths_returns_all_paths_on_str(tmpdir):
187 tmpdir.ensure('test-file')
192 tmpdir.ensure('test-file')
188 tmpdir.ensure('test-file-1')
193 tmpdir.ensure('test-file-1')
189 tmp_path = str(tmpdir)
194 tmp_path = str(tmpdir)
190 dirpaths = utils.get_dirpaths(tmp_path)
195 dirpaths = utils.get_dirpaths(tmp_path)
191 assert list(sorted(dirpaths)) == ['test-file', 'test-file-1']
196 assert list(sorted(dirpaths)) == ['test-file', 'test-file-1']
192
197
193
198
194 def test_get_dirpaths_returns_all_paths_on_bytes(tmpdir):
199 def test_get_dirpaths_returns_all_paths_on_bytes(tmpdir):
195 tmpdir.ensure('test-file-bytes')
200 tmpdir.ensure('test-file-bytes')
196 tmp_path = str(tmpdir)
201 tmp_path = str(tmpdir)
197 dirpaths = utils.get_dirpaths(safe_bytes(tmp_path))
202 dirpaths = utils.get_dirpaths(safe_bytes(tmp_path))
198 assert list(sorted(dirpaths)) == [b'test-file-bytes']
203 assert list(sorted(dirpaths)) == [b'test-file-bytes']
199
204
200
205
201 def test_get_dirpaths_returns_all_paths_bytes(
206 def test_get_dirpaths_returns_all_paths_bytes(
202 tmpdir, platform_encodes_filenames):
207 tmpdir, platform_encodes_filenames):
203 if platform_encodes_filenames:
208 if platform_encodes_filenames:
204 pytest.skip("This platform seems to encode filenames.")
209 pytest.skip("This platform seems to encode filenames.")
205 tmpdir.ensure('repo-a-umlaut-\xe4')
210 tmpdir.ensure('repo-a-umlaut-\xe4')
206 dirpaths = utils.get_dirpaths(str(tmpdir))
211 dirpaths = utils.get_dirpaths(str(tmpdir))
207 assert dirpaths == ['repo-a-umlaut-\xe4']
212 assert dirpaths == ['repo-a-umlaut-\xe4']
208
213
209
214
210 def test_get_dirpaths_skips_paths_it_cannot_decode(
215 def test_get_dirpaths_skips_paths_it_cannot_decode(
211 tmpdir, platform_encodes_filenames):
216 tmpdir, platform_encodes_filenames):
212 if platform_encodes_filenames:
217 if platform_encodes_filenames:
213 pytest.skip("This platform seems to encode filenames.")
218 pytest.skip("This platform seems to encode filenames.")
214 path_with_latin1 = 'repo-a-umlaut-\xe4'
219 path_with_latin1 = 'repo-a-umlaut-\xe4'
215 tmp_path = str(tmpdir.ensure(path_with_latin1))
220 tmp_path = str(tmpdir.ensure(path_with_latin1))
216 dirpaths = utils.get_dirpaths(tmp_path)
221 dirpaths = utils.get_dirpaths(tmp_path)
217 assert dirpaths == []
222 assert dirpaths == []
218
223
219
224
220 @pytest.fixture(scope='session')
225 @pytest.fixture(scope='session')
221 def platform_encodes_filenames():
226 def platform_encodes_filenames():
222 """
227 """
223 Boolean indicator if the current platform changes filename encodings.
228 Boolean indicator if the current platform changes filename encodings.
224 """
229 """
225 path_with_latin1 = 'repo-a-umlaut-\xe4'
230 path_with_latin1 = 'repo-a-umlaut-\xe4'
226 tmpdir = py.path.local.mkdtemp()
231 tmpdir = py.path.local.mkdtemp()
227 tmpdir.ensure(path_with_latin1)
232 tmpdir.ensure(path_with_latin1)
228 read_path = tmpdir.listdir()[0].basename
233 read_path = tmpdir.listdir()[0].basename
229 tmpdir.remove()
234 tmpdir.remove()
230 return path_with_latin1 != read_path
235 return path_with_latin1 != read_path
231
236
232
237
233 def test_repo2db_mapper_groups(repo_groups):
238 def test_repo2db_cleaner_removes_zombie_groups(repo_groups):
234 session = meta.Session()
239 session = meta.Session()
235 zombie_group, parent_group, child_group = repo_groups
240 zombie_group, parent_group, child_group = repo_groups
236 zombie_path = os.path.join(
241 zombie_path = os.path.join(
237 RepoGroupModel().repos_path, zombie_group.full_path)
242 RepoGroupModel().repos_path, zombie_group.full_path)
238 os.rmdir(zombie_path)
243 os.rmdir(zombie_path)
239
244
240 # Avoid removing test repos when calling repo2db_mapper
245 # Avoid removing test repos when calling repo2db_mapper
241 repo_list = {
246 repo_list = [repo.repo_name for repo in session.query(db.Repository).all()]
242 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
247
243 }
248 utils.repo2db_cleanup(skip_repos=repo_list)
244 utils.repo2db_mapper(repo_list, remove_obsolete=True)
245
249
246 groups_in_db = session.query(db.RepoGroup).all()
250 groups_in_db = session.query(db.RepoGroup).all()
247 assert child_group in groups_in_db
251 assert child_group in groups_in_db
248 assert parent_group in groups_in_db
252 assert parent_group in groups_in_db
249 assert zombie_path not in groups_in_db
253 assert zombie_path not in groups_in_db
250
254
251
255
252 def test_repo2db_mapper_enables_largefiles(backend):
256
257 @pytest.mark.backends("hg", "git", "svn")
258 def test_repo2db_cleaner_removes_zombie_repos(backend):
253 repo = backend.create_repo()
259 repo = backend.create_repo()
254 repo_list = {repo.repo_name: 'test'}
260 zombie_path = repo.repo_full_path
255 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
261 shutil.rmtree(zombie_path)
256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
262
257 _, kwargs = scm_mock.call_args
263 removed, errors = utils.repo2db_cleanup()
258 assert kwargs['config'].get('extensions', 'largefiles') == ''
264 assert len(removed) == 1
265 assert not errors
259
266
260
267
261 @pytest.mark.backends("git", "svn")
268 def test_repo2db_mapper_adds_new_repos(request, backend):
269 repo = backend.create_repo()
270 cleanup_repos = []
271 cleanup_groups = []
272 for num in range(5):
273 copy_repo_name = f'{repo.repo_name}-{num}'
274 copy_repo_path = f'{repo.repo_full_path}-{num}'
275
276 shutil.copytree(repo.repo_full_path, copy_repo_path)
277 cleanup_repos.append(copy_repo_name)
278
279 for gr_num in range(5):
280 gr_name = f'my_gr_{gr_num}'
281 dest_gr = os.path.join(os.path.dirname(repo.repo_full_path), gr_name)
282 os.makedirs(dest_gr, exist_ok=True)
283
284 copy_repo_name = f'{gr_name}/{repo.repo_name}-{gr_num}'
285 copy_repo_path = f'{dest_gr}/{repo.repo_name}-{gr_num}'
286
287 shutil.copytree(repo.repo_full_path, copy_repo_path)
288 cleanup_repos.append(copy_repo_name)
289 cleanup_groups.append(gr_name)
290
291 repo_list = ScmModel().repo_scan()
292
293 added, errors = utils.repo2db_mapper(repo_list)
294 Session().commit()
295 assert not errors
296
297 assert len(added) == 10
298
299 @request.addfinalizer
300 def cleanup():
301 for _repo in cleanup_repos:
302 del_result = RepoModel().delete(_repo, call_events=False)
303 Session().commit()
304 assert del_result is True
305
306 for _repo_group in cleanup_groups:
307 del_result = RepoGroupModel().delete(_repo_group, force_delete=True, call_events=False)
308 Session().commit()
309 assert del_result is True
310
311
262 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
312 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
263 repo = backend.create_repo()
313 repo = backend.create_repo()
264 repo_list = {repo.repo_name: 'test'}
314 repo_list = {repo.repo_name: 'test'}
265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
315 added, errors = utils.repo2db_mapper(repo_list)
316 assert not errors
317 assert repo.scm_instance().get_hooks_info() == {'pre_version': rhodecode.__version__, 'post_version': rhodecode.__version__}
266
318
267
319
268 @pytest.mark.backends("git", "svn")
320 @pytest.mark.backends("git", "svn")
269 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
321 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
270 repo = backend.create_repo()
322 repo = backend.create_repo()
271 RepoModel().delete(repo, fs_remove=False)
323 RepoModel().delete(repo, fs_remove=False)
272 meta.Session().commit()
324 meta.Session().commit()
273 repo_list = {repo.repo_name: repo.scm_instance()}
325 repo_list = {repo.repo_name: repo.scm_instance()}
274 utils.repo2db_mapper(repo_list, remove_obsolete=False)
326 added, errors = utils.repo2db_mapper(repo_list)
327 assert not errors
328 assert len(added) == 1
275
329
276
330
277 class TestPasswordChanged(object):
331 class TestPasswordChanged(object):
278
332
279 def setup_method(self):
333 def setup_method(self):
280 self.session = {
334 self.session = {
281 'rhodecode_user': {
335 'rhodecode_user': {
282 'password': '0cc175b9c0f1b6a831c399e269772661'
336 'password': '0cc175b9c0f1b6a831c399e269772661'
283 }
337 }
284 }
338 }
285 self.auth_user = mock.Mock()
339 self.auth_user = mock.Mock()
286 self.auth_user.userame = 'test'
340 self.auth_user.userame = 'test'
287 self.auth_user.password = 'abc123'
341 self.auth_user.password = 'abc123'
288
342
289 def test_returns_false_for_default_user(self):
343 def test_returns_false_for_default_user(self):
290 self.auth_user.username = db.User.DEFAULT_USER
344 self.auth_user.username = db.User.DEFAULT_USER
291 result = utils.password_changed(self.auth_user, self.session)
345 result = utils.password_changed(self.auth_user, self.session)
292 assert result is False
346 assert result is False
293
347
294 def test_returns_false_if_password_was_not_changed(self):
348 def test_returns_false_if_password_was_not_changed(self):
295 self.session['rhodecode_user']['password'] = md5_safe(
349 self.session['rhodecode_user']['password'] = md5_safe(
296 self.auth_user.password)
350 self.auth_user.password)
297 result = utils.password_changed(self.auth_user, self.session)
351 result = utils.password_changed(self.auth_user, self.session)
298 assert result is False
352 assert result is False
299
353
300 def test_returns_true_if_password_was_changed(self):
354 def test_returns_true_if_password_was_changed(self):
301 result = utils.password_changed(self.auth_user, self.session)
355 result = utils.password_changed(self.auth_user, self.session)
302 assert result is True
356 assert result is True
303
357
304 def test_returns_true_if_auth_user_password_is_empty(self):
358 def test_returns_true_if_auth_user_password_is_empty(self):
305 self.auth_user.password = None
359 self.auth_user.password = None
306 result = utils.password_changed(self.auth_user, self.session)
360 result = utils.password_changed(self.auth_user, self.session)
307 assert result is True
361 assert result is True
308
362
309 def test_returns_true_if_session_password_is_empty(self):
363 def test_returns_true_if_session_password_is_empty(self):
310 self.session['rhodecode_user'].pop('password')
364 self.session['rhodecode_user'].pop('password')
311 result = utils.password_changed(self.auth_user, self.session)
365 result = utils.password_changed(self.auth_user, self.session)
312 assert result is True
366 assert result is True
313
367
314
368
315 class TestReadOpenSourceLicenses(object):
369 class TestReadOpenSourceLicenses(object):
316 def test_success(self):
370 def test_success(self):
317 utils._license_cache = None
371 utils._license_cache = None
318 json_data = '''
372 json_data = '''
319 {
373 {
320 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
374 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
321 "python2.7-Markdown-2.6.2": {
375 "python2.7-Markdown-2.6.2": {
322 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
376 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
323 }
377 }
324 }
378 }
325 '''
379 '''
326 resource_string_patch = mock.patch.object(
380 resource_string_patch = mock.patch.object(
327 utils.pkg_resources, 'resource_string', return_value=json_data)
381 utils.pkg_resources, 'resource_string', return_value=json_data)
328 with resource_string_patch:
382 with resource_string_patch:
329 result = utils.read_opensource_licenses()
383 result = utils.read_opensource_licenses()
330 assert result == json.loads(json_data)
384 assert result == json.loads(json_data)
331
385
332 def test_caching(self):
386 def test_caching(self):
333 utils._license_cache = {
387 utils._license_cache = {
334 "python2.7-pytest-2.7.1": {
388 "python2.7-pytest-2.7.1": {
335 "UNKNOWN": None
389 "UNKNOWN": None
336 },
390 },
337 "python2.7-Markdown-2.6.2": {
391 "python2.7-Markdown-2.6.2": {
338 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
392 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
339 }
393 }
340 }
394 }
341 resource_patch = mock.patch.object(
395 resource_patch = mock.patch.object(
342 utils.pkg_resources, 'resource_string', side_effect=Exception)
396 utils.pkg_resources, 'resource_string', side_effect=Exception)
343 json_patch = mock.patch.object(
397 json_patch = mock.patch.object(
344 utils.json, 'loads', side_effect=Exception)
398 utils.json, 'loads', side_effect=Exception)
345
399
346 with resource_patch as resource_mock, json_patch as json_mock:
400 with resource_patch as resource_mock, json_patch as json_mock:
347 result = utils.read_opensource_licenses()
401 result = utils.read_opensource_licenses()
348
402
349 assert resource_mock.call_count == 0
403 assert resource_mock.call_count == 0
350 assert json_mock.call_count == 0
404 assert json_mock.call_count == 0
351 assert result == utils._license_cache
405 assert result == utils._license_cache
352
406
353 def test_licenses_file_contains_no_unknown_licenses(self):
407 def test_licenses_file_contains_no_unknown_licenses(self):
354 utils._license_cache = None
408 utils._license_cache = None
355 result = utils.read_opensource_licenses()
409 result = utils.read_opensource_licenses()
356
410
357 for license_data in result:
411 for license_data in result:
358 if isinstance(license_data["license"], list):
412 if isinstance(license_data["license"], list):
359 for lic_data in license_data["license"]:
413 for lic_data in license_data["license"]:
360 assert 'UNKNOWN' not in lic_data["fullName"]
414 assert 'UNKNOWN' not in lic_data["fullName"]
361 else:
415 else:
362 full_name = license_data.get("fullName") or license_data
416 full_name = license_data.get("fullName") or license_data
363 assert 'UNKNOWN' not in full_name
417 assert 'UNKNOWN' not in full_name
364
418
365
419
366 class TestMakeDbConfig(object):
420 class TestMakeDbConfig(object):
367 def test_data_from_config_data_from_db_returned(self):
421 def test_data_from_config_data_from_db_returned(self):
368 test_data = [
422 test_data = [
369 ('section1', 'option1', 'value1'),
423 ('section1', 'option1', 'value1'),
370 ('section2', 'option2', 'value2'),
424 ('section2', 'option2', 'value2'),
371 ('section3', 'option3', 'value3'),
425 ('section3', 'option3', 'value3'),
372 ]
426 ]
373 with mock.patch.object(utils, 'prepare_config_data') as config_mock:
427 with mock.patch.object(utils, 'prepare_config_data') as config_mock:
374 config_mock.return_value = test_data
428 config_mock.return_value = test_data
375 kwargs = {'clear_session': False, 'repo': 'test_repo'}
429 kwargs = {'clear_session': False, 'repo': 'test_repo'}
376 result = utils.make_db_config(**kwargs)
430 result = utils.make_db_config(**kwargs)
377 config_mock.assert_called_once_with(**kwargs)
431 config_mock.assert_called_once_with(**kwargs)
378 for section, option, expected_value in test_data:
432 for section, option, expected_value in test_data:
379 value = result.get(section, option)
433 value = result.get(section, option)
380 assert value == expected_value
434 assert value == expected_value
381
435
382
436
383 class TestPrepareConfigData(object):
437 class TestPrepareConfigData(object):
384 def test_prepare_config_data_returns_active_settings(self):
438 def test_prepare_config_data_returns_active_settings(self):
385 test_data = [
439 test_data = [
386 UiSetting('section1', 'option1', 'value1', True),
440 UiSetting('section1', 'option1', 'value1', True),
387 UiSetting('section2', 'option2', 'value2', True),
441 UiSetting('section2', 'option2', 'value2', True),
388 UiSetting('section3', 'option3', 'value3', False),
442 UiSetting('section3', 'option3', 'value3', False),
389 ]
443 ]
390 repo_name = 'test_repo'
444 repo_name = 'test_repo'
391
445
392 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
446 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
393 hooks_patch = mock.patch.object(
447 hooks_patch = mock.patch.object(
394 utils, 'get_enabled_hook_classes',
448 utils, 'get_enabled_hook_classes',
395 return_value=['pull', 'push', 'repo_size'])
449 return_value=['pull', 'push', 'repo_size'])
396 with model_patch as model_mock, hooks_patch:
450 with model_patch as model_mock, hooks_patch:
397 instance_mock = mock.Mock()
451 instance_mock = mock.Mock()
398 model_mock.return_value = instance_mock
452 model_mock.return_value = instance_mock
399 instance_mock.get_ui_settings.return_value = test_data
453 instance_mock.get_ui_settings.return_value = test_data
400 result = utils.prepare_config_data(
454 result = utils.prepare_config_data(
401 clear_session=False, repo=repo_name)
455 clear_session=False, repo=repo_name)
402
456
403 self._assert_repo_name_passed(model_mock, repo_name)
457 self._assert_repo_name_passed(model_mock, repo_name)
404
458
405 assert ('section1', 'option1', 'value1') in result
459 assert ('section1', 'option1', 'value1') in result
406 assert ('section2', 'option2', 'value2') in result
460 assert ('section2', 'option2', 'value2') in result
407 assert ('section3', 'option3', 'value3') not in result
461 assert ('section3', 'option3', 'value3') not in result
408
462
409 def _assert_repo_name_passed(self, model_mock, repo_name):
463 def _assert_repo_name_passed(self, model_mock, repo_name):
410 assert model_mock.call_count == 1
464 assert model_mock.call_count == 1
411 call_args, call_kwargs = model_mock.call_args
465 call_args, call_kwargs = model_mock.call_args
412 assert call_kwargs['repo'] == repo_name
466 assert call_kwargs['repo'] == repo_name
413
467
414
468
415 class TestIsDirWritable(object):
469 class TestIsDirWritable(object):
416 def test_returns_false_when_not_writable(self):
470 def test_returns_false_when_not_writable(self):
417 with mock.patch('builtins.open', side_effect=OSError):
471 with mock.patch('builtins.open', side_effect=OSError):
418 assert not utils._is_dir_writable('/stub-path')
472 assert not utils._is_dir_writable('/stub-path')
419
473
420 def test_returns_true_when_writable(self, tmpdir):
474 def test_returns_true_when_writable(self, tmpdir):
421 assert utils._is_dir_writable(str(tmpdir))
475 assert utils._is_dir_writable(str(tmpdir))
422
476
423 def test_is_safe_against_race_conditions(self, tmpdir):
477 def test_is_safe_against_race_conditions(self, tmpdir):
424 workers = multiprocessing.Pool()
478 workers = multiprocessing.Pool()
425 directories = [str(tmpdir)] * 10
479 directories = [str(tmpdir)] * 10
426 workers.map(utils._is_dir_writable, directories)
480 workers.map(utils._is_dir_writable, directories)
427
481
428
482
429 class TestGetEnabledHooks(object):
483 class TestGetEnabledHooks(object):
430 def test_only_active_hooks_are_enabled(self):
484 def test_only_active_hooks_are_enabled(self):
431 ui_settings = [
485 ui_settings = [
432 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
486 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
433 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
487 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
434 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
488 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
435 ]
489 ]
436 result = utils.get_enabled_hook_classes(ui_settings)
490 result = utils.get_enabled_hook_classes(ui_settings)
437 assert result == ['push', 'repo_size']
491 assert result == ['push', 'repo_size']
438
492
439 def test_all_hooks_are_enabled(self):
493 def test_all_hooks_are_enabled(self):
440 ui_settings = [
494 ui_settings = [
441 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
495 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
442 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
496 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
443 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
497 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
444 ]
498 ]
445 result = utils.get_enabled_hook_classes(ui_settings)
499 result = utils.get_enabled_hook_classes(ui_settings)
446 assert result == ['push', 'repo_size', 'pull']
500 assert result == ['push', 'repo_size', 'pull']
447
501
448 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
502 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
449 ui_settings = []
503 ui_settings = []
450 result = utils.get_enabled_hook_classes(ui_settings)
504 result = utils.get_enabled_hook_classes(ui_settings)
451 assert result == []
505 assert result == []
452
506
453
507
454 def test_obfuscate_url_pw():
508 def test_obfuscate_url_pw():
455 from rhodecode.lib.utils2 import obfuscate_url_pw
509 from rhodecode.lib.utils2 import obfuscate_url_pw
456 engine = u'/home/repos/malmΓΆ'
510 engine = '/home/repos/malmΓΆ'
457 assert obfuscate_url_pw(engine)
511 assert obfuscate_url_pw(engine)
458
512
459
513
460 @pytest.mark.parametrize("test_ua, expected", [
514 @pytest.mark.parametrize("test_ua, expected", [
461 ("", ""),
515 ("", ""),
462 ('"quoted"', 'quoted'),
516 ('"quoted"', 'quoted'),
463 ('internal-merge', 'internal-merge'),
517 ('internal-merge', 'internal-merge'),
464 ('hg/internal-merge', 'hg/internal-merge'),
518 ('hg/internal-merge', 'hg/internal-merge'),
465 ('git/internal-merge', 'git/internal-merge'),
519 ('git/internal-merge', 'git/internal-merge'),
466
520
467 # git
521 # git
468 ('git/2.10.1 (Apple Git-78)', 'git/2.10.1'),
522 ('git/2.10.1 (Apple Git-78)', 'git/2.10.1'),
469 ('GiT/2.37.2.windows.2', 'git/2.37.2'),
523 ('GiT/2.37.2.windows.2', 'git/2.37.2'),
470 ('git/2.35.1 (Microsoft Windows NT 10.0.19044.0; Win32NT x64) CLR/4.0.30319 VS16/16.0.0', 'git/2.35.1'),
524 ('git/2.35.1 (Microsoft Windows NT 10.0.19044.0; Win32NT x64) CLR/4.0.30319 VS16/16.0.0', 'git/2.35.1'),
471 ('ssh-user-agent', 'ssh-user-agent'),
525 ('ssh-user-agent', 'ssh-user-agent'),
472 ('git/ssh-user-agent', 'git/ssh-user-agent'),
526 ('git/ssh-user-agent', 'git/ssh-user-agent'),
473
527
474
528
475 # hg
529 # hg
476 ('mercurial/proto-1.0 (Mercurial 4.2)', 'mercurial/4.2'),
530 ('mercurial/proto-1.0 (Mercurial 4.2)', 'mercurial/4.2'),
477 ('mercurial/proto-1.0', ''),
531 ('mercurial/proto-1.0', ''),
478 ('mercurial/proto-1.0 (Mercurial 3.9.2)', 'mercurial/3.9.2'),
532 ('mercurial/proto-1.0 (Mercurial 3.9.2)', 'mercurial/3.9.2'),
479 ('mercurial/ssh-user-agent', 'mercurial/ssh-user-agent'),
533 ('mercurial/ssh-user-agent', 'mercurial/ssh-user-agent'),
480 ('mercurial/proto-1.0 (Mercurial 5.8rc0)', 'mercurial/5.8rc0'),
534 ('mercurial/proto-1.0 (Mercurial 5.8rc0)', 'mercurial/5.8rc0'),
481
535
482
536
483 ])
537 ])
484 def test_user_agent_normalizer(test_ua, expected):
538 def test_user_agent_normalizer(test_ua, expected):
485 from rhodecode.lib.utils2 import user_agent_normalizer
539 from rhodecode.lib.utils2 import user_agent_normalizer
486 assert user_agent_normalizer(test_ua, safe=False) == expected
540 assert user_agent_normalizer(test_ua, safe=False) == expected
@@ -1,323 +1,324 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 def get_url_defs():
20 def get_url_defs():
21 from rhodecode.apps._base import ADMIN_PREFIX
21 from rhodecode.apps._base import ADMIN_PREFIX
22
22
23 return {
23 return {
24 "home": "/",
24 "home": "/",
25 "main_page_repos_data": "/_home_repos",
25 "main_page_repos_data": "/_home_repos",
26 "main_page_repo_groups_data": "/_home_repo_groups",
26 "main_page_repo_groups_data": "/_home_repo_groups",
27 "repo_group_home": "/{repo_group_name}",
27 "repo_group_home": "/{repo_group_name}",
28 "user_autocomplete_data": "/_users",
28 "user_autocomplete_data": "/_users",
29 "user_group_autocomplete_data": "/_user_groups",
29 "user_group_autocomplete_data": "/_user_groups",
30 "repo_list_data": "/_repos",
30 "repo_list_data": "/_repos",
31 "goto_switcher_data": "/_goto_data",
31 "goto_switcher_data": "/_goto_data",
32 "admin_home": ADMIN_PREFIX + "",
32 "admin_home": ADMIN_PREFIX + "",
33 "admin_audit_logs": ADMIN_PREFIX + "/audit_logs",
33 "admin_audit_logs": ADMIN_PREFIX + "/audit_logs",
34 "admin_defaults_repositories": ADMIN_PREFIX + "/defaults/repositories",
34 "admin_defaults_repositories": ADMIN_PREFIX + "/defaults/repositories",
35 "admin_defaults_repositories_update": ADMIN_PREFIX
35 "admin_defaults_repositories_update": ADMIN_PREFIX
36 + "/defaults/repositories/update",
36 + "/defaults/repositories/update",
37 "search": ADMIN_PREFIX + "/search",
37 "search": ADMIN_PREFIX + "/search",
38 "search_repo": "/{repo_name}/search",
38 "search_repo": "/{repo_name}/search",
39 "my_account_auth_tokens": ADMIN_PREFIX + "/my_account/auth_tokens",
39 "my_account_auth_tokens": ADMIN_PREFIX + "/my_account/auth_tokens",
40 "my_account_auth_tokens_add": ADMIN_PREFIX + "/my_account/auth_tokens/new",
40 "my_account_auth_tokens_add": ADMIN_PREFIX + "/my_account/auth_tokens/new",
41 "my_account_auth_tokens_delete": ADMIN_PREFIX
41 "my_account_auth_tokens_delete": ADMIN_PREFIX
42 + "/my_account/auth_tokens/delete",
42 + "/my_account/auth_tokens/delete",
43 "repos": ADMIN_PREFIX + "/repos",
43 "repos": ADMIN_PREFIX + "/repos",
44 "repos_data": ADMIN_PREFIX + "/repos_data",
44 "repos_data": ADMIN_PREFIX + "/repos_data",
45 "repo_groups": ADMIN_PREFIX + "/repo_groups",
45 "repo_groups": ADMIN_PREFIX + "/repo_groups",
46 "repo_groups_data": ADMIN_PREFIX + "/repo_groups_data",
46 "repo_groups_data": ADMIN_PREFIX + "/repo_groups_data",
47 "user_groups": ADMIN_PREFIX + "/user_groups",
47 "user_groups": ADMIN_PREFIX + "/user_groups",
48 "user_groups_data": ADMIN_PREFIX + "/user_groups_data",
48 "user_groups_data": ADMIN_PREFIX + "/user_groups_data",
49 "user_profile": "/_profiles/{username}",
49 "user_profile": "/_profiles/{username}",
50 "profile_user_group": "/_profile_user_group/{user_group_name}",
50 "profile_user_group": "/_profile_user_group/{user_group_name}",
51 "repo_summary": "/{repo_name}",
51 "repo_summary": "/{repo_name}",
52 "repo_creating_check": "/{repo_name}/repo_creating_check",
52 "repo_creating_check": "/{repo_name}/repo_creating_check",
53 "edit_repo": "/{repo_name}/settings",
53 "edit_repo": "/{repo_name}/settings",
54 "edit_repo_vcs": "/{repo_name}/settings/vcs",
54 "edit_repo_vcs": "/{repo_name}/settings/vcs",
55 "edit_repo_vcs_update": "/{repo_name}/settings/vcs/update",
55 "edit_repo_vcs_update": "/{repo_name}/settings/vcs/update",
56 "edit_repo_vcs_svn_pattern_delete": "/{repo_name}/settings/vcs/svn_pattern/delete",
56 "edit_repo_vcs_svn_pattern_delete": "/{repo_name}/settings/vcs/svn_pattern/delete",
57 "repo_archivefile": "/{repo_name}/archive/{fname}",
57 "repo_archivefile": "/{repo_name}/archive/{fname}",
58 "repo_files_diff": "/{repo_name}/diff/{f_path}",
58 "repo_files_diff": "/{repo_name}/diff/{f_path}",
59 "repo_files_diff_2way_redirect": "/{repo_name}/diff-2way/{f_path}",
59 "repo_files_diff_2way_redirect": "/{repo_name}/diff-2way/{f_path}",
60 "repo_files": "/{repo_name}/files/{commit_id}/{f_path}",
60 "repo_files": "/{repo_name}/files/{commit_id}/{f_path}",
61 "repo_files:default_path": "/{repo_name}/files/{commit_id}/",
61 "repo_files:default_path": "/{repo_name}/files/{commit_id}/",
62 "repo_files:default_commit": "/{repo_name}/files",
62 "repo_files:default_commit": "/{repo_name}/files",
63 "repo_files:rendered": "/{repo_name}/render/{commit_id}/{f_path}",
63 "repo_files:rendered": "/{repo_name}/render/{commit_id}/{f_path}",
64 "repo_files:annotated": "/{repo_name}/annotate/{commit_id}/{f_path}",
64 "repo_files:annotated": "/{repo_name}/annotate/{commit_id}/{f_path}",
65 "repo_files:annotated_previous": "/{repo_name}/annotate-previous/{commit_id}/{f_path}",
65 "repo_files:annotated_previous": "/{repo_name}/annotate-previous/{commit_id}/{f_path}",
66 "repo_files_nodelist": "/{repo_name}/nodelist/{commit_id}/{f_path}",
66 "repo_files_nodelist": "/{repo_name}/nodelist/{commit_id}/{f_path}",
67 "repo_file_raw": "/{repo_name}/raw/{commit_id}/{f_path}",
67 "repo_file_raw": "/{repo_name}/raw/{commit_id}/{f_path}",
68 "repo_file_download": "/{repo_name}/download/{commit_id}/{f_path}",
68 "repo_file_download": "/{repo_name}/download/{commit_id}/{f_path}",
69 "repo_file_history": "/{repo_name}/history/{commit_id}/{f_path}",
69 "repo_file_history": "/{repo_name}/history/{commit_id}/{f_path}",
70 "repo_file_authors": "/{repo_name}/authors/{commit_id}/{f_path}",
70 "repo_file_authors": "/{repo_name}/authors/{commit_id}/{f_path}",
71 "repo_files_remove_file": "/{repo_name}/remove_file/{commit_id}/{f_path}",
71 "repo_files_remove_file": "/{repo_name}/remove_file/{commit_id}/{f_path}",
72 "repo_files_delete_file": "/{repo_name}/delete_file/{commit_id}/{f_path}",
72 "repo_files_delete_file": "/{repo_name}/delete_file/{commit_id}/{f_path}",
73 "repo_files_edit_file": "/{repo_name}/edit_file/{commit_id}/{f_path}",
73 "repo_files_edit_file": "/{repo_name}/edit_file/{commit_id}/{f_path}",
74 "repo_files_update_file": "/{repo_name}/update_file/{commit_id}/{f_path}",
74 "repo_files_update_file": "/{repo_name}/update_file/{commit_id}/{f_path}",
75 "repo_files_add_file": "/{repo_name}/add_file/{commit_id}/{f_path}",
75 "repo_files_add_file": "/{repo_name}/add_file/{commit_id}/{f_path}",
76 "repo_files_upload_file": "/{repo_name}/upload_file/{commit_id}/{f_path}",
76 "repo_files_upload_file": "/{repo_name}/upload_file/{commit_id}/{f_path}",
77 "repo_files_create_file": "/{repo_name}/create_file/{commit_id}/{f_path}",
77 "repo_files_create_file": "/{repo_name}/create_file/{commit_id}/{f_path}",
78 "repo_files_replace_binary": "/{repo_name}/replace_binary/{commit_id}/{f_path}",
78 "repo_files_replace_binary": "/{repo_name}/replace_binary/{commit_id}/{f_path}",
79 "repo_nodetree_full": "/{repo_name}/nodetree_full/{commit_id}/{f_path}",
79 "repo_nodetree_full": "/{repo_name}/nodetree_full/{commit_id}/{f_path}",
80 "repo_nodetree_full:default_path": "/{repo_name}/nodetree_full/{commit_id}/",
80 "repo_nodetree_full:default_path": "/{repo_name}/nodetree_full/{commit_id}/",
81 "journal": ADMIN_PREFIX + "/journal",
81 "journal": ADMIN_PREFIX + "/journal",
82 "journal_rss": ADMIN_PREFIX + "/journal/rss",
82 "journal_rss": ADMIN_PREFIX + "/journal/rss",
83 "journal_atom": ADMIN_PREFIX + "/journal/atom",
83 "journal_atom": ADMIN_PREFIX + "/journal/atom",
84 "journal_public": ADMIN_PREFIX + "/public_journal",
84 "journal_public": ADMIN_PREFIX + "/public_journal",
85 "journal_public_atom": ADMIN_PREFIX + "/public_journal/atom",
85 "journal_public_atom": ADMIN_PREFIX + "/public_journal/atom",
86 "journal_public_atom_old": ADMIN_PREFIX + "/public_journal_atom",
86 "journal_public_atom_old": ADMIN_PREFIX + "/public_journal_atom",
87 "journal_public_rss": ADMIN_PREFIX + "/public_journal/rss",
87 "journal_public_rss": ADMIN_PREFIX + "/public_journal/rss",
88 "journal_public_rss_old": ADMIN_PREFIX + "/public_journal_rss",
88 "journal_public_rss_old": ADMIN_PREFIX + "/public_journal_rss",
89 "toggle_following": ADMIN_PREFIX + "/toggle_following",
89 "toggle_following": ADMIN_PREFIX + "/toggle_following",
90 "upload_file": "/_file_store/upload",
90 "upload_file": "/_file_store/upload",
91 "download_file": "/_file_store/download/{fid}",
91 "download_file": "/_file_store/download/{fid}",
92 "download_file_by_token": "/_file_store/token-download/{_auth_token}/{fid}",
92 "download_file_by_token": "/_file_store/token-download/{_auth_token}/{fid}",
93 "gists_show": ADMIN_PREFIX + "/gists",
93 "gists_show": ADMIN_PREFIX + "/gists",
94 "gists_new": ADMIN_PREFIX + "/gists/new",
94 "gists_new": ADMIN_PREFIX + "/gists/new",
95 "gists_create": ADMIN_PREFIX + "/gists/create",
95 "gists_create": ADMIN_PREFIX + "/gists/create",
96 "gist_show": ADMIN_PREFIX + "/gists/{gist_id}",
96 "gist_show": ADMIN_PREFIX + "/gists/{gist_id}",
97 "gist_delete": ADMIN_PREFIX + "/gists/{gist_id}/delete",
97 "gist_delete": ADMIN_PREFIX + "/gists/{gist_id}/delete",
98 "gist_edit": ADMIN_PREFIX + "/gists/{gist_id}/edit",
98 "gist_edit": ADMIN_PREFIX + "/gists/{gist_id}/edit",
99 "gist_edit_check_revision": ADMIN_PREFIX
99 "gist_edit_check_revision": ADMIN_PREFIX
100 + "/gists/{gist_id}/edit/check_revision",
100 + "/gists/{gist_id}/edit/check_revision",
101 "gist_update": ADMIN_PREFIX + "/gists/{gist_id}/update",
101 "gist_update": ADMIN_PREFIX + "/gists/{gist_id}/update",
102 "gist_show_rev": ADMIN_PREFIX + "/gists/{gist_id}/rev/{revision}",
102 "gist_show_rev": ADMIN_PREFIX + "/gists/{gist_id}/rev/{revision}",
103 "gist_show_formatted": ADMIN_PREFIX
103 "gist_show_formatted": ADMIN_PREFIX
104 + "/gists/{gist_id}/rev/{revision}/{format}",
104 + "/gists/{gist_id}/rev/{revision}/{format}",
105 "gist_show_formatted_path": ADMIN_PREFIX
105 "gist_show_formatted_path": ADMIN_PREFIX
106 + "/gists/{gist_id}/rev/{revision}/{format}/{f_path}",
106 + "/gists/{gist_id}/rev/{revision}/{format}/{f_path}",
107 "login": ADMIN_PREFIX + "/login",
107 "login": ADMIN_PREFIX + "/login",
108 "logout": ADMIN_PREFIX + "/logout",
108 "logout": ADMIN_PREFIX + "/logout",
109 "setup_2fa": ADMIN_PREFIX + "/setup_2fa",
109 "setup_2fa": ADMIN_PREFIX + "/setup_2fa",
110 "check_2fa": ADMIN_PREFIX + "/check_2fa",
110 "check_2fa": ADMIN_PREFIX + "/check_2fa",
111 "register": ADMIN_PREFIX + "/register",
111 "register": ADMIN_PREFIX + "/register",
112 "reset_password": ADMIN_PREFIX + "/password_reset",
112 "reset_password": ADMIN_PREFIX + "/password_reset",
113 "reset_password_confirmation": ADMIN_PREFIX + "/password_reset_confirmation",
113 "reset_password_confirmation": ADMIN_PREFIX + "/password_reset_confirmation",
114 "admin_permissions_application": ADMIN_PREFIX + "/permissions/application",
114 "admin_permissions_application": ADMIN_PREFIX + "/permissions/application",
115 "admin_permissions_application_update": ADMIN_PREFIX
115 "admin_permissions_application_update": ADMIN_PREFIX
116 + "/permissions/application/update",
116 + "/permissions/application/update",
117 "repo_commit_raw": "/{repo_name}/changeset-diff/{commit_id}",
117 "repo_commit_raw": "/{repo_name}/changeset-diff/{commit_id}",
118 "user_group_members_data": ADMIN_PREFIX
118 "user_group_members_data": ADMIN_PREFIX
119 + "/user_groups/{user_group_id}/members",
119 + "/user_groups/{user_group_id}/members",
120 "user_groups_new": ADMIN_PREFIX + "/user_groups/new",
120 "user_groups_new": ADMIN_PREFIX + "/user_groups/new",
121 "user_groups_create": ADMIN_PREFIX + "/user_groups/create",
121 "user_groups_create": ADMIN_PREFIX + "/user_groups/create",
122 "edit_user_group": ADMIN_PREFIX + "/user_groups/{user_group_id}/edit",
122 "edit_user_group": ADMIN_PREFIX + "/user_groups/{user_group_id}/edit",
123 "edit_user_group_advanced_sync": ADMIN_PREFIX
123 "edit_user_group_advanced_sync": ADMIN_PREFIX
124 + "/user_groups/{user_group_id}/edit/advanced/sync",
124 + "/user_groups/{user_group_id}/edit/advanced/sync",
125 "edit_user_group_global_perms_update": ADMIN_PREFIX
125 "edit_user_group_global_perms_update": ADMIN_PREFIX
126 + "/user_groups/{user_group_id}/edit/global_permissions/update",
126 + "/user_groups/{user_group_id}/edit/global_permissions/update",
127 "user_groups_update": ADMIN_PREFIX + "/user_groups/{user_group_id}/update",
127 "user_groups_update": ADMIN_PREFIX + "/user_groups/{user_group_id}/update",
128 "user_groups_delete": ADMIN_PREFIX + "/user_groups/{user_group_id}/delete",
128 "user_groups_delete": ADMIN_PREFIX + "/user_groups/{user_group_id}/delete",
129 "edit_user_group_perms": ADMIN_PREFIX
129 "edit_user_group_perms": ADMIN_PREFIX
130 + "/user_groups/{user_group_id}/edit/permissions",
130 + "/user_groups/{user_group_id}/edit/permissions",
131 "edit_user_group_perms_update": ADMIN_PREFIX
131 "edit_user_group_perms_update": ADMIN_PREFIX
132 + "/user_groups/{user_group_id}/edit/permissions/update",
132 + "/user_groups/{user_group_id}/edit/permissions/update",
133 "edit_repo_group": "/{repo_group_name}/_edit",
133 "edit_repo_group": "/{repo_group_name}/_edit",
134 "edit_repo_group_perms": "/{repo_group_name:}/_settings/permissions",
134 "edit_repo_group_perms": "/{repo_group_name:}/_settings/permissions",
135 "edit_repo_group_perms_update": "/{repo_group_name}/_settings/permissions/update",
135 "edit_repo_group_perms_update": "/{repo_group_name}/_settings/permissions/update",
136 "edit_repo_group_advanced": "/{repo_group_name}/_settings/advanced",
136 "edit_repo_group_advanced": "/{repo_group_name}/_settings/advanced",
137 "edit_repo_group_advanced_delete": "/{repo_group_name}/_settings/advanced/delete",
137 "edit_repo_group_advanced_delete": "/{repo_group_name}/_settings/advanced/delete",
138 "edit_user_ssh_keys": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys",
138 "edit_user_ssh_keys": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys",
139 "edit_user_ssh_keys_generate_keypair": ADMIN_PREFIX
139 "edit_user_ssh_keys_generate_keypair": ADMIN_PREFIX
140 + "/users/{user_id}/edit/ssh_keys/generate",
140 + "/users/{user_id}/edit/ssh_keys/generate",
141 "edit_user_ssh_keys_add": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys/new",
141 "edit_user_ssh_keys_add": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys/new",
142 "edit_user_ssh_keys_delete": ADMIN_PREFIX
142 "edit_user_ssh_keys_delete": ADMIN_PREFIX
143 + "/users/{user_id}/edit/ssh_keys/delete",
143 + "/users/{user_id}/edit/ssh_keys/delete",
144 "users": ADMIN_PREFIX + "/users",
144 "users": ADMIN_PREFIX + "/users",
145 "users_data": ADMIN_PREFIX + "/users_data",
145 "users_data": ADMIN_PREFIX + "/users_data",
146 "users_create": ADMIN_PREFIX + "/users/create",
146 "users_create": ADMIN_PREFIX + "/users/create",
147 "users_new": ADMIN_PREFIX + "/users/new",
147 "users_new": ADMIN_PREFIX + "/users/new",
148 "user_edit": ADMIN_PREFIX + "/users/{user_id}/edit",
148 "user_edit": ADMIN_PREFIX + "/users/{user_id}/edit",
149 "user_edit_advanced": ADMIN_PREFIX + "/users/{user_id}/edit/advanced",
149 "user_edit_advanced": ADMIN_PREFIX + "/users/{user_id}/edit/advanced",
150 "user_edit_global_perms": ADMIN_PREFIX
150 "user_edit_global_perms": ADMIN_PREFIX
151 + "/users/{user_id}/edit/global_permissions",
151 + "/users/{user_id}/edit/global_permissions",
152 "user_edit_global_perms_update": ADMIN_PREFIX
152 "user_edit_global_perms_update": ADMIN_PREFIX
153 + "/users/{user_id}/edit/global_permissions/update",
153 + "/users/{user_id}/edit/global_permissions/update",
154 "user_update": ADMIN_PREFIX + "/users/{user_id}/update",
154 "user_update": ADMIN_PREFIX + "/users/{user_id}/update",
155 "user_delete": ADMIN_PREFIX + "/users/{user_id}/delete",
155 "user_delete": ADMIN_PREFIX + "/users/{user_id}/delete",
156 "user_create_personal_repo_group": ADMIN_PREFIX
156 "user_create_personal_repo_group": ADMIN_PREFIX
157 + "/users/{user_id}/create_repo_group",
157 + "/users/{user_id}/create_repo_group",
158 "edit_user_auth_tokens": ADMIN_PREFIX + "/users/{user_id}/edit/auth_tokens",
158 "edit_user_auth_tokens": ADMIN_PREFIX + "/users/{user_id}/edit/auth_tokens",
159 "edit_user_auth_tokens_add": ADMIN_PREFIX
159 "edit_user_auth_tokens_add": ADMIN_PREFIX
160 + "/users/{user_id}/edit/auth_tokens/new",
160 + "/users/{user_id}/edit/auth_tokens/new",
161 "edit_user_auth_tokens_delete": ADMIN_PREFIX
161 "edit_user_auth_tokens_delete": ADMIN_PREFIX
162 + "/users/{user_id}/edit/auth_tokens/delete",
162 + "/users/{user_id}/edit/auth_tokens/delete",
163 "edit_user_emails": ADMIN_PREFIX + "/users/{user_id}/edit/emails",
163 "edit_user_emails": ADMIN_PREFIX + "/users/{user_id}/edit/emails",
164 "edit_user_emails_add": ADMIN_PREFIX + "/users/{user_id}/edit/emails/new",
164 "edit_user_emails_add": ADMIN_PREFIX + "/users/{user_id}/edit/emails/new",
165 "edit_user_emails_delete": ADMIN_PREFIX + "/users/{user_id}/edit/emails/delete",
165 "edit_user_emails_delete": ADMIN_PREFIX + "/users/{user_id}/edit/emails/delete",
166 "edit_user_ips": ADMIN_PREFIX + "/users/{user_id}/edit/ips",
166 "edit_user_ips": ADMIN_PREFIX + "/users/{user_id}/edit/ips",
167 "edit_user_ips_add": ADMIN_PREFIX + "/users/{user_id}/edit/ips/new",
167 "edit_user_ips_add": ADMIN_PREFIX + "/users/{user_id}/edit/ips/new",
168 "edit_user_ips_delete": ADMIN_PREFIX + "/users/{user_id}/edit/ips/delete",
168 "edit_user_ips_delete": ADMIN_PREFIX + "/users/{user_id}/edit/ips/delete",
169 "edit_user_perms_summary": ADMIN_PREFIX
169 "edit_user_perms_summary": ADMIN_PREFIX
170 + "/users/{user_id}/edit/permissions_summary",
170 + "/users/{user_id}/edit/permissions_summary",
171 "edit_user_perms_summary_json": ADMIN_PREFIX
171 "edit_user_perms_summary_json": ADMIN_PREFIX
172 + "/users/{user_id}/edit/permissions_summary/json",
172 + "/users/{user_id}/edit/permissions_summary/json",
173 "edit_user_audit_logs": ADMIN_PREFIX + "/users/{user_id}/edit/audit",
173 "edit_user_audit_logs": ADMIN_PREFIX + "/users/{user_id}/edit/audit",
174 "edit_user_audit_logs_download": ADMIN_PREFIX
174 "edit_user_audit_logs_download": ADMIN_PREFIX
175 + "/users/{user_id}/edit/audit/download",
175 + "/users/{user_id}/edit/audit/download",
176 "admin_settings": ADMIN_PREFIX + "/settings",
176 "admin_settings": ADMIN_PREFIX + "/settings",
177 "admin_settings_update": ADMIN_PREFIX + "/settings/update",
177 "admin_settings_update": ADMIN_PREFIX + "/settings/update",
178 "admin_settings_global": ADMIN_PREFIX + "/settings/global",
178 "admin_settings_global": ADMIN_PREFIX + "/settings/global",
179 "admin_settings_global_update": ADMIN_PREFIX + "/settings/global/update",
179 "admin_settings_global_update": ADMIN_PREFIX + "/settings/global/update",
180 "admin_settings_vcs": ADMIN_PREFIX + "/settings/vcs",
180 "admin_settings_vcs": ADMIN_PREFIX + "/settings/vcs",
181 "admin_settings_vcs_update": ADMIN_PREFIX + "/settings/vcs/update",
181 "admin_settings_vcs_update": ADMIN_PREFIX + "/settings/vcs/update",
182 "admin_settings_vcs_svn_pattern_delete": ADMIN_PREFIX
182 "admin_settings_vcs_svn_pattern_delete": ADMIN_PREFIX
183 + "/settings/vcs/svn_pattern_delete",
183 + "/settings/vcs/svn_pattern_delete",
184 "admin_settings_mapping": ADMIN_PREFIX + "/settings/mapping",
184 "admin_settings_mapping": ADMIN_PREFIX + "/settings/mapping",
185 "admin_settings_mapping_update": ADMIN_PREFIX + "/settings/mapping/update",
185 "admin_settings_mapping_create": ADMIN_PREFIX + "/settings/mapping/create",
186 "admin_settings_mapping_cleanup": ADMIN_PREFIX + "/settings/mapping/cleanup",
186 "admin_settings_visual": ADMIN_PREFIX + "/settings/visual",
187 "admin_settings_visual": ADMIN_PREFIX + "/settings/visual",
187 "admin_settings_visual_update": ADMIN_PREFIX + "/settings/visual/update",
188 "admin_settings_visual_update": ADMIN_PREFIX + "/settings/visual/update",
188 "admin_settings_issuetracker": ADMIN_PREFIX + "/settings/issue-tracker",
189 "admin_settings_issuetracker": ADMIN_PREFIX + "/settings/issue-tracker",
189 "admin_settings_issuetracker_update": ADMIN_PREFIX
190 "admin_settings_issuetracker_update": ADMIN_PREFIX
190 + "/settings/issue-tracker/update",
191 + "/settings/issue-tracker/update",
191 "admin_settings_issuetracker_test": ADMIN_PREFIX
192 "admin_settings_issuetracker_test": ADMIN_PREFIX
192 + "/settings/issue-tracker/test",
193 + "/settings/issue-tracker/test",
193 "admin_settings_issuetracker_delete": ADMIN_PREFIX
194 "admin_settings_issuetracker_delete": ADMIN_PREFIX
194 + "/settings/issue-tracker/delete",
195 + "/settings/issue-tracker/delete",
195 "admin_settings_email": ADMIN_PREFIX + "/settings/email",
196 "admin_settings_email": ADMIN_PREFIX + "/settings/email",
196 "admin_settings_email_update": ADMIN_PREFIX + "/settings/email/update",
197 "admin_settings_email_update": ADMIN_PREFIX + "/settings/email/update",
197 "admin_settings_hooks": ADMIN_PREFIX + "/settings/hooks",
198 "admin_settings_hooks": ADMIN_PREFIX + "/settings/hooks",
198 "admin_settings_hooks_update": ADMIN_PREFIX + "/settings/hooks/update",
199 "admin_settings_hooks_update": ADMIN_PREFIX + "/settings/hooks/update",
199 "admin_settings_hooks_delete": ADMIN_PREFIX + "/settings/hooks/delete",
200 "admin_settings_hooks_delete": ADMIN_PREFIX + "/settings/hooks/delete",
200 "admin_settings_search": ADMIN_PREFIX + "/settings/search",
201 "admin_settings_search": ADMIN_PREFIX + "/settings/search",
201 "admin_settings_labs": ADMIN_PREFIX + "/settings/labs",
202 "admin_settings_labs": ADMIN_PREFIX + "/settings/labs",
202 "admin_settings_labs_update": ADMIN_PREFIX + "/settings/labs/update",
203 "admin_settings_labs_update": ADMIN_PREFIX + "/settings/labs/update",
203 "admin_settings_sessions": ADMIN_PREFIX + "/settings/sessions",
204 "admin_settings_sessions": ADMIN_PREFIX + "/settings/sessions",
204 "admin_settings_sessions_cleanup": ADMIN_PREFIX + "/settings/sessions/cleanup",
205 "admin_settings_sessions_cleanup": ADMIN_PREFIX + "/settings/sessions/cleanup",
205 "admin_settings_system": ADMIN_PREFIX + "/settings/system",
206 "admin_settings_system": ADMIN_PREFIX + "/settings/system",
206 "admin_settings_system_update": ADMIN_PREFIX + "/settings/system/updates",
207 "admin_settings_system_update": ADMIN_PREFIX + "/settings/system/updates",
207 "admin_settings_open_source": ADMIN_PREFIX + "/settings/open_source",
208 "admin_settings_open_source": ADMIN_PREFIX + "/settings/open_source",
208 "repo_group_new": ADMIN_PREFIX + "/repo_group/new",
209 "repo_group_new": ADMIN_PREFIX + "/repo_group/new",
209 "repo_group_create": ADMIN_PREFIX + "/repo_group/create",
210 "repo_group_create": ADMIN_PREFIX + "/repo_group/create",
210 "repo_new": ADMIN_PREFIX + "/repos/new",
211 "repo_new": ADMIN_PREFIX + "/repos/new",
211 "repo_create": ADMIN_PREFIX + "/repos/create",
212 "repo_create": ADMIN_PREFIX + "/repos/create",
212 "admin_permissions_global": ADMIN_PREFIX + "/permissions/global",
213 "admin_permissions_global": ADMIN_PREFIX + "/permissions/global",
213 "admin_permissions_global_update": ADMIN_PREFIX + "/permissions/global/update",
214 "admin_permissions_global_update": ADMIN_PREFIX + "/permissions/global/update",
214 "admin_permissions_object": ADMIN_PREFIX + "/permissions/object",
215 "admin_permissions_object": ADMIN_PREFIX + "/permissions/object",
215 "admin_permissions_object_update": ADMIN_PREFIX + "/permissions/object/update",
216 "admin_permissions_object_update": ADMIN_PREFIX + "/permissions/object/update",
216 "admin_permissions_ips": ADMIN_PREFIX + "/permissions/ips",
217 "admin_permissions_ips": ADMIN_PREFIX + "/permissions/ips",
217 "admin_permissions_overview": ADMIN_PREFIX + "/permissions/overview",
218 "admin_permissions_overview": ADMIN_PREFIX + "/permissions/overview",
218 "admin_permissions_ssh_keys": ADMIN_PREFIX + "/permissions/ssh_keys",
219 "admin_permissions_ssh_keys": ADMIN_PREFIX + "/permissions/ssh_keys",
219 "admin_permissions_ssh_keys_data": ADMIN_PREFIX + "/permissions/ssh_keys/data",
220 "admin_permissions_ssh_keys_data": ADMIN_PREFIX + "/permissions/ssh_keys/data",
220 "admin_permissions_ssh_keys_update": ADMIN_PREFIX
221 "admin_permissions_ssh_keys_update": ADMIN_PREFIX
221 + "/permissions/ssh_keys/update",
222 + "/permissions/ssh_keys/update",
222 "pullrequest_show": "/{repo_name}/pull-request/{pull_request_id}",
223 "pullrequest_show": "/{repo_name}/pull-request/{pull_request_id}",
223 "pull_requests_global": ADMIN_PREFIX + "/pull-request/{pull_request_id}",
224 "pull_requests_global": ADMIN_PREFIX + "/pull-request/{pull_request_id}",
224 "pull_requests_global_0": ADMIN_PREFIX + "/pull_requests/{pull_request_id}",
225 "pull_requests_global_0": ADMIN_PREFIX + "/pull_requests/{pull_request_id}",
225 "pull_requests_global_1": ADMIN_PREFIX + "/pull-requests/{pull_request_id}",
226 "pull_requests_global_1": ADMIN_PREFIX + "/pull-requests/{pull_request_id}",
226 "notifications_show_all": ADMIN_PREFIX + "/notifications",
227 "notifications_show_all": ADMIN_PREFIX + "/notifications",
227 "notifications_mark_all_read": ADMIN_PREFIX + "/notifications_mark_all_read",
228 "notifications_mark_all_read": ADMIN_PREFIX + "/notifications_mark_all_read",
228 "notifications_show": ADMIN_PREFIX + "/notifications/{notification_id}",
229 "notifications_show": ADMIN_PREFIX + "/notifications/{notification_id}",
229 "notifications_update": ADMIN_PREFIX
230 "notifications_update": ADMIN_PREFIX
230 + "/notifications/{notification_id}/update",
231 + "/notifications/{notification_id}/update",
231 "notifications_delete": ADMIN_PREFIX
232 "notifications_delete": ADMIN_PREFIX
232 + "/notifications/{notification_id}/delete",
233 + "/notifications/{notification_id}/delete",
233 "my_account": ADMIN_PREFIX + "/my_account/profile",
234 "my_account": ADMIN_PREFIX + "/my_account/profile",
234 "my_account_edit": ADMIN_PREFIX + "/my_account/edit",
235 "my_account_edit": ADMIN_PREFIX + "/my_account/edit",
235 "my_account_update": ADMIN_PREFIX + "/my_account/update",
236 "my_account_update": ADMIN_PREFIX + "/my_account/update",
236 "my_account_pullrequests": ADMIN_PREFIX + "/my_account/pull_requests",
237 "my_account_pullrequests": ADMIN_PREFIX + "/my_account/pull_requests",
237 "my_account_pullrequests_data": ADMIN_PREFIX + "/my_account/pull_requests/data",
238 "my_account_pullrequests_data": ADMIN_PREFIX + "/my_account/pull_requests/data",
238 "my_account_emails": ADMIN_PREFIX + "/my_account/emails",
239 "my_account_emails": ADMIN_PREFIX + "/my_account/emails",
239 "my_account_emails_add": ADMIN_PREFIX + "/my_account/emails/new",
240 "my_account_emails_add": ADMIN_PREFIX + "/my_account/emails/new",
240 "my_account_emails_delete": ADMIN_PREFIX + "/my_account/emails/delete",
241 "my_account_emails_delete": ADMIN_PREFIX + "/my_account/emails/delete",
241 "my_account_password": ADMIN_PREFIX + "/my_account/password",
242 "my_account_password": ADMIN_PREFIX + "/my_account/password",
242 "my_account_password_update": ADMIN_PREFIX + "/my_account/password/update",
243 "my_account_password_update": ADMIN_PREFIX + "/my_account/password/update",
243 "my_account_repos": ADMIN_PREFIX + "/my_account/repos",
244 "my_account_repos": ADMIN_PREFIX + "/my_account/repos",
244 "my_account_watched": ADMIN_PREFIX + "/my_account/watched",
245 "my_account_watched": ADMIN_PREFIX + "/my_account/watched",
245 "my_account_perms": ADMIN_PREFIX + "/my_account/perms",
246 "my_account_perms": ADMIN_PREFIX + "/my_account/perms",
246 "my_account_notifications": ADMIN_PREFIX + "/my_account/notifications",
247 "my_account_notifications": ADMIN_PREFIX + "/my_account/notifications",
247 "my_account_ssh_keys": ADMIN_PREFIX + "/my_account/ssh_keys",
248 "my_account_ssh_keys": ADMIN_PREFIX + "/my_account/ssh_keys",
248 "my_account_ssh_keys_generate": ADMIN_PREFIX + "/my_account/ssh_keys/generate",
249 "my_account_ssh_keys_generate": ADMIN_PREFIX + "/my_account/ssh_keys/generate",
249 "my_account_ssh_keys_add": ADMIN_PREFIX + "/my_account/ssh_keys/new",
250 "my_account_ssh_keys_add": ADMIN_PREFIX + "/my_account/ssh_keys/new",
250 "my_account_ssh_keys_delete": ADMIN_PREFIX + "/my_account/ssh_keys/delete",
251 "my_account_ssh_keys_delete": ADMIN_PREFIX + "/my_account/ssh_keys/delete",
251 "pullrequest_show_all": "/{repo_name}/pull-request",
252 "pullrequest_show_all": "/{repo_name}/pull-request",
252 "pullrequest_show_all_data": "/{repo_name}/pull-request-data",
253 "pullrequest_show_all_data": "/{repo_name}/pull-request-data",
253 "bookmarks_home": "/{repo_name}/bookmarks",
254 "bookmarks_home": "/{repo_name}/bookmarks",
254 "branches_home": "/{repo_name}/branches",
255 "branches_home": "/{repo_name}/branches",
255 "branch_remove": "/{repo_name}/branches/{branch_name}/remove",
256 "branch_remove": "/{repo_name}/branches/{branch_name}/remove",
256 "tags_home": "/{repo_name}/tags",
257 "tags_home": "/{repo_name}/tags",
257 "repo_changelog": "/{repo_name}/changelog",
258 "repo_changelog": "/{repo_name}/changelog",
258 "repo_commits": "/{repo_name}/commits",
259 "repo_commits": "/{repo_name}/commits",
259 "repo_commits_file": "/{repo_name}/commits/{commit_id}/{f_path}",
260 "repo_commits_file": "/{repo_name}/commits/{commit_id}/{f_path}",
260 "repo_commits_elements": "/{repo_name}/commits_elements",
261 "repo_commits_elements": "/{repo_name}/commits_elements",
261 "repo_commit": "/{repo_name}/changeset/{commit_id}",
262 "repo_commit": "/{repo_name}/changeset/{commit_id}",
262 "repo_commit_comment_create": "/{repo_name}/changeset/{commit_id}/comment/create",
263 "repo_commit_comment_create": "/{repo_name}/changeset/{commit_id}/comment/create",
263 "repo_commit_comment_preview": "/{repo_name}/changeset/{commit_id}/comment/preview",
264 "repo_commit_comment_preview": "/{repo_name}/changeset/{commit_id}/comment/preview",
264 "repo_commit_comment_delete": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete",
265 "repo_commit_comment_delete": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete",
265 "repo_commit_comment_edit": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit",
266 "repo_commit_comment_edit": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit",
266 "repo_commit_children": "/{repo_name}/changeset_children/{commit_id}",
267 "repo_commit_children": "/{repo_name}/changeset_children/{commit_id}",
267 "repo_commit_parents": "/{repo_name}/changeset_parents/{commit_id}",
268 "repo_commit_parents": "/{repo_name}/changeset_parents/{commit_id}",
268 "repo_commit_patch": "/{repo_name}/changeset-patch/{commit_id}",
269 "repo_commit_patch": "/{repo_name}/changeset-patch/{commit_id}",
269 "repo_commit_download": "/{repo_name}/changeset-download/{commit_id}",
270 "repo_commit_download": "/{repo_name}/changeset-download/{commit_id}",
270 "repo_commit_data": "/{repo_name}/changeset-data/{commit_id}",
271 "repo_commit_data": "/{repo_name}/changeset-data/{commit_id}",
271 "repo_compare": "/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}",
272 "repo_compare": "/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}",
272 "repo_compare_select": "/{repo_name}/compare",
273 "repo_compare_select": "/{repo_name}/compare",
273 "rss_feed_home": "/{repo_name}/feed-rss",
274 "rss_feed_home": "/{repo_name}/feed-rss",
274 "atom_feed_home": "/{repo_name}/feed-atom",
275 "atom_feed_home": "/{repo_name}/feed-atom",
275 "rss_feed_home_old": "/{repo_name}/feed/rss",
276 "rss_feed_home_old": "/{repo_name}/feed/rss",
276 "atom_feed_home_old": "/{repo_name}/feed/atom",
277 "atom_feed_home_old": "/{repo_name}/feed/atom",
277 "repo_fork_new": "/{repo_name}/fork",
278 "repo_fork_new": "/{repo_name}/fork",
278 "repo_fork_create": "/{repo_name}/fork/create",
279 "repo_fork_create": "/{repo_name}/fork/create",
279 "repo_forks_show_all": "/{repo_name}/forks",
280 "repo_forks_show_all": "/{repo_name}/forks",
280 "repo_forks_data": "/{repo_name}/forks/data",
281 "repo_forks_data": "/{repo_name}/forks/data",
281 "edit_repo_issuetracker": "/{repo_name}/settings/issue_trackers",
282 "edit_repo_issuetracker": "/{repo_name}/settings/issue_trackers",
282 "edit_repo_issuetracker_test": "/{repo_name}/settings/issue_trackers/test",
283 "edit_repo_issuetracker_test": "/{repo_name}/settings/issue_trackers/test",
283 "edit_repo_issuetracker_delete": "/{repo_name}/settings/issue_trackers/delete",
284 "edit_repo_issuetracker_delete": "/{repo_name}/settings/issue_trackers/delete",
284 "edit_repo_issuetracker_update": "/{repo_name}/settings/issue_trackers/update",
285 "edit_repo_issuetracker_update": "/{repo_name}/settings/issue_trackers/update",
285 "edit_repo_maintenance": "/{repo_name}/settings/maintenance",
286 "edit_repo_maintenance": "/{repo_name}/settings/maintenance",
286 "edit_repo_maintenance_execute": "/{repo_name}/settings/maintenance/execute",
287 "edit_repo_maintenance_execute": "/{repo_name}/settings/maintenance/execute",
287 "repo_changelog_file": "/{repo_name}/changelog/{commit_id}/{f_path}",
288 "repo_changelog_file": "/{repo_name}/changelog/{commit_id}/{f_path}",
288 "pullrequest_repo_refs": "/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}",
289 "pullrequest_repo_refs": "/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}",
289 "pullrequest_repo_targets": "/{repo_name}/pull-request/repo-destinations",
290 "pullrequest_repo_targets": "/{repo_name}/pull-request/repo-destinations",
290 "pullrequest_new": "/{repo_name}/pull-request/new",
291 "pullrequest_new": "/{repo_name}/pull-request/new",
291 "pullrequest_create": "/{repo_name}/pull-request/create",
292 "pullrequest_create": "/{repo_name}/pull-request/create",
292 "pullrequest_update": "/{repo_name}/pull-request/{pull_request_id}/update",
293 "pullrequest_update": "/{repo_name}/pull-request/{pull_request_id}/update",
293 "pullrequest_merge": "/{repo_name}/pull-request/{pull_request_id}/merge",
294 "pullrequest_merge": "/{repo_name}/pull-request/{pull_request_id}/merge",
294 "pullrequest_delete": "/{repo_name}/pull-request/{pull_request_id}/delete",
295 "pullrequest_delete": "/{repo_name}/pull-request/{pull_request_id}/delete",
295 "pullrequest_comment_create": "/{repo_name}/pull-request/{pull_request_id}/comment",
296 "pullrequest_comment_create": "/{repo_name}/pull-request/{pull_request_id}/comment",
296 "pullrequest_comment_delete": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete",
297 "pullrequest_comment_delete": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete",
297 "pullrequest_comment_edit": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit",
298 "pullrequest_comment_edit": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit",
298 "edit_repo_caches": "/{repo_name}/settings/caches",
299 "edit_repo_caches": "/{repo_name}/settings/caches",
299 "edit_repo_perms": "/{repo_name}/settings/permissions",
300 "edit_repo_perms": "/{repo_name}/settings/permissions",
300 "edit_repo_fields": "/{repo_name}/settings/fields",
301 "edit_repo_fields": "/{repo_name}/settings/fields",
301 "edit_repo_remote": "/{repo_name}/settings/remote",
302 "edit_repo_remote": "/{repo_name}/settings/remote",
302 "edit_repo_statistics": "/{repo_name}/settings/statistics",
303 "edit_repo_statistics": "/{repo_name}/settings/statistics",
303 "edit_repo_advanced": "/{repo_name}/settings/advanced",
304 "edit_repo_advanced": "/{repo_name}/settings/advanced",
304 "edit_repo_advanced_delete": "/{repo_name}/settings/advanced/delete",
305 "edit_repo_advanced_delete": "/{repo_name}/settings/advanced/delete",
305 "edit_repo_advanced_archive": "/{repo_name}/settings/advanced/archive",
306 "edit_repo_advanced_archive": "/{repo_name}/settings/advanced/archive",
306 "edit_repo_advanced_fork": "/{repo_name}/settings/advanced/fork",
307 "edit_repo_advanced_fork": "/{repo_name}/settings/advanced/fork",
307 "edit_repo_advanced_locking": "/{repo_name}/settings/advanced/locking",
308 "edit_repo_advanced_locking": "/{repo_name}/settings/advanced/locking",
308 "edit_repo_advanced_journal": "/{repo_name}/settings/advanced/journal",
309 "edit_repo_advanced_journal": "/{repo_name}/settings/advanced/journal",
309 "repo_stats": "/{repo_name}/repo_stats/{commit_id}",
310 "repo_stats": "/{repo_name}/repo_stats/{commit_id}",
310 "repo_refs_data": "/{repo_name}/refs-data",
311 "repo_refs_data": "/{repo_name}/refs-data",
311 "repo_refs_changelog_data": "/{repo_name}/refs-data-changelog",
312 "repo_refs_changelog_data": "/{repo_name}/refs-data-changelog",
312 "repo_artifacts_stream_store": "/_file_store/stream-upload",
313 "repo_artifacts_stream_store": "/_file_store/stream-upload",
313 }
314 }
314
315
315
316
316 def route_path(name, params=None, **kwargs):
317 def route_path(name, params=None, **kwargs):
317 import urllib.parse
318 import urllib.parse
318
319
319 base_url = get_url_defs()[name].format(**kwargs)
320 base_url = get_url_defs()[name].format(**kwargs)
320
321
321 if params:
322 if params:
322 base_url = f"{base_url}?{urllib.parse.urlencode(params)}"
323 base_url = f"{base_url}?{urllib.parse.urlencode(params)}"
323 return base_url
324 return base_url
General Comments 0
You need to be logged in to leave comments. Login now