##// END OF EJS Templates
feat(remap and rescan): added more relient remap and removal option, and also split the logic to either add or cleanup
super-admin -
r5619:c9e499e7 default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import mock
20 import pytest
21
22 from rhodecode.model.scm import ScmModel
23 from rhodecode.api.tests.utils import (
24 build_data, api_call, assert_ok, assert_error, crash)
25
26
27 @pytest.mark.usefixtures("testuser_api", "app")
28 class TestCleanupRepos(object):
29 def test_api_cleanup_repos(self):
30 id_, params = build_data(self.apikey, 'cleanup_repos')
31 response = api_call(self.app, params)
32
33 expected = {'removed': [], 'errors': []}
34 assert_ok(id_, expected, given=response.body)
35
36 def test_api_cleanup_repos_error(self):
37
38 id_, params = build_data(self.apikey, 'cleanup_repos', )
39
40 with mock.patch('rhodecode.lib.utils.repo2db_cleanup', side_effect=crash):
41 response = api_call(self.app, params)
42
43 expected = 'Error occurred during repo storage cleanup action'
44 assert_error(id_, expected, given=response.body)
@@ -1,42 +1,42 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import mock
20 20 import pytest
21 21
22 from rhodecode.model.scm import ScmModel
23 22 from rhodecode.api.tests.utils import (
24 23 build_data, api_call, assert_ok, assert_error, crash)
25 24
26 25
27 26 @pytest.mark.usefixtures("testuser_api", "app")
28 27 class TestRescanRepos(object):
29 28 def test_api_rescan_repos(self):
30 29 id_, params = build_data(self.apikey, 'rescan_repos')
31 30 response = api_call(self.app, params)
32 31
33 expected = {'added': [], 'removed': []}
32 expected = {'added': [], 'errors': []}
34 33 assert_ok(id_, expected, given=response.body)
35 34
36 @mock.patch.object(ScmModel, 'repo_scan', crash)
37 def test_api_rescann_error(self):
35 def test_api_rescan_repos_error(self):
38 36 id_, params = build_data(self.apikey, 'rescan_repos', )
37
38 with mock.patch('rhodecode.lib.utils.repo2db_mapper', side_effect=crash):
39 39 response = api_call(self.app, params)
40 40
41 41 expected = 'Error occurred during rescan repositories action'
42 42 assert_error(id_, expected, given=response.body)
@@ -1,479 +1,463 b''
1 1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import itertools
21 import base64
22 21
23 22 from rhodecode.api import (
24 23 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25 24
26 25 from rhodecode.api.utils import (
27 26 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
27 from rhodecode.lib.utils import get_rhodecode_repo_store_path
29 28 from rhodecode.lib import system_info
30 29 from rhodecode.lib import user_sessions
31 30 from rhodecode.lib import exc_tracking
32 31 from rhodecode.lib.ext_json import json
33 32 from rhodecode.lib.utils2 import safe_int
34 33 from rhodecode.model.db import UserIpMap
35 34 from rhodecode.model.scm import ScmModel
36 from rhodecode.apps.file_store import utils as store_utils
37 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
38 FileOverSizeException
35
39 36
40 37 log = logging.getLogger(__name__)
41 38
42 39
43 40 @jsonrpc_method()
44 41 def get_server_info(request, apiuser):
45 42 """
46 43 Returns the |RCE| server information.
47 44
48 45 This includes the running version of |RCE| and all installed
49 46 packages. This command takes the following options:
50 47
51 48 :param apiuser: This is filled automatically from the |authtoken|.
52 49 :type apiuser: AuthUser
53 50
54 51 Example output:
55 52
56 53 .. code-block:: bash
57 54
58 55 id : <id_given_in_input>
59 56 result : {
60 57 'modules': [<module name>,...]
61 58 'py_version': <python version>,
62 59 'platform': <platform type>,
63 60 'rhodecode_version': <rhodecode version>
64 61 }
65 62 error : null
66 63 """
67 64
68 65 if not has_superadmin_permission(apiuser):
69 66 raise JSONRPCForbidden()
70 67
71 68 server_info = ScmModel().get_server_info(request.environ)
72 69 # rhodecode-index requires those
73 70
74 71 server_info['index_storage'] = server_info['search']['value']['location']
75 72 server_info['storage'] = server_info['storage']['value']['path']
76 73
77 74 return server_info
78 75
79 76
80 77 @jsonrpc_method()
81 78 def get_repo_store(request, apiuser):
82 79 """
83 80 Returns the |RCE| repository storage information.
84 81
85 82 :param apiuser: This is filled automatically from the |authtoken|.
86 83 :type apiuser: AuthUser
87 84
88 85 Example output:
89 86
90 87 .. code-block:: bash
91 88
92 89 id : <id_given_in_input>
93 90 result : {
94 91 'modules': [<module name>,...]
95 92 'py_version': <python version>,
96 93 'platform': <platform type>,
97 94 'rhodecode_version': <rhodecode version>
98 95 }
99 96 error : null
100 97 """
101 98
102 99 if not has_superadmin_permission(apiuser):
103 100 raise JSONRPCForbidden()
104 101
105 102 path = get_rhodecode_repo_store_path()
106 103 return {"path": path}
107 104
108 105
109 106 @jsonrpc_method()
110 107 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
111 108 """
112 109 Displays the IP Address as seen from the |RCE| server.
113 110
114 111 * This command displays the IP Address, as well as all the defined IP
115 112 addresses for the specified user. If the ``userid`` is not set, the
116 113 data returned is for the user calling the method.
117 114
118 115 This command can only be run using an |authtoken| with admin rights to
119 116 the specified repository.
120 117
121 118 This command takes the following options:
122 119
123 120 :param apiuser: This is filled automatically from |authtoken|.
124 121 :type apiuser: AuthUser
125 122 :param userid: Sets the userid for which associated IP Address data
126 123 is returned.
127 124 :type userid: Optional(str or int)
128 125
129 126 Example output:
130 127
131 128 .. code-block:: bash
132 129
133 130 id : <id_given_in_input>
134 131 result : {
135 132 "server_ip_addr": "<ip_from_clien>",
136 133 "user_ips": [
137 134 {
138 135 "ip_addr": "<ip_with_mask>",
139 136 "ip_range": ["<start_ip>", "<end_ip>"],
140 137 },
141 138 ...
142 139 ]
143 140 }
144 141
145 142 """
146 143 if not has_superadmin_permission(apiuser):
147 144 raise JSONRPCForbidden()
148 145
149 146 userid = Optional.extract(userid, evaluate_locals=locals())
150 147 userid = getattr(userid, 'user_id', userid)
151 148
152 149 user = get_user_or_error(userid)
153 150 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
154 151 return {
155 152 'server_ip_addr': request.rpc_ip_addr,
156 153 'user_ips': ips
157 154 }
158 155
159 156
160 157 @jsonrpc_method()
161 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
158 def rescan_repos(request, apiuser):
162 159 """
163 160 Triggers a rescan of the specified repositories.
164
165 * If the ``remove_obsolete`` option is set, it also deletes repositories
166 that are found in the database but not on the file system, so called
167 "clean zombies".
161 It returns list of added repositories, and errors during scan.
168 162
169 163 This command can only be run using an |authtoken| with admin rights to
170 164 the specified repository.
171 165
172 166 This command takes the following options:
173 167
174 168 :param apiuser: This is filled automatically from the |authtoken|.
175 169 :type apiuser: AuthUser
176 :param remove_obsolete: Deletes repositories from the database that
177 are not found on the filesystem.
178 :type remove_obsolete: Optional(``True`` | ``False``)
179 170
180 171 Example output:
181 172
182 173 .. code-block:: bash
183 174
184 175 id : <id_given_in_input>
185 176 result : {
186 177 'added': [<added repository name>,...]
187 'removed': [<removed repository name>,...]
178 'errors': [<error_list>,...]
188 179 }
189 180 error : null
190 181
191 182 Example error output:
192 183
193 184 .. code-block:: bash
194 185
195 186 id : <id_given_in_input>
196 187 result : null
197 188 error : {
198 189 'Error occurred during rescan repositories action'
199 190 }
200 191
201 192 """
193 from rhodecode.lib.utils import repo2db_mapper # re-import for testing patches
194
202 195 if not has_superadmin_permission(apiuser):
203 196 raise JSONRPCForbidden()
204 197
205 198 try:
206 rm_obsolete = Optional.extract(remove_obsolete)
207 added, removed = repo2db_mapper(ScmModel().repo_scan(),
208 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
209 return {'added': added, 'removed': removed}
199 added, errors = repo2db_mapper(ScmModel().repo_scan(), force_hooks_rebuild=True)
200 return {'added': added, 'errors': errors}
210 201 except Exception:
211 log.exception('Failed to run repo rescann')
202 log.exception('Failed to run repo rescan')
212 203 raise JSONRPCError(
213 204 'Error occurred during rescan repositories action'
214 205 )
215 206
216 207 @jsonrpc_method()
217 def cleanup_repos(request, apiuser, remove_obsolete=Optional(False)):
208 def cleanup_repos(request, apiuser):
218 209 """
219 Triggers a rescan of the specified repositories.
220
221 * If the ``remove_obsolete`` option is set, it also deletes repositories
222 that are found in the database but not on the file system, so called
223 "clean zombies".
210 Triggers a cleanup of non-existing repositories or repository groups in filesystem.
224 211
225 212 This command can only be run using an |authtoken| with admin rights to
226 213 the specified repository.
227 214
228 215 This command takes the following options:
229 216
230 217 :param apiuser: This is filled automatically from the |authtoken|.
231 218 :type apiuser: AuthUser
232 :param remove_obsolete: Deletes repositories from the database that
233 are not found on the filesystem.
234 :type remove_obsolete: Optional(``True`` | ``False``)
235 219
236 220 Example output:
237 221
238 222 .. code-block:: bash
239 223
240 224 id : <id_given_in_input>
241 225 result : {
242 'added': [<added repository name>,...]
243 'removed': [<removed repository name>,...]
226 'removed': [<removed repository name or repository group name>,...]
227 'errors': [<error list of failures to remove>,...]
244 228 }
245 229 error : null
246 230
247 231 Example error output:
248 232
249 233 .. code-block:: bash
250 234
251 235 id : <id_given_in_input>
252 236 result : null
253 237 error : {
254 'Error occurred during rescan repositories action'
238 'Error occurred during repo storage cleanup action'
255 239 }
256 240
257 241 """
242 from rhodecode.lib.utils import repo2db_cleanup # re-import for testing patches
243
258 244 if not has_superadmin_permission(apiuser):
259 245 raise JSONRPCForbidden()
260 246
261 247 try:
262 rm_obsolete = Optional.extract(remove_obsolete)
263 added, removed = repo2db_mapper(ScmModel().repo_scan(),
264 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
265 return {'added': added, 'removed': removed}
248 removed, errors = repo2db_cleanup()
249 return {'removed': removed, 'errors': errors}
266 250 except Exception:
267 log.exception('Failed to run repo rescann')
251 log.exception('Failed to run repo storage cleanup')
268 252 raise JSONRPCError(
269 'Error occurred during rescan repositories action'
253 'Error occurred during repo storage cleanup action'
270 254 )
271 255
272 256
273 257 @jsonrpc_method()
274 258 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
275 259 """
276 260 Triggers a session cleanup action.
277 261
278 262 If the ``older_then`` option is set, only sessions that hasn't been
279 263 accessed in the given number of days will be removed.
280 264
281 265 This command can only be run using an |authtoken| with admin rights to
282 266 the specified repository.
283 267
284 268 This command takes the following options:
285 269
286 270 :param apiuser: This is filled automatically from the |authtoken|.
287 271 :type apiuser: AuthUser
288 272 :param older_then: Deletes session that hasn't been accessed
289 273 in given number of days.
290 274 :type older_then: Optional(int)
291 275
292 276 Example output:
293 277
294 278 .. code-block:: bash
295 279
296 280 id : <id_given_in_input>
297 281 result: {
298 282 "backend": "<type of backend>",
299 283 "sessions_removed": <number_of_removed_sessions>
300 284 }
301 285 error : null
302 286
303 287 Example error output:
304 288
305 289 .. code-block:: bash
306 290
307 291 id : <id_given_in_input>
308 292 result : null
309 293 error : {
310 294 'Error occurred during session cleanup'
311 295 }
312 296
313 297 """
314 298 if not has_superadmin_permission(apiuser):
315 299 raise JSONRPCForbidden()
316 300
317 301 older_then = safe_int(Optional.extract(older_then)) or 60
318 302 older_than_seconds = 60 * 60 * 24 * older_then
319 303
320 304 config = system_info.rhodecode_config().get_value()['value']['config']
321 305 session_model = user_sessions.get_session_handler(
322 306 config.get('beaker.session.type', 'memory'))(config)
323 307
324 308 backend = session_model.SESSION_TYPE
325 309 try:
326 310 cleaned = session_model.clean_sessions(
327 311 older_than_seconds=older_than_seconds)
328 312 return {'sessions_removed': cleaned, 'backend': backend}
329 313 except user_sessions.CleanupCommand as msg:
330 314 return {'cleanup_command': str(msg), 'backend': backend}
331 315 except Exception as e:
332 316 log.exception('Failed session cleanup')
333 317 raise JSONRPCError(
334 318 'Error occurred during session cleanup'
335 319 )
336 320
337 321
338 322 @jsonrpc_method()
339 323 def get_method(request, apiuser, pattern=Optional('*')):
340 324 """
341 325 Returns list of all available API methods. By default match pattern
342 326 os "*" but any other pattern can be specified. eg *comment* will return
343 327 all methods with comment inside them. If just single method is matched
344 328 returned data will also include method specification
345 329
346 330 This command can only be run using an |authtoken| with admin rights to
347 331 the specified repository.
348 332
349 333 This command takes the following options:
350 334
351 335 :param apiuser: This is filled automatically from the |authtoken|.
352 336 :type apiuser: AuthUser
353 337 :param pattern: pattern to match method names against
354 338 :type pattern: Optional("*")
355 339
356 340 Example output:
357 341
358 342 .. code-block:: bash
359 343
360 344 id : <id_given_in_input>
361 345 "result": [
362 346 "changeset_comment",
363 347 "comment_pull_request",
364 348 "comment_commit"
365 349 ]
366 350 error : null
367 351
368 352 .. code-block:: bash
369 353
370 354 id : <id_given_in_input>
371 355 "result": [
372 356 "comment_commit",
373 357 {
374 358 "apiuser": "<RequiredType>",
375 359 "comment_type": "<Optional:u'note'>",
376 360 "commit_id": "<RequiredType>",
377 361 "message": "<RequiredType>",
378 362 "repoid": "<RequiredType>",
379 363 "request": "<RequiredType>",
380 364 "resolves_comment_id": "<Optional:None>",
381 365 "status": "<Optional:None>",
382 366 "userid": "<Optional:<OptionalAttr:apiuser>>"
383 367 }
384 368 ]
385 369 error : null
386 370 """
387 371 from rhodecode.config import patches
388 372 inspect = patches.inspect_getargspec()
389 373
390 374 if not has_superadmin_permission(apiuser):
391 375 raise JSONRPCForbidden()
392 376
393 377 pattern = Optional.extract(pattern)
394 378
395 379 matches = find_methods(request.registry.jsonrpc_methods, pattern)
396 380
397 381 args_desc = []
398 382 matches_keys = list(matches.keys())
399 383 if len(matches_keys) == 1:
400 384 func = matches[matches_keys[0]]
401 385
402 386 argspec = inspect.getargspec(func)
403 387 arglist = argspec[0]
404 388 defaults = list(map(repr, argspec[3] or []))
405 389
406 390 default_empty = '<RequiredType>'
407 391
408 392 # kw arguments required by this method
409 393 func_kwargs = dict(itertools.zip_longest(
410 394 reversed(arglist), reversed(defaults), fillvalue=default_empty))
411 395 args_desc.append(func_kwargs)
412 396
413 397 return matches_keys + args_desc
414 398
415 399
416 400 @jsonrpc_method()
417 401 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
418 402 """
419 403 Stores sent exception inside the built-in exception tracker in |RCE| server.
420 404
421 405 This command can only be run using an |authtoken| with admin rights to
422 406 the specified repository.
423 407
424 408 This command takes the following options:
425 409
426 410 :param apiuser: This is filled automatically from the |authtoken|.
427 411 :type apiuser: AuthUser
428 412
429 413 :param exc_data_json: JSON data with exception e.g
430 414 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
431 415 :type exc_data_json: JSON data
432 416
433 417 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
434 418 :type prefix: Optional("rhodecode")
435 419
436 420 Example output:
437 421
438 422 .. code-block:: bash
439 423
440 424 id : <id_given_in_input>
441 425 "result": {
442 426 "exc_id": 139718459226384,
443 427 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
444 428 }
445 429 error : null
446 430 """
447 431 if not has_superadmin_permission(apiuser):
448 432 raise JSONRPCForbidden()
449 433
450 434 prefix = Optional.extract(prefix)
451 435 exc_id = exc_tracking.generate_id()
452 436
453 437 try:
454 438 exc_data = json.loads(exc_data_json)
455 439 except Exception:
456 440 log.error('Failed to parse JSON: %r', exc_data_json)
457 441 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
458 442 'Please make sure it contains a valid JSON.')
459 443
460 444 try:
461 445 exc_traceback = exc_data['exc_traceback']
462 446 exc_type_name = exc_data['exc_type_name']
463 447 exc_value = ''
464 448 except KeyError as err:
465 449 raise JSONRPCError(
466 450 f'Missing exc_traceback, or exc_type_name '
467 451 f'in exc_data_json field. Missing: {err}')
468 452
469 453 class ExcType:
470 454 __name__ = exc_type_name
471 455
472 456 exc_info = (ExcType(), exc_value, exc_traceback)
473 457
474 458 exc_tracking._store_exception(
475 459 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
476 460
477 461 exc_url = request.route_url(
478 462 'admin_settings_exception_tracker_show', exception_id=exc_id)
479 463 return {'exc_id': exc_id, 'exc_url': exc_url}
@@ -1,1124 +1,1133 b''
1 1 # Copyright (C) 2016-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 from rhodecode.apps._base import ADMIN_PREFIX
21 21 from rhodecode.apps._base.navigation import includeme as nav_includeme
22 22 from rhodecode.apps.admin.views.main_views import AdminMainView
23 23
24 24
25 25 def admin_routes(config):
26 26 """
27 27 Admin prefixed routes
28 28 """
29 29 from rhodecode.apps.admin.views.audit_logs import AdminAuditLogsView
30 30 from rhodecode.apps.admin.views.artifacts import AdminArtifactsView
31 31 from rhodecode.apps.admin.views.automation import AdminAutomationView
32 32 from rhodecode.apps.admin.views.scheduler import AdminSchedulerView
33 33 from rhodecode.apps.admin.views.defaults import AdminDefaultSettingsView
34 34 from rhodecode.apps.admin.views.exception_tracker import ExceptionsTrackerView
35 35 from rhodecode.apps.admin.views.open_source_licenses import OpenSourceLicensesAdminSettingsView
36 36 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
37 37 from rhodecode.apps.admin.views.process_management import AdminProcessManagementView
38 38 from rhodecode.apps.admin.views.repo_groups import AdminRepoGroupsView
39 39 from rhodecode.apps.admin.views.repositories import AdminReposView
40 40 from rhodecode.apps.admin.views.sessions import AdminSessionSettingsView
41 41 from rhodecode.apps.admin.views.settings import AdminSettingsView
42 42 from rhodecode.apps.admin.views.svn_config import AdminSvnConfigView
43 43 from rhodecode.apps.admin.views.system_info import AdminSystemInfoSettingsView
44 44 from rhodecode.apps.admin.views.user_groups import AdminUserGroupsView
45 45 from rhodecode.apps.admin.views.users import AdminUsersView, UsersView
46 46 from rhodecode.apps.admin.views.security import AdminSecurityView
47 47
48 48 # Security EE feature
49 49
50 50 config.add_route(
51 51 'admin_security',
52 52 pattern='/security')
53 53 config.add_view(
54 54 AdminSecurityView,
55 55 attr='security',
56 56 route_name='admin_security', request_method='GET',
57 57 renderer='rhodecode:templates/admin/security/security.mako')
58 58
59 59 config.add_route(
60 60 name='admin_security_update',
61 61 pattern='/security/update')
62 62 config.add_view(
63 63 AdminSecurityView,
64 64 attr='security_update',
65 65 route_name='admin_security_update', request_method='POST',
66 66 renderer='rhodecode:templates/admin/security/security.mako')
67 67
68 68 config.add_route(
69 69 name='admin_security_modify_allowed_vcs_client_versions',
70 70 pattern=ADMIN_PREFIX + '/security/modify/allowed_vcs_client_versions')
71 71 config.add_view(
72 72 AdminSecurityView,
73 73 attr='vcs_whitelisted_client_versions_edit',
74 74 route_name='admin_security_modify_allowed_vcs_client_versions', request_method=('GET', 'POST'),
75 75 renderer='rhodecode:templates/admin/security/edit_allowed_vcs_client_versions.mako')
76 76
77 77
78 78 config.add_route(
79 79 name='admin_audit_logs',
80 80 pattern='/audit_logs')
81 81 config.add_view(
82 82 AdminAuditLogsView,
83 83 attr='admin_audit_logs',
84 84 route_name='admin_audit_logs', request_method='GET',
85 85 renderer='rhodecode:templates/admin/admin_audit_logs.mako')
86 86
87 87 config.add_route(
88 88 name='admin_audit_log_entry',
89 89 pattern='/audit_logs/{audit_log_id}')
90 90 config.add_view(
91 91 AdminAuditLogsView,
92 92 attr='admin_audit_log_entry',
93 93 route_name='admin_audit_log_entry', request_method='GET',
94 94 renderer='rhodecode:templates/admin/admin_audit_log_entry.mako')
95 95
96 96 # Artifacts EE feature
97 97 config.add_route(
98 98 'admin_artifacts',
99 99 pattern=ADMIN_PREFIX + '/artifacts')
100 100 config.add_route(
101 101 'admin_artifacts_show_all',
102 102 pattern=ADMIN_PREFIX + '/artifacts')
103 103 config.add_view(
104 104 AdminArtifactsView,
105 105 attr='artifacts',
106 106 route_name='admin_artifacts', request_method='GET',
107 107 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
108 108 config.add_view(
109 109 AdminArtifactsView,
110 110 attr='artifacts',
111 111 route_name='admin_artifacts_show_all', request_method='GET',
112 112 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
113 113
114 114 # EE views
115 115 config.add_route(
116 116 name='admin_artifacts_show_info',
117 117 pattern=ADMIN_PREFIX + '/artifacts/{uid}')
118 118 config.add_route(
119 119 name='admin_artifacts_delete',
120 120 pattern=ADMIN_PREFIX + '/artifacts/{uid}/delete')
121 121 config.add_route(
122 122 name='admin_artifacts_update',
123 123 pattern=ADMIN_PREFIX + '/artifacts/{uid}/update')
124 124
125 125 # Automation EE feature
126 126 config.add_route(
127 127 'admin_automation',
128 128 pattern=ADMIN_PREFIX + '/automation')
129 129 config.add_view(
130 130 AdminAutomationView,
131 131 attr='automation',
132 132 route_name='admin_automation', request_method='GET',
133 133 renderer='rhodecode:templates/admin/automation/automation.mako')
134 134
135 135 # Scheduler EE feature
136 136 config.add_route(
137 137 'admin_scheduler',
138 138 pattern=ADMIN_PREFIX + '/scheduler')
139 139 config.add_view(
140 140 AdminSchedulerView,
141 141 attr='scheduler',
142 142 route_name='admin_scheduler', request_method='GET',
143 143 renderer='rhodecode:templates/admin/scheduler/scheduler.mako')
144 144
145 145 config.add_route(
146 146 name='admin_settings_open_source',
147 147 pattern='/settings/open_source')
148 148 config.add_view(
149 149 OpenSourceLicensesAdminSettingsView,
150 150 attr='open_source_licenses',
151 151 route_name='admin_settings_open_source', request_method='GET',
152 152 renderer='rhodecode:templates/admin/settings/settings.mako')
153 153
154 154 config.add_route(
155 155 name='admin_settings_vcs_svn_generate_cfg',
156 156 pattern='/settings/vcs/svn_generate_cfg')
157 157 config.add_view(
158 158 AdminSvnConfigView,
159 159 attr='vcs_svn_generate_config',
160 160 route_name='admin_settings_vcs_svn_generate_cfg',
161 161 request_method='POST', renderer='json')
162 162
163 163 config.add_route(
164 164 name='admin_settings_system',
165 165 pattern='/settings/system')
166 166 config.add_view(
167 167 AdminSystemInfoSettingsView,
168 168 attr='settings_system_info',
169 169 route_name='admin_settings_system', request_method='GET',
170 170 renderer='rhodecode:templates/admin/settings/settings.mako')
171 171
172 172 config.add_route(
173 173 name='admin_settings_system_update',
174 174 pattern='/settings/system/updates')
175 175 config.add_view(
176 176 AdminSystemInfoSettingsView,
177 177 attr='settings_system_info_check_update',
178 178 route_name='admin_settings_system_update', request_method='GET',
179 179 renderer='rhodecode:templates/admin/settings/settings_system_update.mako')
180 180
181 181 config.add_route(
182 182 name='admin_settings_exception_tracker',
183 183 pattern='/settings/exceptions')
184 184 config.add_view(
185 185 ExceptionsTrackerView,
186 186 attr='browse_exceptions',
187 187 route_name='admin_settings_exception_tracker', request_method='GET',
188 188 renderer='rhodecode:templates/admin/settings/settings.mako')
189 189
190 190 config.add_route(
191 191 name='admin_settings_exception_tracker_delete_all',
192 192 pattern='/settings/exceptions_delete_all')
193 193 config.add_view(
194 194 ExceptionsTrackerView,
195 195 attr='exception_delete_all',
196 196 route_name='admin_settings_exception_tracker_delete_all', request_method='POST',
197 197 renderer='rhodecode:templates/admin/settings/settings.mako')
198 198
199 199 config.add_route(
200 200 name='admin_settings_exception_tracker_show',
201 201 pattern='/settings/exceptions/{exception_id}')
202 202 config.add_view(
203 203 ExceptionsTrackerView,
204 204 attr='exception_show',
205 205 route_name='admin_settings_exception_tracker_show', request_method='GET',
206 206 renderer='rhodecode:templates/admin/settings/settings.mako')
207 207
208 208 config.add_route(
209 209 name='admin_settings_exception_tracker_delete',
210 210 pattern='/settings/exceptions/{exception_id}/delete')
211 211 config.add_view(
212 212 ExceptionsTrackerView,
213 213 attr='exception_delete',
214 214 route_name='admin_settings_exception_tracker_delete', request_method='POST',
215 215 renderer='rhodecode:templates/admin/settings/settings.mako')
216 216
217 217 config.add_route(
218 218 name='admin_settings_sessions',
219 219 pattern='/settings/sessions')
220 220 config.add_view(
221 221 AdminSessionSettingsView,
222 222 attr='settings_sessions',
223 223 route_name='admin_settings_sessions', request_method='GET',
224 224 renderer='rhodecode:templates/admin/settings/settings.mako')
225 225
226 226 config.add_route(
227 227 name='admin_settings_sessions_cleanup',
228 228 pattern='/settings/sessions/cleanup')
229 229 config.add_view(
230 230 AdminSessionSettingsView,
231 231 attr='settings_sessions_cleanup',
232 232 route_name='admin_settings_sessions_cleanup', request_method='POST')
233 233
234 234 config.add_route(
235 235 name='admin_settings_process_management',
236 236 pattern='/settings/process_management')
237 237 config.add_view(
238 238 AdminProcessManagementView,
239 239 attr='process_management',
240 240 route_name='admin_settings_process_management', request_method='GET',
241 241 renderer='rhodecode:templates/admin/settings/settings.mako')
242 242
243 243 config.add_route(
244 244 name='admin_settings_process_management_data',
245 245 pattern='/settings/process_management/data')
246 246 config.add_view(
247 247 AdminProcessManagementView,
248 248 attr='process_management_data',
249 249 route_name='admin_settings_process_management_data', request_method='GET',
250 250 renderer='rhodecode:templates/admin/settings/settings_process_management_data.mako')
251 251
252 252 config.add_route(
253 253 name='admin_settings_process_management_signal',
254 254 pattern='/settings/process_management/signal')
255 255 config.add_view(
256 256 AdminProcessManagementView,
257 257 attr='process_management_signal',
258 258 route_name='admin_settings_process_management_signal',
259 259 request_method='POST', renderer='json_ext')
260 260
261 261 config.add_route(
262 262 name='admin_settings_process_management_master_signal',
263 263 pattern='/settings/process_management/master_signal')
264 264 config.add_view(
265 265 AdminProcessManagementView,
266 266 attr='process_management_master_signal',
267 267 route_name='admin_settings_process_management_master_signal',
268 268 request_method='POST', renderer='json_ext')
269 269
270 270 # default settings
271 271 config.add_route(
272 272 name='admin_defaults_repositories',
273 273 pattern='/defaults/repositories')
274 274 config.add_view(
275 275 AdminDefaultSettingsView,
276 276 attr='defaults_repository_show',
277 277 route_name='admin_defaults_repositories', request_method='GET',
278 278 renderer='rhodecode:templates/admin/defaults/defaults.mako')
279 279
280 280 config.add_route(
281 281 name='admin_defaults_repositories_update',
282 282 pattern='/defaults/repositories/update')
283 283 config.add_view(
284 284 AdminDefaultSettingsView,
285 285 attr='defaults_repository_update',
286 286 route_name='admin_defaults_repositories_update', request_method='POST',
287 287 renderer='rhodecode:templates/admin/defaults/defaults.mako')
288 288
289 289 # admin settings
290 290
291 291 config.add_route(
292 292 name='admin_settings',
293 293 pattern='/settings')
294 294 config.add_view(
295 295 AdminSettingsView,
296 296 attr='settings_global',
297 297 route_name='admin_settings', request_method='GET',
298 298 renderer='rhodecode:templates/admin/settings/settings.mako')
299 299
300 300 config.add_route(
301 301 name='admin_settings_update',
302 302 pattern='/settings/update')
303 303 config.add_view(
304 304 AdminSettingsView,
305 305 attr='settings_global_update',
306 306 route_name='admin_settings_update', request_method='POST',
307 307 renderer='rhodecode:templates/admin/settings/settings.mako')
308 308
309 309 config.add_route(
310 310 name='admin_settings_global',
311 311 pattern='/settings/global')
312 312 config.add_view(
313 313 AdminSettingsView,
314 314 attr='settings_global',
315 315 route_name='admin_settings_global', request_method='GET',
316 316 renderer='rhodecode:templates/admin/settings/settings.mako')
317 317
318 318 config.add_route(
319 319 name='admin_settings_global_update',
320 320 pattern='/settings/global/update')
321 321 config.add_view(
322 322 AdminSettingsView,
323 323 attr='settings_global_update',
324 324 route_name='admin_settings_global_update', request_method='POST',
325 325 renderer='rhodecode:templates/admin/settings/settings.mako')
326 326
327 327 config.add_route(
328 328 name='admin_settings_vcs',
329 329 pattern='/settings/vcs')
330 330 config.add_view(
331 331 AdminSettingsView,
332 332 attr='settings_vcs',
333 333 route_name='admin_settings_vcs', request_method='GET',
334 334 renderer='rhodecode:templates/admin/settings/settings.mako')
335 335
336 336 config.add_route(
337 337 name='admin_settings_vcs_update',
338 338 pattern='/settings/vcs/update')
339 339 config.add_view(
340 340 AdminSettingsView,
341 341 attr='settings_vcs_update',
342 342 route_name='admin_settings_vcs_update', request_method='POST',
343 343 renderer='rhodecode:templates/admin/settings/settings.mako')
344 344
345 345 config.add_route(
346 346 name='admin_settings_vcs_svn_pattern_delete',
347 347 pattern='/settings/vcs/svn_pattern_delete')
348 348 config.add_view(
349 349 AdminSettingsView,
350 350 attr='settings_vcs_delete_svn_pattern',
351 351 route_name='admin_settings_vcs_svn_pattern_delete', request_method='POST',
352 352 renderer='json_ext', xhr=True)
353 353
354 354 config.add_route(
355 355 name='admin_settings_mapping',
356 356 pattern='/settings/mapping')
357 357 config.add_view(
358 358 AdminSettingsView,
359 359 attr='settings_mapping',
360 360 route_name='admin_settings_mapping', request_method='GET',
361 361 renderer='rhodecode:templates/admin/settings/settings.mako')
362 362
363 363 config.add_route(
364 name='admin_settings_mapping_update',
365 pattern='/settings/mapping/update')
364 name='admin_settings_mapping_create',
365 pattern='/settings/mapping/create')
366 366 config.add_view(
367 367 AdminSettingsView,
368 attr='settings_mapping_update',
369 route_name='admin_settings_mapping_update', request_method='POST',
368 attr='settings_mapping_create',
369 route_name='admin_settings_mapping_create', request_method='POST',
370 renderer='rhodecode:templates/admin/settings/settings.mako')
371
372 config.add_route(
373 name='admin_settings_mapping_cleanup',
374 pattern='/settings/mapping/cleanup')
375 config.add_view(
376 AdminSettingsView,
377 attr='settings_mapping_cleanup',
378 route_name='admin_settings_mapping_cleanup', request_method='POST',
370 379 renderer='rhodecode:templates/admin/settings/settings.mako')
371 380
372 381 config.add_route(
373 382 name='admin_settings_visual',
374 383 pattern='/settings/visual')
375 384 config.add_view(
376 385 AdminSettingsView,
377 386 attr='settings_visual',
378 387 route_name='admin_settings_visual', request_method='GET',
379 388 renderer='rhodecode:templates/admin/settings/settings.mako')
380 389
381 390 config.add_route(
382 391 name='admin_settings_visual_update',
383 392 pattern='/settings/visual/update')
384 393 config.add_view(
385 394 AdminSettingsView,
386 395 attr='settings_visual_update',
387 396 route_name='admin_settings_visual_update', request_method='POST',
388 397 renderer='rhodecode:templates/admin/settings/settings.mako')
389 398
390 399 config.add_route(
391 400 name='admin_settings_issuetracker',
392 401 pattern='/settings/issue-tracker')
393 402 config.add_view(
394 403 AdminSettingsView,
395 404 attr='settings_issuetracker',
396 405 route_name='admin_settings_issuetracker', request_method='GET',
397 406 renderer='rhodecode:templates/admin/settings/settings.mako')
398 407
399 408 config.add_route(
400 409 name='admin_settings_issuetracker_update',
401 410 pattern='/settings/issue-tracker/update')
402 411 config.add_view(
403 412 AdminSettingsView,
404 413 attr='settings_issuetracker_update',
405 414 route_name='admin_settings_issuetracker_update', request_method='POST',
406 415 renderer='rhodecode:templates/admin/settings/settings.mako')
407 416
408 417 config.add_route(
409 418 name='admin_settings_issuetracker_test',
410 419 pattern='/settings/issue-tracker/test')
411 420 config.add_view(
412 421 AdminSettingsView,
413 422 attr='settings_issuetracker_test',
414 423 route_name='admin_settings_issuetracker_test', request_method='POST',
415 424 renderer='string', xhr=True)
416 425
417 426 config.add_route(
418 427 name='admin_settings_issuetracker_delete',
419 428 pattern='/settings/issue-tracker/delete')
420 429 config.add_view(
421 430 AdminSettingsView,
422 431 attr='settings_issuetracker_delete',
423 432 route_name='admin_settings_issuetracker_delete', request_method='POST',
424 433 renderer='json_ext', xhr=True)
425 434
426 435 config.add_route(
427 436 name='admin_settings_email',
428 437 pattern='/settings/email')
429 438 config.add_view(
430 439 AdminSettingsView,
431 440 attr='settings_email',
432 441 route_name='admin_settings_email', request_method='GET',
433 442 renderer='rhodecode:templates/admin/settings/settings.mako')
434 443
435 444 config.add_route(
436 445 name='admin_settings_email_update',
437 446 pattern='/settings/email/update')
438 447 config.add_view(
439 448 AdminSettingsView,
440 449 attr='settings_email_update',
441 450 route_name='admin_settings_email_update', request_method='POST',
442 451 renderer='rhodecode:templates/admin/settings/settings.mako')
443 452
444 453 config.add_route(
445 454 name='admin_settings_hooks',
446 455 pattern='/settings/hooks')
447 456 config.add_view(
448 457 AdminSettingsView,
449 458 attr='settings_hooks',
450 459 route_name='admin_settings_hooks', request_method='GET',
451 460 renderer='rhodecode:templates/admin/settings/settings.mako')
452 461
453 462 config.add_route(
454 463 name='admin_settings_hooks_update',
455 464 pattern='/settings/hooks/update')
456 465 config.add_view(
457 466 AdminSettingsView,
458 467 attr='settings_hooks_update',
459 468 route_name='admin_settings_hooks_update', request_method='POST',
460 469 renderer='rhodecode:templates/admin/settings/settings.mako')
461 470
462 471 config.add_route(
463 472 name='admin_settings_hooks_delete',
464 473 pattern='/settings/hooks/delete')
465 474 config.add_view(
466 475 AdminSettingsView,
467 476 attr='settings_hooks_update',
468 477 route_name='admin_settings_hooks_delete', request_method='POST',
469 478 renderer='rhodecode:templates/admin/settings/settings.mako')
470 479
471 480 config.add_route(
472 481 name='admin_settings_search',
473 482 pattern='/settings/search')
474 483 config.add_view(
475 484 AdminSettingsView,
476 485 attr='settings_search',
477 486 route_name='admin_settings_search', request_method='GET',
478 487 renderer='rhodecode:templates/admin/settings/settings.mako')
479 488
480 489 config.add_route(
481 490 name='admin_settings_labs',
482 491 pattern='/settings/labs')
483 492 config.add_view(
484 493 AdminSettingsView,
485 494 attr='settings_labs',
486 495 route_name='admin_settings_labs', request_method='GET',
487 496 renderer='rhodecode:templates/admin/settings/settings.mako')
488 497
489 498 config.add_route(
490 499 name='admin_settings_labs_update',
491 500 pattern='/settings/labs/update')
492 501 config.add_view(
493 502 AdminSettingsView,
494 503 attr='settings_labs_update',
495 504 route_name='admin_settings_labs_update', request_method='POST',
496 505 renderer='rhodecode:templates/admin/settings/settings.mako')
497 506
498 507 # global permissions
499 508
500 509 config.add_route(
501 510 name='admin_permissions_application',
502 511 pattern='/permissions/application')
503 512 config.add_view(
504 513 AdminPermissionsView,
505 514 attr='permissions_application',
506 515 route_name='admin_permissions_application', request_method='GET',
507 516 renderer='rhodecode:templates/admin/permissions/permissions.mako')
508 517
509 518 config.add_route(
510 519 name='admin_permissions_application_update',
511 520 pattern='/permissions/application/update')
512 521 config.add_view(
513 522 AdminPermissionsView,
514 523 attr='permissions_application_update',
515 524 route_name='admin_permissions_application_update', request_method='POST',
516 525 renderer='rhodecode:templates/admin/permissions/permissions.mako')
517 526
518 527 config.add_route(
519 528 name='admin_permissions_global',
520 529 pattern='/permissions/global')
521 530 config.add_view(
522 531 AdminPermissionsView,
523 532 attr='permissions_global',
524 533 route_name='admin_permissions_global', request_method='GET',
525 534 renderer='rhodecode:templates/admin/permissions/permissions.mako')
526 535
527 536 config.add_route(
528 537 name='admin_permissions_global_update',
529 538 pattern='/permissions/global/update')
530 539 config.add_view(
531 540 AdminPermissionsView,
532 541 attr='permissions_global_update',
533 542 route_name='admin_permissions_global_update', request_method='POST',
534 543 renderer='rhodecode:templates/admin/permissions/permissions.mako')
535 544
536 545 config.add_route(
537 546 name='admin_permissions_object',
538 547 pattern='/permissions/object')
539 548 config.add_view(
540 549 AdminPermissionsView,
541 550 attr='permissions_objects',
542 551 route_name='admin_permissions_object', request_method='GET',
543 552 renderer='rhodecode:templates/admin/permissions/permissions.mako')
544 553
545 554 config.add_route(
546 555 name='admin_permissions_object_update',
547 556 pattern='/permissions/object/update')
548 557 config.add_view(
549 558 AdminPermissionsView,
550 559 attr='permissions_objects_update',
551 560 route_name='admin_permissions_object_update', request_method='POST',
552 561 renderer='rhodecode:templates/admin/permissions/permissions.mako')
553 562
554 563 # Branch perms EE feature
555 564 config.add_route(
556 565 name='admin_permissions_branch',
557 566 pattern='/permissions/branch')
558 567 config.add_view(
559 568 AdminPermissionsView,
560 569 attr='permissions_branch',
561 570 route_name='admin_permissions_branch', request_method='GET',
562 571 renderer='rhodecode:templates/admin/permissions/permissions.mako')
563 572
564 573 config.add_route(
565 574 name='admin_permissions_ips',
566 575 pattern='/permissions/ips')
567 576 config.add_view(
568 577 AdminPermissionsView,
569 578 attr='permissions_ips',
570 579 route_name='admin_permissions_ips', request_method='GET',
571 580 renderer='rhodecode:templates/admin/permissions/permissions.mako')
572 581
573 582 config.add_route(
574 583 name='admin_permissions_overview',
575 584 pattern='/permissions/overview')
576 585 config.add_view(
577 586 AdminPermissionsView,
578 587 attr='permissions_overview',
579 588 route_name='admin_permissions_overview', request_method='GET',
580 589 renderer='rhodecode:templates/admin/permissions/permissions.mako')
581 590
582 591 config.add_route(
583 592 name='admin_permissions_auth_token_access',
584 593 pattern='/permissions/auth_token_access')
585 594 config.add_view(
586 595 AdminPermissionsView,
587 596 attr='auth_token_access',
588 597 route_name='admin_permissions_auth_token_access', request_method='GET',
589 598 renderer='rhodecode:templates/admin/permissions/permissions.mako')
590 599
591 600 config.add_route(
592 601 name='admin_permissions_ssh_keys',
593 602 pattern='/permissions/ssh_keys')
594 603 config.add_view(
595 604 AdminPermissionsView,
596 605 attr='ssh_keys',
597 606 route_name='admin_permissions_ssh_keys', request_method='GET',
598 607 renderer='rhodecode:templates/admin/permissions/permissions.mako')
599 608
600 609 config.add_route(
601 610 name='admin_permissions_ssh_keys_data',
602 611 pattern='/permissions/ssh_keys/data')
603 612 config.add_view(
604 613 AdminPermissionsView,
605 614 attr='ssh_keys_data',
606 615 route_name='admin_permissions_ssh_keys_data', request_method='GET',
607 616 renderer='json_ext', xhr=True)
608 617
609 618 config.add_route(
610 619 name='admin_permissions_ssh_keys_update',
611 620 pattern='/permissions/ssh_keys/update')
612 621 config.add_view(
613 622 AdminPermissionsView,
614 623 attr='ssh_keys_update',
615 624 route_name='admin_permissions_ssh_keys_update', request_method='POST',
616 625 renderer='rhodecode:templates/admin/permissions/permissions.mako')
617 626
618 627 # users admin
619 628 config.add_route(
620 629 name='users',
621 630 pattern='/users')
622 631 config.add_view(
623 632 AdminUsersView,
624 633 attr='users_list',
625 634 route_name='users', request_method='GET',
626 635 renderer='rhodecode:templates/admin/users/users.mako')
627 636
628 637 config.add_route(
629 638 name='users_data',
630 639 pattern='/users_data')
631 640 config.add_view(
632 641 AdminUsersView,
633 642 attr='users_list_data',
634 643 # renderer defined below
635 644 route_name='users_data', request_method='GET',
636 645 renderer='json_ext', xhr=True)
637 646
638 647 config.add_route(
639 648 name='users_create',
640 649 pattern='/users/create')
641 650 config.add_view(
642 651 AdminUsersView,
643 652 attr='users_create',
644 653 route_name='users_create', request_method='POST',
645 654 renderer='rhodecode:templates/admin/users/user_add.mako')
646 655
647 656 config.add_route(
648 657 name='users_new',
649 658 pattern='/users/new')
650 659 config.add_view(
651 660 AdminUsersView,
652 661 attr='users_new',
653 662 route_name='users_new', request_method='GET',
654 663 renderer='rhodecode:templates/admin/users/user_add.mako')
655 664
656 665 # user management
657 666 config.add_route(
658 667 name='user_edit',
659 668 pattern=r'/users/{user_id:\d+}/edit',
660 669 user_route=True)
661 670 config.add_view(
662 671 UsersView,
663 672 attr='user_edit',
664 673 route_name='user_edit', request_method='GET',
665 674 renderer='rhodecode:templates/admin/users/user_edit.mako')
666 675
667 676 config.add_route(
668 677 name='user_edit_advanced',
669 678 pattern=r'/users/{user_id:\d+}/edit/advanced',
670 679 user_route=True)
671 680 config.add_view(
672 681 UsersView,
673 682 attr='user_edit_advanced',
674 683 route_name='user_edit_advanced', request_method='GET',
675 684 renderer='rhodecode:templates/admin/users/user_edit.mako')
676 685
677 686 config.add_route(
678 687 name='user_edit_global_perms',
679 688 pattern=r'/users/{user_id:\d+}/edit/global_permissions',
680 689 user_route=True)
681 690 config.add_view(
682 691 UsersView,
683 692 attr='user_edit_global_perms',
684 693 route_name='user_edit_global_perms', request_method='GET',
685 694 renderer='rhodecode:templates/admin/users/user_edit.mako')
686 695
687 696 config.add_route(
688 697 name='user_edit_global_perms_update',
689 698 pattern=r'/users/{user_id:\d+}/edit/global_permissions/update',
690 699 user_route=True)
691 700 config.add_view(
692 701 UsersView,
693 702 attr='user_edit_global_perms_update',
694 703 route_name='user_edit_global_perms_update', request_method='POST',
695 704 renderer='rhodecode:templates/admin/users/user_edit.mako')
696 705
697 706 config.add_route(
698 707 name='user_update',
699 708 pattern=r'/users/{user_id:\d+}/update',
700 709 user_route=True)
701 710 config.add_view(
702 711 UsersView,
703 712 attr='user_update',
704 713 route_name='user_update', request_method='POST',
705 714 renderer='rhodecode:templates/admin/users/user_edit.mako')
706 715
707 716 config.add_route(
708 717 name='user_delete',
709 718 pattern=r'/users/{user_id:\d+}/delete',
710 719 user_route=True)
711 720 config.add_view(
712 721 UsersView,
713 722 attr='user_delete',
714 723 route_name='user_delete', request_method='POST',
715 724 renderer='rhodecode:templates/admin/users/user_edit.mako')
716 725
717 726 config.add_route(
718 727 name='user_enable_force_password_reset',
719 728 pattern=r'/users/{user_id:\d+}/password_reset_enable',
720 729 user_route=True)
721 730 config.add_view(
722 731 UsersView,
723 732 attr='user_enable_force_password_reset',
724 733 route_name='user_enable_force_password_reset', request_method='POST',
725 734 renderer='rhodecode:templates/admin/users/user_edit.mako')
726 735
727 736 config.add_route(
728 737 name='user_disable_force_password_reset',
729 738 pattern=r'/users/{user_id:\d+}/password_reset_disable',
730 739 user_route=True)
731 740 config.add_view(
732 741 UsersView,
733 742 attr='user_disable_force_password_reset',
734 743 route_name='user_disable_force_password_reset', request_method='POST',
735 744 renderer='rhodecode:templates/admin/users/user_edit.mako')
736 745
737 746 config.add_route(
738 747 name='user_create_personal_repo_group',
739 748 pattern=r'/users/{user_id:\d+}/create_repo_group',
740 749 user_route=True)
741 750 config.add_view(
742 751 UsersView,
743 752 attr='user_create_personal_repo_group',
744 753 route_name='user_create_personal_repo_group', request_method='POST',
745 754 renderer='rhodecode:templates/admin/users/user_edit.mako')
746 755
747 756 # user notice
748 757 config.add_route(
749 758 name='user_notice_dismiss',
750 759 pattern=r'/users/{user_id:\d+}/notice_dismiss',
751 760 user_route=True)
752 761 config.add_view(
753 762 UsersView,
754 763 attr='user_notice_dismiss',
755 764 route_name='user_notice_dismiss', request_method='POST',
756 765 renderer='json_ext', xhr=True)
757 766
758 767 # user auth tokens
759 768 config.add_route(
760 769 name='edit_user_auth_tokens',
761 770 pattern=r'/users/{user_id:\d+}/edit/auth_tokens',
762 771 user_route=True)
763 772 config.add_view(
764 773 UsersView,
765 774 attr='auth_tokens',
766 775 route_name='edit_user_auth_tokens', request_method='GET',
767 776 renderer='rhodecode:templates/admin/users/user_edit.mako')
768 777
769 778 config.add_route(
770 779 name='edit_user_auth_tokens_view',
771 780 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/view',
772 781 user_route=True)
773 782 config.add_view(
774 783 UsersView,
775 784 attr='auth_tokens_view',
776 785 route_name='edit_user_auth_tokens_view', request_method='POST',
777 786 renderer='json_ext', xhr=True)
778 787
779 788 config.add_route(
780 789 name='edit_user_auth_tokens_add',
781 790 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/new',
782 791 user_route=True)
783 792 config.add_view(
784 793 UsersView,
785 794 attr='auth_tokens_add',
786 795 route_name='edit_user_auth_tokens_add', request_method='POST')
787 796
788 797 config.add_route(
789 798 name='edit_user_auth_tokens_delete',
790 799 pattern=r'/users/{user_id:\d+}/edit/auth_tokens/delete',
791 800 user_route=True)
792 801 config.add_view(
793 802 UsersView,
794 803 attr='auth_tokens_delete',
795 804 route_name='edit_user_auth_tokens_delete', request_method='POST')
796 805
797 806 # user ssh keys
798 807 config.add_route(
799 808 name='edit_user_ssh_keys',
800 809 pattern=r'/users/{user_id:\d+}/edit/ssh_keys',
801 810 user_route=True)
802 811 config.add_view(
803 812 UsersView,
804 813 attr='ssh_keys',
805 814 route_name='edit_user_ssh_keys', request_method='GET',
806 815 renderer='rhodecode:templates/admin/users/user_edit.mako')
807 816
808 817 config.add_route(
809 818 name='edit_user_ssh_keys_generate_keypair',
810 819 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/generate',
811 820 user_route=True)
812 821 config.add_view(
813 822 UsersView,
814 823 attr='ssh_keys_generate_keypair',
815 824 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
816 825 renderer='rhodecode:templates/admin/users/user_edit.mako')
817 826
818 827 config.add_route(
819 828 name='edit_user_ssh_keys_add',
820 829 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/new',
821 830 user_route=True)
822 831 config.add_view(
823 832 UsersView,
824 833 attr='ssh_keys_add',
825 834 route_name='edit_user_ssh_keys_add', request_method='POST')
826 835
827 836 config.add_route(
828 837 name='edit_user_ssh_keys_delete',
829 838 pattern=r'/users/{user_id:\d+}/edit/ssh_keys/delete',
830 839 user_route=True)
831 840 config.add_view(
832 841 UsersView,
833 842 attr='ssh_keys_delete',
834 843 route_name='edit_user_ssh_keys_delete', request_method='POST')
835 844
836 845 # user emails
837 846 config.add_route(
838 847 name='edit_user_emails',
839 848 pattern=r'/users/{user_id:\d+}/edit/emails',
840 849 user_route=True)
841 850 config.add_view(
842 851 UsersView,
843 852 attr='emails',
844 853 route_name='edit_user_emails', request_method='GET',
845 854 renderer='rhodecode:templates/admin/users/user_edit.mako')
846 855
847 856 config.add_route(
848 857 name='edit_user_emails_add',
849 858 pattern=r'/users/{user_id:\d+}/edit/emails/new',
850 859 user_route=True)
851 860 config.add_view(
852 861 UsersView,
853 862 attr='emails_add',
854 863 route_name='edit_user_emails_add', request_method='POST')
855 864
856 865 config.add_route(
857 866 name='edit_user_emails_delete',
858 867 pattern=r'/users/{user_id:\d+}/edit/emails/delete',
859 868 user_route=True)
860 869 config.add_view(
861 870 UsersView,
862 871 attr='emails_delete',
863 872 route_name='edit_user_emails_delete', request_method='POST')
864 873
865 874 # user IPs
866 875 config.add_route(
867 876 name='edit_user_ips',
868 877 pattern=r'/users/{user_id:\d+}/edit/ips',
869 878 user_route=True)
870 879 config.add_view(
871 880 UsersView,
872 881 attr='ips',
873 882 route_name='edit_user_ips', request_method='GET',
874 883 renderer='rhodecode:templates/admin/users/user_edit.mako')
875 884
876 885 config.add_route(
877 886 name='edit_user_ips_add',
878 887 pattern=r'/users/{user_id:\d+}/edit/ips/new',
879 888 user_route_with_default=True) # enabled for default user too
880 889 config.add_view(
881 890 UsersView,
882 891 attr='ips_add',
883 892 route_name='edit_user_ips_add', request_method='POST')
884 893
885 894 config.add_route(
886 895 name='edit_user_ips_delete',
887 896 pattern=r'/users/{user_id:\d+}/edit/ips/delete',
888 897 user_route_with_default=True) # enabled for default user too
889 898 config.add_view(
890 899 UsersView,
891 900 attr='ips_delete',
892 901 route_name='edit_user_ips_delete', request_method='POST')
893 902
894 903 # user perms
895 904 config.add_route(
896 905 name='edit_user_perms_summary',
897 906 pattern=r'/users/{user_id:\d+}/edit/permissions_summary',
898 907 user_route=True)
899 908 config.add_view(
900 909 UsersView,
901 910 attr='user_perms_summary',
902 911 route_name='edit_user_perms_summary', request_method='GET',
903 912 renderer='rhodecode:templates/admin/users/user_edit.mako')
904 913
905 914 config.add_route(
906 915 name='edit_user_perms_summary_json',
907 916 pattern=r'/users/{user_id:\d+}/edit/permissions_summary/json',
908 917 user_route=True)
909 918 config.add_view(
910 919 UsersView,
911 920 attr='user_perms_summary_json',
912 921 route_name='edit_user_perms_summary_json', request_method='GET',
913 922 renderer='json_ext')
914 923
915 924 # user user groups management
916 925 config.add_route(
917 926 name='edit_user_groups_management',
918 927 pattern=r'/users/{user_id:\d+}/edit/groups_management',
919 928 user_route=True)
920 929 config.add_view(
921 930 UsersView,
922 931 attr='groups_management',
923 932 route_name='edit_user_groups_management', request_method='GET',
924 933 renderer='rhodecode:templates/admin/users/user_edit.mako')
925 934
926 935 config.add_route(
927 936 name='edit_user_groups_management_updates',
928 937 pattern=r'/users/{user_id:\d+}/edit/edit_user_groups_management/updates',
929 938 user_route=True)
930 939 config.add_view(
931 940 UsersView,
932 941 attr='groups_management_updates',
933 942 route_name='edit_user_groups_management_updates', request_method='POST')
934 943
935 944 # user audit logs
936 945 config.add_route(
937 946 name='edit_user_audit_logs',
938 947 pattern=r'/users/{user_id:\d+}/edit/audit', user_route=True)
939 948 config.add_view(
940 949 UsersView,
941 950 attr='user_audit_logs',
942 951 route_name='edit_user_audit_logs', request_method='GET',
943 952 renderer='rhodecode:templates/admin/users/user_edit.mako')
944 953
945 954 config.add_route(
946 955 name='edit_user_audit_logs_download',
947 956 pattern=r'/users/{user_id:\d+}/edit/audit/download', user_route=True)
948 957 config.add_view(
949 958 UsersView,
950 959 attr='user_audit_logs_download',
951 960 route_name='edit_user_audit_logs_download', request_method='GET',
952 961 renderer='string')
953 962
954 963 # user caches
955 964 config.add_route(
956 965 name='edit_user_caches',
957 966 pattern=r'/users/{user_id:\d+}/edit/caches',
958 967 user_route=True)
959 968 config.add_view(
960 969 UsersView,
961 970 attr='user_caches',
962 971 route_name='edit_user_caches', request_method='GET',
963 972 renderer='rhodecode:templates/admin/users/user_edit.mako')
964 973
965 974 config.add_route(
966 975 name='edit_user_caches_update',
967 976 pattern=r'/users/{user_id:\d+}/edit/caches/update',
968 977 user_route=True)
969 978 config.add_view(
970 979 UsersView,
971 980 attr='user_caches_update',
972 981 route_name='edit_user_caches_update', request_method='POST')
973 982
974 983 # user-groups admin
975 984 config.add_route(
976 985 name='user_groups',
977 986 pattern='/user_groups')
978 987 config.add_view(
979 988 AdminUserGroupsView,
980 989 attr='user_groups_list',
981 990 route_name='user_groups', request_method='GET',
982 991 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
983 992
984 993 config.add_route(
985 994 name='user_groups_data',
986 995 pattern='/user_groups_data')
987 996 config.add_view(
988 997 AdminUserGroupsView,
989 998 attr='user_groups_list_data',
990 999 route_name='user_groups_data', request_method='GET',
991 1000 renderer='json_ext', xhr=True)
992 1001
993 1002 config.add_route(
994 1003 name='user_groups_new',
995 1004 pattern='/user_groups/new')
996 1005 config.add_view(
997 1006 AdminUserGroupsView,
998 1007 attr='user_groups_new',
999 1008 route_name='user_groups_new', request_method='GET',
1000 1009 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
1001 1010
1002 1011 config.add_route(
1003 1012 name='user_groups_create',
1004 1013 pattern='/user_groups/create')
1005 1014 config.add_view(
1006 1015 AdminUserGroupsView,
1007 1016 attr='user_groups_create',
1008 1017 route_name='user_groups_create', request_method='POST',
1009 1018 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
1010 1019
1011 1020 # repos admin
1012 1021 config.add_route(
1013 1022 name='repos',
1014 1023 pattern='/repos')
1015 1024 config.add_view(
1016 1025 AdminReposView,
1017 1026 attr='repository_list',
1018 1027 route_name='repos', request_method='GET',
1019 1028 renderer='rhodecode:templates/admin/repos/repos.mako')
1020 1029
1021 1030 config.add_route(
1022 1031 name='repos_data',
1023 1032 pattern='/repos_data')
1024 1033 config.add_view(
1025 1034 AdminReposView,
1026 1035 attr='repository_list_data',
1027 1036 route_name='repos_data', request_method='GET',
1028 1037 renderer='json_ext', xhr=True)
1029 1038
1030 1039 config.add_route(
1031 1040 name='repo_new',
1032 1041 pattern='/repos/new')
1033 1042 config.add_view(
1034 1043 AdminReposView,
1035 1044 attr='repository_new',
1036 1045 route_name='repo_new', request_method='GET',
1037 1046 renderer='rhodecode:templates/admin/repos/repo_add.mako')
1038 1047
1039 1048 config.add_route(
1040 1049 name='repo_create',
1041 1050 pattern='/repos/create')
1042 1051 config.add_view(
1043 1052 AdminReposView,
1044 1053 attr='repository_create',
1045 1054 route_name='repo_create', request_method='POST',
1046 1055 renderer='rhodecode:templates/admin/repos/repos.mako')
1047 1056
1048 1057 # repo groups admin
1049 1058 config.add_route(
1050 1059 name='repo_groups',
1051 1060 pattern='/repo_groups')
1052 1061 config.add_view(
1053 1062 AdminRepoGroupsView,
1054 1063 attr='repo_group_list',
1055 1064 route_name='repo_groups', request_method='GET',
1056 1065 renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako')
1057 1066
1058 1067 config.add_route(
1059 1068 name='repo_groups_data',
1060 1069 pattern='/repo_groups_data')
1061 1070 config.add_view(
1062 1071 AdminRepoGroupsView,
1063 1072 attr='repo_group_list_data',
1064 1073 route_name='repo_groups_data', request_method='GET',
1065 1074 renderer='json_ext', xhr=True)
1066 1075
1067 1076 config.add_route(
1068 1077 name='repo_group_new',
1069 1078 pattern='/repo_group/new')
1070 1079 config.add_view(
1071 1080 AdminRepoGroupsView,
1072 1081 attr='repo_group_new',
1073 1082 route_name='repo_group_new', request_method='GET',
1074 1083 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1075 1084
1076 1085 config.add_route(
1077 1086 name='repo_group_create',
1078 1087 pattern='/repo_group/create')
1079 1088 config.add_view(
1080 1089 AdminRepoGroupsView,
1081 1090 attr='repo_group_create',
1082 1091 route_name='repo_group_create', request_method='POST',
1083 1092 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1084 1093
1085 1094
1086 1095 def includeme(config):
1087 1096 # Create admin navigation registry and add it to the pyramid registry.
1088 1097 nav_includeme(config)
1089 1098
1090 1099 # main admin routes
1091 1100 config.add_route(
1092 1101 name='admin_home', pattern=ADMIN_PREFIX)
1093 1102 config.add_view(
1094 1103 AdminMainView,
1095 1104 attr='admin_main',
1096 1105 route_name='admin_home', request_method='GET',
1097 1106 renderer='rhodecode:templates/admin/main.mako')
1098 1107
1099 1108 # pr global redirect
1100 1109 config.add_route(
1101 1110 name='pull_requests_global_0', # backward compat
1102 1111 pattern=ADMIN_PREFIX + r'/pull_requests/{pull_request_id:\d+}')
1103 1112 config.add_view(
1104 1113 AdminMainView,
1105 1114 attr='pull_requests',
1106 1115 route_name='pull_requests_global_0', request_method='GET')
1107 1116
1108 1117 config.add_route(
1109 1118 name='pull_requests_global_1', # backward compat
1110 1119 pattern=ADMIN_PREFIX + r'/pull-requests/{pull_request_id:\d+}')
1111 1120 config.add_view(
1112 1121 AdminMainView,
1113 1122 attr='pull_requests',
1114 1123 route_name='pull_requests_global_1', request_method='GET')
1115 1124
1116 1125 config.add_route(
1117 1126 name='pull_requests_global',
1118 1127 pattern=ADMIN_PREFIX + r'/pull-request/{pull_request_id:\d+}')
1119 1128 config.add_view(
1120 1129 AdminMainView,
1121 1130 attr='pull_requests',
1122 1131 route_name='pull_requests_global', request_method='GET')
1123 1132
1124 1133 config.include(admin_routes, route_prefix=ADMIN_PREFIX)
@@ -1,496 +1,498 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import urllib.request
20 20 import urllib.parse
21 21 import urllib.error
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps._base import ADMIN_PREFIX
27 27 from rhodecode.lib import auth
28 28 from rhodecode.lib.utils2 import safe_str
29 29 from rhodecode.lib import helpers as h
30 30 from rhodecode.model.db import (
31 31 Repository, RepoGroup, UserRepoToPerm, User, Permission)
32 32 from rhodecode.model.meta import Session
33 33 from rhodecode.model.repo import RepoModel
34 34 from rhodecode.model.repo_group import RepoGroupModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import (
37 37 login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN,
38 38 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
39 39 from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function
40 40 from rhodecode.tests.utils import repo_on_filesystem
41 41 from rhodecode.tests.routes import route_path
42 42
43 43 fixture = Fixture()
44 44
45 45
46 46 def _get_permission_for_user(user, repo):
47 47 perm = UserRepoToPerm.query()\
48 48 .filter(UserRepoToPerm.repository ==
49 49 Repository.get_by_repo_name(repo))\
50 50 .filter(UserRepoToPerm.user == User.get_by_username(user))\
51 51 .all()
52 52 return perm
53 53
54 54
55 55 @pytest.mark.usefixtures("app")
56 56 class TestAdminRepos(object):
57 57
58 58 def test_repo_list(self, autologin_user, user_util, xhr_header):
59 59 repo = user_util.create_repo()
60 60 repo_name = repo.repo_name
61 61 response = self.app.get(
62 62 route_path('repos_data'), status=200,
63 63 extra_environ=xhr_header)
64 64
65 65 response.mustcontain(repo_name)
66 66
67 67 def test_create_page_restricted_to_single_backend(self, autologin_user, backend):
68 68 with mock.patch('rhodecode.BACKENDS', {'git': 'git'}):
69 69 response = self.app.get(route_path('repo_new'), status=200)
70 70 assert_response = response.assert_response()
71 71 element = assert_response.get_element('[name=repo_type]')
72 72 assert element.get('value') == 'git'
73 73
74 74 def test_create_page_non_restricted_backends(self, autologin_user, backend):
75 75 response = self.app.get(route_path('repo_new'), status=200)
76 76 assert_response = response.assert_response()
77 77 assert ['hg', 'git', 'svn'] == [x.get('value') for x in assert_response.get_elements('[name=repo_type]')]
78 78
79 79 @pytest.mark.parametrize(
80 80 "suffix", ['', 'xxa'], ids=['', 'non-ascii'])
81 81 def test_create(self, autologin_user, backend, suffix, csrf_token):
82 82 repo_name_unicode = backend.new_repo_name(suffix=suffix)
83 83 repo_name = repo_name_unicode
84 84
85 85 description_unicode = 'description for newly created repo' + suffix
86 86 description = description_unicode
87 87
88 88 response = self.app.post(
89 89 route_path('repo_create'),
90 90 fixture._get_repo_create_params(
91 91 repo_private=False,
92 92 repo_name=repo_name,
93 93 repo_type=backend.alias,
94 94 repo_description=description,
95 95 csrf_token=csrf_token),
96 96 status=302)
97 97
98 98 self.assert_repository_is_created_correctly(
99 99 repo_name, description, backend)
100 100
101 101 def test_create_numeric_name(self, autologin_user, backend, csrf_token):
102 102 numeric_repo = '1234'
103 103 repo_name = numeric_repo
104 104 description = 'description for newly created repo' + numeric_repo
105 105 self.app.post(
106 106 route_path('repo_create'),
107 107 fixture._get_repo_create_params(
108 108 repo_private=False,
109 109 repo_name=repo_name,
110 110 repo_type=backend.alias,
111 111 repo_description=description,
112 112 csrf_token=csrf_token))
113
114 self.assert_repository_is_created_correctly(
115 repo_name, description, backend)
113 try:
114 self.assert_repository_is_created_correctly(repo_name, description, backend)
115 finally:
116 RepoModel().delete(numeric_repo)
117 Session().commit()
116 118
117 119 @pytest.mark.parametrize("suffix", ['', '_Δ…Δ‡Δ™'], ids=['', 'non-ascii'])
118 120 def test_create_in_group(
119 121 self, autologin_user, backend, suffix, csrf_token):
120 122 # create GROUP
121 123 group_name = f'sometest_{backend.alias}'
122 124 gr = RepoGroupModel().create(group_name=group_name,
123 125 group_description='test',
124 126 owner=TEST_USER_ADMIN_LOGIN)
125 127 Session().commit()
126 128
127 129 repo_name = f'ingroup{suffix}'
128 130 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
129 131 description = 'description for newly created repo'
130 132
131 133 self.app.post(
132 134 route_path('repo_create'),
133 135 fixture._get_repo_create_params(
134 136 repo_private=False,
135 137 repo_name=safe_str(repo_name),
136 138 repo_type=backend.alias,
137 139 repo_description=description,
138 140 repo_group=gr.group_id,
139 141 csrf_token=csrf_token))
140 142
141 143 # TODO: johbo: Cleanup work to fixture
142 144 try:
143 145 self.assert_repository_is_created_correctly(
144 146 repo_name_full, description, backend)
145 147
146 148 new_repo = RepoModel().get_by_repo_name(repo_name_full)
147 149 inherited_perms = UserRepoToPerm.query().filter(
148 150 UserRepoToPerm.repository_id == new_repo.repo_id).all()
149 151 assert len(inherited_perms) == 1
150 152 finally:
151 153 RepoModel().delete(repo_name_full)
152 154 RepoGroupModel().delete(group_name)
153 155 Session().commit()
154 156
155 157 def test_create_in_group_numeric_name(
156 158 self, autologin_user, backend, csrf_token):
157 159 # create GROUP
158 160 group_name = 'sometest_%s' % backend.alias
159 161 gr = RepoGroupModel().create(group_name=group_name,
160 162 group_description='test',
161 163 owner=TEST_USER_ADMIN_LOGIN)
162 164 Session().commit()
163 165
164 166 repo_name = '12345'
165 167 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
166 168 description = 'description for newly created repo'
167 169 self.app.post(
168 170 route_path('repo_create'),
169 171 fixture._get_repo_create_params(
170 172 repo_private=False,
171 173 repo_name=repo_name,
172 174 repo_type=backend.alias,
173 175 repo_description=description,
174 176 repo_group=gr.group_id,
175 177 csrf_token=csrf_token))
176 178
177 179 # TODO: johbo: Cleanup work to fixture
178 180 try:
179 181 self.assert_repository_is_created_correctly(
180 182 repo_name_full, description, backend)
181 183
182 184 new_repo = RepoModel().get_by_repo_name(repo_name_full)
183 185 inherited_perms = UserRepoToPerm.query()\
184 186 .filter(UserRepoToPerm.repository_id == new_repo.repo_id).all()
185 187 assert len(inherited_perms) == 1
186 188 finally:
187 189 RepoModel().delete(repo_name_full)
188 190 RepoGroupModel().delete(group_name)
189 191 Session().commit()
190 192
191 193 def test_create_in_group_without_needed_permissions(self, backend):
192 194 session = login_user_session(
193 195 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
194 196 csrf_token = auth.get_csrf_token(session)
195 197 # revoke
196 198 user_model = UserModel()
197 199 # disable fork and create on default user
198 200 user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
199 201 user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
200 202 user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
201 203 user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
202 204
203 205 # disable on regular user
204 206 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
205 207 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
206 208 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
207 209 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
208 210 Session().commit()
209 211
210 212 # create GROUP
211 213 group_name = 'reg_sometest_%s' % backend.alias
212 214 gr = RepoGroupModel().create(group_name=group_name,
213 215 group_description='test',
214 216 owner=TEST_USER_ADMIN_LOGIN)
215 217 Session().commit()
216 218 repo_group_id = gr.group_id
217 219
218 220 group_name_allowed = 'reg_sometest_allowed_%s' % backend.alias
219 221 gr_allowed = RepoGroupModel().create(
220 222 group_name=group_name_allowed,
221 223 group_description='test',
222 224 owner=TEST_USER_REGULAR_LOGIN)
223 225 allowed_repo_group_id = gr_allowed.group_id
224 226 Session().commit()
225 227
226 228 repo_name = 'ingroup'
227 229 description = 'description for newly created repo'
228 230 response = self.app.post(
229 231 route_path('repo_create'),
230 232 fixture._get_repo_create_params(
231 233 repo_private=False,
232 234 repo_name=repo_name,
233 235 repo_type=backend.alias,
234 236 repo_description=description,
235 237 repo_group=repo_group_id,
236 238 csrf_token=csrf_token))
237 239
238 240 response.mustcontain('Invalid value')
239 241
240 242 # user is allowed to create in this group
241 243 repo_name = 'ingroup'
242 244 repo_name_full = RepoGroup.url_sep().join(
243 245 [group_name_allowed, repo_name])
244 246 description = 'description for newly created repo'
245 247 response = self.app.post(
246 248 route_path('repo_create'),
247 249 fixture._get_repo_create_params(
248 250 repo_private=False,
249 251 repo_name=repo_name,
250 252 repo_type=backend.alias,
251 253 repo_description=description,
252 254 repo_group=allowed_repo_group_id,
253 255 csrf_token=csrf_token))
254 256
255 257 # TODO: johbo: Cleanup in pytest fixture
256 258 try:
257 259 self.assert_repository_is_created_correctly(
258 260 repo_name_full, description, backend)
259 261
260 262 new_repo = RepoModel().get_by_repo_name(repo_name_full)
261 263 inherited_perms = UserRepoToPerm.query().filter(
262 264 UserRepoToPerm.repository_id == new_repo.repo_id).all()
263 265 assert len(inherited_perms) == 1
264 266
265 267 assert repo_on_filesystem(repo_name_full)
266 268 finally:
267 269 RepoModel().delete(repo_name_full)
268 270 RepoGroupModel().delete(group_name)
269 271 RepoGroupModel().delete(group_name_allowed)
270 272 Session().commit()
271 273
272 274 def test_create_in_group_inherit_permissions(self, autologin_user, backend,
273 275 csrf_token):
274 276 # create GROUP
275 277 group_name = 'sometest_%s' % backend.alias
276 278 gr = RepoGroupModel().create(group_name=group_name,
277 279 group_description='test',
278 280 owner=TEST_USER_ADMIN_LOGIN)
279 281 perm = Permission.get_by_key('repository.write')
280 282 RepoGroupModel().grant_user_permission(
281 283 gr, TEST_USER_REGULAR_LOGIN, perm)
282 284
283 285 # add repo permissions
284 286 Session().commit()
285 287 repo_group_id = gr.group_id
286 288 repo_name = 'ingroup_inherited_%s' % backend.alias
287 289 repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
288 290 description = 'description for newly created repo'
289 291 self.app.post(
290 292 route_path('repo_create'),
291 293 fixture._get_repo_create_params(
292 294 repo_private=False,
293 295 repo_name=repo_name,
294 296 repo_type=backend.alias,
295 297 repo_description=description,
296 298 repo_group=repo_group_id,
297 299 repo_copy_permissions=True,
298 300 csrf_token=csrf_token))
299 301
300 302 # TODO: johbo: Cleanup to pytest fixture
301 303 try:
302 304 self.assert_repository_is_created_correctly(
303 305 repo_name_full, description, backend)
304 306 except Exception:
305 307 RepoGroupModel().delete(group_name)
306 308 Session().commit()
307 309 raise
308 310
309 311 # check if inherited permissions are applied
310 312 new_repo = RepoModel().get_by_repo_name(repo_name_full)
311 313 inherited_perms = UserRepoToPerm.query().filter(
312 314 UserRepoToPerm.repository_id == new_repo.repo_id).all()
313 315 assert len(inherited_perms) == 2
314 316
315 317 assert TEST_USER_REGULAR_LOGIN in [
316 318 x.user.username for x in inherited_perms]
317 319 assert 'repository.write' in [
318 320 x.permission.permission_name for x in inherited_perms]
319 321
320 322 RepoModel().delete(repo_name_full)
321 323 RepoGroupModel().delete(group_name)
322 324 Session().commit()
323 325
324 326 @pytest.mark.xfail_backends(
325 327 "git", "hg", reason="Missing reposerver support")
326 328 def test_create_with_clone_uri(self, autologin_user, backend, reposerver,
327 329 csrf_token):
328 330 source_repo = backend.create_repo(number_of_commits=2)
329 331 source_repo_name = source_repo.repo_name
330 332 reposerver.serve(source_repo.scm_instance())
331 333
332 334 repo_name = backend.new_repo_name()
333 335 response = self.app.post(
334 336 route_path('repo_create'),
335 337 fixture._get_repo_create_params(
336 338 repo_private=False,
337 339 repo_name=repo_name,
338 340 repo_type=backend.alias,
339 341 repo_description='',
340 342 clone_uri=reposerver.url,
341 343 csrf_token=csrf_token),
342 344 status=302)
343 345
344 346 # Should be redirected to the creating page
345 347 response.mustcontain('repo_creating')
346 348
347 349 # Expecting that both repositories have same history
348 350 source_repo = RepoModel().get_by_repo_name(source_repo_name)
349 351 source_vcs = source_repo.scm_instance()
350 352 repo = RepoModel().get_by_repo_name(repo_name)
351 353 repo_vcs = repo.scm_instance()
352 354 assert source_vcs[0].message == repo_vcs[0].message
353 355 assert source_vcs.count() == repo_vcs.count()
354 356 assert source_vcs.commit_ids == repo_vcs.commit_ids
355 357
356 358 @pytest.mark.xfail_backends("svn", reason="Depends on import support")
357 359 def test_create_remote_repo_wrong_clone_uri(self, autologin_user, backend,
358 360 csrf_token):
359 361 repo_name = backend.new_repo_name()
360 362 description = 'description for newly created repo'
361 363 response = self.app.post(
362 364 route_path('repo_create'),
363 365 fixture._get_repo_create_params(
364 366 repo_private=False,
365 367 repo_name=repo_name,
366 368 repo_type=backend.alias,
367 369 repo_description=description,
368 370 clone_uri='http://repo.invalid/repo',
369 371 csrf_token=csrf_token))
370 372 response.mustcontain('invalid clone url')
371 373
372 374 @pytest.mark.xfail_backends("svn", reason="Depends on import support")
373 375 def test_create_remote_repo_wrong_clone_uri_hg_svn(
374 376 self, autologin_user, backend, csrf_token):
375 377 repo_name = backend.new_repo_name()
376 378 description = 'description for newly created repo'
377 379 response = self.app.post(
378 380 route_path('repo_create'),
379 381 fixture._get_repo_create_params(
380 382 repo_private=False,
381 383 repo_name=repo_name,
382 384 repo_type=backend.alias,
383 385 repo_description=description,
384 386 clone_uri='svn+http://svn.invalid/repo',
385 387 csrf_token=csrf_token))
386 388 response.mustcontain('invalid clone url')
387 389
388 390 def test_create_with_git_suffix(
389 391 self, autologin_user, backend, csrf_token):
390 392 repo_name = backend.new_repo_name() + ".git"
391 393 description = 'description for newly created repo'
392 394 response = self.app.post(
393 395 route_path('repo_create'),
394 396 fixture._get_repo_create_params(
395 397 repo_private=False,
396 398 repo_name=repo_name,
397 399 repo_type=backend.alias,
398 400 repo_description=description,
399 401 csrf_token=csrf_token))
400 402 response.mustcontain('Repository name cannot end with .git')
401 403
402 404 def test_default_user_cannot_access_private_repo_in_a_group(
403 405 self, autologin_user, user_util, backend):
404 406
405 407 group = user_util.create_repo_group()
406 408
407 409 repo = backend.create_repo(
408 410 repo_private=True, repo_group=group, repo_copy_permissions=True)
409 411
410 412 permissions = _get_permission_for_user(
411 413 user='default', repo=repo.repo_name)
412 414 assert len(permissions) == 1
413 415 assert permissions[0].permission.permission_name == 'repository.none'
414 416 assert permissions[0].repository.private is True
415 417
416 418 def test_create_on_top_level_without_permissions(self, backend):
417 419 session = login_user_session(
418 420 self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
419 421 csrf_token = auth.get_csrf_token(session)
420 422
421 423 # revoke
422 424 user_model = UserModel()
423 425 # disable fork and create on default user
424 426 user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
425 427 user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
426 428 user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
427 429 user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
428 430
429 431 # disable on regular user
430 432 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
431 433 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
432 434 user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
433 435 user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
434 436 Session().commit()
435 437
436 438 repo_name = backend.new_repo_name()
437 439 description = 'description for newly created repo'
438 440 response = self.app.post(
439 441 route_path('repo_create'),
440 442 fixture._get_repo_create_params(
441 443 repo_private=False,
442 444 repo_name=repo_name,
443 445 repo_type=backend.alias,
444 446 repo_description=description,
445 447 csrf_token=csrf_token))
446 448
447 449 response.mustcontain(
448 450 "You do not have the permission to store repositories in "
449 451 "the root location.")
450 452
451 453 @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function)
452 454 def test_create_repo_when_filesystem_op_fails(
453 455 self, autologin_user, backend, csrf_token):
454 456 repo_name = backend.new_repo_name()
455 457 description = 'description for newly created repo'
456 458
457 459 response = self.app.post(
458 460 route_path('repo_create'),
459 461 fixture._get_repo_create_params(
460 462 repo_private=False,
461 463 repo_name=repo_name,
462 464 repo_type=backend.alias,
463 465 repo_description=description,
464 466 csrf_token=csrf_token))
465 467
466 468 assert_session_flash(
467 469 response, 'Error creating repository %s' % repo_name)
468 470 # repo must not be in db
469 471 assert backend.repo is None
470 472 # repo must not be in filesystem !
471 473 assert not repo_on_filesystem(repo_name)
472 474
473 475 def assert_repository_is_created_correctly(self, repo_name, description, backend):
474 476 url_quoted_repo_name = urllib.parse.quote(repo_name)
475 477
476 478 # run the check page that triggers the flash message
477 479 response = self.app.get(
478 480 route_path('repo_creating_check', repo_name=repo_name))
479 481 assert response.json == {'result': True}
480 482
481 483 flash_msg = 'Created repository <a href="/{}">{}</a>'.format(url_quoted_repo_name, repo_name)
482 484 assert_session_flash(response, flash_msg)
483 485
484 486 # test if the repo was created in the database
485 487 new_repo = RepoModel().get_by_repo_name(repo_name)
486 488
487 489 assert new_repo.repo_name == repo_name
488 490 assert new_repo.description == description
489 491
490 492 # test if the repository is visible in the list ?
491 493 response = self.app.get(
492 494 h.route_path('repo_summary', repo_name=repo_name))
493 495 response.mustcontain(repo_name)
494 496 response.mustcontain(backend.alias)
495 497
496 498 assert repo_on_filesystem(repo_name)
@@ -1,716 +1,733 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 import logging
21 21 import collections
22 22
23 23 import datetime
24 24 import formencode
25 25 import formencode.htmlfill
26 26
27 27 import rhodecode
28 28
29 29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
30 30 from pyramid.renderers import render
31 31 from pyramid.response import Response
32 32
33 33 from rhodecode.apps._base import BaseAppView
34 34 from rhodecode.apps._base.navigation import navigation_list
35 35 from rhodecode.apps.svn_support import config_keys
36 36 from rhodecode.lib import helpers as h
37 37 from rhodecode.lib.auth import (
38 38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 39 from rhodecode.lib.celerylib import tasks, run_task
40 40 from rhodecode.lib.str_utils import safe_str
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path, repo2db_cleanup
42 42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 43 from rhodecode.lib.index import searcher_from_config
44 44
45 45 from rhodecode.model.db import RhodeCodeUi, Repository
46 46 from rhodecode.model.forms import (ApplicationSettingsForm,
47 47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
48 48 LabsSettingsForm, IssueTrackerPatternsForm)
49 49 from rhodecode.model.permission import PermissionModel
50 50 from rhodecode.model.repo_group import RepoGroupModel
51 51
52 52 from rhodecode.model.scm import ScmModel
53 53 from rhodecode.model.notification import EmailNotificationModel
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.settings import (
56 56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 57 SettingsModel)
58 58
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62
63 63 class AdminSettingsView(BaseAppView):
64 64
65 65 def load_default_context(self):
66 66 c = self._get_local_tmpl_context()
67 67 c.labs_active = str2bool(
68 68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 69 c.navlist = navigation_list(self.request)
70 70 return c
71 71
72 72 @classmethod
73 73 def _get_ui_settings(cls):
74 74 ret = RhodeCodeUi.query().all()
75 75
76 76 if not ret:
77 77 raise Exception('Could not get application ui settings !')
78 78 settings = {
79 79 # legacy param that needs to be kept
80 80 'web_push_ssl': False,
81 81 'extensions_hgsubversion': False
82 82 }
83 83 for each in ret:
84 84 k = each.ui_key
85 85 v = each.ui_value
86 86 section = each.ui_section
87 87
88 88 # skip some options if they are defined
89 89 if f"{section}_{k}" in ['web_push_ssl', 'extensions_hgsubversion']:
90 90 continue
91 91
92 92 if k == '/':
93 93 k = 'root_path'
94 94
95 95 if k in ['publish', 'enabled']:
96 96 v = str2bool(v)
97 97
98 98 if k.find('.') != -1:
99 99 k = k.replace('.', '_')
100 100
101 101 if each.ui_section in ['hooks', 'extensions']:
102 102 v = each.ui_active
103 103
104 104 settings[section + '_' + k] = v
105 105
106 106 return settings
107 107
108 108 @classmethod
109 109 def _form_defaults(cls):
110 110 defaults = SettingsModel().get_all_settings()
111 111 defaults.update(cls._get_ui_settings())
112 112
113 113 defaults.update({
114 114 'new_svn_branch': '',
115 115 'new_svn_tag': '',
116 116 })
117 117 return defaults
118 118
119 119 @LoginRequired()
120 120 @HasPermissionAllDecorator('hg.admin')
121 121 def settings_vcs(self):
122 122 c = self.load_default_context()
123 123 c.active = 'vcs'
124 124 model = VcsSettingsModel()
125 125 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
126 126 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
127 127 c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
128 128 c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
129 129 defaults = self._form_defaults()
130 130
131 131 data = render('rhodecode:templates/admin/settings/settings.mako',
132 132 self._get_template_context(c), self.request)
133 133 html = formencode.htmlfill.render(
134 134 data,
135 135 defaults=defaults,
136 136 encoding="UTF-8",
137 137 force_defaults=False
138 138 )
139 139 return Response(html)
140 140
141 141 @LoginRequired()
142 142 @HasPermissionAllDecorator('hg.admin')
143 143 @CSRFRequired()
144 144 def settings_vcs_update(self):
145 145 _ = self.request.translate
146 146 c = self.load_default_context()
147 147 c.active = 'vcs'
148 148
149 149 model = VcsSettingsModel()
150 150 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
151 151 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
152 152
153 153 c.svn_generate_config = rhodecode.ConfigGet().get_bool(config_keys.generate_config)
154 154 c.svn_config_path = rhodecode.ConfigGet().get_str(config_keys.config_file_path)
155 155 application_form = ApplicationUiSettingsForm(self.request.translate)()
156 156
157 157 try:
158 158 form_result = application_form.to_python(dict(self.request.POST))
159 159 except formencode.Invalid as errors:
160 160 h.flash(
161 161 _("Some form inputs contain invalid data."),
162 162 category='error')
163 163 data = render('rhodecode:templates/admin/settings/settings.mako',
164 164 self._get_template_context(c), self.request)
165 165 html = formencode.htmlfill.render(
166 166 data,
167 167 defaults=errors.value,
168 168 errors=errors.unpack_errors() or {},
169 169 prefix_error=False,
170 170 encoding="UTF-8",
171 171 force_defaults=False
172 172 )
173 173 return Response(html)
174 174
175 175 try:
176 176 model.update_global_hook_settings(form_result)
177 177
178 178 model.create_or_update_global_svn_settings(form_result)
179 179 model.create_or_update_global_hg_settings(form_result)
180 180 model.create_or_update_global_git_settings(form_result)
181 181 model.create_or_update_global_pr_settings(form_result)
182 182 except Exception:
183 183 log.exception("Exception while updating settings")
184 184 h.flash(_('Error occurred during updating '
185 185 'application settings'), category='error')
186 186 else:
187 187 Session().commit()
188 188 h.flash(_('Updated VCS settings'), category='success')
189 189 raise HTTPFound(h.route_path('admin_settings_vcs'))
190 190
191 191 data = render('rhodecode:templates/admin/settings/settings.mako',
192 192 self._get_template_context(c), self.request)
193 193 html = formencode.htmlfill.render(
194 194 data,
195 195 defaults=self._form_defaults(),
196 196 encoding="UTF-8",
197 197 force_defaults=False
198 198 )
199 199 return Response(html)
200 200
201 201 @LoginRequired()
202 202 @HasPermissionAllDecorator('hg.admin')
203 203 @CSRFRequired()
204 204 def settings_vcs_delete_svn_pattern(self):
205 205 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
206 206 model = VcsSettingsModel()
207 207 try:
208 208 model.delete_global_svn_pattern(delete_pattern_id)
209 209 except SettingNotFound:
210 210 log.exception(
211 211 'Failed to delete svn_pattern with id %s', delete_pattern_id)
212 212 raise HTTPNotFound()
213 213
214 214 Session().commit()
215 215 return True
216 216
217 217 @LoginRequired()
218 218 @HasPermissionAllDecorator('hg.admin')
219 219 def settings_mapping(self):
220 220 c = self.load_default_context()
221 221 c.active = 'mapping'
222 222 c.storage_path = get_rhodecode_repo_store_path()
223 223 data = render('rhodecode:templates/admin/settings/settings.mako',
224 224 self._get_template_context(c), self.request)
225 225 html = formencode.htmlfill.render(
226 226 data,
227 227 defaults=self._form_defaults(),
228 228 encoding="UTF-8",
229 229 force_defaults=False
230 230 )
231 231 return Response(html)
232 232
233 233 @LoginRequired()
234 234 @HasPermissionAllDecorator('hg.admin')
235 235 @CSRFRequired()
236 def settings_mapping_update(self):
236 def settings_mapping_create(self):
237 237 _ = self.request.translate
238 238 c = self.load_default_context()
239 239 c.active = 'mapping'
240 rm_obsolete = self.request.POST.get('destroy', False)
241 240 invalidate_cache = self.request.POST.get('invalidate', False)
242 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241 log.debug('rescanning repo location')
243 242
244 243 if invalidate_cache:
245 244 log.debug('invalidating all repositories cache')
246 245 for repo in Repository.get_all():
247 246 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
248 247
249 248 filesystem_repos = ScmModel().repo_scan()
250 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 added, errors = repo2db_mapper(filesystem_repos, force_hooks_rebuild=True)
251 250 PermissionModel().trigger_permission_flush()
252 251
253 252 def _repr(rm_repo):
254 253 return ', '.join(map(safe_str, rm_repo)) or '-'
255 254
256 h.flash(_('Repositories successfully '
257 'rescanned added: %s ; removed: %s') %
258 (_repr(added), _repr(removed)),
259 category='success')
255 if errors:
256 h.flash(_('Errors during scan: {}').format(_repr(errors), ), category='error')
257
258 h.flash(_('Repositories successfully scanned: Added: {}').format(_repr(added)), category='success')
259 raise HTTPFound(h.route_path('admin_settings_mapping'))
260
261 @LoginRequired()
262 @HasPermissionAllDecorator('hg.admin')
263 @CSRFRequired()
264 def settings_mapping_cleanup(self):
265 _ = self.request.translate
266 c = self.load_default_context()
267 c.active = 'mapping'
268 log.debug('rescanning repo location')
269
270 removed, errors = repo2db_cleanup()
271 PermissionModel().trigger_permission_flush()
272
273 def _repr(rm_repo):
274 return ', '.join(map(safe_str, rm_repo)) or '-'
275
276 h.flash(_('Repositories successfully scanned: Errors: {}, Added: {}').format(errors, _repr(removed)), category='success')
260 277 raise HTTPFound(h.route_path('admin_settings_mapping'))
261 278
262 279 @LoginRequired()
263 280 @HasPermissionAllDecorator('hg.admin')
264 281 def settings_global(self):
265 282 c = self.load_default_context()
266 283 c.active = 'global'
267 284 c.personal_repo_group_default_pattern = RepoGroupModel()\
268 285 .get_personal_group_name_pattern()
269 286
270 287 data = render('rhodecode:templates/admin/settings/settings.mako',
271 288 self._get_template_context(c), self.request)
272 289 html = formencode.htmlfill.render(
273 290 data,
274 291 defaults=self._form_defaults(),
275 292 encoding="UTF-8",
276 293 force_defaults=False
277 294 )
278 295 return Response(html)
279 296
280 297 @LoginRequired()
281 298 @HasPermissionAllDecorator('hg.admin')
282 299 @CSRFRequired()
283 300 def settings_global_update(self):
284 301 _ = self.request.translate
285 302 c = self.load_default_context()
286 303 c.active = 'global'
287 304 c.personal_repo_group_default_pattern = RepoGroupModel()\
288 305 .get_personal_group_name_pattern()
289 306 application_form = ApplicationSettingsForm(self.request.translate)()
290 307 try:
291 308 form_result = application_form.to_python(dict(self.request.POST))
292 309 except formencode.Invalid as errors:
293 310 h.flash(
294 311 _("Some form inputs contain invalid data."),
295 312 category='error')
296 313 data = render('rhodecode:templates/admin/settings/settings.mako',
297 314 self._get_template_context(c), self.request)
298 315 html = formencode.htmlfill.render(
299 316 data,
300 317 defaults=errors.value,
301 318 errors=errors.unpack_errors() or {},
302 319 prefix_error=False,
303 320 encoding="UTF-8",
304 321 force_defaults=False
305 322 )
306 323 return Response(html)
307 324
308 325 settings = [
309 326 ('title', 'rhodecode_title', 'unicode'),
310 327 ('realm', 'rhodecode_realm', 'unicode'),
311 328 ('pre_code', 'rhodecode_pre_code', 'unicode'),
312 329 ('post_code', 'rhodecode_post_code', 'unicode'),
313 330 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
314 331 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
315 332 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
316 333 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
317 334 ]
318 335
319 336 try:
320 337 for setting, form_key, type_ in settings:
321 338 sett = SettingsModel().create_or_update_setting(
322 339 setting, form_result[form_key], type_)
323 340 Session().add(sett)
324 341
325 342 Session().commit()
326 343 SettingsModel().invalidate_settings_cache()
327 344 h.flash(_('Updated application settings'), category='success')
328 345 except Exception:
329 346 log.exception("Exception while updating application settings")
330 347 h.flash(
331 348 _('Error occurred during updating application settings'),
332 349 category='error')
333 350
334 351 raise HTTPFound(h.route_path('admin_settings_global'))
335 352
336 353 @LoginRequired()
337 354 @HasPermissionAllDecorator('hg.admin')
338 355 def settings_visual(self):
339 356 c = self.load_default_context()
340 357 c.active = 'visual'
341 358
342 359 data = render('rhodecode:templates/admin/settings/settings.mako',
343 360 self._get_template_context(c), self.request)
344 361 html = formencode.htmlfill.render(
345 362 data,
346 363 defaults=self._form_defaults(),
347 364 encoding="UTF-8",
348 365 force_defaults=False
349 366 )
350 367 return Response(html)
351 368
352 369 @LoginRequired()
353 370 @HasPermissionAllDecorator('hg.admin')
354 371 @CSRFRequired()
355 372 def settings_visual_update(self):
356 373 _ = self.request.translate
357 374 c = self.load_default_context()
358 375 c.active = 'visual'
359 376 application_form = ApplicationVisualisationForm(self.request.translate)()
360 377 try:
361 378 form_result = application_form.to_python(dict(self.request.POST))
362 379 except formencode.Invalid as errors:
363 380 h.flash(
364 381 _("Some form inputs contain invalid data."),
365 382 category='error')
366 383 data = render('rhodecode:templates/admin/settings/settings.mako',
367 384 self._get_template_context(c), self.request)
368 385 html = formencode.htmlfill.render(
369 386 data,
370 387 defaults=errors.value,
371 388 errors=errors.unpack_errors() or {},
372 389 prefix_error=False,
373 390 encoding="UTF-8",
374 391 force_defaults=False
375 392 )
376 393 return Response(html)
377 394
378 395 try:
379 396 settings = [
380 397 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
381 398 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
382 399 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
383 400 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
384 401 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
385 402 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
386 403 ('show_version', 'rhodecode_show_version', 'bool'),
387 404 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
388 405 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
389 406 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
390 407 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
391 408 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
392 409 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
393 410 ('support_url', 'rhodecode_support_url', 'unicode'),
394 411 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
395 412 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
396 413 ]
397 414 for setting, form_key, type_ in settings:
398 415 sett = SettingsModel().create_or_update_setting(
399 416 setting, form_result[form_key], type_)
400 417 Session().add(sett)
401 418
402 419 Session().commit()
403 420 SettingsModel().invalidate_settings_cache()
404 421 h.flash(_('Updated visualisation settings'), category='success')
405 422 except Exception:
406 423 log.exception("Exception updating visualization settings")
407 424 h.flash(_('Error occurred during updating '
408 425 'visualisation settings'),
409 426 category='error')
410 427
411 428 raise HTTPFound(h.route_path('admin_settings_visual'))
412 429
413 430 @LoginRequired()
414 431 @HasPermissionAllDecorator('hg.admin')
415 432 def settings_issuetracker(self):
416 433 c = self.load_default_context()
417 434 c.active = 'issuetracker'
418 435 defaults = c.rc_config
419 436
420 437 entry_key = 'rhodecode_issuetracker_pat_'
421 438
422 439 c.issuetracker_entries = {}
423 440 for k, v in defaults.items():
424 441 if k.startswith(entry_key):
425 442 uid = k[len(entry_key):]
426 443 c.issuetracker_entries[uid] = None
427 444
428 445 for uid in c.issuetracker_entries:
429 446 c.issuetracker_entries[uid] = AttributeDict({
430 447 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
431 448 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
432 449 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
433 450 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
434 451 })
435 452
436 453 return self._get_template_context(c)
437 454
438 455 @LoginRequired()
439 456 @HasPermissionAllDecorator('hg.admin')
440 457 @CSRFRequired()
441 458 def settings_issuetracker_test(self):
442 459 error_container = []
443 460
444 461 urlified_commit = h.urlify_commit_message(
445 462 self.request.POST.get('test_text', ''),
446 463 'repo_group/test_repo1', error_container=error_container)
447 464 if error_container:
448 465 def converter(inp):
449 466 return h.html_escape(inp)
450 467
451 468 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
452 469
453 470 return urlified_commit
454 471
455 472 @LoginRequired()
456 473 @HasPermissionAllDecorator('hg.admin')
457 474 @CSRFRequired()
458 475 def settings_issuetracker_update(self):
459 476 _ = self.request.translate
460 477 self.load_default_context()
461 478 settings_model = IssueTrackerSettingsModel()
462 479
463 480 try:
464 481 form = IssueTrackerPatternsForm(self.request.translate)()
465 482 data = form.to_python(self.request.POST)
466 483 except formencode.Invalid as errors:
467 484 log.exception('Failed to add new pattern')
468 485 error = errors
469 486 h.flash(_(f'Invalid issue tracker pattern: {error}'),
470 487 category='error')
471 488 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
472 489
473 490 if data:
474 491 for uid in data.get('delete_patterns', []):
475 492 settings_model.delete_entries(uid)
476 493
477 494 for pattern in data.get('patterns', []):
478 495 for setting, value, type_ in pattern:
479 496 sett = settings_model.create_or_update_setting(
480 497 setting, value, type_)
481 498 Session().add(sett)
482 499
483 500 Session().commit()
484 501
485 502 SettingsModel().invalidate_settings_cache()
486 503 h.flash(_('Updated issue tracker entries'), category='success')
487 504 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
488 505
489 506 @LoginRequired()
490 507 @HasPermissionAllDecorator('hg.admin')
491 508 @CSRFRequired()
492 509 def settings_issuetracker_delete(self):
493 510 _ = self.request.translate
494 511 self.load_default_context()
495 512 uid = self.request.POST.get('uid')
496 513 try:
497 514 IssueTrackerSettingsModel().delete_entries(uid)
498 515 except Exception:
499 516 log.exception('Failed to delete issue tracker setting %s', uid)
500 517 raise HTTPNotFound()
501 518
502 519 SettingsModel().invalidate_settings_cache()
503 520 h.flash(_('Removed issue tracker entry.'), category='success')
504 521
505 522 return {'deleted': uid}
506 523
507 524 @LoginRequired()
508 525 @HasPermissionAllDecorator('hg.admin')
509 526 def settings_email(self):
510 527 c = self.load_default_context()
511 528 c.active = 'email'
512 529 c.rhodecode_ini = rhodecode.CONFIG
513 530
514 531 data = render('rhodecode:templates/admin/settings/settings.mako',
515 532 self._get_template_context(c), self.request)
516 533 html = formencode.htmlfill.render(
517 534 data,
518 535 defaults=self._form_defaults(),
519 536 encoding="UTF-8",
520 537 force_defaults=False
521 538 )
522 539 return Response(html)
523 540
524 541 @LoginRequired()
525 542 @HasPermissionAllDecorator('hg.admin')
526 543 @CSRFRequired()
527 544 def settings_email_update(self):
528 545 _ = self.request.translate
529 546 c = self.load_default_context()
530 547 c.active = 'email'
531 548
532 549 test_email = self.request.POST.get('test_email')
533 550
534 551 if not test_email:
535 552 h.flash(_('Please enter email address'), category='error')
536 553 raise HTTPFound(h.route_path('admin_settings_email'))
537 554
538 555 email_kwargs = {
539 556 'date': datetime.datetime.now(),
540 557 'user': self._rhodecode_db_user
541 558 }
542 559
543 560 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
544 561 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
545 562
546 563 recipients = [test_email] if test_email else None
547 564
548 565 run_task(tasks.send_email, recipients, subject,
549 566 email_body_plaintext, email_body)
550 567
551 568 h.flash(_('Send email task created'), category='success')
552 569 raise HTTPFound(h.route_path('admin_settings_email'))
553 570
554 571 @LoginRequired()
555 572 @HasPermissionAllDecorator('hg.admin')
556 573 def settings_hooks(self):
557 574 c = self.load_default_context()
558 575 c.active = 'hooks'
559 576
560 577 model = SettingsModel()
561 578 c.hooks = model.get_builtin_hooks()
562 579 c.custom_hooks = model.get_custom_hooks()
563 580
564 581 data = render('rhodecode:templates/admin/settings/settings.mako',
565 582 self._get_template_context(c), self.request)
566 583 html = formencode.htmlfill.render(
567 584 data,
568 585 defaults=self._form_defaults(),
569 586 encoding="UTF-8",
570 587 force_defaults=False
571 588 )
572 589 return Response(html)
573 590
574 591 @LoginRequired()
575 592 @HasPermissionAllDecorator('hg.admin')
576 593 @CSRFRequired()
577 594 def settings_hooks_update(self):
578 595 _ = self.request.translate
579 596 c = self.load_default_context()
580 597 c.active = 'hooks'
581 598 if c.visual.allow_custom_hooks_settings:
582 599 ui_key = self.request.POST.get('new_hook_ui_key')
583 600 ui_value = self.request.POST.get('new_hook_ui_value')
584 601
585 602 hook_id = self.request.POST.get('hook_id')
586 603 new_hook = False
587 604
588 605 model = SettingsModel()
589 606 try:
590 607 if ui_value and ui_key:
591 608 model.create_or_update_hook(ui_key, ui_value)
592 609 h.flash(_('Added new hook'), category='success')
593 610 new_hook = True
594 611 elif hook_id:
595 612 RhodeCodeUi.delete(hook_id)
596 613 Session().commit()
597 614
598 615 # check for edits
599 616 update = False
600 617 _d = self.request.POST.dict_of_lists()
601 618 for k, v in zip(_d.get('hook_ui_key', []),
602 619 _d.get('hook_ui_value_new', [])):
603 620 model.create_or_update_hook(k, v)
604 621 update = True
605 622
606 623 if update and not new_hook:
607 624 h.flash(_('Updated hooks'), category='success')
608 625 Session().commit()
609 626 except Exception:
610 627 log.exception("Exception during hook creation")
611 628 h.flash(_('Error occurred during hook creation'),
612 629 category='error')
613 630
614 631 raise HTTPFound(h.route_path('admin_settings_hooks'))
615 632
616 633 @LoginRequired()
617 634 @HasPermissionAllDecorator('hg.admin')
618 635 def settings_search(self):
619 636 c = self.load_default_context()
620 637 c.active = 'search'
621 638
622 639 c.searcher = searcher_from_config(self.request.registry.settings)
623 640 c.statistics = c.searcher.statistics(self.request.translate)
624 641
625 642 return self._get_template_context(c)
626 643
627 644 @LoginRequired()
628 645 @HasPermissionAllDecorator('hg.admin')
629 646 def settings_labs(self):
630 647 c = self.load_default_context()
631 648 if not c.labs_active:
632 649 raise HTTPFound(h.route_path('admin_settings'))
633 650
634 651 c.active = 'labs'
635 652 c.lab_settings = _LAB_SETTINGS
636 653
637 654 data = render('rhodecode:templates/admin/settings/settings.mako',
638 655 self._get_template_context(c), self.request)
639 656 html = formencode.htmlfill.render(
640 657 data,
641 658 defaults=self._form_defaults(),
642 659 encoding="UTF-8",
643 660 force_defaults=False
644 661 )
645 662 return Response(html)
646 663
647 664 @LoginRequired()
648 665 @HasPermissionAllDecorator('hg.admin')
649 666 @CSRFRequired()
650 667 def settings_labs_update(self):
651 668 _ = self.request.translate
652 669 c = self.load_default_context()
653 670 c.active = 'labs'
654 671
655 672 application_form = LabsSettingsForm(self.request.translate)()
656 673 try:
657 674 form_result = application_form.to_python(dict(self.request.POST))
658 675 except formencode.Invalid as errors:
659 676 h.flash(
660 677 _("Some form inputs contain invalid data."),
661 678 category='error')
662 679 data = render('rhodecode:templates/admin/settings/settings.mako',
663 680 self._get_template_context(c), self.request)
664 681 html = formencode.htmlfill.render(
665 682 data,
666 683 defaults=errors.value,
667 684 errors=errors.unpack_errors() or {},
668 685 prefix_error=False,
669 686 encoding="UTF-8",
670 687 force_defaults=False
671 688 )
672 689 return Response(html)
673 690
674 691 try:
675 692 session = Session()
676 693 for setting in _LAB_SETTINGS:
677 694 setting_name = setting.key[len('rhodecode_'):]
678 695 sett = SettingsModel().create_or_update_setting(
679 696 setting_name, form_result[setting.key], setting.type)
680 697 session.add(sett)
681 698
682 699 except Exception:
683 700 log.exception('Exception while updating lab settings')
684 701 h.flash(_('Error occurred during updating labs settings'),
685 702 category='error')
686 703 else:
687 704 Session().commit()
688 705 SettingsModel().invalidate_settings_cache()
689 706 h.flash(_('Updated Labs settings'), category='success')
690 707 raise HTTPFound(h.route_path('admin_settings_labs'))
691 708
692 709 data = render('rhodecode:templates/admin/settings/settings.mako',
693 710 self._get_template_context(c), self.request)
694 711 html = formencode.htmlfill.render(
695 712 data,
696 713 defaults=self._form_defaults(),
697 714 encoding="UTF-8",
698 715 force_defaults=False
699 716 )
700 717 return Response(html)
701 718
702 719
703 720 # :param key: name of the setting including the 'rhodecode_' prefix
704 721 # :param type: the RhodeCodeSetting type to use.
705 722 # :param group: the i18ned group in which we should dispaly this setting
706 723 # :param label: the i18ned label we should display for this setting
707 724 # :param help: the i18ned help we should dispaly for this setting
708 725 LabSetting = collections.namedtuple(
709 726 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
710 727
711 728
712 729 # This list has to be kept in sync with the form
713 730 # rhodecode.model.forms.LabsSettingsForm.
714 731 _LAB_SETTINGS = [
715 732
716 733 ]
@@ -1,895 +1,928 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Utilities library for RhodeCode
21 21 """
22 22
23 23 import datetime
24 24 import importlib
25 25
26 26 import decorator
27 27 import logging
28 28 import os
29 29 import re
30 30 import sys
31 31 import shutil
32 32 import socket
33 33 import tempfile
34 34 import traceback
35 35 import tarfile
36 36
37 37 from functools import wraps
38 38 from os.path import join as jn
39 39
40 40 import paste
41 41 import pkg_resources
42 42 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
43 43
44 44 from mako import exceptions
45 45
46 46 import rhodecode
47 47 from rhodecode import ConfigGet
48 48 from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRepo, ClientNotSupported
49 49 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
50 50 from rhodecode.lib.type_utils import AttributeDict
51 51 from rhodecode.lib.str_utils import safe_bytes, safe_str
52 52 from rhodecode.lib.vcs.backends.base import Config
53 53 from rhodecode.lib.vcs.exceptions import VCSError
54 54 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
55 55 from rhodecode.lib.ext_json import sjson as json
56 56 from rhodecode.model import meta
57 57 from rhodecode.model.db import (
58 58 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
59 59 from rhodecode.model.meta import Session
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65 65
66 66 # String which contains characters that are not allowed in slug names for
67 67 # repositories or repository groups. It is properly escaped to use it in
68 68 # regular expressions.
69 69 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
70 70
71 71 # Regex that matches forbidden characters in repo/group slugs.
72 72 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
73 73
74 74 # Regex that matches allowed characters in repo/group slugs.
75 75 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
76 76
77 77 # Regex that matches whole repo/group slugs.
78 78 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
79 79
80 80 _license_cache = None
81 81
82 82
83 83 def adopt_for_celery(func):
84 84 """
85 85 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
86 86 for further usage as a celery tasks.
87 87 """
88 88 @wraps(func)
89 89 def wrapper(extras):
90 90 extras = AttributeDict(extras)
91 91
92 92 try:
93 93 # HooksResponse implements to_json method which must be used there.
94 94 return func(extras).to_json()
95 95 except HTTPBranchProtected as error:
96 96 # Those special cases don't need error reporting. It's a case of
97 97 # locked repo or protected branch
98 98 error_args = error.args
99 99 return {
100 100 'status': error.code,
101 101 'output': error.explanation,
102 102 'exception': type(error).__name__,
103 103 'exception_args': error_args,
104 104 'exception_traceback': '',
105 105 }
106 106 except ClientNotSupported as error:
107 107 # Those special cases don't need error reporting. It's a case of
108 108 # locked repo or protected branch
109 109 error_args = error.args
110 110 return {
111 111 'status': error.code,
112 112 'output': error.explanation,
113 113 'exception': type(error).__name__,
114 114 'exception_args': error_args,
115 115 'exception_traceback': '',
116 116 }
117 117 except HTTPLockedRepo as error:
118 118 # Those special cases don't need error reporting. It's a case of
119 119 # locked repo or protected branch
120 120 error_args = error.args
121 121 return {
122 122 'status': error.code,
123 123 'output': error.explanation,
124 124 'exception': type(error).__name__,
125 125 'exception_args': error_args,
126 126 'exception_traceback': '',
127 127 }
128 128 except Exception as e:
129 129 return {
130 130 'status': 128,
131 131 'output': '',
132 132 'exception': type(e).__name__,
133 133 'exception_args': e.args,
134 134 'exception_traceback': traceback.format_exc(),
135 135 }
136 136 return wrapper
137 137
138 138
139 139 def repo_name_slug(value):
140 140 """
141 141 Return slug of name of repository
142 142 This function is called on each creation/modification
143 143 of repository to prevent bad names in repo
144 144 """
145 145
146 146 replacement_char = '-'
147 147
148 148 slug = strip_tags(value)
149 149 slug = convert_accented_entities(slug)
150 150 slug = convert_misc_entities(slug)
151 151
152 152 slug = SLUG_BAD_CHAR_RE.sub('', slug)
153 153 slug = re.sub(r'[\s]+', '-', slug)
154 154 slug = collapse(slug, replacement_char)
155 155
156 156 return slug
157 157
158 158
159 159 #==============================================================================
160 160 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
161 161 #==============================================================================
162 162 def get_repo_slug(request):
163 163 _repo = ''
164 164
165 165 if hasattr(request, 'db_repo_name'):
166 166 # if our requests has set db reference use it for name, this
167 167 # translates the example.com/_<id> into proper repo names
168 168 _repo = request.db_repo_name
169 169 elif getattr(request, 'matchdict', None):
170 170 # pyramid
171 171 _repo = request.matchdict.get('repo_name')
172 172
173 173 if _repo:
174 174 _repo = _repo.rstrip('/')
175 175 return _repo
176 176
177 177
178 178 def get_repo_group_slug(request):
179 179 _group = ''
180 180 if hasattr(request, 'db_repo_group'):
181 181 # if our requests has set db reference use it for name, this
182 182 # translates the example.com/_<id> into proper repo group names
183 183 _group = request.db_repo_group.group_name
184 184 elif getattr(request, 'matchdict', None):
185 185 # pyramid
186 186 _group = request.matchdict.get('repo_group_name')
187 187
188 188 if _group:
189 189 _group = _group.rstrip('/')
190 190 return _group
191 191
192 192
193 193 def get_user_group_slug(request):
194 194 _user_group = ''
195 195
196 196 if hasattr(request, 'db_user_group'):
197 197 _user_group = request.db_user_group.users_group_name
198 198 elif getattr(request, 'matchdict', None):
199 199 # pyramid
200 200 _user_group = request.matchdict.get('user_group_id')
201 201 _user_group_name = request.matchdict.get('user_group_name')
202 202 try:
203 203 if _user_group:
204 204 _user_group = UserGroup.get(_user_group)
205 205 elif _user_group_name:
206 206 _user_group = UserGroup.get_by_group_name(_user_group_name)
207 207
208 208 if _user_group:
209 209 _user_group = _user_group.users_group_name
210 210 except Exception:
211 211 log.exception('Failed to get user group by id and name')
212 212 # catch all failures here
213 213 return None
214 214
215 215 return _user_group
216 216
217 217
218 218 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
219 219 """
220 220 Scans given path for repos and return (name,(type,path)) tuple
221 221
222 222 :param path: path to scan for repositories
223 223 :param recursive: recursive search and return names with subdirs in front
224 224 """
225 225
226 226 # remove ending slash for better results
227 227 path = path.rstrip(os.sep)
228 228 log.debug('now scanning in %s location recursive:%s...', path, recursive)
229 229
230 230 def _get_repos(p):
231 231 dirpaths = get_dirpaths(p)
232 232 if not _is_dir_writable(p):
233 233 log.warning('repo path without write access: %s', p)
234 234
235 235 for dirpath in dirpaths:
236 236 if os.path.isfile(os.path.join(p, dirpath)):
237 237 continue
238 238 cur_path = os.path.join(p, dirpath)
239 239
240 240 # skip removed repos
241 241 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
242 242 continue
243 243
244 244 #skip .<somethin> dirs
245 245 if dirpath.startswith('.'):
246 246 continue
247 247
248 248 try:
249 249 scm_info = get_scm(cur_path)
250 250 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
251 251 except VCSError:
252 252 if not recursive:
253 253 continue
254 254 #check if this dir containts other repos for recursive scan
255 255 rec_path = os.path.join(p, dirpath)
256 256 if os.path.isdir(rec_path):
257 257 yield from _get_repos(rec_path)
258 258
259 259 return _get_repos(path)
260 260
261 261
262 262 def get_dirpaths(p: str) -> list:
263 263 try:
264 264 # OS-independable way of checking if we have at least read-only
265 265 # access or not.
266 266 dirpaths = os.listdir(p)
267 267 except OSError:
268 268 log.warning('ignoring repo path without read access: %s', p)
269 269 return []
270 270
271 271 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
272 272 # decode paths and suddenly returns unicode objects itself. The items it
273 273 # cannot decode are returned as strings and cause issues.
274 274 #
275 275 # Those paths are ignored here until a solid solution for path handling has
276 276 # been built.
277 277 expected_type = type(p)
278 278
279 279 def _has_correct_type(item):
280 280 if type(item) is not expected_type:
281 281 log.error(
282 282 "Ignoring path %s since it cannot be decoded into str.",
283 283 # Using "repr" to make sure that we see the byte value in case
284 284 # of support.
285 285 repr(item))
286 286 return False
287 287 return True
288 288
289 289 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
290 290
291 291 return dirpaths
292 292
293 293
294 294 def _is_dir_writable(path):
295 295 """
296 296 Probe if `path` is writable.
297 297
298 298 Due to trouble on Cygwin / Windows, this is actually probing if it is
299 299 possible to create a file inside of `path`, stat does not produce reliable
300 300 results in this case.
301 301 """
302 302 try:
303 303 with tempfile.TemporaryFile(dir=path):
304 304 pass
305 305 except OSError:
306 306 return False
307 307 return True
308 308
309 309
310 310 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
311 311 """
312 312 Returns True if given path is a valid repository False otherwise.
313 313 If expect_scm param is given also, compare if given scm is the same
314 314 as expected from scm parameter. If explicit_scm is given don't try to
315 315 detect the scm, just use the given one to check if repo is valid
316 316
317 317 :param repo_name:
318 318 :param base_path:
319 319 :param expect_scm:
320 320 :param explicit_scm:
321 321 :param config:
322 322
323 323 :return True: if given path is a valid repository
324 324 """
325 325 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
326 326 log.debug('Checking if `%s` is a valid path for repository. '
327 327 'Explicit type: %s', repo_name, explicit_scm)
328 328
329 329 try:
330 330 if explicit_scm:
331 331 detected_scms = [get_scm_backend(explicit_scm)(
332 332 full_path, config=config).alias]
333 333 else:
334 334 detected_scms = get_scm(full_path)
335 335
336 336 if expect_scm:
337 337 return detected_scms[0] == expect_scm
338 338 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
339 339 return True
340 340 except VCSError:
341 341 log.debug('path: %s is not a valid repo !', full_path)
342 342 return False
343 343
344 344
345 345 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
346 346 """
347 347 Returns True if a given path is a repository group, False otherwise
348 348
349 349 :param repo_group_name:
350 350 :param base_path:
351 351 """
352 352 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
353 353 log.debug('Checking if `%s` is a valid path for repository group',
354 354 repo_group_name)
355 355
356 356 # check if it's not a repo
357 357 if is_valid_repo(repo_group_name, base_path):
358 358 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
359 359 return False
360 360
361 361 try:
362 362 # we need to check bare git repos at higher level
363 363 # since we might match branches/hooks/info/objects or possible
364 364 # other things inside bare git repo
365 365 maybe_repo = os.path.dirname(full_path)
366 366 if maybe_repo == base_path:
367 367 # skip root level repo check; we know root location CANNOT BE a repo group
368 368 return False
369 369
370 370 scm_ = get_scm(maybe_repo)
371 371 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
372 372 return False
373 373 except VCSError:
374 374 pass
375 375
376 376 # check if it's a valid path
377 377 if skip_path_check or os.path.isdir(full_path):
378 378 log.debug('path: %s is a valid repo group !', full_path)
379 379 return True
380 380
381 381 log.debug('path: %s is not a valid repo group !', full_path)
382 382 return False
383 383
384 384
385 385 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
386 386 while True:
387 387 ok = input(prompt)
388 388 if ok.lower() in ('y', 'ye', 'yes'):
389 389 return True
390 390 if ok.lower() in ('n', 'no', 'nop', 'nope'):
391 391 return False
392 392 retries = retries - 1
393 393 if retries < 0:
394 394 raise OSError
395 395 print(complaint)
396 396
397 397 # propagated from mercurial documentation
398 398 ui_sections = [
399 399 'alias', 'auth',
400 400 'decode/encode', 'defaults',
401 401 'diff', 'email',
402 402 'extensions', 'format',
403 403 'merge-patterns', 'merge-tools',
404 404 'hooks', 'http_proxy',
405 405 'smtp', 'patch',
406 406 'paths', 'profiling',
407 407 'server', 'trusted',
408 408 'ui', 'web', ]
409 409
410 410
411 411 def prepare_config_data(clear_session=True, repo=None):
412 412 """
413 413 Read the configuration data from the database, *.ini files and return configuration
414 414 tuples.
415 415 """
416 416 from rhodecode.model.settings import VcsSettingsModel
417 417
418 418 sa = meta.Session()
419 419 settings_model = VcsSettingsModel(repo=repo, sa=sa)
420 420
421 421 ui_settings = settings_model.get_ui_settings()
422 422
423 423 ui_data = []
424 424 config = [
425 425 ('web', 'push_ssl', 'false'),
426 426 ]
427 427 for setting in ui_settings:
428 428 # skip certain deprecated keys that might be still in DB
429 429 if f"{setting.section}_{setting.key}" in ['extensions_hgsubversion']:
430 430 continue
431 431
432 432 # Todo: remove this section once transition to *.ini files will be completed
433 433 if setting.section in ('largefiles', 'vcs_git_lfs'):
434 434 if setting.key != 'enabled':
435 435 continue
436 436 if setting.active:
437 437 ui_data.append((setting.section, setting.key, setting.value))
438 438 config.append((
439 439 safe_str(setting.section), safe_str(setting.key),
440 440 safe_str(setting.value)))
441 441 if setting.key == 'push_ssl':
442 442 # force set push_ssl requirement to False this is deprecated, and we must force it to False
443 443 config.append((
444 444 safe_str(setting.section), safe_str(setting.key), False))
445 445 config_getter = ConfigGet()
446 446 config.append(('vcs_git_lfs', 'store_location', config_getter.get_str('vcs.git.lfs.storage_location')))
447 447 config.append(('largefiles', 'usercache', config_getter.get_str('vcs.hg.largefiles.storage_location')))
448 448 log.debug(
449 449 'settings ui from db@repo[%s]: %s',
450 450 repo,
451 451 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
452 452 if clear_session:
453 453 meta.Session.remove()
454 454
455 455 # TODO: mikhail: probably it makes no sense to re-read hooks information.
456 456 # It's already there and activated/deactivated
457 457 skip_entries = []
458 458 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
459 459 if 'pull' not in enabled_hook_classes:
460 460 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
461 461 if 'push' not in enabled_hook_classes:
462 462 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
463 463 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
464 464 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
465 465
466 466 config = [entry for entry in config if entry[:2] not in skip_entries]
467 467
468 468 return config
469 469
470 470
471 471 def make_db_config(clear_session=True, repo=None):
472 472 """
473 473 Create a :class:`Config` instance based on the values in the database.
474 474 """
475 475 config = Config()
476 476 config_data = prepare_config_data(clear_session=clear_session, repo=repo)
477 477 for section, option, value in config_data:
478 478 config.set(section, option, value)
479 479 return config
480 480
481 481
482 482 def get_enabled_hook_classes(ui_settings):
483 483 """
484 484 Return the enabled hook classes.
485 485
486 486 :param ui_settings: List of ui_settings as returned
487 487 by :meth:`VcsSettingsModel.get_ui_settings`
488 488
489 489 :return: a list with the enabled hook classes. The order is not guaranteed.
490 490 :rtype: list
491 491 """
492 492 enabled_hooks = []
493 493 active_hook_keys = [
494 494 key for section, key, value, active in ui_settings
495 495 if section == 'hooks' and active]
496 496
497 497 hook_names = {
498 498 RhodeCodeUi.HOOK_PUSH: 'push',
499 499 RhodeCodeUi.HOOK_PULL: 'pull',
500 500 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
501 501 }
502 502
503 503 for key in active_hook_keys:
504 504 hook = hook_names.get(key)
505 505 if hook:
506 506 enabled_hooks.append(hook)
507 507
508 508 return enabled_hooks
509 509
510 510
511 511 def set_rhodecode_config(config):
512 512 """
513 513 Updates pyramid config with new settings from database
514 514
515 515 :param config:
516 516 """
517 517 from rhodecode.model.settings import SettingsModel
518 518 app_settings = SettingsModel().get_all_settings()
519 519
520 520 for k, v in list(app_settings.items()):
521 521 config[k] = v
522 522
523 523
524 524 def get_rhodecode_realm():
525 525 """
526 526 Return the rhodecode realm from database.
527 527 """
528 528 from rhodecode.model.settings import SettingsModel
529 529 realm = SettingsModel().get_setting_by_name('realm')
530 530 return safe_str(realm.app_settings_value)
531 531
532 532
533 533 def get_rhodecode_repo_store_path():
534 534 """
535 535 Returns the base path. The base path is the filesystem path which points
536 536 to the repository store.
537 537 """
538 538
539 539 import rhodecode
540 540 return rhodecode.CONFIG['repo_store.path']
541 541
542 542
543 543 def map_groups(path):
544 544 """
545 545 Given a full path to a repository, create all nested groups that this
546 546 repo is inside. This function creates parent-child relationships between
547 547 groups and creates default perms for all new groups.
548 548
549 549 :param paths: full path to repository
550 550 """
551 551 from rhodecode.model.repo_group import RepoGroupModel
552 552 sa = meta.Session()
553 553 groups = path.split(Repository.NAME_SEP)
554 554 parent = None
555 555 group = None
556 556
557 557 # last element is repo in nested groups structure
558 558 groups = groups[:-1]
559 559 rgm = RepoGroupModel(sa)
560 560 owner = User.get_first_super_admin()
561 561 for lvl, group_name in enumerate(groups):
562 562 group_name = '/'.join(groups[:lvl] + [group_name])
563 563 group = RepoGroup.get_by_group_name(group_name)
564 564 desc = '%s group' % group_name
565 565
566 566 # skip folders that are now removed repos
567 567 if REMOVED_REPO_PAT.match(group_name):
568 568 break
569 569
570 570 if group is None:
571 571 log.debug('creating group level: %s group_name: %s',
572 572 lvl, group_name)
573 573 group = RepoGroup(group_name, parent)
574 574 group.group_description = desc
575 575 group.user = owner
576 576 sa.add(group)
577 577 perm_obj = rgm._create_default_perms(group)
578 578 sa.add(perm_obj)
579 579 sa.flush()
580 580
581 581 parent = group
582 582 return group
583 583
584 584
585 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
585 def repo2db_mapper(initial_repo_list, force_hooks_rebuild=False):
586 586 """
587 maps all repos given in initial_repo_list, non existing repositories
588 are created, if remove_obsolete is True it also checks for db entries
589 that are not in initial_repo_list and removes them.
590
591 :param initial_repo_list: list of repositories found by scanning methods
592 :param remove_obsolete: check for obsolete entries in database
587 maps all repos given in initial_repo_list, non-existing repositories
588 are created
593 589 """
594 590 from rhodecode.model.repo import RepoModel
595 from rhodecode.model.repo_group import RepoGroupModel
596 591 from rhodecode.model.settings import SettingsModel
597 592
598 593 sa = meta.Session()
599 594 repo_model = RepoModel()
600 595 user = User.get_first_super_admin()
601 596 added = []
597 errors = []
602 598
603 599 # creation defaults
604 600 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
605 601 enable_statistics = defs.get('repo_enable_statistics')
606 602 enable_locking = defs.get('repo_enable_locking')
607 603 enable_downloads = defs.get('repo_enable_downloads')
608 604 private = defs.get('repo_private')
609 605
610 606 for name, repo in list(initial_repo_list.items()):
611 607 group = map_groups(name)
612 608 str_name = safe_str(name)
613 609 db_repo = repo_model.get_by_repo_name(str_name)
614 610
615 611 # found repo that is on filesystem not in RhodeCode database
616 612 if not db_repo:
617 613 log.info('repository `%s` not found in the database, creating now', name)
618 614 added.append(name)
619 desc = (repo.description
620 if repo.description != 'unknown'
621 else '%s repository' % name)
615 desc = repo.description if repo.description != 'unknown' else f'{name} repository'
622 616
623 617 db_repo = repo_model._create_repo(
624 618 repo_name=name,
625 619 repo_type=repo.alias,
626 620 description=desc,
627 621 repo_group=getattr(group, 'group_id', None),
628 622 owner=user,
629 623 enable_locking=enable_locking,
630 624 enable_downloads=enable_downloads,
631 625 enable_statistics=enable_statistics,
632 626 private=private,
633 627 state=Repository.STATE_CREATED
634 628 )
635 629 sa.commit()
636 # we added that repo just now, and make sure we updated server info
637 if db_repo.repo_type == 'git':
638 git_repo = db_repo.scm_instance()
639 # update repository server-info
640 log.debug('Running update server info')
641 git_repo._update_server_info(force=True)
642 630
643 db_repo.update_commit_cache(recursive=False)
644
631 try:
645 632 config = db_repo._config
646 633 config.set('extensions', 'largefiles', '')
647 repo = db_repo.scm_instance(config=config)
648 repo.install_hooks(force=force_hooks_rebuild)
634 scm_repo = db_repo.scm_instance(config=config)
635 except Exception:
636 log.error(traceback.format_exc())
637 errors.append(f'getting vcs instance for {name} failed')
638 continue
639
640 try:
641 db_repo.update_commit_cache(recursive=False)
642 except Exception:
643 log.error(traceback.format_exc())
644 errors.append(f'update_commit_cache for {name} failed')
645 continue
646
647 try:
648 scm_repo.install_hooks(force=force_hooks_rebuild)
649 except Exception:
650 log.error(traceback.format_exc())
651 errors.append(f'install_hooks for {name} failed')
652 continue
649 653
654 try:
655 # we added that repo just now, and make sure we updated server info
656 if db_repo.repo_type == 'git':
657 # update repository server-info
658 log.debug('Running update server info')
659 scm_repo._update_server_info(force=True)
660 except Exception:
661 log.error(traceback.format_exc())
662 errors.append(f'update_server_info for {name} failed')
663 continue
664
665 return added, errors
666
667 def repo2db_cleanup(skip_repos=None, skip_groups=None):
668 from rhodecode.model.repo import RepoModel
669 from rhodecode.model.repo_group import RepoGroupModel
670
671 sa = meta.Session()
650 672 removed = []
651 if remove_obsolete:
673 errors = []
674
675
676 all_repos = Repository.execute(
677 Repository.select(Repository)\
678 .order_by(Repository.repo_name)
679 ).scalars()
680
652 681 # remove from database those repositories that are not in the filesystem
653 for repo in sa.query(Repository).all():
654 if repo.repo_name not in list(initial_repo_list.keys()):
655 log.debug("Removing non-existing repository found in db `%s`",
656 repo.repo_name)
682 for db_repo in all_repos:
683 db_repo_name = db_repo.repo_name
684 if skip_repos and db_repo_name in skip_repos:
685 log.debug('Skipping repo `%s`', db_repo_name)
686 continue
657 687 try:
658 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
688 instance = db_repo.scm_instance()
689 except Exception:
690 instance = None
691
692 if not instance:
693 log.debug("Removing non-existing repository found in db `%s`", db_repo_name)
694 try:
695 RepoModel(sa).delete(db_repo, forks='detach', fs_remove=False, call_events=False)
659 696 sa.commit()
660 removed.append(repo.repo_name)
697 removed.append(db_repo_name)
661 698 except Exception:
662 699 # don't hold further removals on error
663 700 log.error(traceback.format_exc())
664 701 sa.rollback()
665
666 def splitter(full_repo_name):
667 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
668 gr_name = None
669 if len(_parts) == 2:
670 gr_name = _parts[0]
671 return gr_name
672
673 initial_repo_group_list = [splitter(x) for x in
674 list(initial_repo_list.keys()) if splitter(x)]
702 errors.append(db_repo_name)
675 703
676 704 # remove from database those repository groups that are not in the
677 705 # filesystem due to parent child relationships we need to delete them
678 706 # in a specific order of most nested first
679 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
707 all_groups = RepoGroup.execute(
708 RepoGroup.select(RepoGroup.group_name)\
709 .order_by(RepoGroup.group_name)
710 ).scalars().all()
711
680 712 def nested_sort(gr):
681 713 return len(gr.split('/'))
714
682 715 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
683 if group_name not in initial_repo_group_list:
684 repo_group = RepoGroup.get_by_group_name(group_name)
685 if (repo_group.children.all() or
686 not RepoGroupModel().check_exist_filesystem(
687 group_name=group_name, exc_on_failure=False)):
716 if skip_groups and group_name in skip_groups:
717 log.debug('Skipping repo group `%s`', group_name)
688 718 continue
689 719
690 log.info(
691 'Removing non-existing repository group found in db `%s`',
692 group_name)
720 repo_group = RepoGroup.get_by_group_name(group_name)
721
722 if repo_group.children.all() or not RepoGroupModel().check_exist_filesystem(group_name=group_name, exc_on_failure=False):
723 continue
724
725 log.info('Removing non-existing repository group found in db `%s`', group_name)
726
693 727 try:
694 RepoGroupModel(sa).delete(group_name, fs_remove=False)
728 RepoGroupModel(sa).delete(group_name, fs_remove=False, call_events=False)
695 729 sa.commit()
696 730 removed.append(group_name)
697 731 except Exception:
698 732 # don't hold further removals on error
699 log.exception(
700 'Unable to remove repository group `%s`',
701 group_name)
733 log.exception('Unable to remove repository group `%s`',group_name)
702 734 sa.rollback()
703 raise
735 errors.append(group_name)
704 736
705 return added, removed
737 return removed, errors
738
706 739
707 740 def deep_reload_package(package_name):
708 741 """
709 742 Deeply reload a package by removing it and its submodules from sys.modules,
710 743 then re-importing it.
711 744 """
712 745 # Remove the package and its submodules from sys.modules
713 746 to_reload = [name for name in sys.modules if name == package_name or name.startswith(package_name + ".")]
714 747 for module_name in to_reload:
715 748 del sys.modules[module_name]
716 749 log.debug(f"Removed module from cache: {module_name}")
717 750
718 751 # Re-import the package
719 752 package = importlib.import_module(package_name)
720 753 log.debug(f"Re-imported package: {package_name}")
721 754
722 755 return package
723 756
724 757 def load_rcextensions(root_path):
725 758 import rhodecode
726 759 from rhodecode.config import conf
727 760
728 761 path = os.path.join(root_path)
729 762 deep_reload = path in sys.path
730 763 sys.path.insert(0, path)
731 764
732 765 try:
733 766 rcextensions = __import__('rcextensions', fromlist=[''])
734 767 except ImportError:
735 768 if os.path.isdir(os.path.join(path, 'rcextensions')):
736 769 log.warning('Unable to load rcextensions from %s', path)
737 770 rcextensions = None
738 771
739 772 if rcextensions:
740 773 if deep_reload:
741 774 rcextensions = deep_reload_package('rcextensions')
742 775 log.info('Loaded rcextensions from %s...', rcextensions)
743 776 rhodecode.EXTENSIONS = rcextensions
744 777
745 778 # Additional mappings that are not present in the pygments lexers
746 779 conf.LANGUAGES_EXTENSIONS_MAP.update(
747 780 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
748 781
749 782
750 783 def get_custom_lexer(extension):
751 784 """
752 785 returns a custom lexer if it is defined in rcextensions module, or None
753 786 if there's no custom lexer defined
754 787 """
755 788 import rhodecode
756 789 from pygments import lexers
757 790
758 791 # custom override made by RhodeCode
759 792 if extension in ['mako']:
760 793 return lexers.get_lexer_by_name('html+mako')
761 794
762 795 # check if we didn't define this extension as other lexer
763 796 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
764 797 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
765 798 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
766 799 return lexers.get_lexer_by_name(_lexer_name)
767 800
768 801
769 802 #==============================================================================
770 803 # TEST FUNCTIONS AND CREATORS
771 804 #==============================================================================
772 805 def create_test_index(repo_location, config):
773 806 """
774 807 Makes default test index.
775 808 """
776 809 try:
777 810 import rc_testdata
778 811 except ImportError:
779 812 raise ImportError('Failed to import rc_testdata, '
780 813 'please make sure this package is installed from requirements_test.txt')
781 814
782 815 rc_testdata.extract_search_index(
783 816 'vcs_search_index', os.path.dirname(config['search.location']))
784 817
785 818
786 819 def create_test_directory(test_path):
787 820 """
788 821 Create test directory if it doesn't exist.
789 822 """
790 823 if not os.path.isdir(test_path):
791 824 log.debug('Creating testdir %s', test_path)
792 825 os.makedirs(test_path)
793 826
794 827
795 828 def create_test_database(test_path, config):
796 829 """
797 830 Makes a fresh database.
798 831 """
799 832 from rhodecode.lib.db_manage import DbManage
800 833 from rhodecode.lib.utils2 import get_encryption_key
801 834
802 835 # PART ONE create db
803 836 dbconf = config['sqlalchemy.db1.url']
804 837 enc_key = get_encryption_key(config)
805 838
806 839 log.debug('making test db %s', dbconf)
807 840
808 841 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
809 842 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
810 843 dbmanage.create_tables(override=True)
811 844 dbmanage.set_db_version()
812 845 # for tests dynamically set new root paths based on generated content
813 846 dbmanage.create_settings(dbmanage.config_prompt(test_path))
814 847 dbmanage.create_default_user()
815 848 dbmanage.create_test_admin_and_users()
816 849 dbmanage.create_permissions()
817 850 dbmanage.populate_default_permissions()
818 851 Session().commit()
819 852
820 853
821 854 def create_test_repositories(test_path, config):
822 855 """
823 856 Creates test repositories in the temporary directory. Repositories are
824 857 extracted from archives within the rc_testdata package.
825 858 """
826 859 try:
827 860 import rc_testdata
828 861 except ImportError:
829 862 raise ImportError('Failed to import rc_testdata, '
830 863 'please make sure this package is installed from requirements_test.txt')
831 864
832 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
865 from rhodecode.bootstrap import HG_REPO, GIT_REPO, SVN_REPO
833 866
834 867 log.debug('making test vcs repositories at %s', test_path)
835 868
836 869 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
837 870 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
838 871
839 872 # Note: Subversion is in the process of being integrated with the system,
840 873 # until we have a properly packed version of the test svn repository, this
841 874 # tries to copy over the repo from a package "rc_testdata"
842 875 svn_repo_path = rc_testdata.get_svn_repo_archive()
843 876 with tarfile.open(svn_repo_path) as tar:
844 877 tar.extractall(jn(test_path, SVN_REPO))
845 878
846 879
847 880 def password_changed(auth_user, session):
848 881 # Never report password change in case of default user or anonymous user.
849 882 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
850 883 return False
851 884
852 885 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
853 886 rhodecode_user = session.get('rhodecode_user', {})
854 887 session_password_hash = rhodecode_user.get('password', '')
855 888 return password_hash != session_password_hash
856 889
857 890
858 891 def read_opensource_licenses():
859 892 global _license_cache
860 893
861 894 if not _license_cache:
862 895 licenses = pkg_resources.resource_string(
863 896 'rhodecode', 'config/licenses.json')
864 897 _license_cache = json.loads(licenses)
865 898
866 899 return _license_cache
867 900
868 901
869 902 def generate_platform_uuid():
870 903 """
871 904 Generates platform UUID based on it's name
872 905 """
873 906 import platform
874 907
875 908 try:
876 909 uuid_list = [platform.platform()]
877 910 return sha256_safe(':'.join(uuid_list))
878 911 except Exception as e:
879 912 log.error('Failed to generate host uuid: %s', e)
880 913 return 'UNDEFINED'
881 914
882 915
883 916 def send_test_email(recipients, email_body='TEST EMAIL'):
884 917 """
885 918 Simple code for generating test emails.
886 919 Usage::
887 920
888 921 from rhodecode.lib import utils
889 922 utils.send_test_email()
890 923 """
891 924 from rhodecode.lib.celerylib import tasks, run_task
892 925
893 926 email_body = email_body_plaintext = email_body
894 927 subject = f'SUBJECT FROM: {socket.gethostname()}'
895 928 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,93 +1,94 b''
1 1 # Copyright (C) 2014-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 VCS Backends module
21 21 """
22 22
23 23 import os
24 24 import logging
25 25
26 26 from rhodecode import typing
27 27
28 28 from rhodecode.lib.vcs.conf import settings
29 29 from rhodecode.lib.vcs.exceptions import VCSError
30 30 from rhodecode.lib.vcs.utils.helpers import get_scm
31 31 from rhodecode.lib.vcs.utils.imports import import_class
32 32
33 33
34 34 log = logging.getLogger(__name__)
35 35
36 36
37 37 def get_vcs_instance(repo_path, *args, **kwargs) -> typing.VCSRepo | None:
38 38 """
39 39 Given a path to a repository an instance of the corresponding vcs backend
40 40 repository class is created and returned. If no repository can be found
41 41 for the path it returns None. Arguments and keyword arguments are passed
42 42 to the vcs backend repository class.
43 43 """
44 44 from rhodecode.lib.utils2 import safe_str
45 45
46 46 explicit_vcs_alias = kwargs.pop('_vcs_alias', None)
47 47 try:
48 48 vcs_alias = safe_str(explicit_vcs_alias or get_scm(repo_path)[0])
49 49 log.debug(
50 50 'Creating instance of %s repository from %s', vcs_alias,
51 51 safe_str(repo_path))
52 52 backend = get_backend(vcs_alias)
53 53
54 54 if explicit_vcs_alias:
55 55 # do final verification of existence of the path, this does the
56 56 # same as get_scm() call which we skip in explicit_vcs_alias
57 57 if not os.path.isdir(repo_path):
58 58 raise VCSError(f"Given path {repo_path} is not a directory")
59 59 except VCSError:
60 60 log.exception(
61 'Perhaps this repository is in db and not in '
62 'filesystem run rescan repositories with '
63 '"destroy old data" option from admin panel')
61 'Perhaps this repository is in db and not in filesystem.'
62 'Run cleanup filesystem option from admin settings under Remap and rescan'
63 )
64
64 65 return None
65 66
66 67 return backend(repo_path=repo_path, *args, **kwargs)
67 68
68 69
69 70 def get_backend(alias) -> typing.VCSRepoClass:
70 71 """
71 72 Returns ``Repository`` class identified by the given alias or raises
72 73 VCSError if alias is not recognized or backend class cannot be imported.
73 74 """
74 75 if alias not in settings.BACKENDS:
75 76 raise VCSError(
76 77 f"Given alias '{alias}' is not recognized! "
77 78 f"Allowed aliases:{settings.BACKENDS.keys()}")
78 79 backend_path = settings.BACKENDS[alias]
79 80 klass = import_class(backend_path)
80 81 return klass
81 82
82 83
83 84 def get_supported_backends():
84 85 """
85 86 Returns list of aliases of supported backends.
86 87 """
87 88 return settings.BACKENDS.keys()
88 89
89 90
90 91 def get_vcsserver_service_data():
91 92 from rhodecode.lib.vcs import connection
92 93 return connection.Service.get_vcsserver_service_data()
93 94
@@ -1,1212 +1,1219 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import re
21 21 import shutil
22 22 import time
23 23 import logging
24 24 import traceback
25 25 import datetime
26 26
27 27 from pyramid.threadlocal import get_current_request
28 28 from sqlalchemy.orm import aliased
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode import events
32 32 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 33 from rhodecode.lib.caching_query import FromCache
34 34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError
35 35 from rhodecode.lib import hooks_base
36 36 from rhodecode.lib.user_log_filter import user_log_filter
37 37 from rhodecode.lib.utils import make_db_config
38 38 from rhodecode.lib.utils2 import (
39 39 safe_str, remove_prefix, obfuscate_url_pw,
40 40 get_current_rhodecode_user, safe_int, action_logger_generic)
41 41 from rhodecode.lib.vcs.backends import get_backend
42 42 from rhodecode.lib.vcs.nodes import NodeKind
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.db import (
45 45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49 from rhodecode.model.permission import PermissionModel
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class RepoModel(BaseModel):
56 56
57 57 cls = Repository
58 58
59 59 def _get_user_group(self, users_group):
60 60 return self._get_instance(UserGroup, users_group,
61 61 callback=UserGroup.get_by_group_name)
62 62
63 63 def _get_repo_group(self, repo_group):
64 64 return self._get_instance(RepoGroup, repo_group,
65 65 callback=RepoGroup.get_by_group_name)
66 66
67 67 def _create_default_perms(self, repository, private):
68 68 # create default permission
69 69 default = 'repository.read'
70 70 def_user = User.get_default_user()
71 71 for p in def_user.user_perms:
72 72 if p.permission.permission_name.startswith('repository.'):
73 73 default = p.permission.permission_name
74 74 break
75 75
76 76 default_perm = 'repository.none' if private else default
77 77
78 78 repo_to_perm = UserRepoToPerm()
79 79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80 80
81 81 repo_to_perm.repository = repository
82 82 repo_to_perm.user = def_user
83 83
84 84 return repo_to_perm
85 85
86 86 def get(self, repo_id):
87 87 repo = self.sa.query(Repository) \
88 88 .filter(Repository.repo_id == repo_id)
89 89
90 90 return repo.scalar()
91 91
92 92 def get_repo(self, repository):
93 93 return self._get_repo(repository)
94 94
95 95 def get_by_repo_name(self, repo_name, cache=False):
96 96 repo = self.sa.query(Repository) \
97 97 .filter(Repository.repo_name == repo_name)
98 98
99 99 if cache:
100 100 name_key = _hash_key(repo_name)
101 101 repo = repo.options(
102 102 FromCache("sql_cache_short", f"get_repo_{name_key}"))
103 103 return repo.scalar()
104 104
105 105 def _extract_id_from_repo_name(self, repo_name):
106 106 if repo_name.startswith('/'):
107 107 repo_name = repo_name.lstrip('/')
108 108 by_id_match = re.match(r'^_(\d+)', repo_name)
109 109 if by_id_match:
110 110 return by_id_match.groups()[0]
111 111
112 112 def get_repo_by_id(self, repo_name):
113 113 """
114 114 Extracts repo_name by id from special urls.
115 115 Example url is _11/repo_name
116 116
117 117 :param repo_name:
118 118 :return: repo object if matched else None
119 119 """
120 120 _repo_id = None
121 121 try:
122 122 _repo_id = self._extract_id_from_repo_name(repo_name)
123 123 if _repo_id:
124 124 return self.get(_repo_id)
125 125 except Exception:
126 126 log.exception('Failed to extract repo_name from URL')
127 127 if _repo_id:
128 128 Session().rollback()
129 129
130 130 return None
131 131
132 132 def get_repos_for_root(self, root, traverse=False):
133 133 if traverse:
134 134 like_expression = u'{}%'.format(safe_str(root))
135 135 repos = Repository.query().filter(
136 136 Repository.repo_name.like(like_expression)).all()
137 137 else:
138 138 if root and not isinstance(root, RepoGroup):
139 139 raise ValueError(
140 140 'Root must be an instance '
141 141 'of RepoGroup, got:{} instead'.format(type(root)))
142 142 repos = Repository.query().filter(Repository.group == root).all()
143 143 return repos
144 144
145 145 def get_url(self, repo, request=None, permalink=False):
146 146 if not request:
147 147 request = get_current_request()
148 148
149 149 if not request:
150 150 return
151 151
152 152 if permalink:
153 153 return request.route_url(
154 154 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
155 155 else:
156 156 return request.route_url(
157 157 'repo_summary', repo_name=safe_str(repo.repo_name))
158 158
159 159 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
160 160 if not request:
161 161 request = get_current_request()
162 162
163 163 if not request:
164 164 return
165 165
166 166 if permalink:
167 167 return request.route_url(
168 168 'repo_commit', repo_name=safe_str(repo.repo_id),
169 169 commit_id=commit_id)
170 170
171 171 else:
172 172 return request.route_url(
173 173 'repo_commit', repo_name=safe_str(repo.repo_name),
174 174 commit_id=commit_id)
175 175
176 176 def get_repo_log(self, repo, filter_term):
177 177 repo_log = UserLog.query()\
178 178 .filter(or_(UserLog.repository_id == repo.repo_id,
179 179 UserLog.repository_name == repo.repo_name))\
180 180 .options(joinedload(UserLog.user))\
181 181 .options(joinedload(UserLog.repository))\
182 182 .order_by(UserLog.action_date.desc())
183 183
184 184 repo_log = user_log_filter(repo_log, filter_term)
185 185 return repo_log
186 186
187 187 @classmethod
188 188 def update_commit_cache(cls, repositories=None):
189 189 if not repositories:
190 190 repositories = Repository.getAll()
191 191 for repo in repositories:
192 192 repo.update_commit_cache()
193 193
194 194 def get_repos_as_dict(self, repo_list=None, admin=False,
195 195 super_user_actions=False, short_name=None):
196 196
197 197 _render = get_current_request().get_partial_renderer(
198 198 'rhodecode:templates/data_table/_dt_elements.mako')
199 199 c = _render.get_call_context()
200 200 h = _render.get_helpers()
201 201
202 202 def quick_menu(repo_name):
203 203 return _render('quick_menu', repo_name)
204 204
205 205 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
206 206 if short_name is not None:
207 207 short_name_var = short_name
208 208 else:
209 209 short_name_var = not admin
210 210 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
211 211 short_name=short_name_var, admin=False)
212 212
213 213 def last_change(last_change):
214 214 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
215 215 ts = time.time()
216 216 utc_offset = (datetime.datetime.fromtimestamp(ts)
217 217 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
218 218 last_change = last_change + datetime.timedelta(seconds=utc_offset)
219 219
220 220 return _render("last_change", last_change)
221 221
222 222 def rss_lnk(repo_name):
223 223 return _render("rss", repo_name)
224 224
225 225 def atom_lnk(repo_name):
226 226 return _render("atom", repo_name)
227 227
228 228 def last_rev(repo_name, cs_cache):
229 229 return _render('revision', repo_name, cs_cache.get('revision'),
230 230 cs_cache.get('raw_id'), cs_cache.get('author'),
231 231 cs_cache.get('message'), cs_cache.get('date'))
232 232
233 233 def desc(desc):
234 234 return _render('repo_desc', desc, c.visual.stylify_metatags)
235 235
236 236 def state(repo_state):
237 237 return _render("repo_state", repo_state)
238 238
239 239 def repo_actions(repo_name):
240 240 return _render('repo_actions', repo_name, super_user_actions)
241 241
242 242 def user_profile(username):
243 243 return _render('user_profile', username)
244 244
245 245 repos_data = []
246 246 for repo in repo_list:
247 247 # NOTE(marcink): because we use only raw column we need to load it like that
248 248 changeset_cache = Repository._load_changeset_cache(
249 249 repo.repo_id, repo._changeset_cache)
250 250
251 251 row = {
252 252 "menu": quick_menu(repo.repo_name),
253 253
254 254 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
255 255 repo.private, repo.archived, repo.fork_repo_name),
256 256
257 257 "desc": desc(h.escape(repo.description)),
258 258
259 259 "last_change": last_change(repo.updated_on),
260 260
261 261 "last_changeset": last_rev(repo.repo_name, changeset_cache),
262 262 "last_changeset_raw": changeset_cache.get('revision'),
263 263
264 264 "owner": user_profile(repo.owner_username),
265 265
266 266 "state": state(repo.repo_state),
267 267 "rss": rss_lnk(repo.repo_name),
268 268 "atom": atom_lnk(repo.repo_name),
269 269 }
270 270 if admin:
271 271 row.update({
272 272 "action": repo_actions(repo.repo_name),
273 273 })
274 274 repos_data.append(row)
275 275
276 276 return repos_data
277 277
278 278 def get_repos_data_table(
279 279 self, draw, start, limit,
280 280 search_q, order_by, order_dir,
281 281 auth_user, repo_group_id):
282 282 from rhodecode.model.scm import RepoList
283 283
284 284 _perms = ['repository.read', 'repository.write', 'repository.admin']
285 285
286 286 repos = Repository.query() \
287 287 .filter(Repository.group_id == repo_group_id) \
288 288 .all()
289 289 auth_repo_list = RepoList(
290 290 repos, perm_set=_perms,
291 291 extra_kwargs=dict(user=auth_user))
292 292
293 293 allowed_ids = [-1]
294 294 for repo in auth_repo_list:
295 295 allowed_ids.append(repo.repo_id)
296 296
297 297 repos_data_total_count = Repository.query() \
298 298 .filter(Repository.group_id == repo_group_id) \
299 299 .filter(or_(
300 300 # generate multiple IN to fix limitation problems
301 301 *in_filter_generator(Repository.repo_id, allowed_ids))
302 302 ) \
303 303 .count()
304 304
305 305 RepoFork = aliased(Repository)
306 306 OwnerUser = aliased(User)
307 307 base_q = Session.query(
308 308 Repository.repo_id,
309 309 Repository.repo_name,
310 310 Repository.description,
311 311 Repository.repo_type,
312 312 Repository.repo_state,
313 313 Repository.private,
314 314 Repository.archived,
315 315 Repository.updated_on,
316 316 Repository._changeset_cache,
317 317 RepoFork.repo_name.label('fork_repo_name'),
318 318 OwnerUser.username.label('owner_username'),
319 319 ) \
320 320 .filter(Repository.group_id == repo_group_id) \
321 321 .filter(or_(
322 322 # generate multiple IN to fix limitation problems
323 323 *in_filter_generator(Repository.repo_id, allowed_ids))
324 324 ) \
325 325 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
326 326 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
327 327
328 328 repos_data_total_filtered_count = base_q.count()
329 329
330 330 sort_defined = False
331 331 if order_by == 'repo_name':
332 332 sort_col = func.lower(Repository.repo_name)
333 333 sort_defined = True
334 334 elif order_by == 'user_username':
335 335 sort_col = User.username
336 336 else:
337 337 sort_col = getattr(Repository, order_by, None)
338 338
339 339 if sort_defined or sort_col:
340 340 if order_dir == 'asc':
341 341 sort_col = sort_col.asc()
342 342 else:
343 343 sort_col = sort_col.desc()
344 344
345 345 base_q = base_q.order_by(sort_col)
346 346 base_q = base_q.offset(start).limit(limit)
347 347
348 348 repos_list = base_q.all()
349 349
350 350 repos_data = RepoModel().get_repos_as_dict(
351 351 repo_list=repos_list, admin=False)
352 352
353 353 data = ({
354 354 'draw': draw,
355 355 'data': repos_data,
356 356 'recordsTotal': repos_data_total_count,
357 357 'recordsFiltered': repos_data_total_filtered_count,
358 358 })
359 359 return data
360 360
361 361 def _get_defaults(self, repo_name):
362 362 """
363 363 Gets information about repository, and returns a dict for
364 364 usage in forms
365 365
366 366 :param repo_name:
367 367 """
368 368
369 369 repo_info = Repository.get_by_repo_name(repo_name)
370 370
371 371 if repo_info is None:
372 372 return None
373 373
374 374 defaults = repo_info.get_dict()
375 375 defaults['repo_name'] = repo_info.just_name
376 376
377 377 groups = repo_info.groups_with_parents
378 378 parent_group = groups[-1] if groups else None
379 379
380 380 # we use -1 as this is how in HTML, we mark an empty group
381 381 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
382 382
383 383 keys_to_process = (
384 384 {'k': 'repo_type', 'strip': False},
385 385 {'k': 'repo_enable_downloads', 'strip': True},
386 386 {'k': 'repo_description', 'strip': True},
387 387 {'k': 'repo_enable_locking', 'strip': True},
388 388 {'k': 'repo_landing_rev', 'strip': True},
389 389 {'k': 'clone_uri', 'strip': False},
390 390 {'k': 'push_uri', 'strip': False},
391 391 {'k': 'repo_private', 'strip': True},
392 392 {'k': 'repo_enable_statistics', 'strip': True}
393 393 )
394 394
395 395 for item in keys_to_process:
396 396 attr = item['k']
397 397 if item['strip']:
398 398 attr = remove_prefix(item['k'], 'repo_')
399 399
400 400 val = defaults[attr]
401 401 if item['k'] == 'repo_landing_rev':
402 402 val = ':'.join(defaults[attr])
403 403 defaults[item['k']] = val
404 404 if item['k'] == 'clone_uri':
405 405 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
406 406 if item['k'] == 'push_uri':
407 407 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
408 408
409 409 # fill owner
410 410 if repo_info.user:
411 411 defaults.update({'user': repo_info.user.username})
412 412 else:
413 413 replacement_user = User.get_first_super_admin().username
414 414 defaults.update({'user': replacement_user})
415 415
416 416 return defaults
417 417
418 418 def update(self, repo, **kwargs):
419 419 try:
420 420 cur_repo = self._get_repo(repo)
421 421 source_repo_name = cur_repo.repo_name
422 422
423 423 affected_user_ids = []
424 424 if 'user' in kwargs:
425 425 old_owner_id = cur_repo.user.user_id
426 426 new_owner = User.get_by_username(kwargs['user'])
427 427 cur_repo.user = new_owner
428 428
429 429 if old_owner_id != new_owner.user_id:
430 430 affected_user_ids = [new_owner.user_id, old_owner_id]
431 431
432 432 if 'repo_group' in kwargs:
433 433 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
434 434 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
435 435
436 436 update_keys = [
437 437 (1, 'repo_description'),
438 438 (1, 'repo_landing_rev'),
439 439 (1, 'repo_private'),
440 440 (1, 'repo_enable_downloads'),
441 441 (1, 'repo_enable_locking'),
442 442 (1, 'repo_enable_statistics'),
443 443 (0, 'clone_uri'),
444 444 (0, 'push_uri'),
445 445 (0, 'fork_id')
446 446 ]
447 447 for strip, k in update_keys:
448 448 if k in kwargs:
449 449 val = kwargs[k]
450 450 if strip:
451 451 k = remove_prefix(k, 'repo_')
452 452
453 453 setattr(cur_repo, k, val)
454 454
455 455 new_name = source_repo_name
456 456 if 'repo_name' in kwargs:
457 457 new_name = cur_repo.get_new_name(kwargs['repo_name'])
458 458 cur_repo.repo_name = new_name
459 459
460 460 if 'repo_private' in kwargs:
461 461 # if private flag is set to True, reset default permission to NONE
462 462 set_private_to = kwargs.get('repo_private')
463 463 if set_private_to:
464 464 EMPTY_PERM = 'repository.none'
465 465 RepoModel().grant_user_permission(
466 466 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
467 467 )
468 468 if set_private_to != cur_repo.private:
469 469 # NOTE(dan): we change repo private mode we need to notify all USERS
470 470 # this is just by having this value set to a different value then it was before
471 471 affected_user_ids = User.get_all_user_ids()
472 472
473 473 if kwargs.get('repo_landing_rev'):
474 474 landing_rev_val = kwargs['repo_landing_rev']
475 475 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
476 476
477 477 # handle extra fields
478 478 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
479 479 k = RepositoryField.un_prefix_key(field)
480 480 ex_field = RepositoryField.get_by_key_name(
481 481 key=k, repo=cur_repo)
482 482 if ex_field:
483 483 ex_field.field_value = kwargs[field]
484 484 self.sa.add(ex_field)
485 485
486 486 self.sa.add(cur_repo)
487 487
488 488 if source_repo_name != new_name:
489 489 # rename repository
490 490 self._rename_filesystem_repo(
491 491 old=source_repo_name, new=new_name)
492 492
493 493 if affected_user_ids:
494 494 PermissionModel().trigger_permission_flush(affected_user_ids)
495 495
496 496 return cur_repo
497 497 except Exception:
498 498 log.error(traceback.format_exc())
499 499 raise
500 500
501 501 def _create_repo(self, repo_name, repo_type, description, owner,
502 502 private=False, clone_uri=None, repo_group=None,
503 503 landing_rev=None, fork_of=None,
504 504 copy_fork_permissions=False, enable_statistics=False,
505 505 enable_locking=False, enable_downloads=False,
506 506 copy_group_permissions=False,
507 507 state=Repository.STATE_PENDING):
508 508 """
509 509 Create repository inside database with PENDING state, this should be
510 510 only executed by create() repo. With exception of importing existing
511 511 repos
512 512 """
513 513 from rhodecode.model.scm import ScmModel
514 514
515 515 owner = self._get_user(owner)
516 516 fork_of = self._get_repo(fork_of)
517 517 repo_group = self._get_repo_group(safe_int(repo_group))
518 518 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
519 519 landing_rev = landing_rev or default_landing_ref
520 520
521 521 try:
522 522 repo_name = safe_str(repo_name)
523 523 description = safe_str(description)
524 524 # repo name is just a name of repository
525 525 # while repo_name_full is a full qualified name that is combined
526 526 # with name and path of group
527 527 repo_name_full = repo_name
528 528 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
529 529
530 530 new_repo = Repository()
531 531 new_repo.repo_state = state
532 532 new_repo.enable_statistics = False
533 533 new_repo.repo_name = repo_name_full
534 534 new_repo.repo_type = repo_type
535 535 new_repo.user = owner
536 536 new_repo.group = repo_group
537 537 new_repo.description = description or repo_name
538 538 new_repo.private = private
539 539 new_repo.archived = False
540 540 new_repo.clone_uri = clone_uri
541 541 new_repo.landing_rev = landing_rev
542 542
543 543 new_repo.enable_statistics = enable_statistics
544 544 new_repo.enable_locking = enable_locking
545 545 new_repo.enable_downloads = enable_downloads
546 546
547 547 if repo_group:
548 548 new_repo.enable_locking = repo_group.enable_locking
549 549
550 550 if fork_of:
551 551 parent_repo = fork_of
552 552 new_repo.fork = parent_repo
553 553
554 554 events.trigger(events.RepoPreCreateEvent(new_repo))
555 555
556 556 self.sa.add(new_repo)
557 557
558 558 EMPTY_PERM = 'repository.none'
559 559 if fork_of and copy_fork_permissions:
560 560 repo = fork_of
561 561 user_perms = UserRepoToPerm.query() \
562 562 .filter(UserRepoToPerm.repository == repo).all()
563 563 group_perms = UserGroupRepoToPerm.query() \
564 564 .filter(UserGroupRepoToPerm.repository == repo).all()
565 565
566 566 for perm in user_perms:
567 567 UserRepoToPerm.create(
568 568 perm.user, new_repo, perm.permission)
569 569
570 570 for perm in group_perms:
571 571 UserGroupRepoToPerm.create(
572 572 perm.users_group, new_repo, perm.permission)
573 573 # in case we copy permissions and also set this repo to private
574 574 # override the default user permission to make it a private repo
575 575 if private:
576 576 RepoModel(self.sa).grant_user_permission(
577 577 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
578 578
579 579 elif repo_group and copy_group_permissions:
580 580 user_perms = UserRepoGroupToPerm.query() \
581 581 .filter(UserRepoGroupToPerm.group == repo_group).all()
582 582
583 583 group_perms = UserGroupRepoGroupToPerm.query() \
584 584 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
585 585
586 586 for perm in user_perms:
587 587 perm_name = perm.permission.permission_name.replace(
588 588 'group.', 'repository.')
589 589 perm_obj = Permission.get_by_key(perm_name)
590 590 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
591 591
592 592 for perm in group_perms:
593 593 perm_name = perm.permission.permission_name.replace(
594 594 'group.', 'repository.')
595 595 perm_obj = Permission.get_by_key(perm_name)
596 596 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
597 597
598 598 if private:
599 599 RepoModel(self.sa).grant_user_permission(
600 600 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
601 601
602 602 else:
603 603 perm_obj = self._create_default_perms(new_repo, private)
604 604 self.sa.add(perm_obj)
605 605
606 606 # now automatically start following this repository as owner
607 607 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
608 608
609 609 # we need to flush here, in order to check if database won't
610 610 # throw any exceptions, create filesystem dirs at the very end
611 611 self.sa.flush()
612 612 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
613 613 return new_repo
614 614
615 615 except Exception:
616 616 log.error(traceback.format_exc())
617 617 raise
618 618
619 619 def create(self, form_data, cur_user):
620 620 """
621 621 Create repository using celery tasks
622 622
623 623 :param form_data:
624 624 :param cur_user:
625 625 """
626 626 from rhodecode.lib.celerylib import tasks, run_task
627 627 return run_task(tasks.create_repo, form_data, cur_user)
628 628
629 629 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
630 630 perm_deletions=None, check_perms=True,
631 631 cur_user=None):
632 632 if not perm_additions:
633 633 perm_additions = []
634 634 if not perm_updates:
635 635 perm_updates = []
636 636 if not perm_deletions:
637 637 perm_deletions = []
638 638
639 639 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
640 640
641 641 changes = {
642 642 'added': [],
643 643 'updated': [],
644 644 'deleted': [],
645 645 'default_user_changed': None
646 646 }
647 647
648 648 repo = self._get_repo(repo)
649 649
650 650 # update permissions
651 651 for member_id, perm, member_type in perm_updates:
652 652 member_id = int(member_id)
653 653 if member_type == 'user':
654 654 member_name = User.get(member_id).username
655 655 if member_name == User.DEFAULT_USER:
656 656 # NOTE(dan): detect if we changed permissions for default user
657 657 perm_obj = self.sa.query(UserRepoToPerm) \
658 658 .filter(UserRepoToPerm.user_id == member_id) \
659 659 .filter(UserRepoToPerm.repository == repo) \
660 660 .scalar()
661 661 if perm_obj and perm_obj.permission.permission_name != perm:
662 662 changes['default_user_changed'] = True
663 663
664 664 # this updates also current one if found
665 665 self.grant_user_permission(
666 666 repo=repo, user=member_id, perm=perm)
667 667 elif member_type == 'user_group':
668 668 # check if we have permissions to alter this usergroup
669 669 member_name = UserGroup.get(member_id).users_group_name
670 670 if not check_perms or HasUserGroupPermissionAny(
671 671 *req_perms)(member_name, user=cur_user):
672 672 self.grant_user_group_permission(
673 673 repo=repo, group_name=member_id, perm=perm)
674 674 else:
675 675 raise ValueError("member_type must be 'user' or 'user_group' "
676 676 "got {} instead".format(member_type))
677 677 changes['updated'].append({'type': member_type, 'id': member_id,
678 678 'name': member_name, 'new_perm': perm})
679 679
680 680 # set new permissions
681 681 for member_id, perm, member_type in perm_additions:
682 682 member_id = int(member_id)
683 683 if member_type == 'user':
684 684 member_name = User.get(member_id).username
685 685 self.grant_user_permission(
686 686 repo=repo, user=member_id, perm=perm)
687 687 elif member_type == 'user_group':
688 688 # check if we have permissions to alter this usergroup
689 689 member_name = UserGroup.get(member_id).users_group_name
690 690 if not check_perms or HasUserGroupPermissionAny(
691 691 *req_perms)(member_name, user=cur_user):
692 692 self.grant_user_group_permission(
693 693 repo=repo, group_name=member_id, perm=perm)
694 694 else:
695 695 raise ValueError("member_type must be 'user' or 'user_group' "
696 696 "got {} instead".format(member_type))
697 697
698 698 changes['added'].append({'type': member_type, 'id': member_id,
699 699 'name': member_name, 'new_perm': perm})
700 700 # delete permissions
701 701 for member_id, perm, member_type in perm_deletions:
702 702 member_id = int(member_id)
703 703 if member_type == 'user':
704 704 member_name = User.get(member_id).username
705 705 self.revoke_user_permission(repo=repo, user=member_id)
706 706 elif member_type == 'user_group':
707 707 # check if we have permissions to alter this usergroup
708 708 member_name = UserGroup.get(member_id).users_group_name
709 709 if not check_perms or HasUserGroupPermissionAny(
710 710 *req_perms)(member_name, user=cur_user):
711 711 self.revoke_user_group_permission(
712 712 repo=repo, group_name=member_id)
713 713 else:
714 714 raise ValueError("member_type must be 'user' or 'user_group' "
715 715 "got {} instead".format(member_type))
716 716
717 717 changes['deleted'].append({'type': member_type, 'id': member_id,
718 718 'name': member_name, 'new_perm': perm})
719 719 return changes
720 720
721 721 def create_fork(self, form_data, cur_user):
722 722 """
723 723 Simple wrapper into executing celery task for fork creation
724 724
725 725 :param form_data:
726 726 :param cur_user:
727 727 """
728 728 from rhodecode.lib.celerylib import tasks, run_task
729 729 return run_task(tasks.create_repo_fork, form_data, cur_user)
730 730
731 731 def archive(self, repo):
732 732 """
733 733 Archive given repository. Set archive flag.
734 734
735 735 :param repo:
736 736 """
737 737 repo = self._get_repo(repo)
738 738 if repo:
739 739
740 740 try:
741 741 repo.archived = True
742 742 self.sa.add(repo)
743 743 self.sa.commit()
744 744 except Exception:
745 745 log.error(traceback.format_exc())
746 746 raise
747 747
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None):
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None, call_events=True):
749 749 """
750 750 Delete given repository, forks parameter defines what do do with
751 751 attached forks. Throws AttachedForksError if deleted repo has attached
752 752 forks
753 753
754 754 :param repo:
755 755 :param forks: str 'delete' or 'detach'
756 756 :param pull_requests: str 'delete' or None
757 757 :param artifacts: str 'delete' or None
758 758 :param fs_remove: remove(archive) repo from filesystem
759 759 """
760 760 if not cur_user:
761 761 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
762 762 repo = self._get_repo(repo)
763 if repo:
763 if not repo:
764 return False
765
764 766 if forks == 'detach':
765 767 for r in repo.forks:
766 768 r.fork = None
767 769 self.sa.add(r)
768 770 elif forks == 'delete':
769 771 for r in repo.forks:
770 772 self.delete(r, forks='delete')
771 773 elif [f for f in repo.forks]:
772 774 raise AttachedForksError()
773 775
774 776 # check for pull requests
775 777 pr_sources = repo.pull_requests_source
776 778 pr_targets = repo.pull_requests_target
777 779 if pull_requests != 'delete' and (pr_sources or pr_targets):
778 780 raise AttachedPullRequestsError()
779 781
780 782 artifacts_objs = repo.artifacts
781 783 if artifacts == 'delete':
782 784 for a in artifacts_objs:
783 785 self.sa.delete(a)
784 786 elif [a for a in artifacts_objs]:
785 787 raise AttachedArtifactsError()
786 788
787 789 old_repo_dict = repo.get_dict()
790 if call_events:
788 791 events.trigger(events.RepoPreDeleteEvent(repo))
792
789 793 try:
790 794 self.sa.delete(repo)
791 795 if fs_remove:
792 796 self._delete_filesystem_repo(repo)
793 797 else:
794 798 log.debug('skipping removal from filesystem')
795 799 old_repo_dict.update({
796 800 'deleted_by': cur_user,
797 801 'deleted_on': time.time(),
798 802 })
803 if call_events:
799 804 hooks_base.delete_repository(**old_repo_dict)
800 805 events.trigger(events.RepoDeleteEvent(repo))
801 806 except Exception:
802 807 log.error(traceback.format_exc())
803 808 raise
804 809
810 return True
811
805 812 def grant_user_permission(self, repo, user, perm):
806 813 """
807 814 Grant permission for user on given repository, or update existing one
808 815 if found
809 816
810 817 :param repo: Instance of Repository, repository_id, or repository name
811 818 :param user: Instance of User, user_id or username
812 819 :param perm: Instance of Permission, or permission_name
813 820 """
814 821 user = self._get_user(user)
815 822 repo = self._get_repo(repo)
816 823 permission = self._get_perm(perm)
817 824
818 825 # check if we have that permission already
819 826 obj = self.sa.query(UserRepoToPerm) \
820 827 .filter(UserRepoToPerm.user == user) \
821 828 .filter(UserRepoToPerm.repository == repo) \
822 829 .scalar()
823 830 if obj is None:
824 831 # create new !
825 832 obj = UserRepoToPerm()
826 833 obj.repository = repo
827 834 obj.user = user
828 835 obj.permission = permission
829 836 self.sa.add(obj)
830 837 log.debug('Granted perm %s to %s on %s', perm, user, repo)
831 838 action_logger_generic(
832 839 'granted permission: {} to user: {} on repo: {}'.format(
833 840 perm, user, repo), namespace='security.repo')
834 841 return obj
835 842
836 843 def revoke_user_permission(self, repo, user):
837 844 """
838 845 Revoke permission for user on given repository
839 846
840 847 :param repo: Instance of Repository, repository_id, or repository name
841 848 :param user: Instance of User, user_id or username
842 849 """
843 850
844 851 user = self._get_user(user)
845 852 repo = self._get_repo(repo)
846 853
847 854 obj = self.sa.query(UserRepoToPerm) \
848 855 .filter(UserRepoToPerm.repository == repo) \
849 856 .filter(UserRepoToPerm.user == user) \
850 857 .scalar()
851 858 if obj:
852 859 self.sa.delete(obj)
853 860 log.debug('Revoked perm on %s on %s', repo, user)
854 861 action_logger_generic(
855 862 'revoked permission from user: {} on repo: {}'.format(
856 863 user, repo), namespace='security.repo')
857 864
858 865 def grant_user_group_permission(self, repo, group_name, perm):
859 866 """
860 867 Grant permission for user group on given repository, or update
861 868 existing one if found
862 869
863 870 :param repo: Instance of Repository, repository_id, or repository name
864 871 :param group_name: Instance of UserGroup, users_group_id,
865 872 or user group name
866 873 :param perm: Instance of Permission, or permission_name
867 874 """
868 875 repo = self._get_repo(repo)
869 876 group_name = self._get_user_group(group_name)
870 877 permission = self._get_perm(perm)
871 878
872 879 # check if we have that permission already
873 880 obj = self.sa.query(UserGroupRepoToPerm) \
874 881 .filter(UserGroupRepoToPerm.users_group == group_name) \
875 882 .filter(UserGroupRepoToPerm.repository == repo) \
876 883 .scalar()
877 884
878 885 if obj is None:
879 886 # create new
880 887 obj = UserGroupRepoToPerm()
881 888
882 889 obj.repository = repo
883 890 obj.users_group = group_name
884 891 obj.permission = permission
885 892 self.sa.add(obj)
886 893 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
887 894 action_logger_generic(
888 895 'granted permission: {} to usergroup: {} on repo: {}'.format(
889 896 perm, group_name, repo), namespace='security.repo')
890 897
891 898 return obj
892 899
893 900 def revoke_user_group_permission(self, repo, group_name):
894 901 """
895 902 Revoke permission for user group on given repository
896 903
897 904 :param repo: Instance of Repository, repository_id, or repository name
898 905 :param group_name: Instance of UserGroup, users_group_id,
899 906 or user group name
900 907 """
901 908 repo = self._get_repo(repo)
902 909 group_name = self._get_user_group(group_name)
903 910
904 911 obj = self.sa.query(UserGroupRepoToPerm) \
905 912 .filter(UserGroupRepoToPerm.repository == repo) \
906 913 .filter(UserGroupRepoToPerm.users_group == group_name) \
907 914 .scalar()
908 915 if obj:
909 916 self.sa.delete(obj)
910 917 log.debug('Revoked perm to %s on %s', repo, group_name)
911 918 action_logger_generic(
912 919 'revoked permission from usergroup: {} on repo: {}'.format(
913 920 group_name, repo), namespace='security.repo')
914 921
915 922 def delete_stats(self, repo_name):
916 923 """
917 924 removes stats for given repo
918 925
919 926 :param repo_name:
920 927 """
921 928 repo = self._get_repo(repo_name)
922 929 try:
923 930 obj = self.sa.query(Statistics) \
924 931 .filter(Statistics.repository == repo).scalar()
925 932 if obj:
926 933 self.sa.delete(obj)
927 934 except Exception:
928 935 log.error(traceback.format_exc())
929 936 raise
930 937
931 938 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
932 939 field_type='str', field_desc=''):
933 940
934 941 repo = self._get_repo(repo_name)
935 942
936 943 new_field = RepositoryField()
937 944 new_field.repository = repo
938 945 new_field.field_key = field_key
939 946 new_field.field_type = field_type # python type
940 947 new_field.field_value = field_value
941 948 new_field.field_desc = field_desc
942 949 new_field.field_label = field_label
943 950 self.sa.add(new_field)
944 951 return new_field
945 952
946 953 def delete_repo_field(self, repo_name, field_key):
947 954 repo = self._get_repo(repo_name)
948 955 field = RepositoryField.get_by_key_name(field_key, repo)
949 956 if field:
950 957 self.sa.delete(field)
951 958
952 959 def set_landing_rev(self, repo, landing_rev_name):
953 960 if landing_rev_name.startswith('branch:'):
954 961 landing_rev_name = landing_rev_name.split('branch:')[-1]
955 962 scm_instance = repo.scm_instance()
956 963 if scm_instance:
957 964 return scm_instance._remote.set_head_ref(landing_rev_name)
958 965
959 966 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
960 967 clone_uri=None, repo_store_location=None,
961 968 use_global_config=False, install_hooks=True):
962 969 """
963 970 makes repository on filesystem. It's group aware means it'll create
964 971 a repository within a group, and alter the paths accordingly of
965 972 group location
966 973
967 974 :param repo_name:
968 975 :param alias:
969 976 :param parent:
970 977 :param clone_uri:
971 978 :param repo_store_location:
972 979 """
973 980 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
974 981 from rhodecode.model.scm import ScmModel
975 982
976 983 if Repository.NAME_SEP in repo_name:
977 984 raise ValueError(
978 985 'repo_name must not contain groups got `%s`' % repo_name)
979 986
980 987 if isinstance(repo_group, RepoGroup):
981 988 new_parent_path = os.sep.join(repo_group.full_path_splitted)
982 989 else:
983 990 new_parent_path = repo_group or ''
984 991
985 992 if repo_store_location:
986 993 _paths = [repo_store_location]
987 994 else:
988 995 _paths = [self.repos_path, new_parent_path, repo_name]
989 996 # we need to make it str for mercurial
990 997 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
991 998
992 999 # check if this path is not a repository
993 1000 if is_valid_repo(repo_path, self.repos_path):
994 1001 raise Exception(f'This path {repo_path} is a valid repository')
995 1002
996 1003 # check if this path is a group
997 1004 if is_valid_repo_group(repo_path, self.repos_path):
998 1005 raise Exception(f'This path {repo_path} is a valid group')
999 1006
1000 1007 log.info('creating repo %s in %s from url: `%s`',
1001 1008 repo_name, safe_str(repo_path),
1002 1009 obfuscate_url_pw(clone_uri))
1003 1010
1004 1011 backend = get_backend(repo_type)
1005 1012
1006 1013 config_repo = None if use_global_config else repo_name
1007 1014 if config_repo and new_parent_path:
1008 1015 config_repo = Repository.NAME_SEP.join(
1009 1016 (new_parent_path, config_repo))
1010 1017 config = make_db_config(clear_session=False, repo=config_repo)
1011 1018 config.set('extensions', 'largefiles', '')
1012 1019
1013 1020 # patch and reset hooks section of UI config to not run any
1014 1021 # hooks on creating remote repo
1015 1022 config.clear_section('hooks')
1016 1023
1017 1024 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1018 1025 if repo_type == 'git':
1019 1026 repo = backend(
1020 1027 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1021 1028 with_wire={"cache": False})
1022 1029 else:
1023 1030 repo = backend(
1024 1031 repo_path, config=config, create=True, src_url=clone_uri,
1025 1032 with_wire={"cache": False})
1026 1033
1027 1034 if install_hooks:
1028 1035 repo.install_hooks()
1029 1036
1030 1037 log.debug('Created repo %s with %s backend',
1031 1038 safe_str(repo_name), safe_str(repo_type))
1032 1039 return repo
1033 1040
1034 1041 def _rename_filesystem_repo(self, old, new):
1035 1042 """
1036 1043 renames repository on filesystem
1037 1044
1038 1045 :param old: old name
1039 1046 :param new: new name
1040 1047 """
1041 1048 log.info('renaming repo from %s to %s', old, new)
1042 1049
1043 1050 old_path = os.path.join(self.repos_path, old)
1044 1051 new_path = os.path.join(self.repos_path, new)
1045 1052 if os.path.isdir(new_path):
1046 1053 raise Exception(
1047 1054 'Was trying to rename to already existing dir %s' % new_path
1048 1055 )
1049 1056 shutil.move(old_path, new_path)
1050 1057
1051 1058 def _delete_filesystem_repo(self, repo):
1052 1059 """
1053 1060 removes repo from filesystem, the removal is actually made by
1054 1061 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1055 1062 repository is no longer valid for rhodecode, can be undeleted later on
1056 1063 by reverting the renames on this repository
1057 1064
1058 1065 :param repo: repo object
1059 1066 """
1060 1067 rm_path = os.path.join(self.repos_path, repo.repo_name)
1061 1068 repo_group = repo.group
1062 1069 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1063 1070 # disable hg/git internal that it doesn't get detected as repo
1064 1071 alias = repo.repo_type
1065 1072
1066 1073 config = make_db_config(clear_session=False)
1067 1074 config.set('extensions', 'largefiles', '')
1068 1075 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1069 1076
1070 1077 # skip this for bare git repos
1071 1078 if not bare:
1072 1079 # disable VCS repo
1073 1080 vcs_path = os.path.join(rm_path, '.%s' % alias)
1074 1081 if os.path.exists(vcs_path):
1075 1082 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1076 1083
1077 1084 _now = datetime.datetime.now()
1078 1085 _ms = str(_now.microsecond).rjust(6, '0')
1079 1086 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1080 1087 repo.just_name)
1081 1088 if repo_group:
1082 1089 # if repository is in group, prefix the removal path with the group
1083 1090 args = repo_group.full_path_splitted + [_d]
1084 1091 _d = os.path.join(*args)
1085 1092
1086 1093 if os.path.isdir(rm_path):
1087 1094 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1088 1095
1089 1096 # finally cleanup diff-cache if it exists
1090 1097 cached_diffs_dir = repo.cached_diffs_dir
1091 1098 if os.path.isdir(cached_diffs_dir):
1092 1099 shutil.rmtree(cached_diffs_dir)
1093 1100
1094 1101
1095 1102 class ReadmeFinder:
1096 1103 """
1097 1104 Utility which knows how to find a readme for a specific commit.
1098 1105
1099 1106 The main idea is that this is a configurable algorithm. When creating an
1100 1107 instance you can define parameters, currently only the `default_renderer`.
1101 1108 Based on this configuration the method :meth:`search` behaves slightly
1102 1109 different.
1103 1110 """
1104 1111
1105 1112 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1106 1113 path_re = re.compile(r'^docs?', re.IGNORECASE)
1107 1114
1108 1115 default_priorities = {
1109 1116 None: 0,
1110 1117 '.rst': 1,
1111 1118 '.md': 1,
1112 1119 '.rest': 2,
1113 1120 '.mkdn': 2,
1114 1121 '.text': 2,
1115 1122 '.txt': 3,
1116 1123 '.mdown': 3,
1117 1124 '.markdown': 4,
1118 1125 }
1119 1126
1120 1127 path_priority = {
1121 1128 'doc': 0,
1122 1129 'docs': 1,
1123 1130 }
1124 1131
1125 1132 FALLBACK_PRIORITY = 99
1126 1133
1127 1134 RENDERER_TO_EXTENSION = {
1128 1135 'rst': ['.rst', '.rest'],
1129 1136 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1130 1137 }
1131 1138
1132 1139 def __init__(self, default_renderer=None):
1133 1140 self._default_renderer = default_renderer
1134 1141 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1135 1142 default_renderer, [])
1136 1143
1137 1144 def search(self, commit, path='/'):
1138 1145 """
1139 1146 Find a readme in the given `commit`.
1140 1147 """
1141 1148 # firstly, check the PATH type if it is actually a DIR
1142 1149 if commit.get_node(path).kind != NodeKind.DIR:
1143 1150 return None
1144 1151
1145 1152 nodes = commit.get_nodes(path)
1146 1153 matches = self._match_readmes(nodes)
1147 1154 matches = self._sort_according_to_priority(matches)
1148 1155 if matches:
1149 1156 return matches[0].node
1150 1157
1151 1158 paths = self._match_paths(nodes)
1152 1159 paths = self._sort_paths_according_to_priority(paths)
1153 1160 for path in paths:
1154 1161 match = self.search(commit, path=path)
1155 1162 if match:
1156 1163 return match
1157 1164
1158 1165 return None
1159 1166
1160 1167 def _match_readmes(self, nodes):
1161 1168 for node in nodes:
1162 1169 if not node.is_file():
1163 1170 continue
1164 1171 path = node.path.rsplit('/', 1)[-1]
1165 1172 match = self.readme_re.match(path)
1166 1173 if match:
1167 1174 extension = match.group(1)
1168 1175 yield ReadmeMatch(node, match, self._priority(extension))
1169 1176
1170 1177 def _match_paths(self, nodes):
1171 1178 for node in nodes:
1172 1179 if not node.is_dir():
1173 1180 continue
1174 1181 match = self.path_re.match(node.path)
1175 1182 if match:
1176 1183 yield node.path
1177 1184
1178 1185 def _priority(self, extension):
1179 1186 renderer_priority = (
1180 1187 0 if extension in self._renderer_extensions else 1)
1181 1188 extension_priority = self.default_priorities.get(
1182 1189 extension, self.FALLBACK_PRIORITY)
1183 1190 return (renderer_priority, extension_priority)
1184 1191
1185 1192 def _sort_according_to_priority(self, matches):
1186 1193
1187 1194 def priority_and_path(match):
1188 1195 return (match.priority, match.path)
1189 1196
1190 1197 return sorted(matches, key=priority_and_path)
1191 1198
1192 1199 def _sort_paths_according_to_priority(self, paths):
1193 1200
1194 1201 def priority_and_path(path):
1195 1202 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1196 1203
1197 1204 return sorted(paths, key=priority_and_path)
1198 1205
1199 1206
1200 1207 class ReadmeMatch:
1201 1208
1202 1209 def __init__(self, node, match, priority):
1203 1210 self.node = node
1204 1211 self._match = match
1205 1212 self.priority = priority
1206 1213
1207 1214 @property
1208 1215 def path(self):
1209 1216 return self.node.path
1210 1217
1211 1218 def __repr__(self):
1212 1219 return f'<ReadmeMatch {self.path} priority={self.priority}'
@@ -1,889 +1,892 b''
1 1 # Copyright (C) 2011-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 """
21 21 repo group model for RhodeCode
22 22 """
23 23
24 24 import os
25 25 import datetime
26 26 import itertools
27 27 import logging
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 import string
32 32
33 33
34 34 from rhodecode import events
35 35 from rhodecode.model import BaseModel
36 36 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
37 37 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
38 38 UserGroup, Repository)
39 39 from rhodecode.model.permission import PermissionModel
40 40 from rhodecode.model.settings import SettingsModel
41 41 from rhodecode.lib.caching_query import FromCache
42 42 from rhodecode.lib.utils2 import action_logger_generic
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 class RepoGroupModel(BaseModel):
48 48
49 49 cls = RepoGroup
50 50 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
51 51 PERSONAL_GROUP_PATTERN = '${username}' # default
52 52
53 53 def _get_user_group(self, users_group):
54 54 return self._get_instance(UserGroup, users_group,
55 55 callback=UserGroup.get_by_group_name)
56 56
57 57 def _get_repo_group(self, repo_group):
58 58 return self._get_instance(RepoGroup, repo_group,
59 59 callback=RepoGroup.get_by_group_name)
60 60
61 61 def get_repo_group(self, repo_group):
62 62 return self._get_repo_group(repo_group)
63 63
64 64 def get_by_group_name(self, repo_group_name, cache=None):
65 65 repo = self.sa.query(RepoGroup) \
66 66 .filter(RepoGroup.group_name == repo_group_name)
67 67
68 68 if cache:
69 69 name_key = _hash_key(repo_group_name)
70 70 repo = repo.options(
71 71 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
72 72 return repo.scalar()
73 73
74 74 def get_default_create_personal_repo_group(self):
75 75 value = SettingsModel().get_setting_by_name(
76 76 'create_personal_repo_group')
77 77 return value.app_settings_value if value else None or False
78 78
79 79 def get_personal_group_name_pattern(self):
80 80 value = SettingsModel().get_setting_by_name(
81 81 'personal_repo_group_pattern')
82 82 val = value.app_settings_value if value else None
83 83 group_template = val or self.PERSONAL_GROUP_PATTERN
84 84
85 85 group_template = group_template.lstrip('/')
86 86 return group_template
87 87
88 88 def get_personal_group_name(self, user):
89 89 template = self.get_personal_group_name_pattern()
90 90 return string.Template(template).safe_substitute(
91 91 username=user.username,
92 92 user_id=user.user_id,
93 93 first_name=user.first_name,
94 94 last_name=user.last_name,
95 95 )
96 96
97 97 def create_personal_repo_group(self, user, commit_early=True):
98 98 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
99 99 personal_repo_group_name = self.get_personal_group_name(user)
100 100
101 101 # create a new one
102 102 RepoGroupModel().create(
103 103 group_name=personal_repo_group_name,
104 104 group_description=desc,
105 105 owner=user.username,
106 106 personal=True,
107 107 commit_early=commit_early)
108 108
109 109 def _create_default_perms(self, new_group):
110 110 # create default permission
111 111 default_perm = 'group.read'
112 112 def_user = User.get_default_user()
113 113 for p in def_user.user_perms:
114 114 if p.permission.permission_name.startswith('group.'):
115 115 default_perm = p.permission.permission_name
116 116 break
117 117
118 118 repo_group_to_perm = UserRepoGroupToPerm()
119 119 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
120 120
121 121 repo_group_to_perm.group = new_group
122 122 repo_group_to_perm.user = def_user
123 123 return repo_group_to_perm
124 124
125 125 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
126 126 get_object=False):
127 127 """
128 128 Get's the group name and a parent group name from given group name.
129 129 If repo_in_path is set to truth, we asume the full path also includes
130 130 repo name, in such case we clean the last element.
131 131
132 132 :param group_name_full:
133 133 """
134 134 split_paths = 1
135 135 if repo_in_path:
136 136 split_paths = 2
137 137 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
138 138
139 139 if repo_in_path and len(_parts) > 1:
140 140 # such case last element is the repo_name
141 141 _parts.pop(-1)
142 142 group_name_cleaned = _parts[-1] # just the group name
143 143 parent_repo_group_name = None
144 144
145 145 if len(_parts) > 1:
146 146 parent_repo_group_name = _parts[0]
147 147
148 148 parent_group = None
149 149 if parent_repo_group_name:
150 150 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
151 151
152 152 if get_object:
153 153 return group_name_cleaned, parent_repo_group_name, parent_group
154 154
155 155 return group_name_cleaned, parent_repo_group_name
156 156
157 157 def check_exist_filesystem(self, group_name, exc_on_failure=True):
158 158 create_path = os.path.join(self.repos_path, group_name)
159 log.debug('creating new group in %s', create_path)
159 log.debug('checking FS presence for repo group in %s', create_path)
160 160
161 161 if os.path.isdir(create_path):
162 162 if exc_on_failure:
163 163 abs_create_path = os.path.abspath(create_path)
164 164 raise Exception(f'Directory `{abs_create_path}` already exists !')
165 165 return False
166 166 return True
167 167
168 168 def _create_group(self, group_name):
169 169 """
170 170 makes repository group on filesystem
171 171
172 172 :param repo_name:
173 173 :param parent_id:
174 174 """
175 175
176 176 self.check_exist_filesystem(group_name)
177 177 create_path = os.path.join(self.repos_path, group_name)
178 178 log.debug('creating new group in %s', create_path)
179 179 os.makedirs(create_path, mode=0o755)
180 180 log.debug('created group in %s', create_path)
181 181
182 182 def _rename_group(self, old, new):
183 183 """
184 184 Renames a group on filesystem
185 185
186 186 :param group_name:
187 187 """
188 188
189 189 if old == new:
190 190 log.debug('skipping group rename')
191 191 return
192 192
193 193 log.debug('renaming repository group from %s to %s', old, new)
194 194
195 195 old_path = os.path.join(self.repos_path, old)
196 196 new_path = os.path.join(self.repos_path, new)
197 197
198 198 log.debug('renaming repos paths from %s to %s', old_path, new_path)
199 199
200 200 if os.path.isdir(new_path):
201 201 raise Exception('Was trying to rename to already '
202 202 'existing dir %s' % new_path)
203 203 shutil.move(old_path, new_path)
204 204
205 205 def _delete_filesystem_group(self, group, force_delete=False):
206 206 """
207 207 Deletes a group from a filesystem
208 208
209 209 :param group: instance of group from database
210 210 :param force_delete: use shutil rmtree to remove all objects
211 211 """
212 212 paths = group.full_path.split(RepoGroup.url_sep())
213 213 paths = os.sep.join(paths)
214 214
215 215 rm_path = os.path.join(self.repos_path, paths)
216 216 log.info("Removing group %s", rm_path)
217 217 # delete only if that path really exists
218 218 if os.path.isdir(rm_path):
219 219 if force_delete:
220 220 shutil.rmtree(rm_path)
221 221 else:
222 222 # archive that group`
223 223 _now = datetime.datetime.now()
224 224 _ms = str(_now.microsecond).rjust(6, '0')
225 225 _d = 'rm__{}_GROUP_{}'.format(
226 226 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
227 227 shutil.move(rm_path, os.path.join(self.repos_path, _d))
228 228
229 229 def create(self, group_name, group_description, owner, just_db=False,
230 230 copy_permissions=False, personal=None, commit_early=True):
231 231
232 232 (group_name_cleaned,
233 233 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
234 234
235 235 parent_group = None
236 236 if parent_group_name:
237 237 parent_group = self._get_repo_group(parent_group_name)
238 238 if not parent_group:
239 239 # we tried to create a nested group, but the parent is not
240 240 # existing
241 241 raise ValueError(
242 242 'Parent group `%s` given in `%s` group name '
243 243 'is not yet existing.' % (parent_group_name, group_name))
244 244
245 245 # because we are doing a cleanup, we need to check if such directory
246 246 # already exists. If we don't do that we can accidentally delete
247 247 # existing directory via cleanup that can cause data issues, since
248 248 # delete does a folder rename to special syntax later cleanup
249 249 # functions can delete this
250 250 cleanup_group = self.check_exist_filesystem(group_name,
251 251 exc_on_failure=False)
252 252 user = self._get_user(owner)
253 253 if not user:
254 254 raise ValueError('Owner %s not found as rhodecode user', owner)
255 255
256 256 try:
257 257 new_repo_group = RepoGroup()
258 258 new_repo_group.user = user
259 259 new_repo_group.group_description = group_description or group_name
260 260 new_repo_group.parent_group = parent_group
261 261 new_repo_group.group_name = group_name
262 262 new_repo_group.personal = personal
263 263
264 264 self.sa.add(new_repo_group)
265 265
266 266 # create an ADMIN permission for owner except if we're super admin,
267 267 # later owner should go into the owner field of groups
268 268 if not user.is_admin:
269 269 self.grant_user_permission(repo_group=new_repo_group,
270 270 user=owner, perm='group.admin')
271 271
272 272 if parent_group and copy_permissions:
273 273 # copy permissions from parent
274 274 user_perms = UserRepoGroupToPerm.query() \
275 275 .filter(UserRepoGroupToPerm.group == parent_group).all()
276 276
277 277 group_perms = UserGroupRepoGroupToPerm.query() \
278 278 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
279 279
280 280 for perm in user_perms:
281 281 # don't copy over the permission for user who is creating
282 282 # this group, if he is not super admin he get's admin
283 283 # permission set above
284 284 if perm.user != user or user.is_admin:
285 285 UserRepoGroupToPerm.create(
286 286 perm.user, new_repo_group, perm.permission)
287 287
288 288 for perm in group_perms:
289 289 UserGroupRepoGroupToPerm.create(
290 290 perm.users_group, new_repo_group, perm.permission)
291 291 else:
292 292 perm_obj = self._create_default_perms(new_repo_group)
293 293 self.sa.add(perm_obj)
294 294
295 295 # now commit the changes, earlier so we are sure everything is in
296 296 # the database.
297 297 if commit_early:
298 298 self.sa.commit()
299 299 if not just_db:
300 300 self._create_group(new_repo_group.group_name)
301 301
302 302 # trigger the post hook
303 303 from rhodecode.lib import hooks_base
304 304 repo_group = RepoGroup.get_by_group_name(group_name)
305 305
306 306 # update repo group commit caches initially
307 307 repo_group.update_commit_cache()
308 308
309 309 hooks_base.create_repository_group(
310 310 created_by=user.username, **repo_group.get_dict())
311 311
312 312 # Trigger create event.
313 313 events.trigger(events.RepoGroupCreateEvent(repo_group))
314 314
315 315 return new_repo_group
316 316 except Exception:
317 317 self.sa.rollback()
318 318 log.exception('Exception occurred when creating repository group, '
319 319 'doing cleanup...')
320 320 # rollback things manually !
321 321 repo_group = RepoGroup.get_by_group_name(group_name)
322 322 if repo_group:
323 323 RepoGroup.delete(repo_group.group_id)
324 324 self.sa.commit()
325 325 if cleanup_group:
326 326 RepoGroupModel()._delete_filesystem_group(repo_group)
327 327 raise
328 328
329 329 def update_permissions(
330 330 self, repo_group, perm_additions=None, perm_updates=None,
331 331 perm_deletions=None, recursive=None, check_perms=True,
332 332 cur_user=None):
333 333 from rhodecode.model.repo import RepoModel
334 334 from rhodecode.lib.auth import HasUserGroupPermissionAny
335 335
336 336 if not perm_additions:
337 337 perm_additions = []
338 338 if not perm_updates:
339 339 perm_updates = []
340 340 if not perm_deletions:
341 341 perm_deletions = []
342 342
343 343 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
344 344
345 345 changes = {
346 346 'added': [],
347 347 'updated': [],
348 348 'deleted': [],
349 349 'default_user_changed': None
350 350 }
351 351
352 352 def _set_perm_user(_obj: RepoGroup | Repository, _user_obj: User, _perm):
353 353
354 354 if isinstance(_obj, RepoGroup):
355 355 self.grant_user_permission(repo_group=_obj, user=_user_obj, perm=_perm)
356 356 elif isinstance(_obj, Repository):
357 357 # private repos will not allow to change the default
358 358 # permissions using recursive mode
359 359 if _obj.private and _user_obj.username == User.DEFAULT_USER:
360 360 log.debug('Skipping private repo %s for user %s', _obj, _user_obj)
361 361 return
362 362
363 363 # we set group permission, we have to switch to repo permission definition
364 364 new_perm = _perm.replace('group.', 'repository.')
365 365 RepoModel().grant_user_permission(repo=_obj, user=_user_obj, perm=new_perm)
366 366
367 367 def _set_perm_group(_obj: RepoGroup | Repository, users_group: UserGroup, _perm):
368 368 if isinstance(_obj, RepoGroup):
369 369 self.grant_user_group_permission(repo_group=_obj, group_name=users_group, perm=_perm)
370 370 elif isinstance(_obj, Repository):
371 371 # we set group permission, we have to switch to repo permission definition
372 372 new_perm = _perm.replace('group.', 'repository.')
373 373 RepoModel().grant_user_group_permission(repo=_obj, group_name=users_group, perm=new_perm)
374 374
375 375 def _revoke_perm_user(_obj: RepoGroup | Repository, _user_obj: User):
376 376 if isinstance(_obj, RepoGroup):
377 377 self.revoke_user_permission(repo_group=_obj, user=_user_obj)
378 378 elif isinstance(_obj, Repository):
379 379 # private repos will not allow to change the default
380 380 # permissions using recursive mode, also there's no revocation fo default user, just update
381 381 if _user_obj.username == User.DEFAULT_USER:
382 382 log.debug('Skipping private repo %s for user %s', _obj, _user_obj)
383 383 return
384 384 RepoModel().revoke_user_permission(repo=_obj, user=_user_obj)
385 385
386 386 def _revoke_perm_group(_obj: RepoGroup | Repository, user_group: UserGroup):
387 387 if isinstance(_obj, RepoGroup):
388 388 self.revoke_user_group_permission(repo_group=_obj, group_name=user_group)
389 389 elif isinstance(_obj, Repository):
390 390 RepoModel().revoke_user_group_permission(repo=_obj, group_name=user_group)
391 391
392 392 # start updates
393 393 log.debug('Now updating permissions for %s in recursive mode:%s',
394 394 repo_group, recursive)
395 395
396 396 # initialize check function, we'll call that multiple times
397 397 has_group_perm = HasUserGroupPermissionAny(*req_perms)
398 398
399 399 for obj in repo_group.recursive_groups_and_repos():
400 400 # iterated obj is an instance of a repos group or repository in
401 401 # that group, recursive option can be: none, repos, groups, all
402 402 if recursive == 'all':
403 403 obj = obj
404 404 elif recursive == 'repos':
405 405 # skip groups, other than this one
406 406 if isinstance(obj, RepoGroup) and not obj == repo_group:
407 407 continue
408 408 elif recursive == 'groups':
409 409 # skip repos
410 410 if isinstance(obj, Repository):
411 411 continue
412 412 else: # recursive == 'none':
413 413 # DEFAULT option - don't apply to iterated objects
414 414 # also we do a break at the end of this loop. if we are not
415 415 # in recursive mode
416 416 obj = repo_group
417 417
418 418 change_obj = obj.get_api_data()
419 419
420 420 # update permissions
421 421 for member_id, perm, member_type in perm_updates:
422 422 member_id = int(member_id)
423 423 if member_type == 'user':
424 424 member_obj = User.get(member_id)
425 425 member_name = member_obj.username
426 426 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
427 427 # NOTE(dan): detect if we changed permissions for default user
428 428 perm_obj = self.sa.query(UserRepoGroupToPerm) \
429 429 .filter(UserRepoGroupToPerm.user_id == member_id) \
430 430 .filter(UserRepoGroupToPerm.group == repo_group) \
431 431 .scalar()
432 432 if perm_obj and perm_obj.permission.permission_name != perm:
433 433 changes['default_user_changed'] = True
434 434
435 435 # this updates also current one if found
436 436 _set_perm_user(obj, member_obj, perm)
437 437 elif member_type == 'user_group':
438 438 member_obj = UserGroup.get(member_id)
439 439 member_name = member_obj.users_group_name
440 440 if not check_perms or has_group_perm(member_name, user=cur_user):
441 441 _set_perm_group(obj, member_obj, perm)
442 442 else:
443 443 raise ValueError(
444 444 f"member_type must be 'user' or 'user_group' got {member_type} instead"
445 445 )
446 446
447 447 changes['updated'].append(
448 448 {'change_obj': change_obj, 'type': member_type,
449 449 'id': member_id, 'name': member_name, 'new_perm': perm})
450 450
451 451 # set new permissions
452 452 for member_id, perm, member_type in perm_additions:
453 453 member_id = int(member_id)
454 454 if member_type == 'user':
455 455 member_obj = User.get(member_id)
456 456 member_name = member_obj.username
457 457 _set_perm_user(obj, member_obj, perm)
458 458 elif member_type == 'user_group':
459 459 # check if we have permissions to alter this usergroup
460 460 member_obj = UserGroup.get(member_id)
461 461 member_name = member_obj.users_group_name
462 462 if not check_perms or has_group_perm(member_name, user=cur_user):
463 463 _set_perm_group(obj, member_obj, perm)
464 464 else:
465 465 raise ValueError(
466 466 f"member_type must be 'user' or 'user_group' got {member_type} instead"
467 467 )
468 468
469 469 changes['added'].append(
470 470 {'change_obj': change_obj, 'type': member_type,
471 471 'id': member_id, 'name': member_name, 'new_perm': perm})
472 472
473 473 # delete permissions
474 474 for member_id, perm, member_type in perm_deletions:
475 475 member_id = int(member_id)
476 476 if member_type == 'user':
477 477 member_obj = User.get(member_id)
478 478 member_name = member_obj.username
479 479 _revoke_perm_user(obj, member_obj)
480 480 elif member_type == 'user_group':
481 481 # check if we have permissions to alter this usergroup
482 482 member_obj = UserGroup.get(member_id)
483 483 member_name = member_obj.users_group_name
484 484 if not check_perms or has_group_perm(member_name, user=cur_user):
485 485 _revoke_perm_group(obj, member_obj)
486 486 else:
487 487 raise ValueError(
488 488 f"member_type must be 'user' or 'user_group' got {member_type} instead"
489 489 )
490 490 changes['deleted'].append(
491 491 {'change_obj': change_obj, 'type': member_type,
492 492 'id': member_id, 'name': member_name, 'new_perm': perm})
493 493
494 494 # if it's not recursive call for all,repos,groups
495 495 # break the loop and don't proceed with other changes
496 496 if recursive not in ['all', 'repos', 'groups']:
497 497 break
498 498
499 499 return changes
500 500
501 501 def update(self, repo_group, form_data):
502 502 try:
503 503 repo_group = self._get_repo_group(repo_group)
504 504 old_path = repo_group.full_path
505 505
506 506 # change properties
507 507 if 'group_description' in form_data:
508 508 repo_group.group_description = form_data['group_description']
509 509
510 510 if 'enable_locking' in form_data:
511 511 repo_group.enable_locking = form_data['enable_locking']
512 512
513 513 if 'group_parent_id' in form_data:
514 514 parent_group = (
515 515 self._get_repo_group(form_data['group_parent_id']))
516 516 repo_group.group_parent_id = (
517 517 parent_group.group_id if parent_group else None)
518 518 repo_group.parent_group = parent_group
519 519
520 520 # mikhail: to update the full_path, we have to explicitly
521 521 # update group_name
522 522 group_name = form_data.get('group_name', repo_group.name)
523 523 repo_group.group_name = repo_group.get_new_name(group_name)
524 524
525 525 new_path = repo_group.full_path
526 526
527 527 affected_user_ids = []
528 528 if 'user' in form_data:
529 529 old_owner_id = repo_group.user.user_id
530 530 new_owner = User.get_by_username(form_data['user'])
531 531 repo_group.user = new_owner
532 532
533 533 if old_owner_id != new_owner.user_id:
534 534 affected_user_ids = [new_owner.user_id, old_owner_id]
535 535
536 536 self.sa.add(repo_group)
537 537
538 538 # iterate over all members of this groups and do fixes
539 539 # set locking if given
540 540 # if obj is a repoGroup also fix the name of the group according
541 541 # to the parent
542 542 # if obj is a Repo fix it's name
543 543 # this can be potentially heavy operation
544 544 for obj in repo_group.recursive_groups_and_repos():
545 545 # set the value from it's parent
546 546 obj.enable_locking = repo_group.enable_locking
547 547 if isinstance(obj, RepoGroup):
548 548 new_name = obj.get_new_name(obj.name)
549 549 log.debug('Fixing group %s to new name %s',
550 550 obj.group_name, new_name)
551 551 obj.group_name = new_name
552 552
553 553 elif isinstance(obj, Repository):
554 554 # we need to get all repositories from this new group and
555 555 # rename them accordingly to new group path
556 556 new_name = obj.get_new_name(obj.just_name)
557 557 log.debug('Fixing repo %s to new name %s',
558 558 obj.repo_name, new_name)
559 559 obj.repo_name = new_name
560 560
561 561 self.sa.add(obj)
562 562
563 563 self._rename_group(old_path, new_path)
564 564
565 565 # Trigger update event.
566 566 events.trigger(events.RepoGroupUpdateEvent(repo_group))
567 567
568 568 if affected_user_ids:
569 569 PermissionModel().trigger_permission_flush(affected_user_ids)
570 570
571 571 return repo_group
572 572 except Exception:
573 573 log.error(traceback.format_exc())
574 574 raise
575 575
576 def delete(self, repo_group, force_delete=False, fs_remove=True):
576 def delete(self, repo_group, force_delete=False, fs_remove=True, call_events=True):
577 577 repo_group = self._get_repo_group(repo_group)
578 578 if not repo_group:
579 579 return False
580 repo_group_name = repo_group.group_name
580 581 try:
581 582 self.sa.delete(repo_group)
582 583 if fs_remove:
583 584 self._delete_filesystem_group(repo_group, force_delete)
584 585 else:
585 586 log.debug('skipping removal from filesystem')
586 587
587 588 # Trigger delete event.
589 if call_events:
588 590 events.trigger(events.RepoGroupDeleteEvent(repo_group))
589 return True
590 591
591 592 except Exception:
592 log.error('Error removing repo_group %s', repo_group)
593 log.error('Error removing repo_group %s', repo_group_name)
593 594 raise
594 595
596 return True
597
595 598 def grant_user_permission(self, repo_group, user, perm):
596 599 """
597 600 Grant permission for user on given repository group, or update
598 601 existing one if found
599 602
600 603 :param repo_group: Instance of RepoGroup, repositories_group_id,
601 604 or repositories_group name
602 605 :param user: Instance of User, user_id or username
603 606 :param perm: Instance of Permission, or permission_name
604 607 """
605 608
606 609 repo_group = self._get_repo_group(repo_group)
607 610 user = self._get_user(user)
608 611 permission = self._get_perm(perm)
609 612
610 613 # check if we have that permission already
611 614 obj = self.sa.query(UserRepoGroupToPerm)\
612 615 .filter(UserRepoGroupToPerm.user == user)\
613 616 .filter(UserRepoGroupToPerm.group == repo_group)\
614 617 .scalar()
615 618 if obj is None:
616 619 # create new !
617 620 obj = UserRepoGroupToPerm()
618 621 obj.group = repo_group
619 622 obj.user = user
620 623 obj.permission = permission
621 624 self.sa.add(obj)
622 625 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
623 626 action_logger_generic(
624 627 'granted permission: {} to user: {} on repogroup: {}'.format(
625 628 perm, user, repo_group), namespace='security.repogroup')
626 629 return obj
627 630
628 631 def revoke_user_permission(self, repo_group, user):
629 632 """
630 633 Revoke permission for user on given repository group
631 634
632 635 :param repo_group: Instance of RepoGroup, repositories_group_id,
633 636 or repositories_group name
634 637 :param user: Instance of User, user_id or username
635 638 """
636 639
637 640 repo_group = self._get_repo_group(repo_group)
638 641 user = self._get_user(user)
639 642
640 643 obj = self.sa.query(UserRepoGroupToPerm)\
641 644 .filter(UserRepoGroupToPerm.user == user)\
642 645 .filter(UserRepoGroupToPerm.group == repo_group)\
643 646 .scalar()
644 647 if obj:
645 648 self.sa.delete(obj)
646 649 log.debug('Revoked perm on %s on %s', repo_group, user)
647 650 action_logger_generic(
648 651 'revoked permission from user: {} on repogroup: {}'.format(
649 652 user, repo_group), namespace='security.repogroup')
650 653
651 654 def grant_user_group_permission(self, repo_group, group_name, perm):
652 655 """
653 656 Grant permission for user group on given repository group, or update
654 657 existing one if found
655 658
656 659 :param repo_group: Instance of RepoGroup, repositories_group_id,
657 660 or repositories_group name
658 661 :param group_name: Instance of UserGroup, users_group_id,
659 662 or user group name
660 663 :param perm: Instance of Permission, or permission_name
661 664 """
662 665 repo_group = self._get_repo_group(repo_group)
663 666 group_name = self._get_user_group(group_name)
664 667 permission = self._get_perm(perm)
665 668
666 669 # check if we have that permission already
667 670 obj = self.sa.query(UserGroupRepoGroupToPerm)\
668 671 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
669 672 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
670 673 .scalar()
671 674
672 675 if obj is None:
673 676 # create new
674 677 obj = UserGroupRepoGroupToPerm()
675 678
676 679 obj.group = repo_group
677 680 obj.users_group = group_name
678 681 obj.permission = permission
679 682 self.sa.add(obj)
680 683 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
681 684 action_logger_generic(
682 685 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
683 686 perm, group_name, repo_group), namespace='security.repogroup')
684 687 return obj
685 688
686 689 def revoke_user_group_permission(self, repo_group, group_name):
687 690 """
688 691 Revoke permission for user group on given repository group
689 692
690 693 :param repo_group: Instance of RepoGroup, repositories_group_id,
691 694 or repositories_group name
692 695 :param group_name: Instance of UserGroup, users_group_id,
693 696 or user group name
694 697 """
695 698 repo_group = self._get_repo_group(repo_group)
696 699 group_name = self._get_user_group(group_name)
697 700
698 701 obj = self.sa.query(UserGroupRepoGroupToPerm)\
699 702 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
700 703 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
701 704 .scalar()
702 705 if obj:
703 706 self.sa.delete(obj)
704 707 log.debug('Revoked perm to %s on %s', repo_group, group_name)
705 708 action_logger_generic(
706 709 'revoked permission from usergroup: {} on repogroup: {}'.format(
707 710 group_name, repo_group), namespace='security.repogroup')
708 711
709 712 @classmethod
710 713 def update_commit_cache(cls, repo_groups=None):
711 714 if not repo_groups:
712 715 repo_groups = RepoGroup.getAll()
713 716 for repo_group in repo_groups:
714 717 repo_group.update_commit_cache()
715 718
716 719 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
717 720 super_user_actions=False):
718 721
719 722 from pyramid.threadlocal import get_current_request
720 723 _render = get_current_request().get_partial_renderer(
721 724 'rhodecode:templates/data_table/_dt_elements.mako')
722 725 c = _render.get_call_context()
723 726 h = _render.get_helpers()
724 727
725 728 def quick_menu(repo_group_name):
726 729 return _render('quick_repo_group_menu', repo_group_name)
727 730
728 731 def repo_group_lnk(repo_group_name):
729 732 return _render('repo_group_name', repo_group_name)
730 733
731 734 def last_change(last_change):
732 735 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
733 736 ts = time.time()
734 737 utc_offset = (datetime.datetime.fromtimestamp(ts)
735 738 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
736 739 last_change = last_change + datetime.timedelta(seconds=utc_offset)
737 740 return _render("last_change", last_change)
738 741
739 742 def desc(desc, personal):
740 743 return _render(
741 744 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
742 745
743 746 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
744 747 return _render(
745 748 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
746 749
747 750 def repo_group_name(repo_group_name, children_groups):
748 751 return _render("repo_group_name", repo_group_name, children_groups)
749 752
750 753 def user_profile(username):
751 754 return _render('user_profile', username)
752 755
753 756 repo_group_data = []
754 757 for group in repo_group_list:
755 758 # NOTE(marcink): because we use only raw column we need to load it like that
756 759 changeset_cache = RepoGroup._load_changeset_cache(
757 760 '', group._changeset_cache)
758 761 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
759 762 row = {
760 763 "menu": quick_menu(group.group_name),
761 764 "name": repo_group_lnk(group.group_name),
762 765 "name_raw": group.group_name,
763 766
764 767 "last_change": last_change(last_commit_change),
765 768
766 769 "last_changeset": "",
767 770 "last_changeset_raw": "",
768 771
769 772 "desc": desc(h.escape(group.group_description), group.personal),
770 773 "top_level_repos": 0,
771 774 "owner": user_profile(group.User.username)
772 775 }
773 776 if admin:
774 777 repo_count = group.repositories.count()
775 778 children_groups = list(map(
776 779 h.safe_str,
777 780 itertools.chain((g.name for g in group.parents),
778 781 (x.name for x in [group]))))
779 782 row.update({
780 783 "action": repo_group_actions(
781 784 group.group_id, group.group_name, repo_count),
782 785 "top_level_repos": repo_count,
783 786 "name": repo_group_name(group.group_name, children_groups),
784 787
785 788 })
786 789 repo_group_data.append(row)
787 790
788 791 return repo_group_data
789 792
790 793 def get_repo_groups_data_table(
791 794 self, draw, start, limit,
792 795 search_q, order_by, order_dir,
793 796 auth_user, repo_group_id):
794 797 from rhodecode.model.scm import RepoGroupList
795 798
796 799 _perms = ['group.read', 'group.write', 'group.admin']
797 800 repo_groups = RepoGroup.query() \
798 801 .filter(RepoGroup.group_parent_id == repo_group_id) \
799 802 .all()
800 803 auth_repo_group_list = RepoGroupList(
801 804 repo_groups, perm_set=_perms,
802 805 extra_kwargs=dict(user=auth_user))
803 806
804 807 allowed_ids = [-1]
805 808 for repo_group in auth_repo_group_list:
806 809 allowed_ids.append(repo_group.group_id)
807 810
808 811 repo_groups_data_total_count = RepoGroup.query() \
809 812 .filter(RepoGroup.group_parent_id == repo_group_id) \
810 813 .filter(or_(
811 814 # generate multiple IN to fix limitation problems
812 815 *in_filter_generator(RepoGroup.group_id, allowed_ids))
813 816 ) \
814 817 .count()
815 818
816 819 base_q = Session.query(
817 820 RepoGroup.group_name,
818 821 RepoGroup.group_name_hash,
819 822 RepoGroup.group_description,
820 823 RepoGroup.group_id,
821 824 RepoGroup.personal,
822 825 RepoGroup.updated_on,
823 826 RepoGroup._changeset_cache,
824 827 User,
825 828 ) \
826 829 .filter(RepoGroup.group_parent_id == repo_group_id) \
827 830 .filter(or_(
828 831 # generate multiple IN to fix limitation problems
829 832 *in_filter_generator(RepoGroup.group_id, allowed_ids))
830 833 ) \
831 834 .join(User, User.user_id == RepoGroup.user_id) \
832 835 .group_by(RepoGroup, User)
833 836
834 837 repo_groups_data_total_filtered_count = base_q.count()
835 838
836 839 sort_defined = False
837 840
838 841 if order_by == 'group_name':
839 842 sort_col = func.lower(RepoGroup.group_name)
840 843 sort_defined = True
841 844 elif order_by == 'user_username':
842 845 sort_col = User.username
843 846 else:
844 847 sort_col = getattr(RepoGroup, order_by, None)
845 848
846 849 if sort_defined or sort_col:
847 850 if order_dir == 'asc':
848 851 sort_col = sort_col.asc()
849 852 else:
850 853 sort_col = sort_col.desc()
851 854
852 855 base_q = base_q.order_by(sort_col)
853 856 base_q = base_q.offset(start).limit(limit)
854 857
855 858 repo_group_list = base_q.all()
856 859
857 860 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
858 861 repo_group_list=repo_group_list, admin=False)
859 862
860 863 data = ({
861 864 'draw': draw,
862 865 'data': repo_groups_data,
863 866 'recordsTotal': repo_groups_data_total_count,
864 867 'recordsFiltered': repo_groups_data_total_filtered_count,
865 868 })
866 869 return data
867 870
868 871 def _get_defaults(self, repo_group_name):
869 872 repo_group = RepoGroup.get_by_group_name(repo_group_name)
870 873
871 874 if repo_group is None:
872 875 return None
873 876
874 877 defaults = repo_group.get_dict()
875 878 defaults['repo_group_name'] = repo_group.name
876 879 defaults['repo_group_description'] = repo_group.group_description
877 880 defaults['repo_group_enable_locking'] = repo_group.enable_locking
878 881
879 882 # we use -1 as this is how in HTML, we mark an empty group
880 883 defaults['repo_group'] = defaults['group_parent_id'] or -1
881 884
882 885 # fill owner
883 886 if repo_group.user:
884 887 defaults.update({'user': repo_group.user.username})
885 888 else:
886 889 replacement_user = User.get_first_super_admin().username
887 890 defaults.update({'user': replacement_user})
888 891
889 892 return defaults
@@ -1,422 +1,422 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18 import io
19 19 import shlex
20 20
21 21 import math
22 22 import re
23 23 import os
24 24 import datetime
25 25 import logging
26 26 import queue
27 27 import subprocess
28 28
29 29
30 30 from dateutil.parser import parse
31 31 from pyramid.interfaces import IRoutesMapper
32 32 from pyramid.settings import asbool
33 33 from pyramid.path import AssetResolver
34 34 from threading import Thread
35 35
36 36 from rhodecode.config.jsroutes import generate_jsroutes_content
37 37 from rhodecode.lib.base import get_auth_user
38 38 from rhodecode.lib.celerylib.loader import set_celery_conf
39 39
40 40 import rhodecode
41 41
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 def add_renderer_globals(event):
47 47 from rhodecode.lib import helpers
48 48
49 49 # TODO: When executed in pyramid view context the request is not available
50 50 # in the event. Find a better solution to get the request.
51 51 from pyramid.threadlocal import get_current_request
52 52 request = event['request'] or get_current_request()
53 53
54 54 # Add Pyramid translation as '_' to context
55 55 event['_'] = request.translate
56 56 event['_ungettext'] = request.plularize
57 57 event['h'] = helpers
58 58
59 59
60 60 def set_user_lang(event):
61 61 request = event.request
62 62 cur_user = getattr(request, 'user', None)
63 63
64 64 if cur_user:
65 65 user_lang = cur_user.get_instance().user_data.get('language')
66 66 if user_lang:
67 67 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
68 68 event.request._LOCALE_ = user_lang
69 69
70 70
71 71 def update_celery_conf(event):
72 72 log.debug('Setting celery config from new request')
73 73 set_celery_conf(request=event.request, registry=event.request.registry)
74 74
75 75
76 76 def add_request_user_context(event):
77 77 """
78 78 Adds auth user into request context
79 79 """
80 80
81 81 request = event.request
82 82 # access req_id as soon as possible
83 83 req_id = request.req_id
84 84
85 85 if hasattr(request, 'vcs_call'):
86 86 # skip vcs calls
87 87 return
88 88
89 89 if hasattr(request, 'rpc_method'):
90 90 # skip api calls
91 91 return
92 92
93 93 auth_user, auth_token = get_auth_user(request)
94 94 request.user = auth_user
95 95 request.user_auth_token = auth_token
96 96 request.environ['rc_auth_user'] = auth_user
97 97 request.environ['rc_auth_user_id'] = str(auth_user.user_id)
98 98 request.environ['rc_req_id'] = req_id
99 99
100 100
101 101 def reset_log_bucket(event):
102 102 """
103 103 reset the log bucket on new request
104 104 """
105 105 request = event.request
106 106 request.req_id_records_init()
107 107
108 108
109 109 def scan_repositories_if_enabled(event):
110 110 """
111 111 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
112 112 does a repository scan if enabled in the settings.
113 113 """
114 114
115 115 settings = event.app.registry.settings
116 116 vcs_server_enabled = settings['vcs.server.enable']
117 117 import_on_startup = settings['startup.import_repos']
118 118
119 119 if vcs_server_enabled and import_on_startup:
120 120 from rhodecode.model.scm import ScmModel
121 121 from rhodecode.lib.utils import repo2db_mapper
122 122 scm = ScmModel()
123 123 repositories = scm.repo_scan(scm.repos_path)
124 repo2db_mapper(repositories, remove_obsolete=False)
124 repo2db_mapper(repositories)
125 125
126 126
127 127 def write_metadata_if_needed(event):
128 128 """
129 129 Writes upgrade metadata
130 130 """
131 131 import rhodecode
132 132 from rhodecode.lib import system_info
133 133 from rhodecode.lib import ext_json
134 134
135 135 fname = '.rcmetadata.json'
136 136 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
137 137 metadata_destination = os.path.join(ini_loc, fname)
138 138
139 139 def get_update_age():
140 140 now = datetime.datetime.utcnow()
141 141
142 142 with open(metadata_destination, 'rb') as f:
143 143 data = ext_json.json.loads(f.read())
144 144 if 'created_on' in data:
145 145 update_date = parse(data['created_on'])
146 146 diff = now - update_date
147 147 return diff.total_seconds() / 60.0
148 148
149 149 return 0
150 150
151 151 def write():
152 152 configuration = system_info.SysInfo(
153 153 system_info.rhodecode_config)()['value']
154 154 license_token = configuration['config']['license_token']
155 155
156 156 setup = dict(
157 157 workers=configuration['config']['server:main'].get(
158 158 'workers', '?'),
159 159 worker_type=configuration['config']['server:main'].get(
160 160 'worker_class', 'sync'),
161 161 )
162 162 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
163 163 del dbinfo['url']
164 164
165 165 metadata = dict(
166 166 desc='upgrade metadata info',
167 167 license_token=license_token,
168 168 created_on=datetime.datetime.utcnow().isoformat(),
169 169 usage=system_info.SysInfo(system_info.usage_info)()['value'],
170 170 platform=system_info.SysInfo(system_info.platform_type)()['value'],
171 171 database=dbinfo,
172 172 cpu=system_info.SysInfo(system_info.cpu)()['value'],
173 173 memory=system_info.SysInfo(system_info.memory)()['value'],
174 174 setup=setup
175 175 )
176 176
177 177 with open(metadata_destination, 'wb') as f:
178 178 f.write(ext_json.json.dumps(metadata))
179 179
180 180 settings = event.app.registry.settings
181 181 if settings.get('metadata.skip'):
182 182 return
183 183
184 184 # only write this every 24h, workers restart caused unwanted delays
185 185 try:
186 186 age_in_min = get_update_age()
187 187 except Exception:
188 188 age_in_min = 0
189 189
190 190 if age_in_min > 60 * 60 * 24:
191 191 return
192 192
193 193 try:
194 194 write()
195 195 except Exception:
196 196 pass
197 197
198 198
199 199 def write_usage_data(event):
200 200 import rhodecode
201 201 from rhodecode.lib import system_info
202 202 from rhodecode.lib import ext_json
203 203
204 204 settings = event.app.registry.settings
205 205 instance_tag = settings.get('metadata.write_usage_tag')
206 206 if not settings.get('metadata.write_usage'):
207 207 return
208 208
209 209 def get_update_age(dest_file):
210 210 now = datetime.datetime.now(datetime.UTC)
211 211
212 212 with open(dest_file, 'rb') as f:
213 213 data = ext_json.json.loads(f.read())
214 214 if 'created_on' in data:
215 215 update_date = parse(data['created_on'])
216 216 diff = now - update_date
217 217 return math.ceil(diff.total_seconds() / 60.0)
218 218
219 219 return 0
220 220
221 221 utc_date = datetime.datetime.now(datetime.UTC)
222 222 hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.))
223 223 fname = f'.rc_usage_{utc_date.year}{utc_date.month:02d}{utc_date.day:02d}_{hour_quarter}.json'
224 224 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
225 225
226 226 usage_dir = os.path.join(ini_loc, '.rcusage')
227 227 if not os.path.isdir(usage_dir):
228 228 os.makedirs(usage_dir)
229 229 usage_metadata_destination = os.path.join(usage_dir, fname)
230 230
231 231 try:
232 232 age_in_min = get_update_age(usage_metadata_destination)
233 233 except Exception:
234 234 age_in_min = 0
235 235
236 236 # write every 6th hour
237 237 if age_in_min and age_in_min < 60 * 6:
238 238 log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...',
239 239 age_in_min, 60 * 6)
240 240 return
241 241
242 242 def write(dest_file):
243 243 configuration = system_info.SysInfo(system_info.rhodecode_config)()['value']
244 244 license_token = configuration['config']['license_token']
245 245
246 246 metadata = dict(
247 247 desc='Usage data',
248 248 instance_tag=instance_tag,
249 249 license_token=license_token,
250 250 created_on=datetime.datetime.utcnow().isoformat(),
251 251 usage=system_info.SysInfo(system_info.usage_info)()['value'],
252 252 )
253 253
254 254 with open(dest_file, 'wb') as f:
255 255 f.write(ext_json.formatted_json(metadata))
256 256
257 257 try:
258 258 log.debug('Writing usage file at: %s', usage_metadata_destination)
259 259 write(usage_metadata_destination)
260 260 except Exception:
261 261 pass
262 262
263 263
264 264 def write_js_routes_if_enabled(event):
265 265 registry = event.app.registry
266 266
267 267 mapper = registry.queryUtility(IRoutesMapper)
268 268 _argument_prog = re.compile(r'\{(.*?)\}|:\((.*)\)')
269 269
270 270 def _extract_route_information(route):
271 271 """
272 272 Convert a route into tuple(name, path, args), eg:
273 273 ('show_user', '/profile/%(username)s', ['username'])
274 274 """
275 275
276 276 route_path = route.pattern
277 277 pattern = route.pattern
278 278
279 279 def replace(matchobj):
280 280 if matchobj.group(1):
281 281 return "%%(%s)s" % matchobj.group(1).split(':')[0]
282 282 else:
283 283 return "%%(%s)s" % matchobj.group(2)
284 284
285 285 route_path = _argument_prog.sub(replace, route_path)
286 286
287 287 if not route_path.startswith('/'):
288 288 route_path = f'/{route_path}'
289 289
290 290 return (
291 291 route.name,
292 292 route_path,
293 293 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
294 294 for arg in _argument_prog.findall(pattern)]
295 295 )
296 296
297 297 def get_routes():
298 298 # pyramid routes
299 299 for route in mapper.get_routes():
300 300 if not route.name.startswith('__'):
301 301 yield _extract_route_information(route)
302 302
303 303 if asbool(registry.settings.get('generate_js_files', 'false')):
304 304 static_path = AssetResolver().resolve('rhodecode:public').abspath()
305 305 jsroutes = get_routes()
306 306 jsroutes_file_content = generate_jsroutes_content(jsroutes)
307 307 jsroutes_file_path = os.path.join(
308 308 static_path, 'js', 'rhodecode', 'routes.js')
309 309
310 310 try:
311 311 with open(jsroutes_file_path, 'w', encoding='utf-8') as f:
312 312 f.write(jsroutes_file_content)
313 313 log.debug('generated JS files in %s', jsroutes_file_path)
314 314 except Exception:
315 315 log.exception('Failed to write routes.js into %s', jsroutes_file_path)
316 316
317 317
318 318 def import_license_if_present(event):
319 319 """
320 320 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
321 321 does a import license key based on a presence of the file.
322 322 """
323 323 settings = event.app.registry.settings
324 324
325 325 rhodecode_edition_id = settings.get('rhodecode.edition_id')
326 326 license_file_path = settings.get('license.import_path')
327 327 force = settings.get('license.import_path_mode') == 'force'
328 328
329 329 if license_file_path and rhodecode_edition_id == 'EE':
330 330 log.debug('license.import_path= is set importing license from %s', license_file_path)
331 331 from rhodecode.model.meta import Session
332 332 from rhodecode.model.license import apply_license_from_file
333 333 try:
334 334 apply_license_from_file(license_file_path, force=force)
335 335 Session().commit()
336 336 except OSError:
337 337 log.exception('Failed to import license from %s, make sure this file exists', license_file_path)
338 338
339 339
340 340 class Subscriber(object):
341 341 """
342 342 Base class for subscribers to the pyramid event system.
343 343 """
344 344 def __call__(self, event):
345 345 self.run(event)
346 346
347 347 def run(self, event):
348 348 raise NotImplementedError('Subclass has to implement this.')
349 349
350 350
351 351 class AsyncSubscriber(Subscriber):
352 352 """
353 353 Subscriber that handles the execution of events in a separate task to not
354 354 block the execution of the code which triggers the event. It puts the
355 355 received events into a queue from which the worker process takes them in
356 356 order.
357 357 """
358 358 def __init__(self):
359 359 self._stop = False
360 360 self._eventq = queue.Queue()
361 361 self._worker = self.create_worker()
362 362 self._worker.start()
363 363
364 364 def __call__(self, event):
365 365 self._eventq.put(event)
366 366
367 367 def create_worker(self):
368 368 worker = Thread(target=self.do_work)
369 369 worker.daemon = True
370 370 return worker
371 371
372 372 def stop_worker(self):
373 373 self._stop = False
374 374 self._eventq.put(None)
375 375 self._worker.join()
376 376
377 377 def do_work(self):
378 378 while not self._stop:
379 379 event = self._eventq.get()
380 380 if event is not None:
381 381 self.run(event)
382 382
383 383
384 384 class AsyncSubprocessSubscriber(AsyncSubscriber):
385 385 """
386 386 Subscriber that uses the subprocess module to execute a command if an
387 387 event is received. Events are handled asynchronously::
388 388
389 389 subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10)
390 390 subscriber(dummyEvent) # running __call__(event)
391 391
392 392 """
393 393
394 394 def __init__(self, cmd, timeout=None):
395 395 if not isinstance(cmd, (list, tuple)):
396 396 cmd = shlex.split(cmd)
397 397 super().__init__()
398 398 self._cmd = cmd
399 399 self._timeout = timeout
400 400
401 401 def run(self, event):
402 402 cmd = self._cmd
403 403 timeout = self._timeout
404 404 log.debug('Executing command %s.', cmd)
405 405
406 406 try:
407 407 output = subprocess.check_output(
408 408 cmd, timeout=timeout, stderr=subprocess.STDOUT)
409 409 log.debug('Command finished %s', cmd)
410 410 if output:
411 411 log.debug('Command output: %s', output)
412 412 except subprocess.TimeoutExpired as e:
413 413 log.exception('Timeout while executing command.')
414 414 if e.output:
415 415 log.error('Command output: %s', e.output)
416 416 except subprocess.CalledProcessError as e:
417 417 log.exception('Error while executing command.')
418 418 if e.output:
419 419 log.error('Command output: %s', e.output)
420 420 except Exception:
421 421 log.exception(
422 422 'Exception while executing command %s.', cmd)
@@ -1,33 +1,45 b''
1 ${h.secure_form(h.route_path('admin_settings_mapping_update'), request=request)}
1
2 2
3 3 <div class="panel panel-default">
4 4 <div class="panel-heading">
5 <h3 class="panel-title">${_('Import New Groups or Repositories')}</h3>
5 <h3 class="panel-title">${_('Import new repository groups and repositories')}</h3>
6 6 </div>
7 7 <div class="panel-body">
8
8 ${h.secure_form(h.route_path('admin_settings_mapping_create'), request=request)}
9 9 <p>
10 ${_('This function will scann all data under the current storage path location at')} <code>${c.storage_path}</code>
10 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code><br/>
11 ${_('Each folder will be imported as a new repository group, and each repository found will be also imported to root level or corresponding repository group')}
11 12 </p>
12 13
13 14 <div class="checkbox">
14 ${h.checkbox('destroy',True)}
15 <label for="destroy">${_('Destroy old data')}</label>
16 </div>
17 <span class="help-block">${_('In case a repository or a group was deleted from the filesystem and it still exists in the database, check this option to remove obsolete data from the database.')}</span>
18
19 <div class="checkbox">
20 15 ${h.checkbox('invalidate',True)}
21 16 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
22 17 </div>
23 18 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
24 19
25 20 <div class="buttons">
26 ${h.submit('rescan',_('Rescan Filesystem'),class_="btn")}
21 ${h.submit('rescan',_('Scan filesystem'),class_="btn")}
27 22 </div>
28
23 ${h.end_form()}
29 24 </div>
30 25 </div>
31 26
32 27
28 <div class="panel panel-default">
29 <div class="panel-heading">
30 <h3 class="panel-title">${_('Cleanup removed Repository Groups or Repositories')}</h3>
31 </div>
32 <div class="panel-body">
33 ${h.secure_form(h.route_path('admin_settings_mapping_cleanup'), request=request)}
34 <p>
35 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code>
36 ${_('Then it will remove all repository groups and repositories that are no longer present in the filesystem.')}
37 </p>
38
39 <div class="buttons">
40 ${h.submit('rescan',_('Cleanup filesystem'),class_="btn btn-danger")}
41 </div>
33 42 ${h.end_form()}
43 </div>
44 </div>
45
@@ -1,1695 +1,1697 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import collections
20 20 import datetime
21 21 import os
22 22 import re
23 23 import pprint
24 24 import shutil
25 25 import socket
26 26 import subprocess
27 27 import time
28 28 import uuid
29 29 import dateutil.tz
30 30 import logging
31 31 import functools
32 32 import textwrap
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39 import pyramid.paster
40 40
41 41 import rhodecode
42 42 import rhodecode.lib
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 46 PullRequest,
47 47 PullRequestReviewers,
48 48 Repository,
49 49 RhodeCodeSetting,
50 50 ChangesetStatus,
51 51 RepoGroup,
52 52 UserGroup,
53 53 RepoRhodeCodeUi,
54 54 RepoRhodeCodeSetting,
55 55 RhodeCodeUi,
56 56 )
57 57 from rhodecode.model.meta import Session
58 58 from rhodecode.model.pull_request import PullRequestModel
59 59 from rhodecode.model.repo import RepoModel
60 60 from rhodecode.model.repo_group import RepoGroupModel
61 61 from rhodecode.model.user import UserModel
62 62 from rhodecode.model.settings import VcsSettingsModel
63 63 from rhodecode.model.user_group import UserGroupModel
64 64 from rhodecode.model.integration import IntegrationModel
65 65 from rhodecode.integrations import integration_type_registry
66 66 from rhodecode.integrations.types.base import IntegrationTypeBase
67 67 from rhodecode.lib.utils import repo2db_mapper
68 68 from rhodecode.lib.str_utils import safe_bytes
69 69 from rhodecode.lib.hash_utils import sha1_safe
70 70 from rhodecode.lib.vcs.backends import get_backend
71 71 from rhodecode.lib.vcs.nodes import FileNode
72 72 from rhodecode.lib.base import bootstrap_config
73 73 from rhodecode.tests import (
74 74 login_user_session,
75 75 get_new_dir,
76 76 utils,
77 77 TESTS_TMP_PATH,
78 78 TEST_USER_ADMIN_LOGIN,
79 79 TEST_USER_REGULAR_LOGIN,
80 80 TEST_USER_REGULAR2_LOGIN,
81 81 TEST_USER_REGULAR_PASS,
82 82 console_printer,
83 83 )
84 84 from rhodecode.tests.utils import set_anonymous_access
85 85 from rhodecode.tests.fixtures.rc_fixture import Fixture
86 86 from rhodecode.config import utils as config_utils
87 87
88 88 log = logging.getLogger(__name__)
89 89
90 90
91 91 def cmp(a, b):
92 92 # backport cmp from python2 so we can still use it in the custom code in this module
93 93 return (a > b) - (a < b)
94 94
95 95
96 96 @pytest.fixture(scope="session")
97 97 def http_environ_session():
98 98 """
99 99 Allow to use "http_environ" in session scope.
100 100 """
101 101 return plain_http_environ()
102 102
103 103
104 104 def plain_http_host_stub():
105 105 """
106 106 Value of HTTP_HOST in the test run.
107 107 """
108 108 return "example.com:80"
109 109
110 110
111 111 def plain_config_stub(request, request_stub):
112 112 """
113 113 Set up pyramid.testing and return the Configurator.
114 114 """
115 115
116 116 config = bootstrap_config(request=request_stub)
117 117
118 118 @request.addfinalizer
119 119 def cleanup():
120 120 pyramid.testing.tearDown()
121 121
122 122 return config
123 123
124 124
125 125 def plain_request_stub():
126 126 """
127 127 Stub request object.
128 128 """
129 129 from rhodecode.lib.base import bootstrap_request
130 130
131 131 _request = bootstrap_request(scheme="https")
132 132 return _request
133 133
134 134
135 135 @pytest.fixture()
136 136 def http_host_stub():
137 137 """
138 138 Value of HTTP_HOST in the test run.
139 139 """
140 140 return plain_http_host_stub()
141 141
142 142
143 143 def plain_http_host_only_stub():
144 144 """
145 145 Value of HTTP_HOST in the test run.
146 146 """
147 147 return plain_http_host_stub().split(":")[0]
148 148
149 149
150 150 @pytest.fixture()
151 151 def http_host_only_stub():
152 152 """
153 153 Value of HTTP_HOST in the test run.
154 154 """
155 155 return plain_http_host_only_stub()
156 156
157 157
158 158 def plain_http_environ():
159 159 """
160 160 HTTP extra environ keys.
161 161
162 162 Used by the test application and as well for setting up the pylons
163 163 environment. In the case of the fixture "app" it should be possible
164 164 to override this for a specific test case.
165 165 """
166 166 return {
167 167 "SERVER_NAME": plain_http_host_only_stub(),
168 168 "SERVER_PORT": plain_http_host_stub().split(":")[1],
169 169 "HTTP_HOST": plain_http_host_stub(),
170 170 "HTTP_USER_AGENT": "rc-test-agent",
171 171 "REQUEST_METHOD": "GET",
172 172 }
173 173
174 174
175 175 @pytest.fixture(scope="session")
176 176 def baseapp(request, ini_config, http_environ_session, available_port_factory, vcsserver_factory, celery_factory):
177 177 from rhodecode.lib.config_utils import get_app_config
178 178 from rhodecode.config.middleware import make_pyramid_app
179 179
180 180 log.info("Using the RhodeCode configuration:%s", ini_config)
181 181 pyramid.paster.setup_logging(ini_config)
182 182
183 183 settings = get_app_config(ini_config)
184 184 store_dir = os.path.dirname(ini_config)
185 185
186 186 # start vcsserver
187 187 _vcsserver_port = available_port_factory()
188 188 vcsserver_instance = vcsserver_factory(
189 189 request,
190 190 store_dir=store_dir,
191 191 port=_vcsserver_port,
192 192 info_prefix="base-app-"
193 193 )
194 194
195 195 settings["vcs.server"] = vcsserver_instance.bind_addr
196 196
197 197 # we skip setting store_dir for baseapp, it's internally set via testing rhodecode.ini
198 198 # settings['repo_store.path'] = str(store_dir)
199 199 console_printer(f' :warning: [green]pytest-setup[/green] Starting base pyramid-app: {ini_config}')
200 200 pyramid_baseapp = make_pyramid_app({"__file__": ini_config}, **settings)
201 201
202 202 # start celery
203 203 celery_factory(
204 204 request,
205 205 store_dir=store_dir,
206 206 port=None,
207 207 info_prefix="base-app-",
208 208 overrides=(
209 209 {'handler_console': {'level': 'DEBUG'}},
210 210 {'app:main': {'vcs.server': vcsserver_instance.bind_addr}},
211 211 {'app:main': {'repo_store.path': store_dir}}
212 212 )
213 213 )
214 214
215 215 return pyramid_baseapp
216 216
217 217
218 218 @pytest.fixture(scope="session")
219 219 def app_settings(baseapp, ini_config):
220 220 """
221 221 Settings dictionary used to create the app.
222 222
223 223 Parses the ini file and passes the result through the sanitize and apply
224 224 defaults mechanism in `rhodecode.config.middleware`.
225 225 """
226 226 return baseapp.config.get_settings()
227 227
228 228
229 229 @pytest.fixture(scope="session")
230 230 def db_connection(ini_settings):
231 231 # Initialize the database connection.
232 232 config_utils.initialize_database(ini_settings)
233 233
234 234
235 235 LoginData = collections.namedtuple("LoginData", ("csrf_token", "user"))
236 236
237 237
238 238 def _autologin_user(app, *args):
239 239 session = login_user_session(app, *args)
240 240 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
241 241 return LoginData(csrf_token, session["rhodecode_user"])
242 242
243 243
244 244 @pytest.fixture()
245 245 def autologin_user(app):
246 246 """
247 247 Utility fixture which makes sure that the admin user is logged in
248 248 """
249 249 return _autologin_user(app)
250 250
251 251
252 252 @pytest.fixture()
253 253 def autologin_regular_user(app):
254 254 """
255 255 Utility fixture which makes sure that the regular user is logged in
256 256 """
257 257 return _autologin_user(app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
258 258
259 259
260 260 @pytest.fixture(scope="function")
261 261 def csrf_token(request, autologin_user):
262 262 return autologin_user.csrf_token
263 263
264 264
265 265 @pytest.fixture(scope="function")
266 266 def xhr_header(request):
267 267 return {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
268 268
269 269
270 270 @pytest.fixture()
271 271 def real_crypto_backend(monkeypatch):
272 272 """
273 273 Switch the production crypto backend on for this test.
274 274
275 275 During the test run the crypto backend is replaced with a faster
276 276 implementation based on the MD5 algorithm.
277 277 """
278 278 monkeypatch.setattr(rhodecode, "is_test", False)
279 279
280 280
281 281 @pytest.fixture(scope="class")
282 282 def index_location(request, baseapp):
283 283 index_location = baseapp.config.get_settings()["search.location"]
284 284 if request.cls:
285 285 request.cls.index_location = index_location
286 286 return index_location
287 287
288 288
289 289 @pytest.fixture(scope="session", autouse=True)
290 290 def tests_tmp_path(request):
291 291 """
292 292 Create temporary directory to be used during the test session.
293 293 """
294 294 if not os.path.exists(TESTS_TMP_PATH):
295 295 os.makedirs(TESTS_TMP_PATH)
296 296
297 297 if not request.config.getoption("--keep-tmp-path"):
298 298
299 299 @request.addfinalizer
300 300 def remove_tmp_path():
301 301 shutil.rmtree(TESTS_TMP_PATH)
302 302
303 303 return TESTS_TMP_PATH
304 304
305 305
306 306 @pytest.fixture()
307 307 def test_repo_group(request):
308 308 """
309 309 Create a temporary repository group, and destroy it after
310 310 usage automatically
311 311 """
312 312 fixture = Fixture()
313 313 repogroupid = "test_repo_group_%s" % str(time.time()).replace(".", "")
314 314 repo_group = fixture.create_repo_group(repogroupid)
315 315
316 316 def _cleanup():
317 317 fixture.destroy_repo_group(repogroupid)
318 318
319 319 request.addfinalizer(_cleanup)
320 320 return repo_group
321 321
322 322
323 323 @pytest.fixture()
324 324 def test_user_group(request):
325 325 """
326 326 Create a temporary user group, and destroy it after
327 327 usage automatically
328 328 """
329 329 fixture = Fixture()
330 330 usergroupid = "test_user_group_%s" % str(time.time()).replace(".", "")
331 331 user_group = fixture.create_user_group(usergroupid)
332 332
333 333 def _cleanup():
334 334 fixture.destroy_user_group(user_group)
335 335
336 336 request.addfinalizer(_cleanup)
337 337 return user_group
338 338
339 339
340 340 @pytest.fixture(scope="session")
341 341 def test_repo(request):
342 342 container = TestRepoContainer()
343 343 request.addfinalizer(container._cleanup)
344 344 return container
345 345
346 346
347 347 class TestRepoContainer(object):
348 348 """
349 349 Container for test repositories which are used read only.
350 350
351 351 Repositories will be created on demand and re-used during the lifetime
352 352 of this object.
353 353
354 354 Usage to get the svn test repository "minimal"::
355 355
356 356 test_repo = TestContainer()
357 357 repo = test_repo('minimal', 'svn')
358 358
359 359 """
360 360
361 361 dump_extractors = {
362 362 "git": utils.extract_git_repo_from_dump,
363 363 "hg": utils.extract_hg_repo_from_dump,
364 364 "svn": utils.extract_svn_repo_from_dump,
365 365 }
366 366
367 367 def __init__(self):
368 368 self._cleanup_repos = []
369 369 self._fixture = Fixture()
370 370 self._repos = {}
371 371
372 372 def __call__(self, dump_name, backend_alias, config=None):
373 373 key = (dump_name, backend_alias)
374 374 if key not in self._repos:
375 375 repo = self._create_repo(dump_name, backend_alias, config)
376 376 self._repos[key] = repo.repo_id
377 377 return Repository.get(self._repos[key])
378 378
379 379 def _create_repo(self, dump_name, backend_alias, config):
380 380 repo_name = f"{backend_alias}-{dump_name}"
381 381 backend = get_backend(backend_alias)
382 382 dump_extractor = self.dump_extractors[backend_alias]
383 383 repo_path = dump_extractor(dump_name, repo_name)
384 384
385 385 vcs_repo = backend(repo_path, config=config)
386 386 repo2db_mapper({repo_name: vcs_repo})
387 387
388 388 repo = RepoModel().get_by_repo_name(repo_name)
389 389 self._cleanup_repos.append(repo_name)
390 390 return repo
391 391
392 392 def _cleanup(self):
393 393 for repo_name in reversed(self._cleanup_repos):
394 394 self._fixture.destroy_repo(repo_name)
395 395
396 396
397 397 def backend_base(request, backend_alias, test_repo):
398 398 if backend_alias not in request.config.getoption("--backends"):
399 399 pytest.skip(f"Backend {backend_alias} not selected.")
400 400
401 401 utils.check_xfail_backends(request.node, backend_alias)
402 402 utils.check_skip_backends(request.node, backend_alias)
403 403
404 404 repo_name = "vcs_test_%s" % (backend_alias,)
405 405 backend = Backend(
406 406 alias=backend_alias, repo_name=repo_name, test_name=request.node.name, test_repo_container=test_repo
407 407 )
408 408 request.addfinalizer(backend.cleanup)
409 409 return backend
410 410
411 411
412 412 @pytest.fixture()
413 413 def backend(request, backend_alias, baseapp, test_repo):
414 414 """
415 415 Parametrized fixture which represents a single backend implementation.
416 416
417 417 It respects the option `--backends` to focus the test run on specific
418 418 backend implementations.
419 419
420 420 It also supports `pytest.mark.xfail_backends` to mark tests as failing
421 421 for specific backends. This is intended as a utility for incremental
422 422 development of a new backend implementation.
423 423 """
424 424 return backend_base(request, backend_alias, test_repo)
425 425
426 426
427 427 @pytest.fixture()
428 428 def backend_git(request, baseapp, test_repo):
429 429 return backend_base(request, "git", test_repo)
430 430
431 431
432 432 @pytest.fixture()
433 433 def backend_hg(request, baseapp, test_repo):
434 434 return backend_base(request, "hg", test_repo)
435 435
436 436
437 437 @pytest.fixture()
438 438 def backend_svn(request, baseapp, test_repo):
439 439 return backend_base(request, "svn", test_repo)
440 440
441 441
442 442 @pytest.fixture()
443 443 def backend_random(backend_git):
444 444 """
445 445 Use this to express that your tests need "a backend.
446 446
447 447 A few of our tests need a backend, so that we can run the code. This
448 448 fixture is intended to be used for such cases. It will pick one of the
449 449 backends and run the tests.
450 450
451 451 The fixture `backend` would run the test multiple times for each
452 452 available backend which is a pure waste of time if the test is
453 453 independent of the backend type.
454 454 """
455 455 # TODO: johbo: Change this to pick a random backend
456 456 return backend_git
457 457
458 458
459 459 @pytest.fixture()
460 460 def backend_stub(backend_git):
461 461 """
462 462 Use this to express that your tests need a backend stub
463 463
464 464 TODO: mikhail: Implement a real stub logic instead of returning
465 465 a git backend
466 466 """
467 467 return backend_git
468 468
469 469
470 470 @pytest.fixture()
471 471 def repo_stub(backend_stub):
472 472 """
473 473 Use this to express that your tests need a repository stub
474 474 """
475 475 return backend_stub.create_repo()
476 476
477 477
478 478 class Backend(object):
479 479 """
480 480 Represents the test configuration for one supported backend
481 481
482 482 Provides easy access to different test repositories based on
483 483 `__getitem__`. Such repositories will only be created once per test
484 484 session.
485 485 """
486 486
487 487 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
488 488 _master_repo = None
489 489 _master_repo_path = ""
490 490 _commit_ids = {}
491 491
492 492 def __init__(self, alias, repo_name, test_name, test_repo_container):
493 493 self.alias = alias
494 494 self.repo_name = repo_name
495 495 self._cleanup_repos = []
496 496 self._test_name = test_name
497 497 self._test_repo_container = test_repo_container
498 498 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
499 499 # Fixture will survive in the end.
500 500 self._fixture = Fixture()
501 501
502 502 def __getitem__(self, key):
503 503 return self._test_repo_container(key, self.alias)
504 504
505 505 def create_test_repo(self, key, config=None):
506 506 return self._test_repo_container(key, self.alias, config)
507 507
508 508 @property
509 509 def repo_id(self):
510 510 # just fake some repo_id
511 511 return self.repo.repo_id
512 512
513 513 @property
514 514 def repo(self):
515 515 """
516 516 Returns the "current" repository. This is the vcs_test repo or the
517 517 last repo which has been created with `create_repo`.
518 518 """
519 519 from rhodecode.model.db import Repository
520 520
521 521 return Repository.get_by_repo_name(self.repo_name)
522 522
523 523 @property
524 524 def default_branch_name(self):
525 525 VcsRepository = get_backend(self.alias)
526 526 return VcsRepository.DEFAULT_BRANCH_NAME
527 527
528 528 @property
529 529 def default_head_id(self):
530 530 """
531 531 Returns the default head id of the underlying backend.
532 532
533 533 This will be the default branch name in case the backend does have a
534 534 default branch. In the other cases it will point to a valid head
535 535 which can serve as the base to create a new commit on top of it.
536 536 """
537 537 vcsrepo = self.repo.scm_instance()
538 538 head_id = vcsrepo.DEFAULT_BRANCH_NAME or vcsrepo.commit_ids[-1]
539 539 return head_id
540 540
541 541 @property
542 542 def commit_ids(self):
543 543 """
544 544 Returns the list of commits for the last created repository
545 545 """
546 546 return self._commit_ids
547 547
548 548 def create_master_repo(self, commits):
549 549 """
550 550 Create a repository and remember it as a template.
551 551
552 552 This allows to easily create derived repositories to construct
553 553 more complex scenarios for diff, compare and pull requests.
554 554
555 555 Returns a commit map which maps from commit message to raw_id.
556 556 """
557 557 self._master_repo = self.create_repo(commits=commits)
558 558 self._master_repo_path = self._master_repo.repo_full_path
559 559
560 560 return self._commit_ids
561 561
562 562 def create_repo(self, commits=None, number_of_commits=0, heads=None, name_suffix="", bare=False, **kwargs):
563 563 """
564 564 Create a repository and record it for later cleanup.
565 565
566 566 :param commits: Optional. A sequence of dict instances.
567 567 Will add a commit per entry to the new repository.
568 568 :param number_of_commits: Optional. If set to a number, this number of
569 569 commits will be added to the new repository.
570 570 :param heads: Optional. Can be set to a sequence of of commit
571 571 names which shall be pulled in from the master repository.
572 572 :param name_suffix: adds special suffix to generated repo name
573 573 :param bare: set a repo as bare (no checkout)
574 574 """
575 575 self.repo_name = self._next_repo_name() + name_suffix
576 576 repo = self._fixture.create_repo(self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
577 577 self._cleanup_repos.append(repo.repo_name)
578 578
579 579 commits = commits or [{"message": f"Commit {x} of {self.repo_name}"} for x in range(number_of_commits)]
580 580 vcs_repo = repo.scm_instance()
581 581 vcs_repo.count()
582 582 self._add_commits_to_repo(vcs_repo, commits)
583 583 if heads:
584 584 self.pull_heads(repo, heads)
585 585
586 586 return repo
587 587
588 588 def pull_heads(self, repo, heads, do_fetch=False):
589 589 """
590 590 Make sure that repo contains all commits mentioned in `heads`
591 591 """
592 592 vcsrepo = repo.scm_instance()
593 593 vcsrepo.config.clear_section("hooks")
594 594 commit_ids = [self._commit_ids[h] for h in heads]
595 595 if do_fetch:
596 596 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
597 597 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
598 598
599 599 def create_fork(self):
600 600 repo_to_fork = self.repo_name
601 601 self.repo_name = self._next_repo_name()
602 602 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
603 603 self._cleanup_repos.append(self.repo_name)
604 604 return repo
605 605
606 606 def new_repo_name(self, suffix=""):
607 607 self.repo_name = self._next_repo_name() + suffix
608 608 self._cleanup_repos.append(self.repo_name)
609 609 return self.repo_name
610 610
611 611 def _next_repo_name(self):
612 612 return "%s_%s" % (self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
613 613
614 614 def ensure_file(self, filename, content=b"Test content\n"):
615 615 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
616 616 commits = [
617 617 {
618 618 "added": [
619 619 FileNode(filename, content=content),
620 620 ]
621 621 },
622 622 ]
623 623 self._add_commits_to_repo(self.repo.scm_instance(), commits)
624 624
625 625 def enable_downloads(self):
626 626 repo = self.repo
627 627 repo.enable_downloads = True
628 628 Session().add(repo)
629 629 Session().commit()
630 630
631 631 def cleanup(self):
632 632 for repo_name in reversed(self._cleanup_repos):
633 633 self._fixture.destroy_repo(repo_name)
634 634
635 635 def _add_commits_to_repo(self, repo, commits):
636 636 commit_ids = _add_commits_to_repo(repo, commits)
637 637 if not commit_ids:
638 638 return
639 639 self._commit_ids = commit_ids
640 640
641 641 # Creating refs for Git to allow fetching them from remote repository
642 642 if self.alias == "git":
643 643 refs = {}
644 644 for message in self._commit_ids:
645 645 cleanup_message = message.replace(" ", "")
646 646 ref_name = f"refs/test-refs/{cleanup_message}"
647 647 refs[ref_name] = self._commit_ids[message]
648 648 self._create_refs(repo, refs)
649 649
650 650 def _create_refs(self, repo, refs):
651 651 for ref_name, ref_val in refs.items():
652 652 repo.set_refs(ref_name, ref_val)
653 653
654 654
655 655 class VcsBackend(object):
656 656 """
657 657 Represents the test configuration for one supported vcs backend.
658 658 """
659 659
660 660 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
661 661
662 662 def __init__(self, alias, repo_path, test_name, test_repo_container):
663 663 self.alias = alias
664 664 self._repo_path = repo_path
665 665 self._cleanup_repos = []
666 666 self._test_name = test_name
667 667 self._test_repo_container = test_repo_container
668 668
669 669 def __getitem__(self, key):
670 670 return self._test_repo_container(key, self.alias).scm_instance()
671 671
672 672 def __repr__(self):
673 673 return f"{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})"
674 674
675 675 @property
676 676 def repo(self):
677 677 """
678 678 Returns the "current" repository. This is the vcs_test repo of the last
679 679 repo which has been created.
680 680 """
681 681 Repository = get_backend(self.alias)
682 682 return Repository(self._repo_path)
683 683
684 684 @property
685 685 def backend(self):
686 686 """
687 687 Returns the backend implementation class.
688 688 """
689 689 return get_backend(self.alias)
690 690
691 691 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, bare=False):
692 692 repo_name = self._next_repo_name()
693 693 self._repo_path = get_new_dir(repo_name)
694 694 repo_class = get_backend(self.alias)
695 695 src_url = None
696 696 if _clone_repo:
697 697 src_url = _clone_repo.path
698 698 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
699 699 self._cleanup_repos.append(repo)
700 700
701 701 commits = commits or [{"message": "Commit %s of %s" % (x, repo_name)} for x in range(number_of_commits)]
702 702 _add_commits_to_repo(repo, commits)
703 703 return repo
704 704
705 705 def clone_repo(self, repo):
706 706 return self.create_repo(_clone_repo=repo)
707 707
708 708 def cleanup(self):
709 709 for repo in self._cleanup_repos:
710 710 shutil.rmtree(repo.path)
711 711
712 712 def new_repo_path(self):
713 713 repo_name = self._next_repo_name()
714 714 self._repo_path = get_new_dir(repo_name)
715 715 return self._repo_path
716 716
717 717 def _next_repo_name(self):
718 718 return "{}_{}".format(self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
719 719
720 720 def add_file(self, repo, filename, content="Test content\n"):
721 721 imc = repo.in_memory_commit
722 722 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
723 723 imc.commit(message="Automatic commit from vcsbackend fixture", author="Automatic <automatic@rhodecode.com>")
724 724
725 725 def ensure_file(self, filename, content="Test content\n"):
726 726 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
727 727 self.add_file(self.repo, filename, content)
728 728
729 729
730 730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
731 731 if backend_alias not in request.config.getoption("--backends"):
732 732 pytest.skip("Backend %s not selected." % (backend_alias,))
733 733
734 734 utils.check_xfail_backends(request.node, backend_alias)
735 735 utils.check_skip_backends(request.node, backend_alias)
736 736
737 737 repo_name = f"vcs_test_{backend_alias}"
738 738 repo_path = os.path.join(tests_tmp_path, repo_name)
739 739 backend = VcsBackend(
740 740 alias=backend_alias, repo_path=repo_path, test_name=request.node.name, test_repo_container=test_repo
741 741 )
742 742 request.addfinalizer(backend.cleanup)
743 743 return backend
744 744
745 745
746 746 @pytest.fixture()
747 747 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
748 748 """
749 749 Parametrized fixture which represents a single vcs backend implementation.
750 750
751 751 See the fixture `backend` for more details. This one implements the same
752 752 concept, but on vcs level. So it does not provide model instances etc.
753 753
754 754 Parameters are generated dynamically, see :func:`pytest_generate_tests`
755 755 for how this works.
756 756 """
757 757 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
758 758
759 759
760 760 @pytest.fixture()
761 761 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
762 762 return vcsbackend_base(request, "git", tests_tmp_path, baseapp, test_repo)
763 763
764 764
765 765 @pytest.fixture()
766 766 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
767 767 return vcsbackend_base(request, "hg", tests_tmp_path, baseapp, test_repo)
768 768
769 769
770 770 @pytest.fixture()
771 771 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
772 772 return vcsbackend_base(request, "svn", tests_tmp_path, baseapp, test_repo)
773 773
774 774
775 775 @pytest.fixture()
776 776 def vcsbackend_stub(vcsbackend_git):
777 777 """
778 778 Use this to express that your test just needs a stub of a vcsbackend.
779 779
780 780 Plan is to eventually implement an in-memory stub to speed tests up.
781 781 """
782 782 return vcsbackend_git
783 783
784 784
785 785 def _add_commits_to_repo(vcs_repo, commits):
786 786 commit_ids = {}
787 787 if not commits:
788 788 return commit_ids
789 789
790 790 imc = vcs_repo.in_memory_commit
791 791
792 792 for idx, commit in enumerate(commits):
793 793 message = str(commit.get("message", f"Commit {idx}"))
794 794
795 795 for node in commit.get("added", []):
796 796 imc.add(FileNode(safe_bytes(node.path), content=node.content))
797 797 for node in commit.get("changed", []):
798 798 imc.change(FileNode(safe_bytes(node.path), content=node.content))
799 799 for node in commit.get("removed", []):
800 800 imc.remove(FileNode(safe_bytes(node.path)))
801 801
802 802 parents = [vcs_repo.get_commit(commit_id=commit_ids[p]) for p in commit.get("parents", [])]
803 803
804 804 operations = ("added", "changed", "removed")
805 805 if not any((commit.get(o) for o in operations)):
806 806 imc.add(FileNode(b"file_%b" % safe_bytes(str(idx)), content=safe_bytes(message)))
807 807
808 808 commit = imc.commit(
809 809 message=message,
810 810 author=str(commit.get("author", "Automatic <automatic@rhodecode.com>")),
811 811 date=commit.get("date"),
812 812 branch=commit.get("branch"),
813 813 parents=parents,
814 814 )
815 815
816 816 commit_ids[commit.message] = commit.raw_id
817 817
818 818 return commit_ids
819 819
820 820
821 821 @pytest.fixture()
822 822 def reposerver(request):
823 823 """
824 824 Allows to serve a backend repository
825 825 """
826 826
827 827 repo_server = RepoServer()
828 828 request.addfinalizer(repo_server.cleanup)
829 829 return repo_server
830 830
831 831
832 832 class RepoServer(object):
833 833 """
834 834 Utility to serve a local repository for the duration of a test case.
835 835
836 836 Supports only Subversion so far.
837 837 """
838 838
839 839 url = None
840 840
841 841 def __init__(self):
842 842 self._cleanup_servers = []
843 843
844 844 def serve(self, vcsrepo):
845 845 if vcsrepo.alias != "svn":
846 846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847 847
848 848 proc = subprocess.Popen(
849 849 ["svnserve", "-d", "--foreground", "--listen-host", "localhost", "--root", vcsrepo.path]
850 850 )
851 851 self._cleanup_servers.append(proc)
852 852 self.url = "svn://localhost"
853 853
854 854 def cleanup(self):
855 855 for proc in self._cleanup_servers:
856 856 proc.terminate()
857 857
858 858
859 859 @pytest.fixture()
860 860 def pr_util(backend, request, config_stub):
861 861 """
862 862 Utility for tests of models and for functional tests around pull requests.
863 863
864 864 It gives an instance of :class:`PRTestUtility` which provides various
865 865 utility methods around one pull request.
866 866
867 867 This fixture uses `backend` and inherits its parameterization.
868 868 """
869 869
870 870 util = PRTestUtility(backend)
871 871 request.addfinalizer(util.cleanup)
872 872
873 873 return util
874 874
875 875
876 876 class PRTestUtility(object):
877 877 pull_request = None
878 878 pull_request_id = None
879 879 mergeable_patcher = None
880 880 mergeable_mock = None
881 881 notification_patcher = None
882 882 commit_ids: dict
883 883
884 884 def __init__(self, backend):
885 885 self.backend = backend
886 886
887 887 def create_pull_request(
888 888 self,
889 889 commits=None,
890 890 target_head=None,
891 891 source_head=None,
892 892 revisions=None,
893 893 approved=False,
894 894 author=None,
895 895 mergeable=False,
896 896 enable_notifications=True,
897 897 name_suffix="",
898 898 reviewers=None,
899 899 observers=None,
900 900 title="Test",
901 901 description="Description",
902 902 ):
903 903 self.set_mergeable(mergeable)
904 904 if not enable_notifications:
905 905 # mock notification side effect
906 906 self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
907 907 self.notification_patcher.start()
908 908
909 909 if not self.pull_request:
910 910 if not commits:
911 911 commits = [
912 912 {"message": "c1"},
913 913 {"message": "c2"},
914 914 {"message": "c3"},
915 915 ]
916 916 target_head = "c1"
917 917 source_head = "c2"
918 918 revisions = ["c2"]
919 919
920 920 self.commit_ids = self.backend.create_master_repo(commits)
921 921 self.target_repository = self.backend.create_repo(heads=[target_head], name_suffix=name_suffix)
922 922 self.source_repository = self.backend.create_repo(heads=[source_head], name_suffix=name_suffix)
923 923 self.author = author or UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
924 924
925 925 model = PullRequestModel()
926 926 self.create_parameters = {
927 927 "created_by": self.author,
928 928 "source_repo": self.source_repository.repo_name,
929 929 "source_ref": self._default_branch_reference(source_head),
930 930 "target_repo": self.target_repository.repo_name,
931 931 "target_ref": self._default_branch_reference(target_head),
932 932 "revisions": [self.commit_ids[r] for r in revisions],
933 933 "reviewers": reviewers or self._get_reviewers(),
934 934 "observers": observers or self._get_observers(),
935 935 "title": title,
936 936 "description": description,
937 937 }
938 938 self.pull_request = model.create(**self.create_parameters)
939 939 assert model.get_versions(self.pull_request) == []
940 940
941 941 self.pull_request_id = self.pull_request.pull_request_id
942 942
943 943 if approved:
944 944 self.approve()
945 945
946 946 Session().add(self.pull_request)
947 947 Session().commit()
948 948
949 949 return self.pull_request
950 950
951 951 def approve(self):
952 952 self.create_status_votes(ChangesetStatus.STATUS_APPROVED, *self.pull_request.reviewers)
953 953
954 954 def close(self):
955 955 PullRequestModel().close_pull_request(self.pull_request, self.author)
956 956
957 957 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
958 958 default_branch = branch or self.backend.default_branch_name
959 959 message = self.commit_ids[commit_message]
960 960 reference = f"branch:{default_branch}:{message}"
961 961
962 962 return reference
963 963
964 964 def _get_reviewers(self):
965 965 role = PullRequestReviewers.ROLE_REVIEWER
966 966 return [
967 967 (TEST_USER_REGULAR_LOGIN, ["default1"], False, role, []),
968 968 (TEST_USER_REGULAR2_LOGIN, ["default2"], False, role, []),
969 969 ]
970 970
971 971 def _get_observers(self):
972 972 return []
973 973
974 974 def update_source_repository(self, head=None, do_fetch=False):
975 975 heads = [head or "c3"]
976 976 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
977 977
978 978 def update_target_repository(self, head=None, do_fetch=False):
979 979 heads = [head or "c3"]
980 980 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
981 981
982 982 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
983 983 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
984 984 self.pull_request.target_ref = full_ref
985 985 return full_ref
986 986
987 987 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
988 988 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
989 989 self.pull_request.source_ref = full_ref
990 990 return full_ref
991 991
992 992 def add_one_commit(self, head=None):
993 993 self.update_source_repository(head=head)
994 994 old_commit_ids = set(self.pull_request.revisions)
995 995 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
996 996 commit_ids = set(self.pull_request.revisions)
997 997 new_commit_ids = commit_ids - old_commit_ids
998 998 assert len(new_commit_ids) == 1
999 999 return new_commit_ids.pop()
1000 1000
1001 1001 def remove_one_commit(self):
1002 1002 assert len(self.pull_request.revisions) == 2
1003 1003 source_vcs = self.source_repository.scm_instance()
1004 1004 removed_commit_id = source_vcs.commit_ids[-1]
1005 1005
1006 1006 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1007 1007 # remove the if once that's sorted out.
1008 1008 if self.backend.alias == "git":
1009 1009 kwargs = {"branch_name": self.backend.default_branch_name}
1010 1010 else:
1011 1011 kwargs = {}
1012 1012 source_vcs.strip(removed_commit_id, **kwargs)
1013 1013
1014 1014 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1015 1015 assert len(self.pull_request.revisions) == 1
1016 1016 return removed_commit_id
1017 1017
1018 1018 def create_comment(self, linked_to=None):
1019 1019 comment = CommentsModel().create(
1020 1020 text="Test comment", repo=self.target_repository.repo_name, user=self.author, pull_request=self.pull_request
1021 1021 )
1022 1022 assert comment.pull_request_version_id is None
1023 1023
1024 1024 if linked_to:
1025 1025 PullRequestModel()._link_comments_to_version(linked_to)
1026 1026
1027 1027 return comment
1028 1028
1029 1029 def create_inline_comment(self, linked_to=None, line_no="n1", file_path="file_1"):
1030 1030 comment = CommentsModel().create(
1031 1031 text="Test comment",
1032 1032 repo=self.target_repository.repo_name,
1033 1033 user=self.author,
1034 1034 line_no=line_no,
1035 1035 f_path=file_path,
1036 1036 pull_request=self.pull_request,
1037 1037 )
1038 1038 assert comment.pull_request_version_id is None
1039 1039
1040 1040 if linked_to:
1041 1041 PullRequestModel()._link_comments_to_version(linked_to)
1042 1042
1043 1043 return comment
1044 1044
1045 1045 def create_version_of_pull_request(self):
1046 1046 pull_request = self.create_pull_request()
1047 1047 version = PullRequestModel()._create_version_from_snapshot(pull_request)
1048 1048 return version
1049 1049
1050 1050 def create_status_votes(self, status, *reviewers):
1051 1051 for reviewer in reviewers:
1052 1052 ChangesetStatusModel().set_status(
1053 1053 repo=self.pull_request.target_repo, status=status, user=reviewer.user_id, pull_request=self.pull_request
1054 1054 )
1055 1055
1056 1056 def set_mergeable(self, value):
1057 1057 if not self.mergeable_patcher:
1058 1058 self.mergeable_patcher = mock.patch.object(VcsSettingsModel, "get_general_settings")
1059 1059 self.mergeable_mock = self.mergeable_patcher.start()
1060 1060 self.mergeable_mock.return_value = {"rhodecode_pr_merge_enabled": value}
1061 1061
1062 1062 def cleanup(self):
1063 1063 # In case the source repository is already cleaned up, the pull
1064 1064 # request will already be deleted.
1065 1065 pull_request = PullRequest().get(self.pull_request_id)
1066 1066 if pull_request:
1067 1067 PullRequestModel().delete(pull_request, pull_request.author)
1068 1068 Session().commit()
1069 1069
1070 1070 if self.notification_patcher:
1071 1071 self.notification_patcher.stop()
1072 1072
1073 1073 if self.mergeable_patcher:
1074 1074 self.mergeable_patcher.stop()
1075 1075
1076 1076
1077 1077 @pytest.fixture()
1078 1078 def user_admin(baseapp):
1079 1079 """
1080 1080 Provides the default admin test user as an instance of `db.User`.
1081 1081 """
1082 1082 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1083 1083 return user
1084 1084
1085 1085
1086 1086 @pytest.fixture()
1087 1087 def user_regular(baseapp):
1088 1088 """
1089 1089 Provides the default regular test user as an instance of `db.User`.
1090 1090 """
1091 1091 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1092 1092 return user
1093 1093
1094 1094
1095 1095 @pytest.fixture()
1096 1096 def user_util(request, db_connection):
1097 1097 """
1098 1098 Provides a wired instance of `UserUtility` with integrated cleanup.
1099 1099 """
1100 1100 utility = UserUtility(test_name=request.node.name)
1101 1101 request.addfinalizer(utility.cleanup)
1102 1102 return utility
1103 1103
1104 1104
1105 1105 # TODO: johbo: Split this up into utilities per domain or something similar
1106 1106 class UserUtility(object):
1107 1107 def __init__(self, test_name="test"):
1108 1108 self._test_name = self._sanitize_name(test_name)
1109 1109 self.fixture = Fixture()
1110 1110 self.repo_group_ids = []
1111 1111 self.repos_ids = []
1112 1112 self.user_ids = []
1113 1113 self.user_group_ids = []
1114 1114 self.user_repo_permission_ids = []
1115 1115 self.user_group_repo_permission_ids = []
1116 1116 self.user_repo_group_permission_ids = []
1117 1117 self.user_group_repo_group_permission_ids = []
1118 1118 self.user_user_group_permission_ids = []
1119 1119 self.user_group_user_group_permission_ids = []
1120 1120 self.user_permissions = []
1121 1121
1122 1122 def _sanitize_name(self, name):
1123 1123 for char in ["[", "]"]:
1124 1124 name = name.replace(char, "_")
1125 1125 return name
1126 1126
1127 1127 def create_repo_group(self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1128 1128 group_name = "{prefix}_repogroup_{count}".format(prefix=self._test_name, count=len(self.repo_group_ids))
1129 1129 repo_group = self.fixture.create_repo_group(group_name, cur_user=owner)
1130 1130 if auto_cleanup:
1131 1131 self.repo_group_ids.append(repo_group.group_id)
1132 1132 return repo_group
1133 1133
1134 1134 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True, repo_type="hg", bare=False):
1135 1135 repo_name = "{prefix}_repository_{count}".format(prefix=self._test_name, count=len(self.repos_ids))
1136 1136
1137 1137 repository = self.fixture.create_repo(
1138 1138 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare
1139 1139 )
1140 1140 if auto_cleanup:
1141 1141 self.repos_ids.append(repository.repo_id)
1142 1142 return repository
1143 1143
1144 1144 def create_user(self, auto_cleanup=True, **kwargs):
1145 1145 user_name = "{prefix}_user_{count}".format(prefix=self._test_name, count=len(self.user_ids))
1146 1146 user = self.fixture.create_user(user_name, **kwargs)
1147 1147 if auto_cleanup:
1148 1148 self.user_ids.append(user.user_id)
1149 1149 return user
1150 1150
1151 1151 def create_additional_user_email(self, user, email):
1152 1152 uem = self.fixture.create_additional_user_email(user=user, email=email)
1153 1153 return uem
1154 1154
1155 1155 def create_user_with_group(self):
1156 1156 user = self.create_user()
1157 1157 user_group = self.create_user_group(members=[user])
1158 1158 return user, user_group
1159 1159
1160 1160 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, auto_cleanup=True, **kwargs):
1161 1161 group_name = "{prefix}_usergroup_{count}".format(prefix=self._test_name, count=len(self.user_group_ids))
1162 1162 user_group = self.fixture.create_user_group(group_name, cur_user=owner, **kwargs)
1163 1163
1164 1164 if auto_cleanup:
1165 1165 self.user_group_ids.append(user_group.users_group_id)
1166 1166 if members:
1167 1167 for user in members:
1168 1168 UserGroupModel().add_user_to_group(user_group, user)
1169 1169 return user_group
1170 1170
1171 1171 def grant_user_permission(self, user_name, permission_name):
1172 1172 self.inherit_default_user_permissions(user_name, False)
1173 1173 self.user_permissions.append((user_name, permission_name))
1174 1174
1175 1175 def grant_user_permission_to_repo_group(self, repo_group, user, permission_name):
1176 1176 permission = RepoGroupModel().grant_user_permission(repo_group, user, permission_name)
1177 1177 self.user_repo_group_permission_ids.append((repo_group.group_id, user.user_id))
1178 1178 return permission
1179 1179
1180 1180 def grant_user_group_permission_to_repo_group(self, repo_group, user_group, permission_name):
1181 1181 permission = RepoGroupModel().grant_user_group_permission(repo_group, user_group, permission_name)
1182 1182 self.user_group_repo_group_permission_ids.append((repo_group.group_id, user_group.users_group_id))
1183 1183 return permission
1184 1184
1185 1185 def grant_user_permission_to_repo(self, repo, user, permission_name):
1186 1186 permission = RepoModel().grant_user_permission(repo, user, permission_name)
1187 1187 self.user_repo_permission_ids.append((repo.repo_id, user.user_id))
1188 1188 return permission
1189 1189
1190 1190 def grant_user_group_permission_to_repo(self, repo, user_group, permission_name):
1191 1191 permission = RepoModel().grant_user_group_permission(repo, user_group, permission_name)
1192 1192 self.user_group_repo_permission_ids.append((repo.repo_id, user_group.users_group_id))
1193 1193 return permission
1194 1194
1195 1195 def grant_user_permission_to_user_group(self, target_user_group, user, permission_name):
1196 1196 permission = UserGroupModel().grant_user_permission(target_user_group, user, permission_name)
1197 1197 self.user_user_group_permission_ids.append((target_user_group.users_group_id, user.user_id))
1198 1198 return permission
1199 1199
1200 1200 def grant_user_group_permission_to_user_group(self, target_user_group, user_group, permission_name):
1201 1201 permission = UserGroupModel().grant_user_group_permission(target_user_group, user_group, permission_name)
1202 1202 self.user_group_user_group_permission_ids.append((target_user_group.users_group_id, user_group.users_group_id))
1203 1203 return permission
1204 1204
1205 1205 def revoke_user_permission(self, user_name, permission_name):
1206 1206 self.inherit_default_user_permissions(user_name, True)
1207 1207 UserModel().revoke_perm(user_name, permission_name)
1208 1208
1209 1209 def inherit_default_user_permissions(self, user_name, value):
1210 1210 user = UserModel().get_by_username(user_name)
1211 1211 user.inherit_default_permissions = value
1212 1212 Session().add(user)
1213 1213 Session().commit()
1214 1214
1215 1215 def cleanup(self):
1216 1216 self._cleanup_permissions()
1217 1217 self._cleanup_repos()
1218 1218 self._cleanup_repo_groups()
1219 1219 self._cleanup_user_groups()
1220 1220 self._cleanup_users()
1221 1221
1222 1222 def _cleanup_permissions(self):
1223 1223 if self.user_permissions:
1224 1224 for user_name, permission_name in self.user_permissions:
1225 1225 self.revoke_user_permission(user_name, permission_name)
1226 1226
1227 1227 for permission in self.user_repo_permission_ids:
1228 1228 RepoModel().revoke_user_permission(*permission)
1229 1229
1230 1230 for permission in self.user_group_repo_permission_ids:
1231 1231 RepoModel().revoke_user_group_permission(*permission)
1232 1232
1233 1233 for permission in self.user_repo_group_permission_ids:
1234 1234 RepoGroupModel().revoke_user_permission(*permission)
1235 1235
1236 1236 for permission in self.user_group_repo_group_permission_ids:
1237 1237 RepoGroupModel().revoke_user_group_permission(*permission)
1238 1238
1239 1239 for permission in self.user_user_group_permission_ids:
1240 1240 UserGroupModel().revoke_user_permission(*permission)
1241 1241
1242 1242 for permission in self.user_group_user_group_permission_ids:
1243 1243 UserGroupModel().revoke_user_group_permission(*permission)
1244 1244
1245 1245 def _cleanup_repo_groups(self):
1246 1246 def _repo_group_compare(first_group_id, second_group_id):
1247 1247 """
1248 1248 Gives higher priority to the groups with the most complex paths
1249 1249 """
1250 1250 first_group = RepoGroup.get(first_group_id)
1251 1251 second_group = RepoGroup.get(second_group_id)
1252 1252 first_group_parts = len(first_group.group_name.split("/")) if first_group else 0
1253 1253 second_group_parts = len(second_group.group_name.split("/")) if second_group else 0
1254 1254 return cmp(second_group_parts, first_group_parts)
1255 1255
1256 1256 sorted_repo_group_ids = sorted(self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1257 1257 for repo_group_id in sorted_repo_group_ids:
1258 1258 self.fixture.destroy_repo_group(repo_group_id)
1259 1259
1260 1260 def _cleanup_repos(self):
1261 1261 sorted_repos_ids = sorted(self.repos_ids)
1262 1262 for repo_id in sorted_repos_ids:
1263 1263 self.fixture.destroy_repo(repo_id)
1264 1264
1265 1265 def _cleanup_user_groups(self):
1266 1266 def _user_group_compare(first_group_id, second_group_id):
1267 1267 """
1268 1268 Gives higher priority to the groups with the most complex paths
1269 1269 """
1270 1270 first_group = UserGroup.get(first_group_id)
1271 1271 second_group = UserGroup.get(second_group_id)
1272 1272 first_group_parts = len(first_group.users_group_name.split("/")) if first_group else 0
1273 1273 second_group_parts = len(second_group.users_group_name.split("/")) if second_group else 0
1274 1274 return cmp(second_group_parts, first_group_parts)
1275 1275
1276 1276 sorted_user_group_ids = sorted(self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1277 1277 for user_group_id in sorted_user_group_ids:
1278 1278 self.fixture.destroy_user_group(user_group_id)
1279 1279
1280 1280 def _cleanup_users(self):
1281 1281 for user_id in self.user_ids:
1282 1282 self.fixture.destroy_user(user_id)
1283 1283
1284 1284
1285 1285 @pytest.fixture(scope="session")
1286 1286 def testrun():
1287 1287 return {
1288 1288 "uuid": uuid.uuid4(),
1289 1289 "start": datetime.datetime.utcnow().isoformat(),
1290 1290 "timestamp": int(time.time()),
1291 1291 }
1292 1292
1293 1293
1294 1294 class AppenlightClient(object):
1295 1295 url_template = "{url}?protocol_version=0.5"
1296 1296
1297 1297 def __init__(self, url, api_key, add_server=True, add_timestamp=True, namespace=None, request=None, testrun=None):
1298 1298 self.url = self.url_template.format(url=url)
1299 1299 self.api_key = api_key
1300 1300 self.add_server = add_server
1301 1301 self.add_timestamp = add_timestamp
1302 1302 self.namespace = namespace
1303 1303 self.request = request
1304 1304 self.server = socket.getfqdn(socket.gethostname())
1305 1305 self.tags_before = {}
1306 1306 self.tags_after = {}
1307 1307 self.stats = []
1308 1308 self.testrun = testrun or {}
1309 1309
1310 1310 def tag_before(self, tag, value):
1311 1311 self.tags_before[tag] = value
1312 1312
1313 1313 def tag_after(self, tag, value):
1314 1314 self.tags_after[tag] = value
1315 1315
1316 1316 def collect(self, data):
1317 1317 if self.add_server:
1318 1318 data.setdefault("server", self.server)
1319 1319 if self.add_timestamp:
1320 1320 data.setdefault("date", datetime.datetime.utcnow().isoformat())
1321 1321 if self.namespace:
1322 1322 data.setdefault("namespace", self.namespace)
1323 1323 if self.request:
1324 1324 data.setdefault("request", self.request)
1325 1325 self.stats.append(data)
1326 1326
1327 1327 def send_stats(self):
1328 1328 tags = [
1329 1329 ("testrun", self.request),
1330 1330 ("testrun.start", self.testrun["start"]),
1331 1331 ("testrun.timestamp", self.testrun["timestamp"]),
1332 1332 ("test", self.namespace),
1333 1333 ]
1334 1334 for key, value in self.tags_before.items():
1335 1335 tags.append((key + ".before", value))
1336 1336 try:
1337 1337 delta = self.tags_after[key] - value
1338 1338 tags.append((key + ".delta", delta))
1339 1339 except Exception:
1340 1340 pass
1341 1341 for key, value in self.tags_after.items():
1342 1342 tags.append((key + ".after", value))
1343 1343 self.collect(
1344 1344 {
1345 1345 "message": "Collected tags",
1346 1346 "tags": tags,
1347 1347 }
1348 1348 )
1349 1349
1350 1350 response = requests.post(
1351 1351 self.url,
1352 1352 headers={"X-appenlight-api-key": self.api_key},
1353 1353 json=self.stats,
1354 1354 )
1355 1355
1356 1356 if not response.status_code == 200:
1357 1357 pprint.pprint(self.stats)
1358 1358 print(response.headers)
1359 1359 print(response.text)
1360 1360 raise Exception("Sending to appenlight failed")
1361 1361
1362 1362
1363 1363 @pytest.fixture()
1364 1364 def gist_util(request, db_connection):
1365 1365 """
1366 1366 Provides a wired instance of `GistUtility` with integrated cleanup.
1367 1367 """
1368 1368 utility = GistUtility()
1369 1369 request.addfinalizer(utility.cleanup)
1370 1370 return utility
1371 1371
1372 1372
1373 1373 class GistUtility(object):
1374 1374 def __init__(self):
1375 1375 self.fixture = Fixture()
1376 1376 self.gist_ids = []
1377 1377
1378 1378 def create_gist(self, **kwargs):
1379 1379 gist = self.fixture.create_gist(**kwargs)
1380 1380 self.gist_ids.append(gist.gist_id)
1381 1381 return gist
1382 1382
1383 1383 def cleanup(self):
1384 1384 for id_ in self.gist_ids:
1385 1385 self.fixture.destroy_gists(str(id_))
1386 1386
1387 1387
1388 1388 @pytest.fixture()
1389 1389 def enabled_backends(request):
1390 1390 backends = request.config.option.backends
1391 1391 return backends[:]
1392 1392
1393 1393
1394 1394 @pytest.fixture()
1395 1395 def settings_util(request, db_connection):
1396 1396 """
1397 1397 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1398 1398 """
1399 1399 utility = SettingsUtility()
1400 1400 request.addfinalizer(utility.cleanup)
1401 1401 return utility
1402 1402
1403 1403
1404 1404 class SettingsUtility(object):
1405 1405 def __init__(self):
1406 1406 self.rhodecode_ui_ids = []
1407 1407 self.rhodecode_setting_ids = []
1408 1408 self.repo_rhodecode_ui_ids = []
1409 1409 self.repo_rhodecode_setting_ids = []
1410 1410
1411 1411 def create_repo_rhodecode_ui(self, repo, section, value, key=None, active=True, cleanup=True):
1412 1412 key = key or sha1_safe(f"{section}{value}{repo.repo_id}")
1413 1413
1414 1414 setting = RepoRhodeCodeUi()
1415 1415 setting.repository_id = repo.repo_id
1416 1416 setting.ui_section = section
1417 1417 setting.ui_value = value
1418 1418 setting.ui_key = key
1419 1419 setting.ui_active = active
1420 1420 Session().add(setting)
1421 1421 Session().commit()
1422 1422
1423 1423 if cleanup:
1424 1424 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1425 1425 return setting
1426 1426
1427 1427 def create_rhodecode_ui(self, section, value, key=None, active=True, cleanup=True):
1428 1428 key = key or sha1_safe(f"{section}{value}")
1429 1429
1430 1430 setting = RhodeCodeUi()
1431 1431 setting.ui_section = section
1432 1432 setting.ui_value = value
1433 1433 setting.ui_key = key
1434 1434 setting.ui_active = active
1435 1435 Session().add(setting)
1436 1436 Session().commit()
1437 1437
1438 1438 if cleanup:
1439 1439 self.rhodecode_ui_ids.append(setting.ui_id)
1440 1440 return setting
1441 1441
1442 1442 def create_repo_rhodecode_setting(self, repo, name, value, type_, cleanup=True):
1443 1443 setting = RepoRhodeCodeSetting(repo.repo_id, key=name, val=value, type=type_)
1444 1444 Session().add(setting)
1445 1445 Session().commit()
1446 1446
1447 1447 if cleanup:
1448 1448 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1449 1449 return setting
1450 1450
1451 1451 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1452 1452 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1453 1453 Session().add(setting)
1454 1454 Session().commit()
1455 1455
1456 1456 if cleanup:
1457 1457 self.rhodecode_setting_ids.append(setting.app_settings_id)
1458 1458
1459 1459 return setting
1460 1460
1461 1461 def cleanup(self):
1462 1462 for id_ in self.rhodecode_ui_ids:
1463 1463 setting = RhodeCodeUi.get(id_)
1464 1464 Session().delete(setting)
1465 1465
1466 1466 for id_ in self.rhodecode_setting_ids:
1467 1467 setting = RhodeCodeSetting.get(id_)
1468 1468 Session().delete(setting)
1469 1469
1470 1470 for id_ in self.repo_rhodecode_ui_ids:
1471 1471 setting = RepoRhodeCodeUi.get(id_)
1472 1472 Session().delete(setting)
1473 1473
1474 1474 for id_ in self.repo_rhodecode_setting_ids:
1475 1475 setting = RepoRhodeCodeSetting.get(id_)
1476 1476 Session().delete(setting)
1477 1477
1478 1478 Session().commit()
1479 1479
1480 1480
1481 1481 @pytest.fixture()
1482 1482 def no_notifications(request):
1483 1483 notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
1484 1484 notification_patcher.start()
1485 1485 request.addfinalizer(notification_patcher.stop)
1486 1486
1487 1487
1488 1488 @pytest.fixture(scope="session")
1489 1489 def repeat(request):
1490 1490 """
1491 1491 The number of repetitions is based on this fixture.
1492 1492
1493 1493 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1494 1494 tests are not too slow in our default test suite.
1495 1495 """
1496 1496 return request.config.getoption("--repeat")
1497 1497
1498 1498
1499 1499 @pytest.fixture()
1500 1500 def rhodecode_fixtures():
1501 1501 return Fixture()
1502 1502
1503 1503
1504 1504 @pytest.fixture()
1505 1505 def context_stub():
1506 1506 """
1507 1507 Stub context object.
1508 1508 """
1509 1509 context = pyramid.testing.DummyResource()
1510 1510 return context
1511 1511
1512 1512
1513 1513 @pytest.fixture()
1514 1514 def StubIntegrationType():
1515 1515 class _StubIntegrationType(IntegrationTypeBase):
1516 1516 """Test integration type class"""
1517 1517
1518 1518 key = "test"
1519 1519 display_name = "Test integration type"
1520 1520 description = "A test integration type for testing"
1521 1521
1522 1522 @classmethod
1523 1523 def icon(cls):
1524 1524 return "test_icon_html_image"
1525 1525
1526 1526 def __init__(self, settings):
1527 1527 super(_StubIntegrationType, self).__init__(settings)
1528 1528 self.sent_events = [] # for testing
1529 1529
1530 1530 def send_event(self, event):
1531 1531 self.sent_events.append(event)
1532 1532
1533 1533 def settings_schema(self):
1534 1534 class SettingsSchema(colander.Schema):
1535 1535 test_string_field = colander.SchemaNode(
1536 1536 colander.String(),
1537 1537 missing=colander.required,
1538 1538 title="test string field",
1539 1539 )
1540 1540 test_int_field = colander.SchemaNode(
1541 1541 colander.Int(),
1542 1542 title="some integer setting",
1543 1543 )
1544 1544
1545 1545 return SettingsSchema()
1546 1546
1547 1547 integration_type_registry.register_integration_type(_StubIntegrationType)
1548 1548 return _StubIntegrationType
1549 1549
1550 1550
1551 1551 @pytest.fixture()
1552 1552 def stub_integration_settings():
1553 1553 return {
1554 1554 "test_string_field": "some data",
1555 1555 "test_int_field": 100,
1556 1556 }
1557 1557
1558 1558
1559 1559 @pytest.fixture()
1560 1560 def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings):
1561 repo_id = repo_stub.repo_id
1561 1562 integration = IntegrationModel().create(
1562 1563 StubIntegrationType,
1563 1564 settings=stub_integration_settings,
1564 1565 enabled=True,
1565 1566 name="test repo integration",
1566 1567 repo=repo_stub,
1567 1568 repo_group=None,
1568 1569 child_repos_only=None,
1569 1570 )
1570 1571
1571 1572 @request.addfinalizer
1572 1573 def cleanup():
1573 1574 IntegrationModel().delete(integration)
1575 RepoModel().delete(repo_id)
1574 1576
1575 1577 return integration
1576 1578
1577 1579
1578 1580 @pytest.fixture()
1579 1581 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1580 1582 integration = IntegrationModel().create(
1581 1583 StubIntegrationType,
1582 1584 settings=stub_integration_settings,
1583 1585 enabled=True,
1584 1586 name="test repogroup integration",
1585 1587 repo=None,
1586 1588 repo_group=test_repo_group,
1587 1589 child_repos_only=True,
1588 1590 )
1589 1591
1590 1592 @request.addfinalizer
1591 1593 def cleanup():
1592 1594 IntegrationModel().delete(integration)
1593 1595
1594 1596 return integration
1595 1597
1596 1598
1597 1599 @pytest.fixture()
1598 1600 def repogroup_recursive_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1599 1601 integration = IntegrationModel().create(
1600 1602 StubIntegrationType,
1601 1603 settings=stub_integration_settings,
1602 1604 enabled=True,
1603 1605 name="test recursive repogroup integration",
1604 1606 repo=None,
1605 1607 repo_group=test_repo_group,
1606 1608 child_repos_only=False,
1607 1609 )
1608 1610
1609 1611 @request.addfinalizer
1610 1612 def cleanup():
1611 1613 IntegrationModel().delete(integration)
1612 1614
1613 1615 return integration
1614 1616
1615 1617
1616 1618 @pytest.fixture()
1617 1619 def global_integration_stub(request, StubIntegrationType, stub_integration_settings):
1618 1620 integration = IntegrationModel().create(
1619 1621 StubIntegrationType,
1620 1622 settings=stub_integration_settings,
1621 1623 enabled=True,
1622 1624 name="test global integration",
1623 1625 repo=None,
1624 1626 repo_group=None,
1625 1627 child_repos_only=None,
1626 1628 )
1627 1629
1628 1630 @request.addfinalizer
1629 1631 def cleanup():
1630 1632 IntegrationModel().delete(integration)
1631 1633
1632 1634 return integration
1633 1635
1634 1636
1635 1637 @pytest.fixture()
1636 1638 def root_repos_integration_stub(request, StubIntegrationType, stub_integration_settings):
1637 1639 integration = IntegrationModel().create(
1638 1640 StubIntegrationType,
1639 1641 settings=stub_integration_settings,
1640 1642 enabled=True,
1641 1643 name="test global integration",
1642 1644 repo=None,
1643 1645 repo_group=None,
1644 1646 child_repos_only=True,
1645 1647 )
1646 1648
1647 1649 @request.addfinalizer
1648 1650 def cleanup():
1649 1651 IntegrationModel().delete(integration)
1650 1652
1651 1653 return integration
1652 1654
1653 1655
1654 1656 @pytest.fixture()
1655 1657 def local_dt_to_utc():
1656 1658 def _factory(dt):
1657 1659 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)
1658 1660
1659 1661 return _factory
1660 1662
1661 1663
1662 1664 @pytest.fixture()
1663 1665 def disable_anonymous_user(request, baseapp):
1664 1666 set_anonymous_access(False)
1665 1667
1666 1668 @request.addfinalizer
1667 1669 def cleanup():
1668 1670 set_anonymous_access(True)
1669 1671
1670 1672
1671 1673 @pytest.fixture(scope="module")
1672 1674 def rc_fixture(request):
1673 1675 return Fixture()
1674 1676
1675 1677
1676 1678 @pytest.fixture()
1677 1679 def repo_groups(request):
1678 1680 fixture = Fixture()
1679 1681
1680 1682 session = Session()
1681 1683 zombie_group = fixture.create_repo_group("zombie")
1682 1684 parent_group = fixture.create_repo_group("parent")
1683 1685 child_group = fixture.create_repo_group("parent/child")
1684 1686 groups_in_db = session.query(RepoGroup).all()
1685 1687 assert len(groups_in_db) == 3
1686 1688 assert child_group.group_parent_id == parent_group.group_id
1687 1689
1688 1690 @request.addfinalizer
1689 1691 def cleanup():
1690 1692 fixture.destroy_repo_group(zombie_group)
1691 1693 fixture.destroy_repo_group(child_group)
1692 1694 fixture.destroy_repo_group(parent_group)
1693 1695
1694 1696 return zombie_group, parent_group, child_group
1695 1697
@@ -1,223 +1,221 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import time
20 20 import pytest
21 21
22 22 from rhodecode import events
23 from rhodecode.model.repo import RepoModel
23 24 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 25 from rhodecode.model.db import Session, Integration
25 26 from rhodecode.model.integration import IntegrationModel
26 27
27 28
28 29 class TestDeleteScopesDeletesIntegrations(object):
29 def test_delete_repo_with_integration_deletes_integration(
30 self, repo_integration_stub):
31
32 Session().delete(repo_integration_stub.repo)
30 def test_delete_repo_with_integration_deletes_integration(self, repo_integration_stub):
31 RepoModel().delete(repo_integration_stub.repo)
33 32 Session().commit()
34 33 Session().expire_all()
35 34 integration = Integration.get(repo_integration_stub.integration_id)
36 35 assert integration is None
37 36
38 def test_delete_repo_group_with_integration_deletes_integration(
39 self, repogroup_integration_stub):
37 def test_delete_repo_group_with_integration_deletes_integration(self, repogroup_integration_stub):
40 38
41 39 Session().delete(repogroup_integration_stub.repo_group)
42 40 Session().commit()
43 41 Session().expire_all()
44 42 integration = Integration.get(repogroup_integration_stub.integration_id)
45 43 assert integration is None
46 44
47 45
48 46 count = 1
49 47
50 48
51 49 def counter():
52 50 global count
53 51 val = count
54 52 count += 1
55 return '{}_{}'.format(val, time.time())
53 return f'{val}_{time.time()}'
56 54
57 55
58 56 @pytest.fixture()
59 57 def integration_repos(request, StubIntegrationType, stub_integration_settings):
60 58 """
61 59 Create repositories and integrations for testing, and destroy them after
62 60
63 61 Structure:
64 62 root_repo
65 63 parent_group/
66 64 parent_repo
67 65 child_group/
68 66 child_repo
69 67 other_group/
70 68 other_repo
71 69 """
72 70 fixture = Fixture()
73 71
74 72 parent_group_id = 'int_test_parent_group_{}'.format(counter())
75 73 parent_group = fixture.create_repo_group(parent_group_id)
76 74
77 75 other_group_id = 'int_test_other_group_{}'.format(counter())
78 76 other_group = fixture.create_repo_group(other_group_id)
79 77
80 78 child_group_id = (
81 79 parent_group_id + '/' + 'int_test_child_group_{}'.format(counter()))
82 80 child_group = fixture.create_repo_group(child_group_id)
83 81
84 82 parent_repo_id = 'int_test_parent_repo_{}'.format(counter())
85 83 parent_repo = fixture.create_repo(parent_repo_id, repo_group=parent_group)
86 84
87 85 child_repo_id = 'int_test_child_repo_{}'.format(counter())
88 86 child_repo = fixture.create_repo(child_repo_id, repo_group=child_group)
89 87
90 88 other_repo_id = 'int_test_other_repo_{}'.format(counter())
91 89 other_repo = fixture.create_repo(other_repo_id, repo_group=other_group)
92 90
93 91 root_repo_id = 'int_test_repo_root_{}'.format(counter())
94 92 root_repo = fixture.create_repo(root_repo_id)
95 93
96 94 integrations = {}
97 95 for name, repo, repo_group, child_repos_only in [
98 96 ('global', None, None, None),
99 97 ('root_repos', None, None, True),
100 98 ('parent_repo', parent_repo, None, None),
101 99 ('child_repo', child_repo, None, None),
102 100 ('other_repo', other_repo, None, None),
103 101 ('root_repo', root_repo, None, None),
104 102 ('parent_group', None, parent_group, True),
105 103 ('parent_group_recursive', None, parent_group, False),
106 104 ('child_group', None, child_group, True),
107 105 ('child_group_recursive', None, child_group, False),
108 106 ('other_group', None, other_group, True),
109 107 ('other_group_recursive', None, other_group, False),
110 108 ]:
111 109 integrations[name] = IntegrationModel().create(
112 110 StubIntegrationType, settings=stub_integration_settings,
113 111 enabled=True, name='test %s integration' % name,
114 112 repo=repo, repo_group=repo_group, child_repos_only=child_repos_only)
115 113
116 114 Session().commit()
117 115
118 116 def _cleanup():
119 117 for integration in integrations.values():
120 118 Session.delete(integration)
121 119
122 120 fixture.destroy_repo(root_repo)
123 121 fixture.destroy_repo(child_repo)
124 122 fixture.destroy_repo(parent_repo)
125 123 fixture.destroy_repo(other_repo)
126 124 fixture.destroy_repo_group(child_group)
127 125 fixture.destroy_repo_group(parent_group)
128 126 fixture.destroy_repo_group(other_group)
129 127
130 128 request.addfinalizer(_cleanup)
131 129
132 130 return {
133 131 'integrations': integrations,
134 132 'repos': {
135 133 'root_repo': root_repo,
136 134 'other_repo': other_repo,
137 135 'parent_repo': parent_repo,
138 136 'child_repo': child_repo,
139 137 }
140 138 }
141 139
142 140
143 141 def test_enabled_integration_repo_scopes(integration_repos):
144 142 integrations = integration_repos['integrations']
145 143 repos = integration_repos['repos']
146 144
147 145 triggered_integrations = IntegrationModel().get_for_event(
148 146 events.RepoEvent(repos['root_repo']))
149 147
150 148 assert triggered_integrations == [
151 149 integrations['global'],
152 150 integrations['root_repos'],
153 151 integrations['root_repo'],
154 152 ]
155 153
156 154 triggered_integrations = IntegrationModel().get_for_event(
157 155 events.RepoEvent(repos['other_repo']))
158 156
159 157 assert triggered_integrations == [
160 158 integrations['global'],
161 159 integrations['other_group'],
162 160 integrations['other_group_recursive'],
163 161 integrations['other_repo'],
164 162 ]
165 163
166 164 triggered_integrations = IntegrationModel().get_for_event(
167 165 events.RepoEvent(repos['parent_repo']))
168 166
169 167 assert triggered_integrations == [
170 168 integrations['global'],
171 169 integrations['parent_group'],
172 170 integrations['parent_group_recursive'],
173 171 integrations['parent_repo'],
174 172 ]
175 173
176 174 triggered_integrations = IntegrationModel().get_for_event(
177 175 events.RepoEvent(repos['child_repo']))
178 176
179 177 assert triggered_integrations == [
180 178 integrations['global'],
181 179 integrations['child_group'],
182 180 integrations['parent_group_recursive'],
183 181 integrations['child_group_recursive'],
184 182 integrations['child_repo'],
185 183 ]
186 184
187 185
188 186 def test_disabled_integration_repo_scopes(integration_repos):
189 187 integrations = integration_repos['integrations']
190 188 repos = integration_repos['repos']
191 189
192 190 for integration in integrations.values():
193 191 integration.enabled = False
194 192 Session().commit()
195 193
196 194 triggered_integrations = IntegrationModel().get_for_event(
197 195 events.RepoEvent(repos['root_repo']))
198 196
199 197 assert triggered_integrations == []
200 198
201 199 triggered_integrations = IntegrationModel().get_for_event(
202 200 events.RepoEvent(repos['parent_repo']))
203 201
204 202 assert triggered_integrations == []
205 203
206 204 triggered_integrations = IntegrationModel().get_for_event(
207 205 events.RepoEvent(repos['child_repo']))
208 206
209 207 assert triggered_integrations == []
210 208
211 209 triggered_integrations = IntegrationModel().get_for_event(
212 210 events.RepoEvent(repos['other_repo']))
213 211
214 212 assert triggered_integrations == []
215 213
216 214
217 215 def test_enabled_non_repo_integrations(integration_repos):
218 216 integrations = integration_repos['integrations']
219 217
220 218 triggered_integrations = IntegrationModel().get_for_event(
221 219 events.UserPreCreate({}))
222 220
223 221 assert triggered_integrations == [integrations['global']]
@@ -1,486 +1,540 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import multiprocessing
20 20 import os
21 import shutil
21 22
22 23 import mock
23 24 import py
24 25 import pytest
25 26
27 import rhodecode
26 28 from rhodecode.lib import caching_query
27 29 from rhodecode.lib import utils
28 30 from rhodecode.lib.str_utils import safe_bytes
29 31 from rhodecode.model import settings
30 32 from rhodecode.model import db
31 33 from rhodecode.model import meta
34 from rhodecode.model.meta import Session
32 35 from rhodecode.model.repo import RepoModel
33 36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
34 38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixtures.fixture_pyramid import rhodecode_factory
35 40 from rhodecode.tests.fixtures.rc_fixture import Fixture
36 41 from rhodecode_tools.lib.hash_utils import md5_safe
37 42 from rhodecode.lib.ext_json import json
38 43
39 44 fixture = Fixture()
40 45
41 46
42 47 def extract_hooks(config):
43 48 """Return a dictionary with the hook entries of the given config."""
44 49 hooks = {}
45 50 config_items = config.serialize()
46 51 for section, name, value in config_items:
47 52 if section != 'hooks':
48 53 continue
49 54 hooks[name] = value
50 55
51 56 return hooks
52 57
53 58
54 59 def disable_hooks(request, hooks):
55 60 """Disables the given hooks from the UI settings."""
56 61 session = meta.Session()
57 62
58 63 model = SettingsModel()
59 64 for hook_key in hooks:
60 65 sett = model.get_ui_by_key(hook_key)
61 66 sett.ui_active = False
62 67 session.add(sett)
63 68
64 69 # Invalidate cache
65 70 ui_settings = session.query(db.RhodeCodeUi).options(
66 71 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
67 72
68 73 meta.cache.invalidate(
69 74 ui_settings, {},
70 75 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
71 76
72 77 ui_settings = session.query(db.RhodeCodeUi).options(
73 78 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
74 79
75 80 meta.cache.invalidate(
76 81 ui_settings, {},
77 82 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
78 83
79 84 @request.addfinalizer
80 85 def rollback():
81 86 session.rollback()
82 87
83 88
84 89 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
85 90 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
86 91 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
87 92 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
88 93 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
89 94 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
90 95 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
91 96
92 97 HG_HOOKS = frozenset(
93 98 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
94 99 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
95 100
96 101
97 102 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
98 103 ([], HG_HOOKS),
99 104 (HG_HOOKS, []),
100 105
101 106 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
102 107
103 108 # When a pull/push hook is disabled, its pre-pull/push counterpart should
104 109 # be disabled too.
105 110 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
106 111 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
107 112 HOOK_PUSH_KEY]),
108 113 ])
109 114 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
110 115 expected_hooks):
111 116 disable_hooks(request, disabled_hooks)
112 117
113 118 config = utils.make_db_config()
114 119 hooks = extract_hooks(config)
115 120
116 121 assert set(hooks.keys()).intersection(HG_HOOKS) == set(expected_hooks)
117 122
118 123
119 124 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
120 125 ([], ['pull', 'push']),
121 126 ([HOOK_PUSH], ['pull']),
122 127 ([HOOK_PULL], ['push']),
123 128 ([HOOK_PULL, HOOK_PUSH], []),
124 129 ])
125 130 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
126 131 hook_keys = (HOOK_PUSH, HOOK_PULL)
127 132 ui_settings = [
128 133 ('hooks', key, 'some value', key not in disabled_hooks)
129 134 for key in hook_keys]
130 135
131 136 result = utils.get_enabled_hook_classes(ui_settings)
132 137 assert sorted(result) == expected_hooks
133 138
134 139
135 140 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
136 141 _stub_git_repo(tmpdir.ensure('repo', dir=True))
137 142 repos = list(utils.get_filesystem_repos(str(tmpdir)))
138 143 assert repos == [('repo', ('git', tmpdir.join('repo')))]
139 144
140 145
141 146 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
142 147 tmpdir.ensure('not-a-repo', dir=True)
143 148 repos = list(utils.get_filesystem_repos(str(tmpdir)))
144 149 assert repos == []
145 150
146 151
147 152 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
148 153 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
149 154 repos = list(utils.get_filesystem_repos(str(tmpdir)))
150 155 assert repos == []
151 156
152 157
153 158 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
154 159 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
155 160 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
156 161 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
157 162
158 163
159 164 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
160 165 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
161 166 repos = list(utils.get_filesystem_repos(str(tmpdir)))
162 167 assert repos == []
163 168
164 169
165 170 def test_get_filesystem_repos_skips_files(tmpdir):
166 171 tmpdir.ensure('test-file')
167 172 repos = list(utils.get_filesystem_repos(str(tmpdir)))
168 173 assert repos == []
169 174
170 175
171 176 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
172 177 removed_repo_name = 'rm__00000000_000000_000000__.stub'
173 178 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
174 179 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
175 180 repos = list(utils.get_filesystem_repos(str(tmpdir)))
176 181 assert repos == []
177 182
178 183
179 184 def _stub_git_repo(repo_path):
180 185 """
181 186 Make `repo_path` look like a Git repository.
182 187 """
183 188 repo_path.ensure('.git', dir=True)
184 189
185 190
186 191 def test_get_dirpaths_returns_all_paths_on_str(tmpdir):
187 192 tmpdir.ensure('test-file')
188 193 tmpdir.ensure('test-file-1')
189 194 tmp_path = str(tmpdir)
190 195 dirpaths = utils.get_dirpaths(tmp_path)
191 196 assert list(sorted(dirpaths)) == ['test-file', 'test-file-1']
192 197
193 198
194 199 def test_get_dirpaths_returns_all_paths_on_bytes(tmpdir):
195 200 tmpdir.ensure('test-file-bytes')
196 201 tmp_path = str(tmpdir)
197 202 dirpaths = utils.get_dirpaths(safe_bytes(tmp_path))
198 203 assert list(sorted(dirpaths)) == [b'test-file-bytes']
199 204
200 205
201 206 def test_get_dirpaths_returns_all_paths_bytes(
202 207 tmpdir, platform_encodes_filenames):
203 208 if platform_encodes_filenames:
204 209 pytest.skip("This platform seems to encode filenames.")
205 210 tmpdir.ensure('repo-a-umlaut-\xe4')
206 211 dirpaths = utils.get_dirpaths(str(tmpdir))
207 212 assert dirpaths == ['repo-a-umlaut-\xe4']
208 213
209 214
210 215 def test_get_dirpaths_skips_paths_it_cannot_decode(
211 216 tmpdir, platform_encodes_filenames):
212 217 if platform_encodes_filenames:
213 218 pytest.skip("This platform seems to encode filenames.")
214 219 path_with_latin1 = 'repo-a-umlaut-\xe4'
215 220 tmp_path = str(tmpdir.ensure(path_with_latin1))
216 221 dirpaths = utils.get_dirpaths(tmp_path)
217 222 assert dirpaths == []
218 223
219 224
220 225 @pytest.fixture(scope='session')
221 226 def platform_encodes_filenames():
222 227 """
223 228 Boolean indicator if the current platform changes filename encodings.
224 229 """
225 230 path_with_latin1 = 'repo-a-umlaut-\xe4'
226 231 tmpdir = py.path.local.mkdtemp()
227 232 tmpdir.ensure(path_with_latin1)
228 233 read_path = tmpdir.listdir()[0].basename
229 234 tmpdir.remove()
230 235 return path_with_latin1 != read_path
231 236
232 237
233 def test_repo2db_mapper_groups(repo_groups):
238 def test_repo2db_cleaner_removes_zombie_groups(repo_groups):
234 239 session = meta.Session()
235 240 zombie_group, parent_group, child_group = repo_groups
236 241 zombie_path = os.path.join(
237 242 RepoGroupModel().repos_path, zombie_group.full_path)
238 243 os.rmdir(zombie_path)
239 244
240 245 # Avoid removing test repos when calling repo2db_mapper
241 repo_list = {
242 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
243 }
244 utils.repo2db_mapper(repo_list, remove_obsolete=True)
246 repo_list = [repo.repo_name for repo in session.query(db.Repository).all()]
247
248 utils.repo2db_cleanup(skip_repos=repo_list)
245 249
246 250 groups_in_db = session.query(db.RepoGroup).all()
247 251 assert child_group in groups_in_db
248 252 assert parent_group in groups_in_db
249 253 assert zombie_path not in groups_in_db
250 254
251 255
252 def test_repo2db_mapper_enables_largefiles(backend):
256
257 @pytest.mark.backends("hg", "git", "svn")
258 def test_repo2db_cleaner_removes_zombie_repos(backend):
253 259 repo = backend.create_repo()
254 repo_list = {repo.repo_name: 'test'}
255 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
257 _, kwargs = scm_mock.call_args
258 assert kwargs['config'].get('extensions', 'largefiles') == ''
260 zombie_path = repo.repo_full_path
261 shutil.rmtree(zombie_path)
262
263 removed, errors = utils.repo2db_cleanup()
264 assert len(removed) == 1
265 assert not errors
259 266
260 267
261 @pytest.mark.backends("git", "svn")
268 def test_repo2db_mapper_adds_new_repos(request, backend):
269 repo = backend.create_repo()
270 cleanup_repos = []
271 cleanup_groups = []
272 for num in range(5):
273 copy_repo_name = f'{repo.repo_name}-{num}'
274 copy_repo_path = f'{repo.repo_full_path}-{num}'
275
276 shutil.copytree(repo.repo_full_path, copy_repo_path)
277 cleanup_repos.append(copy_repo_name)
278
279 for gr_num in range(5):
280 gr_name = f'my_gr_{gr_num}'
281 dest_gr = os.path.join(os.path.dirname(repo.repo_full_path), gr_name)
282 os.makedirs(dest_gr, exist_ok=True)
283
284 copy_repo_name = f'{gr_name}/{repo.repo_name}-{gr_num}'
285 copy_repo_path = f'{dest_gr}/{repo.repo_name}-{gr_num}'
286
287 shutil.copytree(repo.repo_full_path, copy_repo_path)
288 cleanup_repos.append(copy_repo_name)
289 cleanup_groups.append(gr_name)
290
291 repo_list = ScmModel().repo_scan()
292
293 added, errors = utils.repo2db_mapper(repo_list)
294 Session().commit()
295 assert not errors
296
297 assert len(added) == 10
298
299 @request.addfinalizer
300 def cleanup():
301 for _repo in cleanup_repos:
302 del_result = RepoModel().delete(_repo, call_events=False)
303 Session().commit()
304 assert del_result is True
305
306 for _repo_group in cleanup_groups:
307 del_result = RepoGroupModel().delete(_repo_group, force_delete=True, call_events=False)
308 Session().commit()
309 assert del_result is True
310
311
262 312 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
263 313 repo = backend.create_repo()
264 314 repo_list = {repo.repo_name: 'test'}
265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
315 added, errors = utils.repo2db_mapper(repo_list)
316 assert not errors
317 assert repo.scm_instance().get_hooks_info() == {'pre_version': rhodecode.__version__, 'post_version': rhodecode.__version__}
266 318
267 319
268 320 @pytest.mark.backends("git", "svn")
269 321 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
270 322 repo = backend.create_repo()
271 323 RepoModel().delete(repo, fs_remove=False)
272 324 meta.Session().commit()
273 325 repo_list = {repo.repo_name: repo.scm_instance()}
274 utils.repo2db_mapper(repo_list, remove_obsolete=False)
326 added, errors = utils.repo2db_mapper(repo_list)
327 assert not errors
328 assert len(added) == 1
275 329
276 330
277 331 class TestPasswordChanged(object):
278 332
279 333 def setup_method(self):
280 334 self.session = {
281 335 'rhodecode_user': {
282 336 'password': '0cc175b9c0f1b6a831c399e269772661'
283 337 }
284 338 }
285 339 self.auth_user = mock.Mock()
286 340 self.auth_user.userame = 'test'
287 341 self.auth_user.password = 'abc123'
288 342
289 343 def test_returns_false_for_default_user(self):
290 344 self.auth_user.username = db.User.DEFAULT_USER
291 345 result = utils.password_changed(self.auth_user, self.session)
292 346 assert result is False
293 347
294 348 def test_returns_false_if_password_was_not_changed(self):
295 349 self.session['rhodecode_user']['password'] = md5_safe(
296 350 self.auth_user.password)
297 351 result = utils.password_changed(self.auth_user, self.session)
298 352 assert result is False
299 353
300 354 def test_returns_true_if_password_was_changed(self):
301 355 result = utils.password_changed(self.auth_user, self.session)
302 356 assert result is True
303 357
304 358 def test_returns_true_if_auth_user_password_is_empty(self):
305 359 self.auth_user.password = None
306 360 result = utils.password_changed(self.auth_user, self.session)
307 361 assert result is True
308 362
309 363 def test_returns_true_if_session_password_is_empty(self):
310 364 self.session['rhodecode_user'].pop('password')
311 365 result = utils.password_changed(self.auth_user, self.session)
312 366 assert result is True
313 367
314 368
315 369 class TestReadOpenSourceLicenses(object):
316 370 def test_success(self):
317 371 utils._license_cache = None
318 372 json_data = '''
319 373 {
320 374 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
321 375 "python2.7-Markdown-2.6.2": {
322 376 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
323 377 }
324 378 }
325 379 '''
326 380 resource_string_patch = mock.patch.object(
327 381 utils.pkg_resources, 'resource_string', return_value=json_data)
328 382 with resource_string_patch:
329 383 result = utils.read_opensource_licenses()
330 384 assert result == json.loads(json_data)
331 385
332 386 def test_caching(self):
333 387 utils._license_cache = {
334 388 "python2.7-pytest-2.7.1": {
335 389 "UNKNOWN": None
336 390 },
337 391 "python2.7-Markdown-2.6.2": {
338 392 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
339 393 }
340 394 }
341 395 resource_patch = mock.patch.object(
342 396 utils.pkg_resources, 'resource_string', side_effect=Exception)
343 397 json_patch = mock.patch.object(
344 398 utils.json, 'loads', side_effect=Exception)
345 399
346 400 with resource_patch as resource_mock, json_patch as json_mock:
347 401 result = utils.read_opensource_licenses()
348 402
349 403 assert resource_mock.call_count == 0
350 404 assert json_mock.call_count == 0
351 405 assert result == utils._license_cache
352 406
353 407 def test_licenses_file_contains_no_unknown_licenses(self):
354 408 utils._license_cache = None
355 409 result = utils.read_opensource_licenses()
356 410
357 411 for license_data in result:
358 412 if isinstance(license_data["license"], list):
359 413 for lic_data in license_data["license"]:
360 414 assert 'UNKNOWN' not in lic_data["fullName"]
361 415 else:
362 416 full_name = license_data.get("fullName") or license_data
363 417 assert 'UNKNOWN' not in full_name
364 418
365 419
366 420 class TestMakeDbConfig(object):
367 421 def test_data_from_config_data_from_db_returned(self):
368 422 test_data = [
369 423 ('section1', 'option1', 'value1'),
370 424 ('section2', 'option2', 'value2'),
371 425 ('section3', 'option3', 'value3'),
372 426 ]
373 427 with mock.patch.object(utils, 'prepare_config_data') as config_mock:
374 428 config_mock.return_value = test_data
375 429 kwargs = {'clear_session': False, 'repo': 'test_repo'}
376 430 result = utils.make_db_config(**kwargs)
377 431 config_mock.assert_called_once_with(**kwargs)
378 432 for section, option, expected_value in test_data:
379 433 value = result.get(section, option)
380 434 assert value == expected_value
381 435
382 436
383 437 class TestPrepareConfigData(object):
384 438 def test_prepare_config_data_returns_active_settings(self):
385 439 test_data = [
386 440 UiSetting('section1', 'option1', 'value1', True),
387 441 UiSetting('section2', 'option2', 'value2', True),
388 442 UiSetting('section3', 'option3', 'value3', False),
389 443 ]
390 444 repo_name = 'test_repo'
391 445
392 446 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
393 447 hooks_patch = mock.patch.object(
394 448 utils, 'get_enabled_hook_classes',
395 449 return_value=['pull', 'push', 'repo_size'])
396 450 with model_patch as model_mock, hooks_patch:
397 451 instance_mock = mock.Mock()
398 452 model_mock.return_value = instance_mock
399 453 instance_mock.get_ui_settings.return_value = test_data
400 454 result = utils.prepare_config_data(
401 455 clear_session=False, repo=repo_name)
402 456
403 457 self._assert_repo_name_passed(model_mock, repo_name)
404 458
405 459 assert ('section1', 'option1', 'value1') in result
406 460 assert ('section2', 'option2', 'value2') in result
407 461 assert ('section3', 'option3', 'value3') not in result
408 462
409 463 def _assert_repo_name_passed(self, model_mock, repo_name):
410 464 assert model_mock.call_count == 1
411 465 call_args, call_kwargs = model_mock.call_args
412 466 assert call_kwargs['repo'] == repo_name
413 467
414 468
415 469 class TestIsDirWritable(object):
416 470 def test_returns_false_when_not_writable(self):
417 471 with mock.patch('builtins.open', side_effect=OSError):
418 472 assert not utils._is_dir_writable('/stub-path')
419 473
420 474 def test_returns_true_when_writable(self, tmpdir):
421 475 assert utils._is_dir_writable(str(tmpdir))
422 476
423 477 def test_is_safe_against_race_conditions(self, tmpdir):
424 478 workers = multiprocessing.Pool()
425 479 directories = [str(tmpdir)] * 10
426 480 workers.map(utils._is_dir_writable, directories)
427 481
428 482
429 483 class TestGetEnabledHooks(object):
430 484 def test_only_active_hooks_are_enabled(self):
431 485 ui_settings = [
432 486 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
433 487 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
434 488 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
435 489 ]
436 490 result = utils.get_enabled_hook_classes(ui_settings)
437 491 assert result == ['push', 'repo_size']
438 492
439 493 def test_all_hooks_are_enabled(self):
440 494 ui_settings = [
441 495 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
442 496 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
443 497 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
444 498 ]
445 499 result = utils.get_enabled_hook_classes(ui_settings)
446 500 assert result == ['push', 'repo_size', 'pull']
447 501
448 502 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
449 503 ui_settings = []
450 504 result = utils.get_enabled_hook_classes(ui_settings)
451 505 assert result == []
452 506
453 507
454 508 def test_obfuscate_url_pw():
455 509 from rhodecode.lib.utils2 import obfuscate_url_pw
456 engine = u'/home/repos/malmΓΆ'
510 engine = '/home/repos/malmΓΆ'
457 511 assert obfuscate_url_pw(engine)
458 512
459 513
460 514 @pytest.mark.parametrize("test_ua, expected", [
461 515 ("", ""),
462 516 ('"quoted"', 'quoted'),
463 517 ('internal-merge', 'internal-merge'),
464 518 ('hg/internal-merge', 'hg/internal-merge'),
465 519 ('git/internal-merge', 'git/internal-merge'),
466 520
467 521 # git
468 522 ('git/2.10.1 (Apple Git-78)', 'git/2.10.1'),
469 523 ('GiT/2.37.2.windows.2', 'git/2.37.2'),
470 524 ('git/2.35.1 (Microsoft Windows NT 10.0.19044.0; Win32NT x64) CLR/4.0.30319 VS16/16.0.0', 'git/2.35.1'),
471 525 ('ssh-user-agent', 'ssh-user-agent'),
472 526 ('git/ssh-user-agent', 'git/ssh-user-agent'),
473 527
474 528
475 529 # hg
476 530 ('mercurial/proto-1.0 (Mercurial 4.2)', 'mercurial/4.2'),
477 531 ('mercurial/proto-1.0', ''),
478 532 ('mercurial/proto-1.0 (Mercurial 3.9.2)', 'mercurial/3.9.2'),
479 533 ('mercurial/ssh-user-agent', 'mercurial/ssh-user-agent'),
480 534 ('mercurial/proto-1.0 (Mercurial 5.8rc0)', 'mercurial/5.8rc0'),
481 535
482 536
483 537 ])
484 538 def test_user_agent_normalizer(test_ua, expected):
485 539 from rhodecode.lib.utils2 import user_agent_normalizer
486 540 assert user_agent_normalizer(test_ua, safe=False) == expected
@@ -1,323 +1,324 b''
1 1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 def get_url_defs():
21 21 from rhodecode.apps._base import ADMIN_PREFIX
22 22
23 23 return {
24 24 "home": "/",
25 25 "main_page_repos_data": "/_home_repos",
26 26 "main_page_repo_groups_data": "/_home_repo_groups",
27 27 "repo_group_home": "/{repo_group_name}",
28 28 "user_autocomplete_data": "/_users",
29 29 "user_group_autocomplete_data": "/_user_groups",
30 30 "repo_list_data": "/_repos",
31 31 "goto_switcher_data": "/_goto_data",
32 32 "admin_home": ADMIN_PREFIX + "",
33 33 "admin_audit_logs": ADMIN_PREFIX + "/audit_logs",
34 34 "admin_defaults_repositories": ADMIN_PREFIX + "/defaults/repositories",
35 35 "admin_defaults_repositories_update": ADMIN_PREFIX
36 36 + "/defaults/repositories/update",
37 37 "search": ADMIN_PREFIX + "/search",
38 38 "search_repo": "/{repo_name}/search",
39 39 "my_account_auth_tokens": ADMIN_PREFIX + "/my_account/auth_tokens",
40 40 "my_account_auth_tokens_add": ADMIN_PREFIX + "/my_account/auth_tokens/new",
41 41 "my_account_auth_tokens_delete": ADMIN_PREFIX
42 42 + "/my_account/auth_tokens/delete",
43 43 "repos": ADMIN_PREFIX + "/repos",
44 44 "repos_data": ADMIN_PREFIX + "/repos_data",
45 45 "repo_groups": ADMIN_PREFIX + "/repo_groups",
46 46 "repo_groups_data": ADMIN_PREFIX + "/repo_groups_data",
47 47 "user_groups": ADMIN_PREFIX + "/user_groups",
48 48 "user_groups_data": ADMIN_PREFIX + "/user_groups_data",
49 49 "user_profile": "/_profiles/{username}",
50 50 "profile_user_group": "/_profile_user_group/{user_group_name}",
51 51 "repo_summary": "/{repo_name}",
52 52 "repo_creating_check": "/{repo_name}/repo_creating_check",
53 53 "edit_repo": "/{repo_name}/settings",
54 54 "edit_repo_vcs": "/{repo_name}/settings/vcs",
55 55 "edit_repo_vcs_update": "/{repo_name}/settings/vcs/update",
56 56 "edit_repo_vcs_svn_pattern_delete": "/{repo_name}/settings/vcs/svn_pattern/delete",
57 57 "repo_archivefile": "/{repo_name}/archive/{fname}",
58 58 "repo_files_diff": "/{repo_name}/diff/{f_path}",
59 59 "repo_files_diff_2way_redirect": "/{repo_name}/diff-2way/{f_path}",
60 60 "repo_files": "/{repo_name}/files/{commit_id}/{f_path}",
61 61 "repo_files:default_path": "/{repo_name}/files/{commit_id}/",
62 62 "repo_files:default_commit": "/{repo_name}/files",
63 63 "repo_files:rendered": "/{repo_name}/render/{commit_id}/{f_path}",
64 64 "repo_files:annotated": "/{repo_name}/annotate/{commit_id}/{f_path}",
65 65 "repo_files:annotated_previous": "/{repo_name}/annotate-previous/{commit_id}/{f_path}",
66 66 "repo_files_nodelist": "/{repo_name}/nodelist/{commit_id}/{f_path}",
67 67 "repo_file_raw": "/{repo_name}/raw/{commit_id}/{f_path}",
68 68 "repo_file_download": "/{repo_name}/download/{commit_id}/{f_path}",
69 69 "repo_file_history": "/{repo_name}/history/{commit_id}/{f_path}",
70 70 "repo_file_authors": "/{repo_name}/authors/{commit_id}/{f_path}",
71 71 "repo_files_remove_file": "/{repo_name}/remove_file/{commit_id}/{f_path}",
72 72 "repo_files_delete_file": "/{repo_name}/delete_file/{commit_id}/{f_path}",
73 73 "repo_files_edit_file": "/{repo_name}/edit_file/{commit_id}/{f_path}",
74 74 "repo_files_update_file": "/{repo_name}/update_file/{commit_id}/{f_path}",
75 75 "repo_files_add_file": "/{repo_name}/add_file/{commit_id}/{f_path}",
76 76 "repo_files_upload_file": "/{repo_name}/upload_file/{commit_id}/{f_path}",
77 77 "repo_files_create_file": "/{repo_name}/create_file/{commit_id}/{f_path}",
78 78 "repo_files_replace_binary": "/{repo_name}/replace_binary/{commit_id}/{f_path}",
79 79 "repo_nodetree_full": "/{repo_name}/nodetree_full/{commit_id}/{f_path}",
80 80 "repo_nodetree_full:default_path": "/{repo_name}/nodetree_full/{commit_id}/",
81 81 "journal": ADMIN_PREFIX + "/journal",
82 82 "journal_rss": ADMIN_PREFIX + "/journal/rss",
83 83 "journal_atom": ADMIN_PREFIX + "/journal/atom",
84 84 "journal_public": ADMIN_PREFIX + "/public_journal",
85 85 "journal_public_atom": ADMIN_PREFIX + "/public_journal/atom",
86 86 "journal_public_atom_old": ADMIN_PREFIX + "/public_journal_atom",
87 87 "journal_public_rss": ADMIN_PREFIX + "/public_journal/rss",
88 88 "journal_public_rss_old": ADMIN_PREFIX + "/public_journal_rss",
89 89 "toggle_following": ADMIN_PREFIX + "/toggle_following",
90 90 "upload_file": "/_file_store/upload",
91 91 "download_file": "/_file_store/download/{fid}",
92 92 "download_file_by_token": "/_file_store/token-download/{_auth_token}/{fid}",
93 93 "gists_show": ADMIN_PREFIX + "/gists",
94 94 "gists_new": ADMIN_PREFIX + "/gists/new",
95 95 "gists_create": ADMIN_PREFIX + "/gists/create",
96 96 "gist_show": ADMIN_PREFIX + "/gists/{gist_id}",
97 97 "gist_delete": ADMIN_PREFIX + "/gists/{gist_id}/delete",
98 98 "gist_edit": ADMIN_PREFIX + "/gists/{gist_id}/edit",
99 99 "gist_edit_check_revision": ADMIN_PREFIX
100 100 + "/gists/{gist_id}/edit/check_revision",
101 101 "gist_update": ADMIN_PREFIX + "/gists/{gist_id}/update",
102 102 "gist_show_rev": ADMIN_PREFIX + "/gists/{gist_id}/rev/{revision}",
103 103 "gist_show_formatted": ADMIN_PREFIX
104 104 + "/gists/{gist_id}/rev/{revision}/{format}",
105 105 "gist_show_formatted_path": ADMIN_PREFIX
106 106 + "/gists/{gist_id}/rev/{revision}/{format}/{f_path}",
107 107 "login": ADMIN_PREFIX + "/login",
108 108 "logout": ADMIN_PREFIX + "/logout",
109 109 "setup_2fa": ADMIN_PREFIX + "/setup_2fa",
110 110 "check_2fa": ADMIN_PREFIX + "/check_2fa",
111 111 "register": ADMIN_PREFIX + "/register",
112 112 "reset_password": ADMIN_PREFIX + "/password_reset",
113 113 "reset_password_confirmation": ADMIN_PREFIX + "/password_reset_confirmation",
114 114 "admin_permissions_application": ADMIN_PREFIX + "/permissions/application",
115 115 "admin_permissions_application_update": ADMIN_PREFIX
116 116 + "/permissions/application/update",
117 117 "repo_commit_raw": "/{repo_name}/changeset-diff/{commit_id}",
118 118 "user_group_members_data": ADMIN_PREFIX
119 119 + "/user_groups/{user_group_id}/members",
120 120 "user_groups_new": ADMIN_PREFIX + "/user_groups/new",
121 121 "user_groups_create": ADMIN_PREFIX + "/user_groups/create",
122 122 "edit_user_group": ADMIN_PREFIX + "/user_groups/{user_group_id}/edit",
123 123 "edit_user_group_advanced_sync": ADMIN_PREFIX
124 124 + "/user_groups/{user_group_id}/edit/advanced/sync",
125 125 "edit_user_group_global_perms_update": ADMIN_PREFIX
126 126 + "/user_groups/{user_group_id}/edit/global_permissions/update",
127 127 "user_groups_update": ADMIN_PREFIX + "/user_groups/{user_group_id}/update",
128 128 "user_groups_delete": ADMIN_PREFIX + "/user_groups/{user_group_id}/delete",
129 129 "edit_user_group_perms": ADMIN_PREFIX
130 130 + "/user_groups/{user_group_id}/edit/permissions",
131 131 "edit_user_group_perms_update": ADMIN_PREFIX
132 132 + "/user_groups/{user_group_id}/edit/permissions/update",
133 133 "edit_repo_group": "/{repo_group_name}/_edit",
134 134 "edit_repo_group_perms": "/{repo_group_name:}/_settings/permissions",
135 135 "edit_repo_group_perms_update": "/{repo_group_name}/_settings/permissions/update",
136 136 "edit_repo_group_advanced": "/{repo_group_name}/_settings/advanced",
137 137 "edit_repo_group_advanced_delete": "/{repo_group_name}/_settings/advanced/delete",
138 138 "edit_user_ssh_keys": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys",
139 139 "edit_user_ssh_keys_generate_keypair": ADMIN_PREFIX
140 140 + "/users/{user_id}/edit/ssh_keys/generate",
141 141 "edit_user_ssh_keys_add": ADMIN_PREFIX + "/users/{user_id}/edit/ssh_keys/new",
142 142 "edit_user_ssh_keys_delete": ADMIN_PREFIX
143 143 + "/users/{user_id}/edit/ssh_keys/delete",
144 144 "users": ADMIN_PREFIX + "/users",
145 145 "users_data": ADMIN_PREFIX + "/users_data",
146 146 "users_create": ADMIN_PREFIX + "/users/create",
147 147 "users_new": ADMIN_PREFIX + "/users/new",
148 148 "user_edit": ADMIN_PREFIX + "/users/{user_id}/edit",
149 149 "user_edit_advanced": ADMIN_PREFIX + "/users/{user_id}/edit/advanced",
150 150 "user_edit_global_perms": ADMIN_PREFIX
151 151 + "/users/{user_id}/edit/global_permissions",
152 152 "user_edit_global_perms_update": ADMIN_PREFIX
153 153 + "/users/{user_id}/edit/global_permissions/update",
154 154 "user_update": ADMIN_PREFIX + "/users/{user_id}/update",
155 155 "user_delete": ADMIN_PREFIX + "/users/{user_id}/delete",
156 156 "user_create_personal_repo_group": ADMIN_PREFIX
157 157 + "/users/{user_id}/create_repo_group",
158 158 "edit_user_auth_tokens": ADMIN_PREFIX + "/users/{user_id}/edit/auth_tokens",
159 159 "edit_user_auth_tokens_add": ADMIN_PREFIX
160 160 + "/users/{user_id}/edit/auth_tokens/new",
161 161 "edit_user_auth_tokens_delete": ADMIN_PREFIX
162 162 + "/users/{user_id}/edit/auth_tokens/delete",
163 163 "edit_user_emails": ADMIN_PREFIX + "/users/{user_id}/edit/emails",
164 164 "edit_user_emails_add": ADMIN_PREFIX + "/users/{user_id}/edit/emails/new",
165 165 "edit_user_emails_delete": ADMIN_PREFIX + "/users/{user_id}/edit/emails/delete",
166 166 "edit_user_ips": ADMIN_PREFIX + "/users/{user_id}/edit/ips",
167 167 "edit_user_ips_add": ADMIN_PREFIX + "/users/{user_id}/edit/ips/new",
168 168 "edit_user_ips_delete": ADMIN_PREFIX + "/users/{user_id}/edit/ips/delete",
169 169 "edit_user_perms_summary": ADMIN_PREFIX
170 170 + "/users/{user_id}/edit/permissions_summary",
171 171 "edit_user_perms_summary_json": ADMIN_PREFIX
172 172 + "/users/{user_id}/edit/permissions_summary/json",
173 173 "edit_user_audit_logs": ADMIN_PREFIX + "/users/{user_id}/edit/audit",
174 174 "edit_user_audit_logs_download": ADMIN_PREFIX
175 175 + "/users/{user_id}/edit/audit/download",
176 176 "admin_settings": ADMIN_PREFIX + "/settings",
177 177 "admin_settings_update": ADMIN_PREFIX + "/settings/update",
178 178 "admin_settings_global": ADMIN_PREFIX + "/settings/global",
179 179 "admin_settings_global_update": ADMIN_PREFIX + "/settings/global/update",
180 180 "admin_settings_vcs": ADMIN_PREFIX + "/settings/vcs",
181 181 "admin_settings_vcs_update": ADMIN_PREFIX + "/settings/vcs/update",
182 182 "admin_settings_vcs_svn_pattern_delete": ADMIN_PREFIX
183 183 + "/settings/vcs/svn_pattern_delete",
184 184 "admin_settings_mapping": ADMIN_PREFIX + "/settings/mapping",
185 "admin_settings_mapping_update": ADMIN_PREFIX + "/settings/mapping/update",
185 "admin_settings_mapping_create": ADMIN_PREFIX + "/settings/mapping/create",
186 "admin_settings_mapping_cleanup": ADMIN_PREFIX + "/settings/mapping/cleanup",
186 187 "admin_settings_visual": ADMIN_PREFIX + "/settings/visual",
187 188 "admin_settings_visual_update": ADMIN_PREFIX + "/settings/visual/update",
188 189 "admin_settings_issuetracker": ADMIN_PREFIX + "/settings/issue-tracker",
189 190 "admin_settings_issuetracker_update": ADMIN_PREFIX
190 191 + "/settings/issue-tracker/update",
191 192 "admin_settings_issuetracker_test": ADMIN_PREFIX
192 193 + "/settings/issue-tracker/test",
193 194 "admin_settings_issuetracker_delete": ADMIN_PREFIX
194 195 + "/settings/issue-tracker/delete",
195 196 "admin_settings_email": ADMIN_PREFIX + "/settings/email",
196 197 "admin_settings_email_update": ADMIN_PREFIX + "/settings/email/update",
197 198 "admin_settings_hooks": ADMIN_PREFIX + "/settings/hooks",
198 199 "admin_settings_hooks_update": ADMIN_PREFIX + "/settings/hooks/update",
199 200 "admin_settings_hooks_delete": ADMIN_PREFIX + "/settings/hooks/delete",
200 201 "admin_settings_search": ADMIN_PREFIX + "/settings/search",
201 202 "admin_settings_labs": ADMIN_PREFIX + "/settings/labs",
202 203 "admin_settings_labs_update": ADMIN_PREFIX + "/settings/labs/update",
203 204 "admin_settings_sessions": ADMIN_PREFIX + "/settings/sessions",
204 205 "admin_settings_sessions_cleanup": ADMIN_PREFIX + "/settings/sessions/cleanup",
205 206 "admin_settings_system": ADMIN_PREFIX + "/settings/system",
206 207 "admin_settings_system_update": ADMIN_PREFIX + "/settings/system/updates",
207 208 "admin_settings_open_source": ADMIN_PREFIX + "/settings/open_source",
208 209 "repo_group_new": ADMIN_PREFIX + "/repo_group/new",
209 210 "repo_group_create": ADMIN_PREFIX + "/repo_group/create",
210 211 "repo_new": ADMIN_PREFIX + "/repos/new",
211 212 "repo_create": ADMIN_PREFIX + "/repos/create",
212 213 "admin_permissions_global": ADMIN_PREFIX + "/permissions/global",
213 214 "admin_permissions_global_update": ADMIN_PREFIX + "/permissions/global/update",
214 215 "admin_permissions_object": ADMIN_PREFIX + "/permissions/object",
215 216 "admin_permissions_object_update": ADMIN_PREFIX + "/permissions/object/update",
216 217 "admin_permissions_ips": ADMIN_PREFIX + "/permissions/ips",
217 218 "admin_permissions_overview": ADMIN_PREFIX + "/permissions/overview",
218 219 "admin_permissions_ssh_keys": ADMIN_PREFIX + "/permissions/ssh_keys",
219 220 "admin_permissions_ssh_keys_data": ADMIN_PREFIX + "/permissions/ssh_keys/data",
220 221 "admin_permissions_ssh_keys_update": ADMIN_PREFIX
221 222 + "/permissions/ssh_keys/update",
222 223 "pullrequest_show": "/{repo_name}/pull-request/{pull_request_id}",
223 224 "pull_requests_global": ADMIN_PREFIX + "/pull-request/{pull_request_id}",
224 225 "pull_requests_global_0": ADMIN_PREFIX + "/pull_requests/{pull_request_id}",
225 226 "pull_requests_global_1": ADMIN_PREFIX + "/pull-requests/{pull_request_id}",
226 227 "notifications_show_all": ADMIN_PREFIX + "/notifications",
227 228 "notifications_mark_all_read": ADMIN_PREFIX + "/notifications_mark_all_read",
228 229 "notifications_show": ADMIN_PREFIX + "/notifications/{notification_id}",
229 230 "notifications_update": ADMIN_PREFIX
230 231 + "/notifications/{notification_id}/update",
231 232 "notifications_delete": ADMIN_PREFIX
232 233 + "/notifications/{notification_id}/delete",
233 234 "my_account": ADMIN_PREFIX + "/my_account/profile",
234 235 "my_account_edit": ADMIN_PREFIX + "/my_account/edit",
235 236 "my_account_update": ADMIN_PREFIX + "/my_account/update",
236 237 "my_account_pullrequests": ADMIN_PREFIX + "/my_account/pull_requests",
237 238 "my_account_pullrequests_data": ADMIN_PREFIX + "/my_account/pull_requests/data",
238 239 "my_account_emails": ADMIN_PREFIX + "/my_account/emails",
239 240 "my_account_emails_add": ADMIN_PREFIX + "/my_account/emails/new",
240 241 "my_account_emails_delete": ADMIN_PREFIX + "/my_account/emails/delete",
241 242 "my_account_password": ADMIN_PREFIX + "/my_account/password",
242 243 "my_account_password_update": ADMIN_PREFIX + "/my_account/password/update",
243 244 "my_account_repos": ADMIN_PREFIX + "/my_account/repos",
244 245 "my_account_watched": ADMIN_PREFIX + "/my_account/watched",
245 246 "my_account_perms": ADMIN_PREFIX + "/my_account/perms",
246 247 "my_account_notifications": ADMIN_PREFIX + "/my_account/notifications",
247 248 "my_account_ssh_keys": ADMIN_PREFIX + "/my_account/ssh_keys",
248 249 "my_account_ssh_keys_generate": ADMIN_PREFIX + "/my_account/ssh_keys/generate",
249 250 "my_account_ssh_keys_add": ADMIN_PREFIX + "/my_account/ssh_keys/new",
250 251 "my_account_ssh_keys_delete": ADMIN_PREFIX + "/my_account/ssh_keys/delete",
251 252 "pullrequest_show_all": "/{repo_name}/pull-request",
252 253 "pullrequest_show_all_data": "/{repo_name}/pull-request-data",
253 254 "bookmarks_home": "/{repo_name}/bookmarks",
254 255 "branches_home": "/{repo_name}/branches",
255 256 "branch_remove": "/{repo_name}/branches/{branch_name}/remove",
256 257 "tags_home": "/{repo_name}/tags",
257 258 "repo_changelog": "/{repo_name}/changelog",
258 259 "repo_commits": "/{repo_name}/commits",
259 260 "repo_commits_file": "/{repo_name}/commits/{commit_id}/{f_path}",
260 261 "repo_commits_elements": "/{repo_name}/commits_elements",
261 262 "repo_commit": "/{repo_name}/changeset/{commit_id}",
262 263 "repo_commit_comment_create": "/{repo_name}/changeset/{commit_id}/comment/create",
263 264 "repo_commit_comment_preview": "/{repo_name}/changeset/{commit_id}/comment/preview",
264 265 "repo_commit_comment_delete": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete",
265 266 "repo_commit_comment_edit": "/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit",
266 267 "repo_commit_children": "/{repo_name}/changeset_children/{commit_id}",
267 268 "repo_commit_parents": "/{repo_name}/changeset_parents/{commit_id}",
268 269 "repo_commit_patch": "/{repo_name}/changeset-patch/{commit_id}",
269 270 "repo_commit_download": "/{repo_name}/changeset-download/{commit_id}",
270 271 "repo_commit_data": "/{repo_name}/changeset-data/{commit_id}",
271 272 "repo_compare": "/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}",
272 273 "repo_compare_select": "/{repo_name}/compare",
273 274 "rss_feed_home": "/{repo_name}/feed-rss",
274 275 "atom_feed_home": "/{repo_name}/feed-atom",
275 276 "rss_feed_home_old": "/{repo_name}/feed/rss",
276 277 "atom_feed_home_old": "/{repo_name}/feed/atom",
277 278 "repo_fork_new": "/{repo_name}/fork",
278 279 "repo_fork_create": "/{repo_name}/fork/create",
279 280 "repo_forks_show_all": "/{repo_name}/forks",
280 281 "repo_forks_data": "/{repo_name}/forks/data",
281 282 "edit_repo_issuetracker": "/{repo_name}/settings/issue_trackers",
282 283 "edit_repo_issuetracker_test": "/{repo_name}/settings/issue_trackers/test",
283 284 "edit_repo_issuetracker_delete": "/{repo_name}/settings/issue_trackers/delete",
284 285 "edit_repo_issuetracker_update": "/{repo_name}/settings/issue_trackers/update",
285 286 "edit_repo_maintenance": "/{repo_name}/settings/maintenance",
286 287 "edit_repo_maintenance_execute": "/{repo_name}/settings/maintenance/execute",
287 288 "repo_changelog_file": "/{repo_name}/changelog/{commit_id}/{f_path}",
288 289 "pullrequest_repo_refs": "/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}",
289 290 "pullrequest_repo_targets": "/{repo_name}/pull-request/repo-destinations",
290 291 "pullrequest_new": "/{repo_name}/pull-request/new",
291 292 "pullrequest_create": "/{repo_name}/pull-request/create",
292 293 "pullrequest_update": "/{repo_name}/pull-request/{pull_request_id}/update",
293 294 "pullrequest_merge": "/{repo_name}/pull-request/{pull_request_id}/merge",
294 295 "pullrequest_delete": "/{repo_name}/pull-request/{pull_request_id}/delete",
295 296 "pullrequest_comment_create": "/{repo_name}/pull-request/{pull_request_id}/comment",
296 297 "pullrequest_comment_delete": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete",
297 298 "pullrequest_comment_edit": "/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit",
298 299 "edit_repo_caches": "/{repo_name}/settings/caches",
299 300 "edit_repo_perms": "/{repo_name}/settings/permissions",
300 301 "edit_repo_fields": "/{repo_name}/settings/fields",
301 302 "edit_repo_remote": "/{repo_name}/settings/remote",
302 303 "edit_repo_statistics": "/{repo_name}/settings/statistics",
303 304 "edit_repo_advanced": "/{repo_name}/settings/advanced",
304 305 "edit_repo_advanced_delete": "/{repo_name}/settings/advanced/delete",
305 306 "edit_repo_advanced_archive": "/{repo_name}/settings/advanced/archive",
306 307 "edit_repo_advanced_fork": "/{repo_name}/settings/advanced/fork",
307 308 "edit_repo_advanced_locking": "/{repo_name}/settings/advanced/locking",
308 309 "edit_repo_advanced_journal": "/{repo_name}/settings/advanced/journal",
309 310 "repo_stats": "/{repo_name}/repo_stats/{commit_id}",
310 311 "repo_refs_data": "/{repo_name}/refs-data",
311 312 "repo_refs_changelog_data": "/{repo_name}/refs-data-changelog",
312 313 "repo_artifacts_stream_store": "/_file_store/stream-upload",
313 314 }
314 315
315 316
316 317 def route_path(name, params=None, **kwargs):
317 318 import urllib.parse
318 319
319 320 base_url = get_url_defs()[name].format(**kwargs)
320 321
321 322 if params:
322 323 base_url = f"{base_url}?{urllib.parse.urlencode(params)}"
323 324 return base_url
General Comments 0
You need to be logged in to leave comments. Login now