##// END OF EJS Templates
caches: make sure the global cache namespace prefixes are used....
super-admin -
r5106:4e5efedc default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,2534 +1,2534 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import time
21 21
22 22 import rhodecode
23 23 from rhodecode.api import (
24 24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
25 25 from rhodecode.api.utils import (
26 26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 27 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
28 28 get_perm_or_error, parse_args, get_origin, build_commit_data,
29 29 validate_set_owner_permissions)
30 30 from rhodecode.lib import audit_logger, rc_cache, channelstream
31 31 from rhodecode.lib import repo_maintenance
32 32 from rhodecode.lib.auth import (
33 33 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
34 34 HasRepoPermissionAnyApi)
35 35 from rhodecode.lib.celerylib.utils import get_task_id
36 36 from rhodecode.lib.utils2 import (
37 37 str2bool, time_to_datetime, safe_str, safe_int)
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.exceptions import (
40 40 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
41 41 from rhodecode.lib.vcs import RepositoryError
42 42 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 46 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
47 47 ChangesetComment)
48 48 from rhodecode.model.permission import PermissionModel
49 49 from rhodecode.model.pull_request import PullRequestModel
50 50 from rhodecode.model.repo import RepoModel
51 51 from rhodecode.model.scm import ScmModel, RepoList
52 52 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
53 53 from rhodecode.model import validation_schema
54 54 from rhodecode.model.validation_schema.schemas import repo_schema
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 @jsonrpc_method()
60 60 def get_repo(request, apiuser, repoid, cache=Optional(True)):
61 61 """
62 62 Gets an existing repository by its name or repository_id.
63 63
64 64 The members section so the output returns users groups or users
65 65 associated with that repository.
66 66
67 67 This command can only be run using an |authtoken| with admin rights,
68 68 or users with at least read rights to the |repo|.
69 69
70 70 :param apiuser: This is filled automatically from the |authtoken|.
71 71 :type apiuser: AuthUser
72 72 :param repoid: The repository name or repository id.
73 73 :type repoid: str or int
74 74 :param cache: use the cached value for last changeset
75 75 :type: cache: Optional(bool)
76 76
77 77 Example output:
78 78
79 79 .. code-block:: bash
80 80
81 81 {
82 82 "error": null,
83 83 "id": <repo_id>,
84 84 "result": {
85 85 "clone_uri": null,
86 86 "created_on": "timestamp",
87 87 "description": "repo description",
88 88 "enable_downloads": false,
89 89 "enable_locking": false,
90 90 "enable_statistics": false,
91 91 "followers": [
92 92 {
93 93 "active": true,
94 94 "admin": false,
95 95 "api_key": "****************************************",
96 96 "api_keys": [
97 97 "****************************************"
98 98 ],
99 99 "email": "user@example.com",
100 100 "emails": [
101 101 "user@example.com"
102 102 ],
103 103 "extern_name": "rhodecode",
104 104 "extern_type": "rhodecode",
105 105 "firstname": "username",
106 106 "ip_addresses": [],
107 107 "language": null,
108 108 "last_login": "2015-09-16T17:16:35.854",
109 109 "lastname": "surname",
110 110 "user_id": <user_id>,
111 111 "username": "name"
112 112 }
113 113 ],
114 114 "fork_of": "parent-repo",
115 115 "landing_rev": [
116 116 "rev",
117 117 "tip"
118 118 ],
119 119 "last_changeset": {
120 120 "author": "User <user@example.com>",
121 121 "branch": "default",
122 122 "date": "timestamp",
123 123 "message": "last commit message",
124 124 "parents": [
125 125 {
126 126 "raw_id": "commit-id"
127 127 }
128 128 ],
129 129 "raw_id": "commit-id",
130 130 "revision": <revision number>,
131 131 "short_id": "short id"
132 132 },
133 133 "lock_reason": null,
134 134 "locked_by": null,
135 135 "locked_date": null,
136 136 "owner": "owner-name",
137 137 "permissions": [
138 138 {
139 139 "name": "super-admin-name",
140 140 "origin": "super-admin",
141 141 "permission": "repository.admin",
142 142 "type": "user"
143 143 },
144 144 {
145 145 "name": "owner-name",
146 146 "origin": "owner",
147 147 "permission": "repository.admin",
148 148 "type": "user"
149 149 },
150 150 {
151 151 "name": "user-group-name",
152 152 "origin": "permission",
153 153 "permission": "repository.write",
154 154 "type": "user_group"
155 155 }
156 156 ],
157 157 "private": true,
158 158 "repo_id": 676,
159 159 "repo_name": "user-group/repo-name",
160 160 "repo_type": "hg"
161 161 }
162 162 }
163 163 """
164 164
165 165 repo = get_repo_or_error(repoid)
166 166 cache = Optional.extract(cache)
167 167
168 168 include_secrets = False
169 169 if has_superadmin_permission(apiuser):
170 170 include_secrets = True
171 171 else:
172 172 # check if we have at least read permission for this repo !
173 173 _perms = (
174 174 'repository.admin', 'repository.write', 'repository.read',)
175 175 validate_repo_permissions(apiuser, repoid, repo, _perms)
176 176
177 177 permissions = []
178 178 for _user in repo.permissions():
179 179 user_data = {
180 180 'name': _user.username,
181 181 'permission': _user.permission,
182 182 'origin': get_origin(_user),
183 183 'type': "user",
184 184 }
185 185 permissions.append(user_data)
186 186
187 187 for _user_group in repo.permission_user_groups():
188 188 user_group_data = {
189 189 'name': _user_group.users_group_name,
190 190 'permission': _user_group.permission,
191 191 'origin': get_origin(_user_group),
192 192 'type': "user_group",
193 193 }
194 194 permissions.append(user_group_data)
195 195
196 196 following_users = [
197 197 user.user.get_api_data(include_secrets=include_secrets)
198 198 for user in repo.followers]
199 199
200 200 if not cache:
201 201 repo.update_commit_cache()
202 202 data = repo.get_api_data(include_secrets=include_secrets)
203 203 data['permissions'] = permissions
204 204 data['followers'] = following_users
205 205
206 206 return data
207 207
208 208
209 209 @jsonrpc_method()
210 210 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
211 211 """
212 212 Lists all existing repositories.
213 213
214 214 This command can only be run using an |authtoken| with admin rights,
215 215 or users with at least read rights to |repos|.
216 216
217 217 :param apiuser: This is filled automatically from the |authtoken|.
218 218 :type apiuser: AuthUser
219 219 :param root: specify root repository group to fetch repositories.
220 220 filters the returned repositories to be members of given root group.
221 221 :type root: Optional(None)
222 222 :param traverse: traverse given root into subrepositories. With this flag
223 223 set to False, it will only return top-level repositories from `root`.
224 224 if root is empty it will return just top-level repositories.
225 225 :type traverse: Optional(True)
226 226
227 227
228 228 Example output:
229 229
230 230 .. code-block:: bash
231 231
232 232 id : <id_given_in_input>
233 233 result: [
234 234 {
235 235 "repo_id" : "<repo_id>",
236 236 "repo_name" : "<reponame>"
237 237 "repo_type" : "<repo_type>",
238 238 "clone_uri" : "<clone_uri>",
239 239 "private": : "<bool>",
240 240 "created_on" : "<datetimecreated>",
241 241 "description" : "<description>",
242 242 "landing_rev": "<landing_rev>",
243 243 "owner": "<repo_owner>",
244 244 "fork_of": "<name_of_fork_parent>",
245 245 "enable_downloads": "<bool>",
246 246 "enable_locking": "<bool>",
247 247 "enable_statistics": "<bool>",
248 248 },
249 249 ...
250 250 ]
251 251 error: null
252 252 """
253 253
254 254 include_secrets = has_superadmin_permission(apiuser)
255 255 _perms = ('repository.read', 'repository.write', 'repository.admin',)
256 256 extras = {'user': apiuser}
257 257
258 258 root = Optional.extract(root)
259 259 traverse = Optional.extract(traverse, binary=True)
260 260
261 261 if root:
262 262 # verify parent existance, if it's empty return an error
263 263 parent = RepoGroup.get_by_group_name(root)
264 264 if not parent:
265 265 raise JSONRPCError(
266 266 f'Root repository group `{root}` does not exist')
267 267
268 268 if traverse:
269 269 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
270 270 else:
271 271 repos = RepoModel().get_repos_for_root(root=parent)
272 272 else:
273 273 if traverse:
274 274 repos = RepoModel().get_all()
275 275 else:
276 276 # return just top-level
277 277 repos = RepoModel().get_repos_for_root(root=None)
278 278
279 279 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
280 280 return [repo.get_api_data(include_secrets=include_secrets)
281 281 for repo in repo_list]
282 282
283 283
284 284 @jsonrpc_method()
285 285 def get_repo_changeset(request, apiuser, repoid, revision,
286 286 details=Optional('basic')):
287 287 """
288 288 Returns information about a changeset.
289 289
290 290 Additionally parameters define the amount of details returned by
291 291 this function.
292 292
293 293 This command can only be run using an |authtoken| with admin rights,
294 294 or users with at least read rights to the |repo|.
295 295
296 296 :param apiuser: This is filled automatically from the |authtoken|.
297 297 :type apiuser: AuthUser
298 298 :param repoid: The repository name or repository id
299 299 :type repoid: str or int
300 300 :param revision: revision for which listing should be done
301 301 :type revision: str
302 302 :param details: details can be 'basic|extended|full' full gives diff
303 303 info details like the diff itself, and number of changed files etc.
304 304 :type details: Optional(str)
305 305
306 306 """
307 307 repo = get_repo_or_error(repoid)
308 308 if not has_superadmin_permission(apiuser):
309 309 _perms = ('repository.admin', 'repository.write', 'repository.read',)
310 310 validate_repo_permissions(apiuser, repoid, repo, _perms)
311 311
312 312 changes_details = Optional.extract(details)
313 313 _changes_details_types = ['basic', 'extended', 'full']
314 314 if changes_details not in _changes_details_types:
315 315 raise JSONRPCError(
316 316 'ret_type must be one of %s' % (
317 317 ','.join(_changes_details_types)))
318 318
319 319 vcs_repo = repo.scm_instance()
320 320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
321 321 'status', '_commit', '_file_paths']
322 322
323 323 try:
324 324 commit = repo.get_commit(commit_id=revision, pre_load=pre_load)
325 325 except TypeError as e:
326 326 raise JSONRPCError(safe_str(e))
327 327 _cs_json = commit.__json__()
328 328 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
329 329 if changes_details == 'full':
330 330 _cs_json['refs'] = commit._get_refs()
331 331 return _cs_json
332 332
333 333
334 334 @jsonrpc_method()
335 335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
336 336 details=Optional('basic')):
337 337 """
338 338 Returns a set of commits limited by the number starting
339 339 from the `start_rev` option.
340 340
341 341 Additional parameters define the amount of details returned by this
342 342 function.
343 343
344 344 This command can only be run using an |authtoken| with admin rights,
345 345 or users with at least read rights to |repos|.
346 346
347 347 :param apiuser: This is filled automatically from the |authtoken|.
348 348 :type apiuser: AuthUser
349 349 :param repoid: The repository name or repository ID.
350 350 :type repoid: str or int
351 351 :param start_rev: The starting revision from where to get changesets.
352 352 :type start_rev: str
353 353 :param limit: Limit the number of commits to this amount
354 354 :type limit: str or int
355 355 :param details: Set the level of detail returned. Valid option are:
356 356 ``basic``, ``extended`` and ``full``.
357 357 :type details: Optional(str)
358 358
359 359 .. note::
360 360
361 361 Setting the parameter `details` to the value ``full`` is extensive
362 362 and returns details like the diff itself, and the number
363 363 of changed files.
364 364
365 365 """
366 366 repo = get_repo_or_error(repoid)
367 367 if not has_superadmin_permission(apiuser):
368 368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
369 369 validate_repo_permissions(apiuser, repoid, repo, _perms)
370 370
371 371 changes_details = Optional.extract(details)
372 372 _changes_details_types = ['basic', 'extended', 'full']
373 373 if changes_details not in _changes_details_types:
374 374 raise JSONRPCError(
375 375 'ret_type must be one of %s' % (
376 376 ','.join(_changes_details_types)))
377 377
378 378 limit = int(limit)
379 379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
380 380 'status', '_commit', '_file_paths']
381 381
382 382 vcs_repo = repo.scm_instance()
383 383 # SVN needs a special case to distinguish its index and commit id
384 384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
385 385 start_rev = vcs_repo.commit_ids[0]
386 386
387 387 try:
388 388 commits = vcs_repo.get_commits(
389 389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
390 390 except TypeError as e:
391 391 raise JSONRPCError(safe_str(e))
392 392 except Exception:
393 393 log.exception('Fetching of commits failed')
394 394 raise JSONRPCError('Error occurred during commit fetching')
395 395
396 396 ret = []
397 397 for cnt, commit in enumerate(commits):
398 398 if cnt >= limit != -1:
399 399 break
400 400 _cs_json = commit.__json__()
401 401 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
402 402 if changes_details == 'full':
403 403 _cs_json['refs'] = {
404 404 'branches': [commit.branch],
405 405 'bookmarks': getattr(commit, 'bookmarks', []),
406 406 'tags': commit.tags
407 407 }
408 408 ret.append(_cs_json)
409 409 return ret
410 410
411 411
412 412 @jsonrpc_method()
413 413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
414 414 ret_type=Optional('all'), details=Optional('basic'),
415 415 max_file_bytes=Optional(None)):
416 416 """
417 417 Returns a list of nodes and children in a flat list for a given
418 418 path at given revision.
419 419
420 420 It's possible to specify ret_type to show only `files` or `dirs`.
421 421
422 422 This command can only be run using an |authtoken| with admin rights,
423 423 or users with at least read rights to |repos|.
424 424
425 425 :param apiuser: This is filled automatically from the |authtoken|.
426 426 :type apiuser: AuthUser
427 427 :param repoid: The repository name or repository ID.
428 428 :type repoid: str or int
429 429 :param revision: The revision for which listing should be done.
430 430 :type revision: str
431 431 :param root_path: The path from which to start displaying.
432 432 :type root_path: str
433 433 :param ret_type: Set the return type. Valid options are
434 434 ``all`` (default), ``files`` and ``dirs``.
435 435 :type ret_type: Optional(str)
436 436 :param details: Returns extended information about nodes, such as
437 437 md5, binary, and or content.
438 438 The valid options are ``basic`` and ``full``.
439 439 :type details: Optional(str)
440 440 :param max_file_bytes: Only return file content under this file size bytes
441 441 :type details: Optional(int)
442 442
443 443 Example output:
444 444
445 445 .. code-block:: bash
446 446
447 447 id : <id_given_in_input>
448 448 result: [
449 449 {
450 450 "binary": false,
451 451 "content": "File line",
452 452 "extension": "md",
453 453 "lines": 2,
454 454 "md5": "059fa5d29b19c0657e384749480f6422",
455 455 "mimetype": "text/x-minidsrc",
456 456 "name": "file.md",
457 457 "size": 580,
458 458 "type": "file"
459 459 },
460 460 ...
461 461 ]
462 462 error: null
463 463 """
464 464
465 465 repo = get_repo_or_error(repoid)
466 466 if not has_superadmin_permission(apiuser):
467 467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
468 468 validate_repo_permissions(apiuser, repoid, repo, _perms)
469 469
470 470 ret_type = Optional.extract(ret_type)
471 471 details = Optional.extract(details)
472 472 max_file_bytes = Optional.extract(max_file_bytes)
473 473
474 474 _extended_types = ['basic', 'full']
475 475 if details not in _extended_types:
476 476 ret_types = ','.join(_extended_types)
477 477 raise JSONRPCError(f'ret_type must be one of {ret_types}')
478 478
479 479 extended_info = False
480 480 content = False
481 481 if details == 'basic':
482 482 extended_info = True
483 483
484 484 if details == 'full':
485 485 extended_info = content = True
486 486
487 487 _map = {}
488 488 try:
489 489 # check if repo is not empty by any chance, skip quicker if it is.
490 490 _scm = repo.scm_instance()
491 491 if _scm.is_empty():
492 492 return []
493 493
494 494 _d, _f = ScmModel().get_nodes(
495 495 repo, revision, root_path, flat=False,
496 496 extended_info=extended_info, content=content,
497 497 max_file_bytes=max_file_bytes)
498 498
499 499 _map = {
500 500 'all': _d + _f,
501 501 'files': _f,
502 502 'dirs': _d,
503 503 }
504 504
505 505 return _map[ret_type]
506 506 except KeyError:
507 507 keys = ','.join(sorted(_map.keys()))
508 508 raise JSONRPCError(f'ret_type must be one of {keys}')
509 509 except Exception:
510 510 log.exception("Exception occurred while trying to get repo nodes")
511 511 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
512 512
513 513
514 514 @jsonrpc_method()
515 515 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
516 516 max_file_bytes=Optional(0), details=Optional('basic'),
517 517 cache=Optional(True)):
518 518 """
519 519 Returns a single file from repository at given revision.
520 520
521 521 This command can only be run using an |authtoken| with admin rights,
522 522 or users with at least read rights to |repos|.
523 523
524 524 :param apiuser: This is filled automatically from the |authtoken|.
525 525 :type apiuser: AuthUser
526 526 :param repoid: The repository name or repository ID.
527 527 :type repoid: str or int
528 528 :param commit_id: The revision for which listing should be done.
529 529 :type commit_id: str
530 530 :param file_path: The path from which to start displaying.
531 531 :type file_path: str
532 532 :param details: Returns different set of information about nodes.
533 533 The valid options are ``minimal`` ``basic`` and ``full``.
534 534 :type details: Optional(str)
535 535 :param max_file_bytes: Only return file content under this file size bytes
536 536 :type max_file_bytes: Optional(int)
537 537 :param cache: Use internal caches for fetching files. If disabled fetching
538 538 files is slower but more memory efficient
539 539 :type cache: Optional(bool)
540 540
541 541 Example output:
542 542
543 543 .. code-block:: bash
544 544
545 545 id : <id_given_in_input>
546 546 result: {
547 547 "binary": false,
548 548 "extension": "py",
549 549 "lines": 35,
550 550 "content": "....",
551 551 "md5": "76318336366b0f17ee249e11b0c99c41",
552 552 "mimetype": "text/x-python",
553 553 "name": "python.py",
554 554 "size": 817,
555 555 "type": "file",
556 556 }
557 557 error: null
558 558 """
559 559
560 560 repo = get_repo_or_error(repoid)
561 561 if not has_superadmin_permission(apiuser):
562 562 _perms = ('repository.admin', 'repository.write', 'repository.read',)
563 563 validate_repo_permissions(apiuser, repoid, repo, _perms)
564 564
565 565 cache = Optional.extract(cache, binary=True)
566 566 details = Optional.extract(details)
567 567 max_file_bytes = Optional.extract(max_file_bytes)
568 568
569 569 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
570 570 if details not in _extended_types:
571 571 ret_types = ','.join(_extended_types)
572 572 raise JSONRPCError(f'ret_type must be one of %s, got {ret_types}', details)
573 573 extended_info = False
574 574 content = False
575 575
576 576 if details == 'minimal':
577 577 extended_info = False
578 578
579 579 elif details == 'basic':
580 580 extended_info = True
581 581
582 582 elif details == 'full':
583 583 extended_info = content = True
584 584
585 585 file_path = safe_str(file_path)
586 586 try:
587 587 # check if repo is not empty by any chance, skip quicker if it is.
588 588 _scm = repo.scm_instance()
589 589 if _scm.is_empty():
590 590 return None
591 591
592 592 node = ScmModel().get_node(
593 593 repo, commit_id, file_path, extended_info=extended_info,
594 594 content=content, max_file_bytes=max_file_bytes, cache=cache)
595 595
596 596 except NodeDoesNotExistError:
597 597 raise JSONRPCError(
598 598 f'There is no file in repo: `{repo.repo_name}` at path `{file_path}` for commit: `{commit_id}`')
599 599 except Exception:
600 600 log.exception("Exception occurred while trying to get repo %s file",
601 601 repo.repo_name)
602 602 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` file at path {file_path}')
603 603
604 604 return node
605 605
606 606
607 607 @jsonrpc_method()
608 608 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
609 609 """
610 610 Returns a list of tree nodes for path at given revision. This api is built
611 611 strictly for usage in full text search building, and shouldn't be consumed
612 612
613 613 This command can only be run using an |authtoken| with admin rights,
614 614 or users with at least read rights to |repos|.
615 615
616 616 """
617 617
618 618 repo = get_repo_or_error(repoid)
619 619 if not has_superadmin_permission(apiuser):
620 620 _perms = ('repository.admin', 'repository.write', 'repository.read',)
621 621 validate_repo_permissions(apiuser, repoid, repo, _perms)
622 622
623 623 repo_id = repo.repo_id
624 624 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
625 625 cache_on = cache_seconds > 0
626 626
627 cache_namespace_uid = f'repo.{repo_id}'
627 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
628 628 rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
629 629
630 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
630 def compute_fts_tree(repo_id, commit_id, root_path):
631 631 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
632 632
633 633 try:
634 634 # check if repo is not empty by any chance, skip quicker if it is.
635 635 _scm = repo.scm_instance()
636 636 if not _scm or _scm.is_empty():
637 637 return []
638 638 except RepositoryError:
639 639 log.exception("Exception occurred while trying to get repo nodes")
640 640 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
641 641
642 642 try:
643 643 # we need to resolve commit_id to a FULL sha for cache to work correctly.
644 644 # sending 'master' is a pointer that needs to be translated to current commit.
645 645 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
646 646 log.debug(
647 647 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
648 648 'with caching: %s[TTL: %ss]' % (
649 649 repo_id, commit_id, cache_on, cache_seconds or 0))
650 650
651 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
651 tree_files = compute_fts_tree(repo_id, commit_id, root_path)
652 652
653 653 return tree_files
654 654
655 655 except Exception:
656 656 log.exception("Exception occurred while trying to get repo nodes")
657 657 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
658 658
659 659
660 660 @jsonrpc_method()
661 661 def get_repo_refs(request, apiuser, repoid):
662 662 """
663 663 Returns a dictionary of current references. It returns
664 664 bookmarks, branches, closed_branches, and tags for given repository
665 665
666 666 It's possible to specify ret_type to show only `files` or `dirs`.
667 667
668 668 This command can only be run using an |authtoken| with admin rights,
669 669 or users with at least read rights to |repos|.
670 670
671 671 :param apiuser: This is filled automatically from the |authtoken|.
672 672 :type apiuser: AuthUser
673 673 :param repoid: The repository name or repository ID.
674 674 :type repoid: str or int
675 675
676 676 Example output:
677 677
678 678 .. code-block:: bash
679 679
680 680 id : <id_given_in_input>
681 681 "result": {
682 682 "bookmarks": {
683 683 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
684 684 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
685 685 },
686 686 "branches": {
687 687 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
688 688 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
689 689 },
690 690 "branches_closed": {},
691 691 "tags": {
692 692 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
693 693 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
694 694 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
695 695 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
696 696 }
697 697 }
698 698 error: null
699 699 """
700 700
701 701 repo = get_repo_or_error(repoid)
702 702 if not has_superadmin_permission(apiuser):
703 703 _perms = ('repository.admin', 'repository.write', 'repository.read',)
704 704 validate_repo_permissions(apiuser, repoid, repo, _perms)
705 705
706 706 try:
707 707 # check if repo is not empty by any chance, skip quicker if it is.
708 708 vcs_instance = repo.scm_instance()
709 709 refs = vcs_instance.refs()
710 710 return refs
711 711 except Exception:
712 712 log.exception("Exception occurred while trying to get repo refs")
713 713 raise JSONRPCError(
714 714 'failed to get repo: `%s` references' % repo.repo_name
715 715 )
716 716
717 717
718 718 @jsonrpc_method()
719 719 def create_repo(
720 720 request, apiuser, repo_name, repo_type,
721 721 owner=Optional(OAttr('apiuser')),
722 722 description=Optional(''),
723 723 private=Optional(False),
724 724 clone_uri=Optional(None),
725 725 push_uri=Optional(None),
726 726 landing_rev=Optional(None),
727 727 enable_statistics=Optional(False),
728 728 enable_locking=Optional(False),
729 729 enable_downloads=Optional(False),
730 730 copy_permissions=Optional(False)):
731 731 """
732 732 Creates a repository.
733 733
734 734 * If the repository name contains "/", repository will be created inside
735 735 a repository group or nested repository groups
736 736
737 737 For example "foo/bar/repo1" will create |repo| called "repo1" inside
738 738 group "foo/bar". You have to have permissions to access and write to
739 739 the last repository group ("bar" in this example)
740 740
741 741 This command can only be run using an |authtoken| with at least
742 742 permissions to create repositories, or write permissions to
743 743 parent repository groups.
744 744
745 745 :param apiuser: This is filled automatically from the |authtoken|.
746 746 :type apiuser: AuthUser
747 747 :param repo_name: Set the repository name.
748 748 :type repo_name: str
749 749 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
750 750 :type repo_type: str
751 751 :param owner: user_id or username
752 752 :type owner: Optional(str)
753 753 :param description: Set the repository description.
754 754 :type description: Optional(str)
755 755 :param private: set repository as private
756 756 :type private: bool
757 757 :param clone_uri: set clone_uri
758 758 :type clone_uri: str
759 759 :param push_uri: set push_uri
760 760 :type push_uri: str
761 761 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
762 762 :type landing_rev: str
763 763 :param enable_locking:
764 764 :type enable_locking: bool
765 765 :param enable_downloads:
766 766 :type enable_downloads: bool
767 767 :param enable_statistics:
768 768 :type enable_statistics: bool
769 769 :param copy_permissions: Copy permission from group in which the
770 770 repository is being created.
771 771 :type copy_permissions: bool
772 772
773 773
774 774 Example output:
775 775
776 776 .. code-block:: bash
777 777
778 778 id : <id_given_in_input>
779 779 result: {
780 780 "msg": "Created new repository `<reponame>`",
781 781 "success": true,
782 782 "task": "<celery task id or None if done sync>"
783 783 }
784 784 error: null
785 785
786 786
787 787 Example error output:
788 788
789 789 .. code-block:: bash
790 790
791 791 id : <id_given_in_input>
792 792 result : null
793 793 error : {
794 794 'failed to create repository `<repo_name>`'
795 795 }
796 796
797 797 """
798 798
799 799 owner = validate_set_owner_permissions(apiuser, owner)
800 800
801 801 description = Optional.extract(description)
802 802 copy_permissions = Optional.extract(copy_permissions)
803 803 clone_uri = Optional.extract(clone_uri)
804 804 push_uri = Optional.extract(push_uri)
805 805
806 806 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
807 807 if isinstance(private, Optional):
808 808 private = defs.get('repo_private') or Optional.extract(private)
809 809 if isinstance(repo_type, Optional):
810 810 repo_type = defs.get('repo_type')
811 811 if isinstance(enable_statistics, Optional):
812 812 enable_statistics = defs.get('repo_enable_statistics')
813 813 if isinstance(enable_locking, Optional):
814 814 enable_locking = defs.get('repo_enable_locking')
815 815 if isinstance(enable_downloads, Optional):
816 816 enable_downloads = defs.get('repo_enable_downloads')
817 817
818 818 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
819 819 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
820 820 ref_choices = list(set(ref_choices + [landing_ref]))
821 821
822 822 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
823 823
824 824 schema = repo_schema.RepoSchema().bind(
825 825 repo_type_options=rhodecode.BACKENDS.keys(),
826 826 repo_ref_options=ref_choices,
827 827 repo_type=repo_type,
828 828 # user caller
829 829 user=apiuser)
830 830
831 831 try:
832 832 schema_data = schema.deserialize(dict(
833 833 repo_name=repo_name,
834 834 repo_type=repo_type,
835 835 repo_owner=owner.username,
836 836 repo_description=description,
837 837 repo_landing_commit_ref=landing_commit_ref,
838 838 repo_clone_uri=clone_uri,
839 839 repo_push_uri=push_uri,
840 840 repo_private=private,
841 841 repo_copy_permissions=copy_permissions,
842 842 repo_enable_statistics=enable_statistics,
843 843 repo_enable_downloads=enable_downloads,
844 844 repo_enable_locking=enable_locking))
845 845 except validation_schema.Invalid as err:
846 846 raise JSONRPCValidationError(colander_exc=err)
847 847
848 848 try:
849 849 data = {
850 850 'owner': owner,
851 851 'repo_name': schema_data['repo_group']['repo_name_without_group'],
852 852 'repo_name_full': schema_data['repo_name'],
853 853 'repo_group': schema_data['repo_group']['repo_group_id'],
854 854 'repo_type': schema_data['repo_type'],
855 855 'repo_description': schema_data['repo_description'],
856 856 'repo_private': schema_data['repo_private'],
857 857 'clone_uri': schema_data['repo_clone_uri'],
858 858 'push_uri': schema_data['repo_push_uri'],
859 859 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
860 860 'enable_statistics': schema_data['repo_enable_statistics'],
861 861 'enable_locking': schema_data['repo_enable_locking'],
862 862 'enable_downloads': schema_data['repo_enable_downloads'],
863 863 'repo_copy_permissions': schema_data['repo_copy_permissions'],
864 864 }
865 865
866 866 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
867 867 task_id = get_task_id(task)
868 868 # no commit, it's done in RepoModel, or async via celery
869 869 return {
870 870 'msg': "Created new repository `{}`".format(schema_data['repo_name']),
871 871 'success': True, # cannot return the repo data here since fork
872 872 # can be done async
873 873 'task': task_id
874 874 }
875 875 except Exception:
876 876 log.exception(
877 877 "Exception while trying to create the repository %s",
878 878 schema_data['repo_name'])
879 879 raise JSONRPCError(
880 880 'failed to create repository `{}`'.format(schema_data['repo_name']))
881 881
882 882
883 883 @jsonrpc_method()
884 884 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
885 885 description=Optional('')):
886 886 """
887 887 Adds an extra field to a repository.
888 888
889 889 This command can only be run using an |authtoken| with at least
890 890 write permissions to the |repo|.
891 891
892 892 :param apiuser: This is filled automatically from the |authtoken|.
893 893 :type apiuser: AuthUser
894 894 :param repoid: Set the repository name or repository id.
895 895 :type repoid: str or int
896 896 :param key: Create a unique field key for this repository.
897 897 :type key: str
898 898 :param label:
899 899 :type label: Optional(str)
900 900 :param description:
901 901 :type description: Optional(str)
902 902 """
903 903 repo = get_repo_or_error(repoid)
904 904 if not has_superadmin_permission(apiuser):
905 905 _perms = ('repository.admin',)
906 906 validate_repo_permissions(apiuser, repoid, repo, _perms)
907 907
908 908 label = Optional.extract(label) or key
909 909 description = Optional.extract(description)
910 910
911 911 field = RepositoryField.get_by_key_name(key, repo)
912 912 if field:
913 913 raise JSONRPCError('Field with key '
914 914 '`%s` exists for repo `%s`' % (key, repoid))
915 915
916 916 try:
917 917 RepoModel().add_repo_field(repo, key, field_label=label,
918 918 field_desc=description)
919 919 Session().commit()
920 920 return {
921 921 'msg': f"Added new repository field `{key}`",
922 922 'success': True,
923 923 }
924 924 except Exception:
925 925 log.exception("Exception occurred while trying to add field to repo")
926 926 raise JSONRPCError(
927 927 f'failed to create new field for repository `{repoid}`')
928 928
929 929
930 930 @jsonrpc_method()
931 931 def remove_field_from_repo(request, apiuser, repoid, key):
932 932 """
933 933 Removes an extra field from a repository.
934 934
935 935 This command can only be run using an |authtoken| with at least
936 936 write permissions to the |repo|.
937 937
938 938 :param apiuser: This is filled automatically from the |authtoken|.
939 939 :type apiuser: AuthUser
940 940 :param repoid: Set the repository name or repository ID.
941 941 :type repoid: str or int
942 942 :param key: Set the unique field key for this repository.
943 943 :type key: str
944 944 """
945 945
946 946 repo = get_repo_or_error(repoid)
947 947 if not has_superadmin_permission(apiuser):
948 948 _perms = ('repository.admin',)
949 949 validate_repo_permissions(apiuser, repoid, repo, _perms)
950 950
951 951 field = RepositoryField.get_by_key_name(key, repo)
952 952 if not field:
953 953 raise JSONRPCError('Field with key `%s` does not '
954 954 'exists for repo `%s`' % (key, repoid))
955 955
956 956 try:
957 957 RepoModel().delete_repo_field(repo, field_key=key)
958 958 Session().commit()
959 959 return {
960 960 'msg': f"Deleted repository field `{key}`",
961 961 'success': True,
962 962 }
963 963 except Exception:
964 964 log.exception(
965 965 "Exception occurred while trying to delete field from repo")
966 966 raise JSONRPCError(
967 967 f'failed to delete field for repository `{repoid}`')
968 968
969 969
970 970 @jsonrpc_method()
971 971 def update_repo(
972 972 request, apiuser, repoid, repo_name=Optional(None),
973 973 owner=Optional(OAttr('apiuser')), description=Optional(''),
974 974 private=Optional(False),
975 975 clone_uri=Optional(None), push_uri=Optional(None),
976 976 landing_rev=Optional(None), fork_of=Optional(None),
977 977 enable_statistics=Optional(False),
978 978 enable_locking=Optional(False),
979 979 enable_downloads=Optional(False), fields=Optional('')):
980 980 r"""
981 981 Updates a repository with the given information.
982 982
983 983 This command can only be run using an |authtoken| with at least
984 984 admin permissions to the |repo|.
985 985
986 986 * If the repository name contains "/", repository will be updated
987 987 accordingly with a repository group or nested repository groups
988 988
989 989 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
990 990 called "repo-test" and place it inside group "foo/bar".
991 991 You have to have permissions to access and write to the last repository
992 992 group ("bar" in this example)
993 993
994 994 :param apiuser: This is filled automatically from the |authtoken|.
995 995 :type apiuser: AuthUser
996 996 :param repoid: repository name or repository ID.
997 997 :type repoid: str or int
998 998 :param repo_name: Update the |repo| name, including the
999 999 repository group it's in.
1000 1000 :type repo_name: str
1001 1001 :param owner: Set the |repo| owner.
1002 1002 :type owner: str
1003 1003 :param fork_of: Set the |repo| as fork of another |repo|.
1004 1004 :type fork_of: str
1005 1005 :param description: Update the |repo| description.
1006 1006 :type description: str
1007 1007 :param private: Set the |repo| as private. (True | False)
1008 1008 :type private: bool
1009 1009 :param clone_uri: Update the |repo| clone URI.
1010 1010 :type clone_uri: str
1011 1011 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1012 1012 :type landing_rev: str
1013 1013 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1014 1014 :type enable_statistics: bool
1015 1015 :param enable_locking: Enable |repo| locking.
1016 1016 :type enable_locking: bool
1017 1017 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1018 1018 :type enable_downloads: bool
1019 1019 :param fields: Add extra fields to the |repo|. Use the following
1020 1020 example format: ``field_key=field_val,field_key2=fieldval2``.
1021 1021 Escape ', ' with \,
1022 1022 :type fields: str
1023 1023 """
1024 1024
1025 1025 repo = get_repo_or_error(repoid)
1026 1026
1027 1027 include_secrets = False
1028 1028 if not has_superadmin_permission(apiuser):
1029 1029 _perms = ('repository.admin',)
1030 1030 validate_repo_permissions(apiuser, repoid, repo, _perms)
1031 1031 else:
1032 1032 include_secrets = True
1033 1033
1034 1034 updates = dict(
1035 1035 repo_name=repo_name
1036 1036 if not isinstance(repo_name, Optional) else repo.repo_name,
1037 1037
1038 1038 fork_id=fork_of
1039 1039 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1040 1040
1041 1041 user=owner
1042 1042 if not isinstance(owner, Optional) else repo.user.username,
1043 1043
1044 1044 repo_description=description
1045 1045 if not isinstance(description, Optional) else repo.description,
1046 1046
1047 1047 repo_private=private
1048 1048 if not isinstance(private, Optional) else repo.private,
1049 1049
1050 1050 clone_uri=clone_uri
1051 1051 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1052 1052
1053 1053 push_uri=push_uri
1054 1054 if not isinstance(push_uri, Optional) else repo.push_uri,
1055 1055
1056 1056 repo_landing_rev=landing_rev
1057 1057 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1058 1058
1059 1059 repo_enable_statistics=enable_statistics
1060 1060 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1061 1061
1062 1062 repo_enable_locking=enable_locking
1063 1063 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1064 1064
1065 1065 repo_enable_downloads=enable_downloads
1066 1066 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1067 1067
1068 1068 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1069 1069 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1070 1070 request.translate, repo=repo)
1071 1071 ref_choices = list(set(ref_choices + [landing_ref]))
1072 1072
1073 1073 old_values = repo.get_api_data()
1074 1074 repo_type = repo.repo_type
1075 1075 schema = repo_schema.RepoSchema().bind(
1076 1076 repo_type_options=rhodecode.BACKENDS.keys(),
1077 1077 repo_ref_options=ref_choices,
1078 1078 repo_type=repo_type,
1079 1079 # user caller
1080 1080 user=apiuser,
1081 1081 old_values=old_values)
1082 1082 try:
1083 1083 schema_data = schema.deserialize(dict(
1084 1084 # we save old value, users cannot change type
1085 1085 repo_type=repo_type,
1086 1086
1087 1087 repo_name=updates['repo_name'],
1088 1088 repo_owner=updates['user'],
1089 1089 repo_description=updates['repo_description'],
1090 1090 repo_clone_uri=updates['clone_uri'],
1091 1091 repo_push_uri=updates['push_uri'],
1092 1092 repo_fork_of=updates['fork_id'],
1093 1093 repo_private=updates['repo_private'],
1094 1094 repo_landing_commit_ref=updates['repo_landing_rev'],
1095 1095 repo_enable_statistics=updates['repo_enable_statistics'],
1096 1096 repo_enable_downloads=updates['repo_enable_downloads'],
1097 1097 repo_enable_locking=updates['repo_enable_locking']))
1098 1098 except validation_schema.Invalid as err:
1099 1099 raise JSONRPCValidationError(colander_exc=err)
1100 1100
1101 1101 # save validated data back into the updates dict
1102 1102 validated_updates = dict(
1103 1103 repo_name=schema_data['repo_group']['repo_name_without_group'],
1104 1104 repo_group=schema_data['repo_group']['repo_group_id'],
1105 1105
1106 1106 user=schema_data['repo_owner'],
1107 1107 repo_description=schema_data['repo_description'],
1108 1108 repo_private=schema_data['repo_private'],
1109 1109 clone_uri=schema_data['repo_clone_uri'],
1110 1110 push_uri=schema_data['repo_push_uri'],
1111 1111 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1112 1112 repo_enable_statistics=schema_data['repo_enable_statistics'],
1113 1113 repo_enable_locking=schema_data['repo_enable_locking'],
1114 1114 repo_enable_downloads=schema_data['repo_enable_downloads'],
1115 1115 )
1116 1116
1117 1117 if schema_data['repo_fork_of']:
1118 1118 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1119 1119 validated_updates['fork_id'] = fork_repo.repo_id
1120 1120
1121 1121 # extra fields
1122 1122 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1123 1123 if fields:
1124 1124 validated_updates.update(fields)
1125 1125
1126 1126 try:
1127 1127 RepoModel().update(repo, **validated_updates)
1128 1128 audit_logger.store_api(
1129 1129 'repo.edit', action_data={'old_data': old_values},
1130 1130 user=apiuser, repo=repo)
1131 1131 Session().commit()
1132 1132 return {
1133 1133 'msg': f'updated repo ID:{repo.repo_id} {repo.repo_name}',
1134 1134 'repository': repo.get_api_data(include_secrets=include_secrets)
1135 1135 }
1136 1136 except Exception:
1137 1137 log.exception(
1138 1138 "Exception while trying to update the repository %s",
1139 1139 repoid)
1140 1140 raise JSONRPCError('failed to update repo `%s`' % repoid)
1141 1141
1142 1142
1143 1143 @jsonrpc_method()
1144 1144 def fork_repo(request, apiuser, repoid, fork_name,
1145 1145 owner=Optional(OAttr('apiuser')),
1146 1146 description=Optional(''),
1147 1147 private=Optional(False),
1148 1148 clone_uri=Optional(None),
1149 1149 landing_rev=Optional(None),
1150 1150 copy_permissions=Optional(False)):
1151 1151 """
1152 1152 Creates a fork of the specified |repo|.
1153 1153
1154 1154 * If the fork_name contains "/", fork will be created inside
1155 1155 a repository group or nested repository groups
1156 1156
1157 1157 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1158 1158 inside group "foo/bar". You have to have permissions to access and
1159 1159 write to the last repository group ("bar" in this example)
1160 1160
1161 1161 This command can only be run using an |authtoken| with minimum
1162 1162 read permissions of the forked repo, create fork permissions for an user.
1163 1163
1164 1164 :param apiuser: This is filled automatically from the |authtoken|.
1165 1165 :type apiuser: AuthUser
1166 1166 :param repoid: Set repository name or repository ID.
1167 1167 :type repoid: str or int
1168 1168 :param fork_name: Set the fork name, including it's repository group membership.
1169 1169 :type fork_name: str
1170 1170 :param owner: Set the fork owner.
1171 1171 :type owner: str
1172 1172 :param description: Set the fork description.
1173 1173 :type description: str
1174 1174 :param copy_permissions: Copy permissions from parent |repo|. The
1175 1175 default is False.
1176 1176 :type copy_permissions: bool
1177 1177 :param private: Make the fork private. The default is False.
1178 1178 :type private: bool
1179 1179 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1180 1180
1181 1181 Example output:
1182 1182
1183 1183 .. code-block:: bash
1184 1184
1185 1185 id : <id_for_response>
1186 1186 api_key : "<api_key>"
1187 1187 args: {
1188 1188 "repoid" : "<reponame or repo_id>",
1189 1189 "fork_name": "<forkname>",
1190 1190 "owner": "<username or user_id = Optional(=apiuser)>",
1191 1191 "description": "<description>",
1192 1192 "copy_permissions": "<bool>",
1193 1193 "private": "<bool>",
1194 1194 "landing_rev": "<landing_rev>"
1195 1195 }
1196 1196
1197 1197 Example error output:
1198 1198
1199 1199 .. code-block:: bash
1200 1200
1201 1201 id : <id_given_in_input>
1202 1202 result: {
1203 1203 "msg": "Created fork of `<reponame>` as `<forkname>`",
1204 1204 "success": true,
1205 1205 "task": "<celery task id or None if done sync>"
1206 1206 }
1207 1207 error: null
1208 1208
1209 1209 """
1210 1210
1211 1211 repo = get_repo_or_error(repoid)
1212 1212 repo_name = repo.repo_name
1213 1213
1214 1214 if not has_superadmin_permission(apiuser):
1215 1215 # check if we have at least read permission for
1216 1216 # this repo that we fork !
1217 1217 _perms = ('repository.admin', 'repository.write', 'repository.read')
1218 1218 validate_repo_permissions(apiuser, repoid, repo, _perms)
1219 1219
1220 1220 # check if the regular user has at least fork permissions as well
1221 1221 if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser):
1222 1222 raise JSONRPCForbidden()
1223 1223
1224 1224 # check if user can set owner parameter
1225 1225 owner = validate_set_owner_permissions(apiuser, owner)
1226 1226
1227 1227 description = Optional.extract(description)
1228 1228 copy_permissions = Optional.extract(copy_permissions)
1229 1229 clone_uri = Optional.extract(clone_uri)
1230 1230
1231 1231 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1232 1232 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1233 1233 ref_choices = list(set(ref_choices + [landing_ref]))
1234 1234 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1235 1235
1236 1236 private = Optional.extract(private)
1237 1237
1238 1238 schema = repo_schema.RepoSchema().bind(
1239 1239 repo_type_options=rhodecode.BACKENDS.keys(),
1240 1240 repo_ref_options=ref_choices,
1241 1241 repo_type=repo.repo_type,
1242 1242 # user caller
1243 1243 user=apiuser)
1244 1244
1245 1245 try:
1246 1246 schema_data = schema.deserialize(dict(
1247 1247 repo_name=fork_name,
1248 1248 repo_type=repo.repo_type,
1249 1249 repo_owner=owner.username,
1250 1250 repo_description=description,
1251 1251 repo_landing_commit_ref=landing_commit_ref,
1252 1252 repo_clone_uri=clone_uri,
1253 1253 repo_private=private,
1254 1254 repo_copy_permissions=copy_permissions))
1255 1255 except validation_schema.Invalid as err:
1256 1256 raise JSONRPCValidationError(colander_exc=err)
1257 1257
1258 1258 try:
1259 1259 data = {
1260 1260 'fork_parent_id': repo.repo_id,
1261 1261
1262 1262 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1263 1263 'repo_name_full': schema_data['repo_name'],
1264 1264 'repo_group': schema_data['repo_group']['repo_group_id'],
1265 1265 'repo_type': schema_data['repo_type'],
1266 1266 'description': schema_data['repo_description'],
1267 1267 'private': schema_data['repo_private'],
1268 1268 'copy_permissions': schema_data['repo_copy_permissions'],
1269 1269 'landing_rev': schema_data['repo_landing_commit_ref'],
1270 1270 }
1271 1271
1272 1272 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1273 1273 # no commit, it's done in RepoModel, or async via celery
1274 1274 task_id = get_task_id(task)
1275 1275
1276 1276 return {
1277 1277 'msg': 'Created fork of `{}` as `{}`'.format(
1278 1278 repo.repo_name, schema_data['repo_name']),
1279 1279 'success': True, # cannot return the repo data here since fork
1280 1280 # can be done async
1281 1281 'task': task_id
1282 1282 }
1283 1283 except Exception:
1284 1284 log.exception(
1285 1285 "Exception while trying to create fork %s",
1286 1286 schema_data['repo_name'])
1287 1287 raise JSONRPCError(
1288 1288 'failed to fork repository `{}` as `{}`'.format(
1289 1289 repo_name, schema_data['repo_name']))
1290 1290
1291 1291
1292 1292 @jsonrpc_method()
1293 1293 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1294 1294 """
1295 1295 Deletes a repository.
1296 1296
1297 1297 * When the `forks` parameter is set it's possible to detach or delete
1298 1298 forks of deleted repository.
1299 1299
1300 1300 This command can only be run using an |authtoken| with admin
1301 1301 permissions on the |repo|.
1302 1302
1303 1303 :param apiuser: This is filled automatically from the |authtoken|.
1304 1304 :type apiuser: AuthUser
1305 1305 :param repoid: Set the repository name or repository ID.
1306 1306 :type repoid: str or int
1307 1307 :param forks: Set to `detach` or `delete` forks from the |repo|.
1308 1308 :type forks: Optional(str)
1309 1309
1310 1310 Example error output:
1311 1311
1312 1312 .. code-block:: bash
1313 1313
1314 1314 id : <id_given_in_input>
1315 1315 result: {
1316 1316 "msg": "Deleted repository `<reponame>`",
1317 1317 "success": true
1318 1318 }
1319 1319 error: null
1320 1320 """
1321 1321
1322 1322 repo = get_repo_or_error(repoid)
1323 1323 repo_name = repo.repo_name
1324 1324 if not has_superadmin_permission(apiuser):
1325 1325 _perms = ('repository.admin',)
1326 1326 validate_repo_permissions(apiuser, repoid, repo, _perms)
1327 1327
1328 1328 try:
1329 1329 handle_forks = Optional.extract(forks)
1330 1330 _forks_msg = ''
1331 1331 _forks = [f for f in repo.forks]
1332 1332 if handle_forks == 'detach':
1333 1333 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1334 1334 elif handle_forks == 'delete':
1335 1335 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1336 1336 elif _forks:
1337 1337 raise JSONRPCError(
1338 1338 'Cannot delete `%s` it still contains attached forks' %
1339 1339 (repo.repo_name,)
1340 1340 )
1341 1341 old_data = repo.get_api_data()
1342 1342 RepoModel().delete(repo, forks=forks)
1343 1343
1344 1344 repo = audit_logger.RepoWrap(repo_id=None,
1345 1345 repo_name=repo.repo_name)
1346 1346
1347 1347 audit_logger.store_api(
1348 1348 'repo.delete', action_data={'old_data': old_data},
1349 1349 user=apiuser, repo=repo)
1350 1350
1351 1351 ScmModel().mark_for_invalidation(repo_name, delete=True)
1352 1352 Session().commit()
1353 1353 return {
1354 1354 'msg': f'Deleted repository `{repo_name}`{_forks_msg}',
1355 1355 'success': True
1356 1356 }
1357 1357 except Exception:
1358 1358 log.exception("Exception occurred while trying to delete repo")
1359 1359 raise JSONRPCError(
1360 1360 f'failed to delete repository `{repo_name}`'
1361 1361 )
1362 1362
1363 1363
1364 1364 #TODO: marcink, change name ?
1365 1365 @jsonrpc_method()
1366 1366 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1367 1367 """
1368 1368 Invalidates the cache for the specified repository.
1369 1369
1370 1370 This command can only be run using an |authtoken| with admin rights to
1371 1371 the specified repository.
1372 1372
1373 1373 This command takes the following options:
1374 1374
1375 1375 :param apiuser: This is filled automatically from |authtoken|.
1376 1376 :type apiuser: AuthUser
1377 1377 :param repoid: Sets the repository name or repository ID.
1378 1378 :type repoid: str or int
1379 1379 :param delete_keys: This deletes the invalidated keys instead of
1380 1380 just flagging them.
1381 1381 :type delete_keys: Optional(``True`` | ``False``)
1382 1382
1383 1383 Example output:
1384 1384
1385 1385 .. code-block:: bash
1386 1386
1387 1387 id : <id_given_in_input>
1388 1388 result : {
1389 1389 'msg': Cache for repository `<repository name>` was invalidated,
1390 1390 'repository': <repository name>
1391 1391 }
1392 1392 error : null
1393 1393
1394 1394 Example error output:
1395 1395
1396 1396 .. code-block:: bash
1397 1397
1398 1398 id : <id_given_in_input>
1399 1399 result : null
1400 1400 error : {
1401 1401 'Error occurred during cache invalidation action'
1402 1402 }
1403 1403
1404 1404 """
1405 1405
1406 1406 repo = get_repo_or_error(repoid)
1407 1407 if not has_superadmin_permission(apiuser):
1408 1408 _perms = ('repository.admin', 'repository.write',)
1409 1409 validate_repo_permissions(apiuser, repoid, repo, _perms)
1410 1410
1411 1411 delete = Optional.extract(delete_keys)
1412 1412 try:
1413 1413 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1414 1414 return {
1415 1415 'msg': f'Cache for repository `{repoid}` was invalidated',
1416 1416 'repository': repo.repo_name
1417 1417 }
1418 1418 except Exception:
1419 1419 log.exception(
1420 1420 "Exception occurred while trying to invalidate repo cache")
1421 1421 raise JSONRPCError(
1422 1422 'Error occurred during cache invalidation action'
1423 1423 )
1424 1424
1425 1425
1426 1426 #TODO: marcink, change name ?
1427 1427 @jsonrpc_method()
1428 1428 def lock(request, apiuser, repoid, locked=Optional(None),
1429 1429 userid=Optional(OAttr('apiuser'))):
1430 1430 """
1431 1431 Sets the lock state of the specified |repo| by the given user.
1432 1432 From more information, see :ref:`repo-locking`.
1433 1433
1434 1434 * If the ``userid`` option is not set, the repository is locked to the
1435 1435 user who called the method.
1436 1436 * If the ``locked`` parameter is not set, the current lock state of the
1437 1437 repository is displayed.
1438 1438
1439 1439 This command can only be run using an |authtoken| with admin rights to
1440 1440 the specified repository.
1441 1441
1442 1442 This command takes the following options:
1443 1443
1444 1444 :param apiuser: This is filled automatically from the |authtoken|.
1445 1445 :type apiuser: AuthUser
1446 1446 :param repoid: Sets the repository name or repository ID.
1447 1447 :type repoid: str or int
1448 1448 :param locked: Sets the lock state.
1449 1449 :type locked: Optional(``True`` | ``False``)
1450 1450 :param userid: Set the repository lock to this user.
1451 1451 :type userid: Optional(str or int)
1452 1452
1453 1453 Example error output:
1454 1454
1455 1455 .. code-block:: bash
1456 1456
1457 1457 id : <id_given_in_input>
1458 1458 result : {
1459 1459 'repo': '<reponame>',
1460 1460 'locked': <bool: lock state>,
1461 1461 'locked_since': <int: lock timestamp>,
1462 1462 'locked_by': <username of person who made the lock>,
1463 1463 'lock_reason': <str: reason for locking>,
1464 1464 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1465 1465 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1466 1466 or
1467 1467 'msg': 'Repo `<repository name>` not locked.'
1468 1468 or
1469 1469 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1470 1470 }
1471 1471 error : null
1472 1472
1473 1473 Example error output:
1474 1474
1475 1475 .. code-block:: bash
1476 1476
1477 1477 id : <id_given_in_input>
1478 1478 result : null
1479 1479 error : {
1480 1480 'Error occurred locking repository `<reponame>`'
1481 1481 }
1482 1482 """
1483 1483
1484 1484 repo = get_repo_or_error(repoid)
1485 1485 if not has_superadmin_permission(apiuser):
1486 1486 # check if we have at least write permission for this repo !
1487 1487 _perms = ('repository.admin', 'repository.write',)
1488 1488 validate_repo_permissions(apiuser, repoid, repo, _perms)
1489 1489
1490 1490 # make sure normal user does not pass someone else userid,
1491 1491 # he is not allowed to do that
1492 1492 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1493 1493 raise JSONRPCError('userid is not the same as your user')
1494 1494
1495 1495 if isinstance(userid, Optional):
1496 1496 userid = apiuser.user_id
1497 1497
1498 1498 user = get_user_or_error(userid)
1499 1499
1500 1500 if isinstance(locked, Optional):
1501 1501 lockobj = repo.locked
1502 1502
1503 1503 if lockobj[0] is None:
1504 1504 _d = {
1505 1505 'repo': repo.repo_name,
1506 1506 'locked': False,
1507 1507 'locked_since': None,
1508 1508 'locked_by': None,
1509 1509 'lock_reason': None,
1510 1510 'lock_state_changed': False,
1511 1511 'msg': 'Repo `%s` not locked.' % repo.repo_name
1512 1512 }
1513 1513 return _d
1514 1514 else:
1515 1515 _user_id, _time, _reason = lockobj
1516 1516 lock_user = get_user_or_error(userid)
1517 1517 _d = {
1518 1518 'repo': repo.repo_name,
1519 1519 'locked': True,
1520 1520 'locked_since': _time,
1521 1521 'locked_by': lock_user.username,
1522 1522 'lock_reason': _reason,
1523 1523 'lock_state_changed': False,
1524 1524 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1525 1525 % (repo.repo_name, lock_user.username,
1526 1526 json.dumps(time_to_datetime(_time))))
1527 1527 }
1528 1528 return _d
1529 1529
1530 1530 # force locked state through a flag
1531 1531 else:
1532 1532 locked = str2bool(locked)
1533 1533 lock_reason = Repository.LOCK_API
1534 1534 try:
1535 1535 if locked:
1536 1536 lock_time = time.time()
1537 1537 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1538 1538 else:
1539 1539 lock_time = None
1540 1540 Repository.unlock(repo)
1541 1541 _d = {
1542 1542 'repo': repo.repo_name,
1543 1543 'locked': locked,
1544 1544 'locked_since': lock_time,
1545 1545 'locked_by': user.username,
1546 1546 'lock_reason': lock_reason,
1547 1547 'lock_state_changed': True,
1548 1548 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1549 1549 % (user.username, repo.repo_name, locked))
1550 1550 }
1551 1551 return _d
1552 1552 except Exception:
1553 1553 log.exception(
1554 1554 "Exception occurred while trying to lock repository")
1555 1555 raise JSONRPCError(
1556 1556 'Error occurred locking repository `%s`' % repo.repo_name
1557 1557 )
1558 1558
1559 1559
1560 1560 @jsonrpc_method()
1561 1561 def comment_commit(
1562 1562 request, apiuser, repoid, commit_id, message, status=Optional(None),
1563 1563 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1564 1564 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1565 1565 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1566 1566 """
1567 1567 Set a commit comment, and optionally change the status of the commit.
1568 1568
1569 1569 :param apiuser: This is filled automatically from the |authtoken|.
1570 1570 :type apiuser: AuthUser
1571 1571 :param repoid: Set the repository name or repository ID.
1572 1572 :type repoid: str or int
1573 1573 :param commit_id: Specify the commit_id for which to set a comment.
1574 1574 :type commit_id: str
1575 1575 :param message: The comment text.
1576 1576 :type message: str
1577 1577 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1578 1578 'approved', 'rejected', 'under_review'
1579 1579 :type status: str
1580 1580 :param comment_type: Comment type, one of: 'note', 'todo'
1581 1581 :type comment_type: Optional(str), default: 'note'
1582 1582 :param resolves_comment_id: id of comment which this one will resolve
1583 1583 :type resolves_comment_id: Optional(int)
1584 1584 :param extra_recipients: list of user ids or usernames to add
1585 1585 notifications for this comment. Acts like a CC for notification
1586 1586 :type extra_recipients: Optional(list)
1587 1587 :param userid: Set the user name of the comment creator.
1588 1588 :type userid: Optional(str or int)
1589 1589 :param send_email: Define if this comment should also send email notification
1590 1590 :type send_email: Optional(bool)
1591 1591
1592 1592 Example error output:
1593 1593
1594 1594 .. code-block:: bash
1595 1595
1596 1596 {
1597 1597 "id" : <id_given_in_input>,
1598 1598 "result" : {
1599 1599 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1600 1600 "status_change": null or <status>,
1601 1601 "success": true
1602 1602 },
1603 1603 "error" : null
1604 1604 }
1605 1605
1606 1606 """
1607 1607 _ = request.translate
1608 1608
1609 1609 repo = get_repo_or_error(repoid)
1610 1610 if not has_superadmin_permission(apiuser):
1611 1611 _perms = ('repository.read', 'repository.write', 'repository.admin')
1612 1612 validate_repo_permissions(apiuser, repoid, repo, _perms)
1613 1613 db_repo_name = repo.repo_name
1614 1614
1615 1615 try:
1616 1616 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1617 1617 commit_id = commit.raw_id
1618 1618 except Exception as e:
1619 1619 log.exception('Failed to fetch commit')
1620 1620 raise JSONRPCError(safe_str(e))
1621 1621
1622 1622 if isinstance(userid, Optional):
1623 1623 userid = apiuser.user_id
1624 1624
1625 1625 user = get_user_or_error(userid)
1626 1626 status = Optional.extract(status)
1627 1627 comment_type = Optional.extract(comment_type)
1628 1628 resolves_comment_id = Optional.extract(resolves_comment_id)
1629 1629 extra_recipients = Optional.extract(extra_recipients)
1630 1630 send_email = Optional.extract(send_email, binary=True)
1631 1631
1632 1632 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1633 1633 if status and status not in allowed_statuses:
1634 1634 raise JSONRPCError('Bad status, must be on '
1635 1635 'of %s got %s' % (allowed_statuses, status,))
1636 1636
1637 1637 if resolves_comment_id:
1638 1638 comment = ChangesetComment.get(resolves_comment_id)
1639 1639 if not comment:
1640 1640 raise JSONRPCError(
1641 1641 'Invalid resolves_comment_id `%s` for this commit.'
1642 1642 % resolves_comment_id)
1643 1643 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1644 1644 raise JSONRPCError(
1645 1645 'Comment `%s` is wrong type for setting status to resolved.'
1646 1646 % resolves_comment_id)
1647 1647
1648 1648 try:
1649 1649 rc_config = SettingsModel().get_all_settings()
1650 1650 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1651 1651 status_change_label = ChangesetStatus.get_status_lbl(status)
1652 1652 comment = CommentsModel().create(
1653 1653 message, repo, user, commit_id=commit_id,
1654 1654 status_change=status_change_label,
1655 1655 status_change_type=status,
1656 1656 renderer=renderer,
1657 1657 comment_type=comment_type,
1658 1658 resolves_comment_id=resolves_comment_id,
1659 1659 auth_user=apiuser,
1660 1660 extra_recipients=extra_recipients,
1661 1661 send_email=send_email
1662 1662 )
1663 1663 is_inline = comment.is_inline
1664 1664
1665 1665 if status:
1666 1666 # also do a status change
1667 1667 try:
1668 1668 ChangesetStatusModel().set_status(
1669 1669 repo, status, user, comment, revision=commit_id,
1670 1670 dont_allow_on_closed_pull_request=True
1671 1671 )
1672 1672 except StatusChangeOnClosedPullRequestError:
1673 1673 log.exception(
1674 1674 "Exception occurred while trying to change repo commit status")
1675 1675 msg = ('Changing status on a commit associated with '
1676 1676 'a closed pull request is not allowed')
1677 1677 raise JSONRPCError(msg)
1678 1678
1679 1679 CommentsModel().trigger_commit_comment_hook(
1680 1680 repo, apiuser, 'create',
1681 1681 data={'comment': comment, 'commit': commit})
1682 1682
1683 1683 Session().commit()
1684 1684
1685 1685 comment_broadcast_channel = channelstream.comment_channel(
1686 1686 db_repo_name, commit_obj=commit)
1687 1687
1688 1688 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1689 1689 comment_type = 'inline' if is_inline else 'general'
1690 1690 channelstream.comment_channelstream_push(
1691 1691 request, comment_broadcast_channel, apiuser,
1692 1692 _('posted a new {} comment').format(comment_type),
1693 1693 comment_data=comment_data)
1694 1694
1695 1695 return {
1696 1696 'msg': (
1697 1697 'Commented on commit `{}` for repository `{}`'.format(
1698 1698 comment.revision, repo.repo_name)),
1699 1699 'status_change': status,
1700 1700 'success': True,
1701 1701 }
1702 1702 except JSONRPCError:
1703 1703 # catch any inside errors, and re-raise them to prevent from
1704 1704 # below global catch to silence them
1705 1705 raise
1706 1706 except Exception:
1707 1707 log.exception("Exception occurred while trying to comment on commit")
1708 1708 raise JSONRPCError(
1709 1709 f'failed to set comment on repository `{repo.repo_name}`'
1710 1710 )
1711 1711
1712 1712
1713 1713 @jsonrpc_method()
1714 1714 def get_repo_comments(request, apiuser, repoid,
1715 1715 commit_id=Optional(None), comment_type=Optional(None),
1716 1716 userid=Optional(None)):
1717 1717 """
1718 1718 Get all comments for a repository
1719 1719
1720 1720 :param apiuser: This is filled automatically from the |authtoken|.
1721 1721 :type apiuser: AuthUser
1722 1722 :param repoid: Set the repository name or repository ID.
1723 1723 :type repoid: str or int
1724 1724 :param commit_id: Optionally filter the comments by the commit_id
1725 1725 :type commit_id: Optional(str), default: None
1726 1726 :param comment_type: Optionally filter the comments by the comment_type
1727 1727 one of: 'note', 'todo'
1728 1728 :type comment_type: Optional(str), default: None
1729 1729 :param userid: Optionally filter the comments by the author of comment
1730 1730 :type userid: Optional(str or int), Default: None
1731 1731
1732 1732 Example error output:
1733 1733
1734 1734 .. code-block:: bash
1735 1735
1736 1736 {
1737 1737 "id" : <id_given_in_input>,
1738 1738 "result" : [
1739 1739 {
1740 1740 "comment_author": <USER_DETAILS>,
1741 1741 "comment_created_on": "2017-02-01T14:38:16.309",
1742 1742 "comment_f_path": "file.txt",
1743 1743 "comment_id": 282,
1744 1744 "comment_lineno": "n1",
1745 1745 "comment_resolved_by": null,
1746 1746 "comment_status": [],
1747 1747 "comment_text": "This file needs a header",
1748 1748 "comment_type": "todo",
1749 1749 "comment_last_version: 0
1750 1750 }
1751 1751 ],
1752 1752 "error" : null
1753 1753 }
1754 1754
1755 1755 """
1756 1756 repo = get_repo_or_error(repoid)
1757 1757 if not has_superadmin_permission(apiuser):
1758 1758 _perms = ('repository.read', 'repository.write', 'repository.admin')
1759 1759 validate_repo_permissions(apiuser, repoid, repo, _perms)
1760 1760
1761 1761 commit_id = Optional.extract(commit_id)
1762 1762
1763 1763 userid = Optional.extract(userid)
1764 1764 if userid:
1765 1765 user = get_user_or_error(userid)
1766 1766 else:
1767 1767 user = None
1768 1768
1769 1769 comment_type = Optional.extract(comment_type)
1770 1770 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1771 1771 raise JSONRPCError(
1772 1772 'comment_type must be one of `{}` got {}'.format(
1773 1773 ChangesetComment.COMMENT_TYPES, comment_type)
1774 1774 )
1775 1775
1776 1776 comments = CommentsModel().get_repository_comments(
1777 1777 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1778 1778 return comments
1779 1779
1780 1780
1781 1781 @jsonrpc_method()
1782 1782 def get_comment(request, apiuser, comment_id):
1783 1783 """
1784 1784 Get single comment from repository or pull_request
1785 1785
1786 1786 :param apiuser: This is filled automatically from the |authtoken|.
1787 1787 :type apiuser: AuthUser
1788 1788 :param comment_id: comment id found in the URL of comment
1789 1789 :type comment_id: str or int
1790 1790
1791 1791 Example error output:
1792 1792
1793 1793 .. code-block:: bash
1794 1794
1795 1795 {
1796 1796 "id" : <id_given_in_input>,
1797 1797 "result" : {
1798 1798 "comment_author": <USER_DETAILS>,
1799 1799 "comment_created_on": "2017-02-01T14:38:16.309",
1800 1800 "comment_f_path": "file.txt",
1801 1801 "comment_id": 282,
1802 1802 "comment_lineno": "n1",
1803 1803 "comment_resolved_by": null,
1804 1804 "comment_status": [],
1805 1805 "comment_text": "This file needs a header",
1806 1806 "comment_type": "todo",
1807 1807 "comment_last_version: 0
1808 1808 },
1809 1809 "error" : null
1810 1810 }
1811 1811
1812 1812 """
1813 1813
1814 1814 comment = ChangesetComment.get(comment_id)
1815 1815 if not comment:
1816 1816 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1817 1817
1818 1818 perms = ('repository.read', 'repository.write', 'repository.admin')
1819 1819 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1820 1820 (user=apiuser, repo_name=comment.repo.repo_name)
1821 1821
1822 1822 if not has_comment_perm:
1823 1823 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1824 1824
1825 1825 return comment
1826 1826
1827 1827
1828 1828 @jsonrpc_method()
1829 1829 def edit_comment(request, apiuser, message, comment_id, version,
1830 1830 userid=Optional(OAttr('apiuser'))):
1831 1831 """
1832 1832 Edit comment on the pull request or commit,
1833 1833 specified by the `comment_id` and version. Initially version should be 0
1834 1834
1835 1835 :param apiuser: This is filled automatically from the |authtoken|.
1836 1836 :type apiuser: AuthUser
1837 1837 :param comment_id: Specify the comment_id for editing
1838 1838 :type comment_id: int
1839 1839 :param version: version of the comment that will be created, starts from 0
1840 1840 :type version: int
1841 1841 :param message: The text content of the comment.
1842 1842 :type message: str
1843 1843 :param userid: Comment on the pull request as this user
1844 1844 :type userid: Optional(str or int)
1845 1845
1846 1846 Example output:
1847 1847
1848 1848 .. code-block:: bash
1849 1849
1850 1850 id : <id_given_in_input>
1851 1851 result : {
1852 1852 "comment": "<comment data>",
1853 1853 "version": "<Integer>",
1854 1854 },
1855 1855 error : null
1856 1856 """
1857 1857
1858 1858 auth_user = apiuser
1859 1859 comment = ChangesetComment.get(comment_id)
1860 1860 if not comment:
1861 1861 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1862 1862
1863 1863 is_super_admin = has_superadmin_permission(apiuser)
1864 1864 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1865 1865 (user=apiuser, repo_name=comment.repo.repo_name)
1866 1866
1867 1867 if not isinstance(userid, Optional):
1868 1868 if is_super_admin or is_repo_admin:
1869 1869 apiuser = get_user_or_error(userid)
1870 1870 auth_user = apiuser.AuthUser()
1871 1871 else:
1872 1872 raise JSONRPCError('userid is not the same as your user')
1873 1873
1874 1874 comment_author = comment.author.user_id == auth_user.user_id
1875 1875
1876 1876 if comment.immutable:
1877 1877 raise JSONRPCError("Immutable comment cannot be edited")
1878 1878
1879 1879 if not (is_super_admin or is_repo_admin or comment_author):
1880 1880 raise JSONRPCError("you don't have access to edit this comment")
1881 1881
1882 1882 try:
1883 1883 comment_history = CommentsModel().edit(
1884 1884 comment_id=comment_id,
1885 1885 text=message,
1886 1886 auth_user=auth_user,
1887 1887 version=version,
1888 1888 )
1889 1889 Session().commit()
1890 1890 except CommentVersionMismatch:
1891 1891 raise JSONRPCError(
1892 1892 f'comment ({comment_id}) version ({version}) mismatch'
1893 1893 )
1894 1894 if not comment_history and not message:
1895 1895 raise JSONRPCError(
1896 1896 f"comment ({comment_id}) can't be changed with empty string"
1897 1897 )
1898 1898
1899 1899 if comment.pull_request:
1900 1900 pull_request = comment.pull_request
1901 1901 PullRequestModel().trigger_pull_request_hook(
1902 1902 pull_request, apiuser, 'comment_edit',
1903 1903 data={'comment': comment})
1904 1904 else:
1905 1905 db_repo = comment.repo
1906 1906 commit_id = comment.revision
1907 1907 commit = db_repo.get_commit(commit_id)
1908 1908 CommentsModel().trigger_commit_comment_hook(
1909 1909 db_repo, apiuser, 'edit',
1910 1910 data={'comment': comment, 'commit': commit})
1911 1911
1912 1912 data = {
1913 1913 'comment': comment,
1914 1914 'version': comment_history.version if comment_history else None,
1915 1915 }
1916 1916 return data
1917 1917
1918 1918
1919 1919 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1920 1920 # @jsonrpc_method()
1921 1921 # def delete_comment(request, apiuser, comment_id):
1922 1922 # auth_user = apiuser
1923 1923 #
1924 1924 # comment = ChangesetComment.get(comment_id)
1925 1925 # if not comment:
1926 1926 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1927 1927 #
1928 1928 # is_super_admin = has_superadmin_permission(apiuser)
1929 1929 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1930 1930 # (user=apiuser, repo_name=comment.repo.repo_name)
1931 1931 #
1932 1932 # comment_author = comment.author.user_id == auth_user.user_id
1933 1933 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1934 1934 # raise JSONRPCError("you don't have access to edit this comment")
1935 1935
1936 1936 @jsonrpc_method()
1937 1937 def grant_user_permission(request, apiuser, repoid, userid, perm):
1938 1938 """
1939 1939 Grant permissions for the specified user on the given repository,
1940 1940 or update existing permissions if found.
1941 1941
1942 1942 This command can only be run using an |authtoken| with admin
1943 1943 permissions on the |repo|.
1944 1944
1945 1945 :param apiuser: This is filled automatically from the |authtoken|.
1946 1946 :type apiuser: AuthUser
1947 1947 :param repoid: Set the repository name or repository ID.
1948 1948 :type repoid: str or int
1949 1949 :param userid: Set the user name.
1950 1950 :type userid: str
1951 1951 :param perm: Set the user permissions, using the following format
1952 1952 ``(repository.(none|read|write|admin))``
1953 1953 :type perm: str
1954 1954
1955 1955 Example output:
1956 1956
1957 1957 .. code-block:: bash
1958 1958
1959 1959 id : <id_given_in_input>
1960 1960 result: {
1961 1961 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1962 1962 "success": true
1963 1963 }
1964 1964 error: null
1965 1965 """
1966 1966
1967 1967 repo = get_repo_or_error(repoid)
1968 1968 user = get_user_or_error(userid)
1969 1969 perm = get_perm_or_error(perm)
1970 1970 if not has_superadmin_permission(apiuser):
1971 1971 _perms = ('repository.admin',)
1972 1972 validate_repo_permissions(apiuser, repoid, repo, _perms)
1973 1973
1974 1974 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1975 1975 try:
1976 1976 changes = RepoModel().update_permissions(
1977 1977 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1978 1978
1979 1979 action_data = {
1980 1980 'added': changes['added'],
1981 1981 'updated': changes['updated'],
1982 1982 'deleted': changes['deleted'],
1983 1983 }
1984 1984 audit_logger.store_api(
1985 1985 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1986 1986 Session().commit()
1987 1987 PermissionModel().flush_user_permission_caches(changes)
1988 1988
1989 1989 return {
1990 1990 'msg': 'Granted perm: `{}` for user: `{}` in repo: `{}`'.format(
1991 1991 perm.permission_name, user.username, repo.repo_name
1992 1992 ),
1993 1993 'success': True
1994 1994 }
1995 1995 except Exception:
1996 1996 log.exception("Exception occurred while trying edit permissions for repo")
1997 1997 raise JSONRPCError(
1998 1998 'failed to edit permission for user: `{}` in repo: `{}`'.format(
1999 1999 userid, repoid
2000 2000 )
2001 2001 )
2002 2002
2003 2003
2004 2004 @jsonrpc_method()
2005 2005 def revoke_user_permission(request, apiuser, repoid, userid):
2006 2006 """
2007 2007 Revoke permission for a user on the specified repository.
2008 2008
2009 2009 This command can only be run using an |authtoken| with admin
2010 2010 permissions on the |repo|.
2011 2011
2012 2012 :param apiuser: This is filled automatically from the |authtoken|.
2013 2013 :type apiuser: AuthUser
2014 2014 :param repoid: Set the repository name or repository ID.
2015 2015 :type repoid: str or int
2016 2016 :param userid: Set the user name of revoked user.
2017 2017 :type userid: str or int
2018 2018
2019 2019 Example error output:
2020 2020
2021 2021 .. code-block:: bash
2022 2022
2023 2023 id : <id_given_in_input>
2024 2024 result: {
2025 2025 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2026 2026 "success": true
2027 2027 }
2028 2028 error: null
2029 2029 """
2030 2030
2031 2031 repo = get_repo_or_error(repoid)
2032 2032 user = get_user_or_error(userid)
2033 2033 if not has_superadmin_permission(apiuser):
2034 2034 _perms = ('repository.admin',)
2035 2035 validate_repo_permissions(apiuser, repoid, repo, _perms)
2036 2036
2037 2037 perm_deletions = [[user.user_id, None, "user"]]
2038 2038 try:
2039 2039 changes = RepoModel().update_permissions(
2040 2040 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2041 2041
2042 2042 action_data = {
2043 2043 'added': changes['added'],
2044 2044 'updated': changes['updated'],
2045 2045 'deleted': changes['deleted'],
2046 2046 }
2047 2047 audit_logger.store_api(
2048 2048 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2049 2049 Session().commit()
2050 2050 PermissionModel().flush_user_permission_caches(changes)
2051 2051
2052 2052 return {
2053 2053 'msg': 'Revoked perm for user: `{}` in repo: `{}`'.format(
2054 2054 user.username, repo.repo_name
2055 2055 ),
2056 2056 'success': True
2057 2057 }
2058 2058 except Exception:
2059 2059 log.exception("Exception occurred while trying revoke permissions to repo")
2060 2060 raise JSONRPCError(
2061 2061 'failed to edit permission for user: `{}` in repo: `{}`'.format(
2062 2062 userid, repoid
2063 2063 )
2064 2064 )
2065 2065
2066 2066
2067 2067 @jsonrpc_method()
2068 2068 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2069 2069 """
2070 2070 Grant permission for a user group on the specified repository,
2071 2071 or update existing permissions.
2072 2072
2073 2073 This command can only be run using an |authtoken| with admin
2074 2074 permissions on the |repo|.
2075 2075
2076 2076 :param apiuser: This is filled automatically from the |authtoken|.
2077 2077 :type apiuser: AuthUser
2078 2078 :param repoid: Set the repository name or repository ID.
2079 2079 :type repoid: str or int
2080 2080 :param usergroupid: Specify the ID of the user group.
2081 2081 :type usergroupid: str or int
2082 2082 :param perm: Set the user group permissions using the following
2083 2083 format: (repository.(none|read|write|admin))
2084 2084 :type perm: str
2085 2085
2086 2086 Example output:
2087 2087
2088 2088 .. code-block:: bash
2089 2089
2090 2090 id : <id_given_in_input>
2091 2091 result : {
2092 2092 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2093 2093 "success": true
2094 2094
2095 2095 }
2096 2096 error : null
2097 2097
2098 2098 Example error output:
2099 2099
2100 2100 .. code-block:: bash
2101 2101
2102 2102 id : <id_given_in_input>
2103 2103 result : null
2104 2104 error : {
2105 2105 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2106 2106 }
2107 2107
2108 2108 """
2109 2109
2110 2110 repo = get_repo_or_error(repoid)
2111 2111 perm = get_perm_or_error(perm)
2112 2112 if not has_superadmin_permission(apiuser):
2113 2113 _perms = ('repository.admin',)
2114 2114 validate_repo_permissions(apiuser, repoid, repo, _perms)
2115 2115
2116 2116 user_group = get_user_group_or_error(usergroupid)
2117 2117 if not has_superadmin_permission(apiuser):
2118 2118 # check if we have at least read permission for this user group !
2119 2119 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2120 2120 if not HasUserGroupPermissionAnyApi(*_perms)(
2121 2121 user=apiuser, user_group_name=user_group.users_group_name):
2122 2122 raise JSONRPCError(
2123 2123 f'user group `{usergroupid}` does not exist')
2124 2124
2125 2125 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2126 2126 try:
2127 2127 changes = RepoModel().update_permissions(
2128 2128 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2129 2129 action_data = {
2130 2130 'added': changes['added'],
2131 2131 'updated': changes['updated'],
2132 2132 'deleted': changes['deleted'],
2133 2133 }
2134 2134 audit_logger.store_api(
2135 2135 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2136 2136 Session().commit()
2137 2137 PermissionModel().flush_user_permission_caches(changes)
2138 2138
2139 2139 return {
2140 2140 'msg': 'Granted perm: `%s` for user group: `%s` in '
2141 2141 'repo: `%s`' % (
2142 2142 perm.permission_name, user_group.users_group_name,
2143 2143 repo.repo_name
2144 2144 ),
2145 2145 'success': True
2146 2146 }
2147 2147 except Exception:
2148 2148 log.exception(
2149 2149 "Exception occurred while trying change permission on repo")
2150 2150 raise JSONRPCError(
2151 2151 'failed to edit permission for user group: `%s` in '
2152 2152 'repo: `%s`' % (
2153 2153 usergroupid, repo.repo_name
2154 2154 )
2155 2155 )
2156 2156
2157 2157
2158 2158 @jsonrpc_method()
2159 2159 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2160 2160 """
2161 2161 Revoke the permissions of a user group on a given repository.
2162 2162
2163 2163 This command can only be run using an |authtoken| with admin
2164 2164 permissions on the |repo|.
2165 2165
2166 2166 :param apiuser: This is filled automatically from the |authtoken|.
2167 2167 :type apiuser: AuthUser
2168 2168 :param repoid: Set the repository name or repository ID.
2169 2169 :type repoid: str or int
2170 2170 :param usergroupid: Specify the user group ID.
2171 2171 :type usergroupid: str or int
2172 2172
2173 2173 Example output:
2174 2174
2175 2175 .. code-block:: bash
2176 2176
2177 2177 id : <id_given_in_input>
2178 2178 result: {
2179 2179 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2180 2180 "success": true
2181 2181 }
2182 2182 error: null
2183 2183 """
2184 2184
2185 2185 repo = get_repo_or_error(repoid)
2186 2186 if not has_superadmin_permission(apiuser):
2187 2187 _perms = ('repository.admin',)
2188 2188 validate_repo_permissions(apiuser, repoid, repo, _perms)
2189 2189
2190 2190 user_group = get_user_group_or_error(usergroupid)
2191 2191 if not has_superadmin_permission(apiuser):
2192 2192 # check if we have at least read permission for this user group !
2193 2193 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2194 2194 if not HasUserGroupPermissionAnyApi(*_perms)(
2195 2195 user=apiuser, user_group_name=user_group.users_group_name):
2196 2196 raise JSONRPCError(
2197 2197 f'user group `{usergroupid}` does not exist')
2198 2198
2199 2199 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2200 2200 try:
2201 2201 changes = RepoModel().update_permissions(
2202 2202 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2203 2203 action_data = {
2204 2204 'added': changes['added'],
2205 2205 'updated': changes['updated'],
2206 2206 'deleted': changes['deleted'],
2207 2207 }
2208 2208 audit_logger.store_api(
2209 2209 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2210 2210 Session().commit()
2211 2211 PermissionModel().flush_user_permission_caches(changes)
2212 2212
2213 2213 return {
2214 2214 'msg': 'Revoked perm for user group: `{}` in repo: `{}`'.format(
2215 2215 user_group.users_group_name, repo.repo_name
2216 2216 ),
2217 2217 'success': True
2218 2218 }
2219 2219 except Exception:
2220 2220 log.exception("Exception occurred while trying revoke "
2221 2221 "user group permission on repo")
2222 2222 raise JSONRPCError(
2223 2223 'failed to edit permission for user group: `%s` in '
2224 2224 'repo: `%s`' % (
2225 2225 user_group.users_group_name, repo.repo_name
2226 2226 )
2227 2227 )
2228 2228
2229 2229
2230 2230 @jsonrpc_method()
2231 2231 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2232 2232 """
2233 2233 Triggers a pull on the given repository from a remote location. You
2234 2234 can use this to keep remote repositories up-to-date.
2235 2235
2236 2236 This command can only be run using an |authtoken| with admin
2237 2237 rights to the specified repository. For more information,
2238 2238 see :ref:`config-token-ref`.
2239 2239
2240 2240 This command takes the following options:
2241 2241
2242 2242 :param apiuser: This is filled automatically from the |authtoken|.
2243 2243 :type apiuser: AuthUser
2244 2244 :param repoid: The repository name or repository ID.
2245 2245 :type repoid: str or int
2246 2246 :param remote_uri: Optional remote URI to pass in for pull
2247 2247 :type remote_uri: str
2248 2248
2249 2249 Example output:
2250 2250
2251 2251 .. code-block:: bash
2252 2252
2253 2253 id : <id_given_in_input>
2254 2254 result : {
2255 2255 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2256 2256 "repository": "<repository name>"
2257 2257 }
2258 2258 error : null
2259 2259
2260 2260 Example error output:
2261 2261
2262 2262 .. code-block:: bash
2263 2263
2264 2264 id : <id_given_in_input>
2265 2265 result : null
2266 2266 error : {
2267 2267 "Unable to push changes from `<remote_url>`"
2268 2268 }
2269 2269
2270 2270 """
2271 2271
2272 2272 repo = get_repo_or_error(repoid)
2273 2273 remote_uri = Optional.extract(remote_uri)
2274 2274 remote_uri_display = remote_uri or repo.clone_uri_hidden
2275 2275 if not has_superadmin_permission(apiuser):
2276 2276 _perms = ('repository.admin',)
2277 2277 validate_repo_permissions(apiuser, repoid, repo, _perms)
2278 2278
2279 2279 try:
2280 2280 ScmModel().pull_changes(
2281 2281 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2282 2282 return {
2283 2283 'msg': 'Pulled from url `{}` on repo `{}`'.format(
2284 2284 remote_uri_display, repo.repo_name),
2285 2285 'repository': repo.repo_name
2286 2286 }
2287 2287 except Exception:
2288 2288 log.exception("Exception occurred while trying to "
2289 2289 "pull changes from remote location")
2290 2290 raise JSONRPCError(
2291 2291 'Unable to pull changes from `%s`' % remote_uri_display
2292 2292 )
2293 2293
2294 2294
2295 2295 @jsonrpc_method()
2296 2296 def strip(request, apiuser, repoid, revision, branch):
2297 2297 """
2298 2298 Strips the given revision from the specified repository.
2299 2299
2300 2300 * This will remove the revision and all of its decendants.
2301 2301
2302 2302 This command can only be run using an |authtoken| with admin rights to
2303 2303 the specified repository.
2304 2304
2305 2305 This command takes the following options:
2306 2306
2307 2307 :param apiuser: This is filled automatically from the |authtoken|.
2308 2308 :type apiuser: AuthUser
2309 2309 :param repoid: The repository name or repository ID.
2310 2310 :type repoid: str or int
2311 2311 :param revision: The revision you wish to strip.
2312 2312 :type revision: str
2313 2313 :param branch: The branch from which to strip the revision.
2314 2314 :type branch: str
2315 2315
2316 2316 Example output:
2317 2317
2318 2318 .. code-block:: bash
2319 2319
2320 2320 id : <id_given_in_input>
2321 2321 result : {
2322 2322 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2323 2323 "repository": "<repository name>"
2324 2324 }
2325 2325 error : null
2326 2326
2327 2327 Example error output:
2328 2328
2329 2329 .. code-block:: bash
2330 2330
2331 2331 id : <id_given_in_input>
2332 2332 result : null
2333 2333 error : {
2334 2334 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2335 2335 }
2336 2336
2337 2337 """
2338 2338
2339 2339 repo = get_repo_or_error(repoid)
2340 2340 if not has_superadmin_permission(apiuser):
2341 2341 _perms = ('repository.admin',)
2342 2342 validate_repo_permissions(apiuser, repoid, repo, _perms)
2343 2343
2344 2344 try:
2345 2345 ScmModel().strip(repo, revision, branch)
2346 2346 audit_logger.store_api(
2347 2347 'repo.commit.strip', action_data={'commit_id': revision},
2348 2348 repo=repo,
2349 2349 user=apiuser, commit=True)
2350 2350
2351 2351 return {
2352 2352 'msg': 'Stripped commit {} from repo `{}`'.format(
2353 2353 revision, repo.repo_name),
2354 2354 'repository': repo.repo_name
2355 2355 }
2356 2356 except Exception:
2357 2357 log.exception("Exception while trying to strip")
2358 2358 raise JSONRPCError(
2359 2359 'Unable to strip commit {} from repo `{}`'.format(
2360 2360 revision, repo.repo_name)
2361 2361 )
2362 2362
2363 2363
2364 2364 @jsonrpc_method()
2365 2365 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2366 2366 """
2367 2367 Returns all settings for a repository. If key is given it only returns the
2368 2368 setting identified by the key or null.
2369 2369
2370 2370 :param apiuser: This is filled automatically from the |authtoken|.
2371 2371 :type apiuser: AuthUser
2372 2372 :param repoid: The repository name or repository id.
2373 2373 :type repoid: str or int
2374 2374 :param key: Key of the setting to return.
2375 2375 :type: key: Optional(str)
2376 2376
2377 2377 Example output:
2378 2378
2379 2379 .. code-block:: bash
2380 2380
2381 2381 {
2382 2382 "error": null,
2383 2383 "id": 237,
2384 2384 "result": {
2385 2385 "extensions_largefiles": true,
2386 2386 "extensions_evolve": true,
2387 2387 "hooks_changegroup_push_logger": true,
2388 2388 "hooks_changegroup_repo_size": false,
2389 2389 "hooks_outgoing_pull_logger": true,
2390 2390 "phases_publish": "True",
2391 2391 "rhodecode_hg_use_rebase_for_merging": true,
2392 2392 "rhodecode_pr_merge_enabled": true,
2393 2393 "rhodecode_use_outdated_comments": true
2394 2394 }
2395 2395 }
2396 2396 """
2397 2397
2398 2398 # Restrict access to this api method to super-admins, and repo admins only.
2399 2399 repo = get_repo_or_error(repoid)
2400 2400 if not has_superadmin_permission(apiuser):
2401 2401 _perms = ('repository.admin',)
2402 2402 validate_repo_permissions(apiuser, repoid, repo, _perms)
2403 2403
2404 2404 try:
2405 2405 settings_model = VcsSettingsModel(repo=repo)
2406 2406 settings = settings_model.get_global_settings()
2407 2407 settings.update(settings_model.get_repo_settings())
2408 2408
2409 2409 # If only a single setting is requested fetch it from all settings.
2410 2410 key = Optional.extract(key)
2411 2411 if key is not None:
2412 2412 settings = settings.get(key, None)
2413 2413 except Exception:
2414 2414 msg = f'Failed to fetch settings for repository `{repoid}`'
2415 2415 log.exception(msg)
2416 2416 raise JSONRPCError(msg)
2417 2417
2418 2418 return settings
2419 2419
2420 2420
2421 2421 @jsonrpc_method()
2422 2422 def set_repo_settings(request, apiuser, repoid, settings):
2423 2423 """
2424 2424 Update repository settings. Returns true on success.
2425 2425
2426 2426 :param apiuser: This is filled automatically from the |authtoken|.
2427 2427 :type apiuser: AuthUser
2428 2428 :param repoid: The repository name or repository id.
2429 2429 :type repoid: str or int
2430 2430 :param settings: The new settings for the repository.
2431 2431 :type: settings: dict
2432 2432
2433 2433 Example output:
2434 2434
2435 2435 .. code-block:: bash
2436 2436
2437 2437 {
2438 2438 "error": null,
2439 2439 "id": 237,
2440 2440 "result": true
2441 2441 }
2442 2442 """
2443 2443 # Restrict access to this api method to super-admins, and repo admins only.
2444 2444 repo = get_repo_or_error(repoid)
2445 2445 if not has_superadmin_permission(apiuser):
2446 2446 _perms = ('repository.admin',)
2447 2447 validate_repo_permissions(apiuser, repoid, repo, _perms)
2448 2448
2449 2449 if type(settings) is not dict:
2450 2450 raise JSONRPCError('Settings have to be a JSON Object.')
2451 2451
2452 2452 try:
2453 2453 settings_model = VcsSettingsModel(repo=repoid)
2454 2454
2455 2455 # Merge global, repo and incoming settings.
2456 2456 new_settings = settings_model.get_global_settings()
2457 2457 new_settings.update(settings_model.get_repo_settings())
2458 2458 new_settings.update(settings)
2459 2459
2460 2460 # Update the settings.
2461 2461 inherit_global_settings = new_settings.get(
2462 2462 'inherit_global_settings', False)
2463 2463 settings_model.create_or_update_repo_settings(
2464 2464 new_settings, inherit_global_settings=inherit_global_settings)
2465 2465 Session().commit()
2466 2466 except Exception:
2467 2467 msg = f'Failed to update settings for repository `{repoid}`'
2468 2468 log.exception(msg)
2469 2469 raise JSONRPCError(msg)
2470 2470
2471 2471 # Indicate success.
2472 2472 return True
2473 2473
2474 2474
2475 2475 @jsonrpc_method()
2476 2476 def maintenance(request, apiuser, repoid):
2477 2477 """
2478 2478 Triggers a maintenance on the given repository.
2479 2479
2480 2480 This command can only be run using an |authtoken| with admin
2481 2481 rights to the specified repository. For more information,
2482 2482 see :ref:`config-token-ref`.
2483 2483
2484 2484 This command takes the following options:
2485 2485
2486 2486 :param apiuser: This is filled automatically from the |authtoken|.
2487 2487 :type apiuser: AuthUser
2488 2488 :param repoid: The repository name or repository ID.
2489 2489 :type repoid: str or int
2490 2490
2491 2491 Example output:
2492 2492
2493 2493 .. code-block:: bash
2494 2494
2495 2495 id : <id_given_in_input>
2496 2496 result : {
2497 2497 "msg": "executed maintenance command",
2498 2498 "executed_actions": [
2499 2499 <action_message>, <action_message2>...
2500 2500 ],
2501 2501 "repository": "<repository name>"
2502 2502 }
2503 2503 error : null
2504 2504
2505 2505 Example error output:
2506 2506
2507 2507 .. code-block:: bash
2508 2508
2509 2509 id : <id_given_in_input>
2510 2510 result : null
2511 2511 error : {
2512 2512 "Unable to execute maintenance on `<reponame>`"
2513 2513 }
2514 2514
2515 2515 """
2516 2516
2517 2517 repo = get_repo_or_error(repoid)
2518 2518 if not has_superadmin_permission(apiuser):
2519 2519 _perms = ('repository.admin',)
2520 2520 validate_repo_permissions(apiuser, repoid, repo, _perms)
2521 2521
2522 2522 try:
2523 2523 maintenance = repo_maintenance.RepoMaintenance()
2524 2524 executed_actions = maintenance.execute(repo)
2525 2525
2526 2526 return {
2527 2527 'msg': 'executed maintenance command',
2528 2528 'executed_actions': executed_actions,
2529 2529 'repository': repo.repo_name
2530 2530 }
2531 2531 except Exception:
2532 2532 log.exception("Exception occurred while trying to run maintenance")
2533 2533 raise JSONRPCError(
2534 2534 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,53 +1,53 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20
21 21 from rhodecode import events
22 22 from rhodecode.lib import rc_cache
23 23
24 24 log = logging.getLogger(__name__)
25 25
26 26 # names of namespaces used for different permission related cached
27 27 # during flush operation we need to take care of all those
28 28 cache_namespaces = [
29 'cache_user_auth.{}',
30 'cache_user_repo_acl_ids.{}',
31 'cache_user_user_group_acl_ids.{}',
32 'cache_user_repo_group_acl_ids.{}'
29 f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
30 f'cache_user_repo_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
31 f'cache_user_user_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}',
32 f'cache_user_repo_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{{}}'
33 33 ]
34 34
35 35
36 36 def trigger_user_permission_flush(event):
37 37 """
38 38 Subscriber to the `UserPermissionsChange`. This triggers the
39 39 automatic flush of permission caches, so the users affected receive new permissions
40 40 Right Away
41 41 """
42 42
43 43 affected_user_ids = set(event.user_ids)
44 44 for user_id in affected_user_ids:
45 45 for cache_namespace_uid_tmpl in cache_namespaces:
46 46 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
47 47 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
48 48 log.debug('Invalidated %s cache keys for user_id: %s and namespace %s',
49 49 del_keys, user_id, cache_namespace_uid)
50 50
51 51
52 52 def includeme(config):
53 53 config.add_subscriber(trigger_user_permission_flush, events.UserPermissionsChange)
@@ -1,1321 +1,1321 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import datetime
21 21 import formencode
22 22 import formencode.htmlfill
23 23
24 24 from pyramid.httpexceptions import HTTPFound
25 25 from pyramid.renderers import render
26 26 from pyramid.response import Response
27 27
28 28 from rhodecode import events
29 29 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
30 30 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
31 31 from rhodecode.authentication.base import get_authn_registry, RhodeCodeExternalAuthPlugin
32 32 from rhodecode.authentication.plugins import auth_rhodecode
33 33 from rhodecode.events import trigger
34 34 from rhodecode.model.db import true, UserNotice
35 35
36 36 from rhodecode.lib import audit_logger, rc_cache, auth
37 37 from rhodecode.lib.exceptions import (
38 38 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
39 39 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
40 40 UserOwnsArtifactsException, DefaultUserException)
41 41 from rhodecode.lib import ext_json
42 42 from rhodecode.lib.auth import (
43 43 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
44 44 from rhodecode.lib import helpers as h
45 45 from rhodecode.lib.helpers import SqlPage
46 46 from rhodecode.lib.utils2 import safe_int, safe_str, AttributeDict
47 47 from rhodecode.model.auth_token import AuthTokenModel
48 48 from rhodecode.model.forms import (
49 49 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
50 50 UserExtraEmailForm, UserExtraIpForm)
51 51 from rhodecode.model.permission import PermissionModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.ssh_key import SshKeyModel
54 54 from rhodecode.model.user import UserModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.db import (
57 57 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
58 58 UserApiKeys, UserSshKeys, RepoGroup)
59 59 from rhodecode.model.meta import Session
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class AdminUsersView(BaseAppView, DataGridAppView):
65 65
66 66 def load_default_context(self):
67 67 c = self._get_local_tmpl_context()
68 68 return c
69 69
70 70 @LoginRequired()
71 71 @HasPermissionAllDecorator('hg.admin')
72 72 def users_list(self):
73 73 c = self.load_default_context()
74 74 return self._get_template_context(c)
75 75
76 76 @LoginRequired()
77 77 @HasPermissionAllDecorator('hg.admin')
78 78 def users_list_data(self):
79 79 self.load_default_context()
80 80 column_map = {
81 81 'first_name': 'name',
82 82 'last_name': 'lastname',
83 83 }
84 84 draw, start, limit = self._extract_chunk(self.request)
85 85 search_q, order_by, order_dir = self._extract_ordering(
86 86 self.request, column_map=column_map)
87 87 _render = self.request.get_partial_renderer(
88 88 'rhodecode:templates/data_table/_dt_elements.mako')
89 89
90 90 def user_actions(user_id, username):
91 91 return _render("user_actions", user_id, username)
92 92
93 93 users_data_total_count = User.query()\
94 94 .filter(User.username != User.DEFAULT_USER) \
95 95 .count()
96 96
97 97 users_data_total_inactive_count = User.query()\
98 98 .filter(User.username != User.DEFAULT_USER) \
99 99 .filter(User.active != true())\
100 100 .count()
101 101
102 102 # json generate
103 103 base_q = User.query().filter(User.username != User.DEFAULT_USER)
104 104 base_inactive_q = base_q.filter(User.active != true())
105 105
106 106 if search_q:
107 107 like_expression = '%{}%'.format(safe_str(search_q))
108 108 base_q = base_q.filter(or_(
109 109 User.username.ilike(like_expression),
110 110 User._email.ilike(like_expression),
111 111 User.name.ilike(like_expression),
112 112 User.lastname.ilike(like_expression),
113 113 ))
114 114 base_inactive_q = base_q.filter(User.active != true())
115 115
116 116 users_data_total_filtered_count = base_q.count()
117 117 users_data_total_filtered_inactive_count = base_inactive_q.count()
118 118
119 119 sort_col = getattr(User, order_by, None)
120 120 if sort_col:
121 121 if order_dir == 'asc':
122 122 # handle null values properly to order by NULL last
123 123 if order_by in ['last_activity']:
124 124 sort_col = coalesce(sort_col, datetime.date.max)
125 125 sort_col = sort_col.asc()
126 126 else:
127 127 # handle null values properly to order by NULL last
128 128 if order_by in ['last_activity']:
129 129 sort_col = coalesce(sort_col, datetime.date.min)
130 130 sort_col = sort_col.desc()
131 131
132 132 base_q = base_q.order_by(sort_col)
133 133 base_q = base_q.offset(start).limit(limit)
134 134
135 135 users_list = base_q.all()
136 136
137 137 users_data = []
138 138 for user in users_list:
139 139 users_data.append({
140 140 "username": h.gravatar_with_user(self.request, user.username),
141 141 "email": user.email,
142 142 "first_name": user.first_name,
143 143 "last_name": user.last_name,
144 144 "last_login": h.format_date(user.last_login),
145 145 "last_activity": h.format_date(user.last_activity),
146 146 "active": h.bool2icon(user.active),
147 147 "active_raw": user.active,
148 148 "admin": h.bool2icon(user.admin),
149 149 "extern_type": user.extern_type,
150 150 "extern_name": user.extern_name,
151 151 "action": user_actions(user.user_id, user.username),
152 152 })
153 153 data = ({
154 154 'draw': draw,
155 155 'data': users_data,
156 156 'recordsTotal': users_data_total_count,
157 157 'recordsFiltered': users_data_total_filtered_count,
158 158 'recordsTotalInactive': users_data_total_inactive_count,
159 159 'recordsFilteredInactive': users_data_total_filtered_inactive_count
160 160 })
161 161
162 162 return data
163 163
164 164 def _set_personal_repo_group_template_vars(self, c_obj):
165 165 DummyUser = AttributeDict({
166 166 'username': '${username}',
167 167 'user_id': '${user_id}',
168 168 })
169 169 c_obj.default_create_repo_group = RepoGroupModel() \
170 170 .get_default_create_personal_repo_group()
171 171 c_obj.personal_repo_group_name = RepoGroupModel() \
172 172 .get_personal_group_name(DummyUser)
173 173
174 174 @LoginRequired()
175 175 @HasPermissionAllDecorator('hg.admin')
176 176 def users_new(self):
177 177 _ = self.request.translate
178 178 c = self.load_default_context()
179 179 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
180 180 self._set_personal_repo_group_template_vars(c)
181 181 return self._get_template_context(c)
182 182
183 183 @LoginRequired()
184 184 @HasPermissionAllDecorator('hg.admin')
185 185 @CSRFRequired()
186 186 def users_create(self):
187 187 _ = self.request.translate
188 188 c = self.load_default_context()
189 189 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid
190 190 user_model = UserModel()
191 191 user_form = UserForm(self.request.translate)()
192 192 try:
193 193 form_result = user_form.to_python(dict(self.request.POST))
194 194 user = user_model.create(form_result)
195 195 Session().flush()
196 196 creation_data = user.get_api_data()
197 197 username = form_result['username']
198 198
199 199 audit_logger.store_web(
200 200 'user.create', action_data={'data': creation_data},
201 201 user=c.rhodecode_user)
202 202
203 203 user_link = h.link_to(
204 204 h.escape(username),
205 205 h.route_path('user_edit', user_id=user.user_id))
206 206 h.flash(h.literal(_('Created user %(user_link)s')
207 207 % {'user_link': user_link}), category='success')
208 208 Session().commit()
209 209 except formencode.Invalid as errors:
210 210 self._set_personal_repo_group_template_vars(c)
211 211 data = render(
212 212 'rhodecode:templates/admin/users/user_add.mako',
213 213 self._get_template_context(c), self.request)
214 214 html = formencode.htmlfill.render(
215 215 data,
216 216 defaults=errors.value,
217 217 errors=errors.unpack_errors() or {},
218 218 prefix_error=False,
219 219 encoding="UTF-8",
220 220 force_defaults=False
221 221 )
222 222 return Response(html)
223 223 except UserCreationError as e:
224 224 h.flash(safe_str(e), 'error')
225 225 except Exception:
226 226 log.exception("Exception creation of user")
227 227 h.flash(_('Error occurred during creation of user %s')
228 228 % self.request.POST.get('username'), category='error')
229 229 raise HTTPFound(h.route_path('users'))
230 230
231 231
232 232 class UsersView(UserAppView):
233 233 ALLOW_SCOPED_TOKENS = False
234 234 """
235 235 This view has alternative version inside EE, if modified please take a look
236 236 in there as well.
237 237 """
238 238
239 239 def get_auth_plugins(self):
240 240 valid_plugins = []
241 241 authn_registry = get_authn_registry(self.request.registry)
242 242 for plugin in authn_registry.get_plugins_for_authentication():
243 243 if isinstance(plugin, RhodeCodeExternalAuthPlugin):
244 244 valid_plugins.append(plugin)
245 245 elif plugin.name == 'rhodecode':
246 246 valid_plugins.append(plugin)
247 247
248 248 # extend our choices if user has set a bound plugin which isn't enabled at the
249 249 # moment
250 250 extern_type = self.db_user.extern_type
251 251 if extern_type not in [x.uid for x in valid_plugins]:
252 252 try:
253 253 plugin = authn_registry.get_plugin_by_uid(extern_type)
254 254 if plugin:
255 255 valid_plugins.append(plugin)
256 256
257 257 except Exception:
258 258 log.exception(
259 259 f'Could not extend user plugins with `{extern_type}`')
260 260 return valid_plugins
261 261
262 262 def load_default_context(self):
263 263 req = self.request
264 264
265 265 c = self._get_local_tmpl_context()
266 266 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
267 267 c.allowed_languages = [
268 268 ('en', 'English (en)'),
269 269 ('de', 'German (de)'),
270 270 ('fr', 'French (fr)'),
271 271 ('it', 'Italian (it)'),
272 272 ('ja', 'Japanese (ja)'),
273 273 ('pl', 'Polish (pl)'),
274 274 ('pt', 'Portuguese (pt)'),
275 275 ('ru', 'Russian (ru)'),
276 276 ('zh', 'Chinese (zh)'),
277 277 ]
278 278
279 279 c.allowed_extern_types = [
280 280 (x.uid, x.get_display_name()) for x in self.get_auth_plugins()
281 281 ]
282 282 perms = req.registry.settings.get('available_permissions')
283 283 if not perms:
284 284 # inject info about available permissions
285 285 auth.set_available_permissions(req.registry.settings)
286 286
287 287 c.available_permissions = req.registry.settings['available_permissions']
288 288 PermissionModel().set_global_permission_choices(
289 289 c, gettext_translator=req.translate)
290 290
291 291 return c
292 292
293 293 @LoginRequired()
294 294 @HasPermissionAllDecorator('hg.admin')
295 295 @CSRFRequired()
296 296 def user_update(self):
297 297 _ = self.request.translate
298 298 c = self.load_default_context()
299 299
300 300 user_id = self.db_user_id
301 301 c.user = self.db_user
302 302
303 303 c.active = 'profile'
304 304 c.extern_type = c.user.extern_type
305 305 c.extern_name = c.user.extern_name
306 306 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
307 307 available_languages = [x[0] for x in c.allowed_languages]
308 308 _form = UserForm(self.request.translate, edit=True,
309 309 available_languages=available_languages,
310 310 old_data={'user_id': user_id,
311 311 'email': c.user.email})()
312 312
313 313 c.edit_mode = self.request.POST.get('edit') == '1'
314 314 form_result = {}
315 315 old_values = c.user.get_api_data()
316 316 try:
317 317 form_result = _form.to_python(dict(self.request.POST))
318 318 skip_attrs = ['extern_name']
319 319 # TODO: plugin should define if username can be updated
320 320
321 321 if c.extern_type != "rhodecode" and not c.edit_mode:
322 322 # forbid updating username for external accounts
323 323 skip_attrs.append('username')
324 324
325 325 UserModel().update_user(
326 326 user_id, skip_attrs=skip_attrs, **form_result)
327 327
328 328 audit_logger.store_web(
329 329 'user.edit', action_data={'old_data': old_values},
330 330 user=c.rhodecode_user)
331 331
332 332 Session().commit()
333 333 h.flash(_('User updated successfully'), category='success')
334 334 except formencode.Invalid as errors:
335 335 data = render(
336 336 'rhodecode:templates/admin/users/user_edit.mako',
337 337 self._get_template_context(c), self.request)
338 338 html = formencode.htmlfill.render(
339 339 data,
340 340 defaults=errors.value,
341 341 errors=errors.unpack_errors() or {},
342 342 prefix_error=False,
343 343 encoding="UTF-8",
344 344 force_defaults=False
345 345 )
346 346 return Response(html)
347 347 except UserCreationError as e:
348 348 h.flash(safe_str(e), 'error')
349 349 except Exception:
350 350 log.exception("Exception updating user")
351 351 h.flash(_('Error occurred during update of user %s')
352 352 % form_result.get('username'), category='error')
353 353 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
354 354
355 355 @LoginRequired()
356 356 @HasPermissionAllDecorator('hg.admin')
357 357 @CSRFRequired()
358 358 def user_delete(self):
359 359 _ = self.request.translate
360 360 c = self.load_default_context()
361 361 c.user = self.db_user
362 362
363 363 _repos = len(c.user.repositories)
364 364 _repo_groups = len(c.user.repository_groups)
365 365 _user_groups = len(c.user.user_groups)
366 366 _pull_requests = len(c.user.user_pull_requests)
367 367 _artifacts = len(c.user.artifacts)
368 368
369 369 handle_repos = None
370 370 handle_repo_groups = None
371 371 handle_user_groups = None
372 372 handle_pull_requests = None
373 373 handle_artifacts = None
374 374
375 375 # calls for flash of handle based on handle case detach or delete
376 376 def set_handle_flash_repos():
377 377 handle = handle_repos
378 378 if handle == 'detach':
379 379 h.flash(_('Detached %s repositories') % _repos,
380 380 category='success')
381 381 elif handle == 'delete':
382 382 h.flash(_('Deleted %s repositories') % _repos,
383 383 category='success')
384 384
385 385 def set_handle_flash_repo_groups():
386 386 handle = handle_repo_groups
387 387 if handle == 'detach':
388 388 h.flash(_('Detached %s repository groups') % _repo_groups,
389 389 category='success')
390 390 elif handle == 'delete':
391 391 h.flash(_('Deleted %s repository groups') % _repo_groups,
392 392 category='success')
393 393
394 394 def set_handle_flash_user_groups():
395 395 handle = handle_user_groups
396 396 if handle == 'detach':
397 397 h.flash(_('Detached %s user groups') % _user_groups,
398 398 category='success')
399 399 elif handle == 'delete':
400 400 h.flash(_('Deleted %s user groups') % _user_groups,
401 401 category='success')
402 402
403 403 def set_handle_flash_pull_requests():
404 404 handle = handle_pull_requests
405 405 if handle == 'detach':
406 406 h.flash(_('Detached %s pull requests') % _pull_requests,
407 407 category='success')
408 408 elif handle == 'delete':
409 409 h.flash(_('Deleted %s pull requests') % _pull_requests,
410 410 category='success')
411 411
412 412 def set_handle_flash_artifacts():
413 413 handle = handle_artifacts
414 414 if handle == 'detach':
415 415 h.flash(_('Detached %s artifacts') % _artifacts,
416 416 category='success')
417 417 elif handle == 'delete':
418 418 h.flash(_('Deleted %s artifacts') % _artifacts,
419 419 category='success')
420 420
421 421 handle_user = User.get_first_super_admin()
422 422 handle_user_id = safe_int(self.request.POST.get('detach_user_id'))
423 423 if handle_user_id:
424 424 # NOTE(marcink): we get new owner for objects...
425 425 handle_user = User.get_or_404(handle_user_id)
426 426
427 427 if _repos and self.request.POST.get('user_repos'):
428 428 handle_repos = self.request.POST['user_repos']
429 429
430 430 if _repo_groups and self.request.POST.get('user_repo_groups'):
431 431 handle_repo_groups = self.request.POST['user_repo_groups']
432 432
433 433 if _user_groups and self.request.POST.get('user_user_groups'):
434 434 handle_user_groups = self.request.POST['user_user_groups']
435 435
436 436 if _pull_requests and self.request.POST.get('user_pull_requests'):
437 437 handle_pull_requests = self.request.POST['user_pull_requests']
438 438
439 439 if _artifacts and self.request.POST.get('user_artifacts'):
440 440 handle_artifacts = self.request.POST['user_artifacts']
441 441
442 442 old_values = c.user.get_api_data()
443 443
444 444 try:
445 445
446 446 UserModel().delete(
447 447 c.user,
448 448 handle_repos=handle_repos,
449 449 handle_repo_groups=handle_repo_groups,
450 450 handle_user_groups=handle_user_groups,
451 451 handle_pull_requests=handle_pull_requests,
452 452 handle_artifacts=handle_artifacts,
453 453 handle_new_owner=handle_user
454 454 )
455 455
456 456 audit_logger.store_web(
457 457 'user.delete', action_data={'old_data': old_values},
458 458 user=c.rhodecode_user)
459 459
460 460 Session().commit()
461 461 set_handle_flash_repos()
462 462 set_handle_flash_repo_groups()
463 463 set_handle_flash_user_groups()
464 464 set_handle_flash_pull_requests()
465 465 set_handle_flash_artifacts()
466 466 username = h.escape(old_values['username'])
467 467 h.flash(_('Successfully deleted user `{}`').format(username), category='success')
468 468 except (UserOwnsReposException, UserOwnsRepoGroupsException,
469 469 UserOwnsUserGroupsException, UserOwnsPullRequestsException,
470 470 UserOwnsArtifactsException, DefaultUserException) as e:
471 471
472 472 h.flash(safe_str(e), category='warning')
473 473 except Exception:
474 474 log.exception("Exception during deletion of user")
475 475 h.flash(_('An error occurred during deletion of user'),
476 476 category='error')
477 477 raise HTTPFound(h.route_path('users'))
478 478
479 479 @LoginRequired()
480 480 @HasPermissionAllDecorator('hg.admin')
481 481 def user_edit(self):
482 482 _ = self.request.translate
483 483 c = self.load_default_context()
484 484 c.user = self.db_user
485 485
486 486 c.active = 'profile'
487 487 c.extern_type = c.user.extern_type
488 488 c.extern_name = c.user.extern_name
489 489 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
490 490 c.edit_mode = self.request.GET.get('edit') == '1'
491 491
492 492 defaults = c.user.get_dict()
493 493 defaults.update({'language': c.user.user_data.get('language')})
494 494
495 495 data = render(
496 496 'rhodecode:templates/admin/users/user_edit.mako',
497 497 self._get_template_context(c), self.request)
498 498 html = formencode.htmlfill.render(
499 499 data,
500 500 defaults=defaults,
501 501 encoding="UTF-8",
502 502 force_defaults=False
503 503 )
504 504 return Response(html)
505 505
506 506 @LoginRequired()
507 507 @HasPermissionAllDecorator('hg.admin')
508 508 def user_edit_advanced(self):
509 509 _ = self.request.translate
510 510 c = self.load_default_context()
511 511
512 512 user_id = self.db_user_id
513 513 c.user = self.db_user
514 514
515 515 c.detach_user = User.get_first_super_admin()
516 516 detach_user_id = safe_int(self.request.GET.get('detach_user_id'))
517 517 if detach_user_id:
518 518 c.detach_user = User.get_or_404(detach_user_id)
519 519
520 520 c.active = 'advanced'
521 521 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
522 522 c.personal_repo_group_name = RepoGroupModel()\
523 523 .get_personal_group_name(c.user)
524 524
525 525 c.user_to_review_rules = sorted(
526 526 (x.user for x in c.user.user_review_rules),
527 527 key=lambda u: u.username.lower())
528 528
529 529 defaults = c.user.get_dict()
530 530
531 531 # Interim workaround if the user participated on any pull requests as a
532 532 # reviewer.
533 533 has_review = len(c.user.reviewer_pull_requests)
534 534 c.can_delete_user = not has_review
535 535 c.can_delete_user_message = ''
536 536 inactive_link = h.link_to(
537 537 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
538 538 if has_review == 1:
539 539 c.can_delete_user_message = h.literal(_(
540 540 'The user participates as reviewer in {} pull request and '
541 541 'cannot be deleted. \nYou can set the user to '
542 542 '"{}" instead of deleting it.').format(
543 543 has_review, inactive_link))
544 544 elif has_review:
545 545 c.can_delete_user_message = h.literal(_(
546 546 'The user participates as reviewer in {} pull requests and '
547 547 'cannot be deleted. \nYou can set the user to '
548 548 '"{}" instead of deleting it.').format(
549 549 has_review, inactive_link))
550 550
551 551 data = render(
552 552 'rhodecode:templates/admin/users/user_edit.mako',
553 553 self._get_template_context(c), self.request)
554 554 html = formencode.htmlfill.render(
555 555 data,
556 556 defaults=defaults,
557 557 encoding="UTF-8",
558 558 force_defaults=False
559 559 )
560 560 return Response(html)
561 561
562 562 @LoginRequired()
563 563 @HasPermissionAllDecorator('hg.admin')
564 564 def user_edit_global_perms(self):
565 565 _ = self.request.translate
566 566 c = self.load_default_context()
567 567 c.user = self.db_user
568 568
569 569 c.active = 'global_perms'
570 570
571 571 c.default_user = User.get_default_user()
572 572 defaults = c.user.get_dict()
573 573 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
574 574 defaults.update(c.default_user.get_default_perms())
575 575 defaults.update(c.user.get_default_perms())
576 576
577 577 data = render(
578 578 'rhodecode:templates/admin/users/user_edit.mako',
579 579 self._get_template_context(c), self.request)
580 580 html = formencode.htmlfill.render(
581 581 data,
582 582 defaults=defaults,
583 583 encoding="UTF-8",
584 584 force_defaults=False
585 585 )
586 586 return Response(html)
587 587
588 588 @LoginRequired()
589 589 @HasPermissionAllDecorator('hg.admin')
590 590 @CSRFRequired()
591 591 def user_edit_global_perms_update(self):
592 592 _ = self.request.translate
593 593 c = self.load_default_context()
594 594
595 595 user_id = self.db_user_id
596 596 c.user = self.db_user
597 597
598 598 c.active = 'global_perms'
599 599 try:
600 600 # first stage that verifies the checkbox
601 601 _form = UserIndividualPermissionsForm(self.request.translate)
602 602 form_result = _form.to_python(dict(self.request.POST))
603 603 inherit_perms = form_result['inherit_default_permissions']
604 604 c.user.inherit_default_permissions = inherit_perms
605 605 Session().add(c.user)
606 606
607 607 if not inherit_perms:
608 608 # only update the individual ones if we un check the flag
609 609 _form = UserPermissionsForm(
610 610 self.request.translate,
611 611 [x[0] for x in c.repo_create_choices],
612 612 [x[0] for x in c.repo_create_on_write_choices],
613 613 [x[0] for x in c.repo_group_create_choices],
614 614 [x[0] for x in c.user_group_create_choices],
615 615 [x[0] for x in c.fork_choices],
616 616 [x[0] for x in c.inherit_default_permission_choices])()
617 617
618 618 form_result = _form.to_python(dict(self.request.POST))
619 619 form_result.update({'perm_user_id': c.user.user_id})
620 620
621 621 PermissionModel().update_user_permissions(form_result)
622 622
623 623 # TODO(marcink): implement global permissions
624 624 # audit_log.store_web('user.edit.permissions')
625 625
626 626 Session().commit()
627 627
628 628 h.flash(_('User global permissions updated successfully'),
629 629 category='success')
630 630
631 631 except formencode.Invalid as errors:
632 632 data = render(
633 633 'rhodecode:templates/admin/users/user_edit.mako',
634 634 self._get_template_context(c), self.request)
635 635 html = formencode.htmlfill.render(
636 636 data,
637 637 defaults=errors.value,
638 638 errors=errors.unpack_errors() or {},
639 639 prefix_error=False,
640 640 encoding="UTF-8",
641 641 force_defaults=False
642 642 )
643 643 return Response(html)
644 644 except Exception:
645 645 log.exception("Exception during permissions saving")
646 646 h.flash(_('An error occurred during permissions saving'),
647 647 category='error')
648 648
649 649 affected_user_ids = [user_id]
650 650 PermissionModel().trigger_permission_flush(affected_user_ids)
651 651 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
652 652
653 653 @LoginRequired()
654 654 @HasPermissionAllDecorator('hg.admin')
655 655 @CSRFRequired()
656 656 def user_enable_force_password_reset(self):
657 657 _ = self.request.translate
658 658 c = self.load_default_context()
659 659
660 660 user_id = self.db_user_id
661 661 c.user = self.db_user
662 662
663 663 try:
664 664 c.user.update_userdata(force_password_change=True)
665 665
666 666 msg = _('Force password change enabled for user')
667 667 audit_logger.store_web('user.edit.password_reset.enabled',
668 668 user=c.rhodecode_user)
669 669
670 670 Session().commit()
671 671 h.flash(msg, category='success')
672 672 except Exception:
673 673 log.exception("Exception during password reset for user")
674 674 h.flash(_('An error occurred during password reset for user'),
675 675 category='error')
676 676
677 677 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
678 678
679 679 @LoginRequired()
680 680 @HasPermissionAllDecorator('hg.admin')
681 681 @CSRFRequired()
682 682 def user_disable_force_password_reset(self):
683 683 _ = self.request.translate
684 684 c = self.load_default_context()
685 685
686 686 user_id = self.db_user_id
687 687 c.user = self.db_user
688 688
689 689 try:
690 690 c.user.update_userdata(force_password_change=False)
691 691
692 692 msg = _('Force password change disabled for user')
693 693 audit_logger.store_web(
694 694 'user.edit.password_reset.disabled',
695 695 user=c.rhodecode_user)
696 696
697 697 Session().commit()
698 698 h.flash(msg, category='success')
699 699 except Exception:
700 700 log.exception("Exception during password reset for user")
701 701 h.flash(_('An error occurred during password reset for user'),
702 702 category='error')
703 703
704 704 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
705 705
706 706 @LoginRequired()
707 707 @HasPermissionAllDecorator('hg.admin')
708 708 @CSRFRequired()
709 709 def user_notice_dismiss(self):
710 710 _ = self.request.translate
711 711 c = self.load_default_context()
712 712
713 713 user_id = self.db_user_id
714 714 c.user = self.db_user
715 715 user_notice_id = safe_int(self.request.POST.get('notice_id'))
716 716 notice = UserNotice().query()\
717 717 .filter(UserNotice.user_id == user_id)\
718 718 .filter(UserNotice.user_notice_id == user_notice_id)\
719 719 .scalar()
720 720 read = False
721 721 if notice:
722 722 notice.notice_read = True
723 723 Session().add(notice)
724 724 Session().commit()
725 725 read = True
726 726
727 727 return {'notice': user_notice_id, 'read': read}
728 728
729 729 @LoginRequired()
730 730 @HasPermissionAllDecorator('hg.admin')
731 731 @CSRFRequired()
732 732 def user_create_personal_repo_group(self):
733 733 """
734 734 Create personal repository group for this user
735 735 """
736 736 from rhodecode.model.repo_group import RepoGroupModel
737 737
738 738 _ = self.request.translate
739 739 c = self.load_default_context()
740 740
741 741 user_id = self.db_user_id
742 742 c.user = self.db_user
743 743
744 744 personal_repo_group = RepoGroup.get_user_personal_repo_group(
745 745 c.user.user_id)
746 746 if personal_repo_group:
747 747 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
748 748
749 749 personal_repo_group_name = RepoGroupModel().get_personal_group_name(c.user)
750 750 named_personal_group = RepoGroup.get_by_group_name(
751 751 personal_repo_group_name)
752 752 try:
753 753
754 754 if named_personal_group and named_personal_group.user_id == c.user.user_id:
755 755 # migrate the same named group, and mark it as personal
756 756 named_personal_group.personal = True
757 757 Session().add(named_personal_group)
758 758 Session().commit()
759 759 msg = _('Linked repository group `{}` as personal'.format(
760 760 personal_repo_group_name))
761 761 h.flash(msg, category='success')
762 762 elif not named_personal_group:
763 763 RepoGroupModel().create_personal_repo_group(c.user)
764 764
765 765 msg = _('Created repository group `{}`'.format(
766 766 personal_repo_group_name))
767 767 h.flash(msg, category='success')
768 768 else:
769 769 msg = _('Repository group `{}` is already taken'.format(
770 770 personal_repo_group_name))
771 771 h.flash(msg, category='warning')
772 772 except Exception:
773 773 log.exception("Exception during repository group creation")
774 774 msg = _(
775 775 'An error occurred during repository group creation for user')
776 776 h.flash(msg, category='error')
777 777 Session().rollback()
778 778
779 779 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
780 780
781 781 @LoginRequired()
782 782 @HasPermissionAllDecorator('hg.admin')
783 783 def auth_tokens(self):
784 784 _ = self.request.translate
785 785 c = self.load_default_context()
786 786 c.user = self.db_user
787 787
788 788 c.active = 'auth_tokens'
789 789
790 790 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
791 791 c.role_values = [
792 792 (x, AuthTokenModel.cls._get_role_name(x))
793 793 for x in AuthTokenModel.cls.ROLES]
794 794 c.role_options = [(c.role_values, _("Role"))]
795 795 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
796 796 c.user.user_id, show_expired=True)
797 797 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
798 798 return self._get_template_context(c)
799 799
800 800 @LoginRequired()
801 801 @HasPermissionAllDecorator('hg.admin')
802 802 def auth_tokens_view(self):
803 803 _ = self.request.translate
804 804 c = self.load_default_context()
805 805 c.user = self.db_user
806 806
807 807 auth_token_id = self.request.POST.get('auth_token_id')
808 808
809 809 if auth_token_id:
810 810 token = UserApiKeys.get_or_404(auth_token_id)
811 811
812 812 return {
813 813 'auth_token': token.api_key
814 814 }
815 815
816 816 def maybe_attach_token_scope(self, token):
817 817 # implemented in EE edition
818 818 pass
819 819
820 820 @LoginRequired()
821 821 @HasPermissionAllDecorator('hg.admin')
822 822 @CSRFRequired()
823 823 def auth_tokens_add(self):
824 824 _ = self.request.translate
825 825 c = self.load_default_context()
826 826
827 827 user_id = self.db_user_id
828 828 c.user = self.db_user
829 829
830 830 user_data = c.user.get_api_data()
831 831 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
832 832 description = self.request.POST.get('description')
833 833 role = self.request.POST.get('role')
834 834
835 835 token = UserModel().add_auth_token(
836 836 user=c.user.user_id,
837 837 lifetime_minutes=lifetime, role=role, description=description,
838 838 scope_callback=self.maybe_attach_token_scope)
839 839 token_data = token.get_api_data()
840 840
841 841 audit_logger.store_web(
842 842 'user.edit.token.add', action_data={
843 843 'data': {'token': token_data, 'user': user_data}},
844 844 user=self._rhodecode_user, )
845 845 Session().commit()
846 846
847 847 h.flash(_("Auth token successfully created"), category='success')
848 848 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
849 849
850 850 @LoginRequired()
851 851 @HasPermissionAllDecorator('hg.admin')
852 852 @CSRFRequired()
853 853 def auth_tokens_delete(self):
854 854 _ = self.request.translate
855 855 c = self.load_default_context()
856 856
857 857 user_id = self.db_user_id
858 858 c.user = self.db_user
859 859
860 860 user_data = c.user.get_api_data()
861 861
862 862 del_auth_token = self.request.POST.get('del_auth_token')
863 863
864 864 if del_auth_token:
865 865 token = UserApiKeys.get_or_404(del_auth_token)
866 866 token_data = token.get_api_data()
867 867
868 868 AuthTokenModel().delete(del_auth_token, c.user.user_id)
869 869 audit_logger.store_web(
870 870 'user.edit.token.delete', action_data={
871 871 'data': {'token': token_data, 'user': user_data}},
872 872 user=self._rhodecode_user,)
873 873 Session().commit()
874 874 h.flash(_("Auth token successfully deleted"), category='success')
875 875
876 876 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
877 877
878 878 @LoginRequired()
879 879 @HasPermissionAllDecorator('hg.admin')
880 880 def ssh_keys(self):
881 881 _ = self.request.translate
882 882 c = self.load_default_context()
883 883 c.user = self.db_user
884 884
885 885 c.active = 'ssh_keys'
886 886 c.default_key = self.request.GET.get('default_key')
887 887 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
888 888 return self._get_template_context(c)
889 889
890 890 @LoginRequired()
891 891 @HasPermissionAllDecorator('hg.admin')
892 892 def ssh_keys_generate_keypair(self):
893 893 _ = self.request.translate
894 894 c = self.load_default_context()
895 895
896 896 c.user = self.db_user
897 897
898 898 c.active = 'ssh_keys_generate'
899 899 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
900 900 private_format = self.request.GET.get('private_format') \
901 901 or SshKeyModel.DEFAULT_PRIVATE_KEY_FORMAT
902 902 c.private, c.public = SshKeyModel().generate_keypair(
903 903 comment=comment, private_format=private_format)
904 904
905 905 return self._get_template_context(c)
906 906
907 907 @LoginRequired()
908 908 @HasPermissionAllDecorator('hg.admin')
909 909 @CSRFRequired()
910 910 def ssh_keys_add(self):
911 911 _ = self.request.translate
912 912 c = self.load_default_context()
913 913
914 914 user_id = self.db_user_id
915 915 c.user = self.db_user
916 916
917 917 user_data = c.user.get_api_data()
918 918 key_data = self.request.POST.get('key_data')
919 919 description = self.request.POST.get('description')
920 920
921 921 fingerprint = 'unknown'
922 922 try:
923 923 if not key_data:
924 924 raise ValueError('Please add a valid public key')
925 925
926 926 key = SshKeyModel().parse_key(key_data.strip())
927 927 fingerprint = key.hash_md5()
928 928
929 929 ssh_key = SshKeyModel().create(
930 930 c.user.user_id, fingerprint, key.keydata, description)
931 931 ssh_key_data = ssh_key.get_api_data()
932 932
933 933 audit_logger.store_web(
934 934 'user.edit.ssh_key.add', action_data={
935 935 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
936 936 user=self._rhodecode_user, )
937 937 Session().commit()
938 938
939 939 # Trigger an event on change of keys.
940 940 trigger(SshKeyFileChangeEvent(), self.request.registry)
941 941
942 942 h.flash(_("Ssh Key successfully created"), category='success')
943 943
944 944 except IntegrityError:
945 945 log.exception("Exception during ssh key saving")
946 946 err = 'Such key with fingerprint `{}` already exists, ' \
947 947 'please use a different one'.format(fingerprint)
948 948 h.flash(_('An error occurred during ssh key saving: {}').format(err),
949 949 category='error')
950 950 except Exception as e:
951 951 log.exception("Exception during ssh key saving")
952 952 h.flash(_('An error occurred during ssh key saving: {}').format(e),
953 953 category='error')
954 954
955 955 return HTTPFound(
956 956 h.route_path('edit_user_ssh_keys', user_id=user_id))
957 957
958 958 @LoginRequired()
959 959 @HasPermissionAllDecorator('hg.admin')
960 960 @CSRFRequired()
961 961 def ssh_keys_delete(self):
962 962 _ = self.request.translate
963 963 c = self.load_default_context()
964 964
965 965 user_id = self.db_user_id
966 966 c.user = self.db_user
967 967
968 968 user_data = c.user.get_api_data()
969 969
970 970 del_ssh_key = self.request.POST.get('del_ssh_key')
971 971
972 972 if del_ssh_key:
973 973 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
974 974 ssh_key_data = ssh_key.get_api_data()
975 975
976 976 SshKeyModel().delete(del_ssh_key, c.user.user_id)
977 977 audit_logger.store_web(
978 978 'user.edit.ssh_key.delete', action_data={
979 979 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
980 980 user=self._rhodecode_user,)
981 981 Session().commit()
982 982 # Trigger an event on change of keys.
983 983 trigger(SshKeyFileChangeEvent(), self.request.registry)
984 984 h.flash(_("Ssh key successfully deleted"), category='success')
985 985
986 986 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
987 987
988 988 @LoginRequired()
989 989 @HasPermissionAllDecorator('hg.admin')
990 990 def emails(self):
991 991 _ = self.request.translate
992 992 c = self.load_default_context()
993 993 c.user = self.db_user
994 994
995 995 c.active = 'emails'
996 996 c.user_email_map = UserEmailMap.query() \
997 997 .filter(UserEmailMap.user == c.user).all()
998 998
999 999 return self._get_template_context(c)
1000 1000
1001 1001 @LoginRequired()
1002 1002 @HasPermissionAllDecorator('hg.admin')
1003 1003 @CSRFRequired()
1004 1004 def emails_add(self):
1005 1005 _ = self.request.translate
1006 1006 c = self.load_default_context()
1007 1007
1008 1008 user_id = self.db_user_id
1009 1009 c.user = self.db_user
1010 1010
1011 1011 email = self.request.POST.get('new_email')
1012 1012 user_data = c.user.get_api_data()
1013 1013 try:
1014 1014
1015 1015 form = UserExtraEmailForm(self.request.translate)()
1016 1016 data = form.to_python({'email': email})
1017 1017 email = data['email']
1018 1018
1019 1019 UserModel().add_extra_email(c.user.user_id, email)
1020 1020 audit_logger.store_web(
1021 1021 'user.edit.email.add',
1022 1022 action_data={'email': email, 'user': user_data},
1023 1023 user=self._rhodecode_user)
1024 1024 Session().commit()
1025 1025 h.flash(_("Added new email address `%s` for user account") % email,
1026 1026 category='success')
1027 1027 except formencode.Invalid as error:
1028 1028 msg = error.unpack_errors()['email']
1029 1029 h.flash(h.escape(msg), category='error')
1030 1030 except IntegrityError:
1031 1031 log.warning("Email %s already exists", email)
1032 1032 h.flash(_('Email `{}` is already registered for another user.').format(email),
1033 1033 category='error')
1034 1034 except Exception:
1035 1035 log.exception("Exception during email saving")
1036 1036 h.flash(_('An error occurred during email saving'),
1037 1037 category='error')
1038 1038 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1039 1039
1040 1040 @LoginRequired()
1041 1041 @HasPermissionAllDecorator('hg.admin')
1042 1042 @CSRFRequired()
1043 1043 def emails_delete(self):
1044 1044 _ = self.request.translate
1045 1045 c = self.load_default_context()
1046 1046
1047 1047 user_id = self.db_user_id
1048 1048 c.user = self.db_user
1049 1049
1050 1050 email_id = self.request.POST.get('del_email_id')
1051 1051 user_model = UserModel()
1052 1052
1053 1053 email = UserEmailMap.query().get(email_id).email
1054 1054 user_data = c.user.get_api_data()
1055 1055 user_model.delete_extra_email(c.user.user_id, email_id)
1056 1056 audit_logger.store_web(
1057 1057 'user.edit.email.delete',
1058 1058 action_data={'email': email, 'user': user_data},
1059 1059 user=self._rhodecode_user)
1060 1060 Session().commit()
1061 1061 h.flash(_("Removed email address from user account"),
1062 1062 category='success')
1063 1063 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
1064 1064
1065 1065 @LoginRequired()
1066 1066 @HasPermissionAllDecorator('hg.admin')
1067 1067 def ips(self):
1068 1068 _ = self.request.translate
1069 1069 c = self.load_default_context()
1070 1070 c.user = self.db_user
1071 1071
1072 1072 c.active = 'ips'
1073 1073 c.user_ip_map = UserIpMap.query() \
1074 1074 .filter(UserIpMap.user == c.user).all()
1075 1075
1076 1076 c.inherit_default_ips = c.user.inherit_default_permissions
1077 1077 c.default_user_ip_map = UserIpMap.query() \
1078 1078 .filter(UserIpMap.user == User.get_default_user()).all()
1079 1079
1080 1080 return self._get_template_context(c)
1081 1081
1082 1082 @LoginRequired()
1083 1083 @HasPermissionAllDecorator('hg.admin')
1084 1084 @CSRFRequired()
1085 1085 # NOTE(marcink): this view is allowed for default users, as we can
1086 1086 # edit their IP white list
1087 1087 def ips_add(self):
1088 1088 _ = self.request.translate
1089 1089 c = self.load_default_context()
1090 1090
1091 1091 user_id = self.db_user_id
1092 1092 c.user = self.db_user
1093 1093
1094 1094 user_model = UserModel()
1095 1095 desc = self.request.POST.get('description')
1096 1096 try:
1097 1097 ip_list = user_model.parse_ip_range(
1098 1098 self.request.POST.get('new_ip'))
1099 1099 except Exception as e:
1100 1100 ip_list = []
1101 1101 log.exception("Exception during ip saving")
1102 1102 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1103 1103 category='error')
1104 1104 added = []
1105 1105 user_data = c.user.get_api_data()
1106 1106 for ip in ip_list:
1107 1107 try:
1108 1108 form = UserExtraIpForm(self.request.translate)()
1109 1109 data = form.to_python({'ip': ip})
1110 1110 ip = data['ip']
1111 1111
1112 1112 user_model.add_extra_ip(c.user.user_id, ip, desc)
1113 1113 audit_logger.store_web(
1114 1114 'user.edit.ip.add',
1115 1115 action_data={'ip': ip, 'user': user_data},
1116 1116 user=self._rhodecode_user)
1117 1117 Session().commit()
1118 1118 added.append(ip)
1119 1119 except formencode.Invalid as error:
1120 1120 msg = error.unpack_errors()['ip']
1121 1121 h.flash(msg, category='error')
1122 1122 except Exception:
1123 1123 log.exception("Exception during ip saving")
1124 1124 h.flash(_('An error occurred during ip saving'),
1125 1125 category='error')
1126 1126 if added:
1127 1127 h.flash(
1128 1128 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1129 1129 category='success')
1130 1130 if 'default_user' in self.request.POST:
1131 1131 # case for editing global IP list we do it for 'DEFAULT' user
1132 1132 raise HTTPFound(h.route_path('admin_permissions_ips'))
1133 1133 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1134 1134
1135 1135 @LoginRequired()
1136 1136 @HasPermissionAllDecorator('hg.admin')
1137 1137 @CSRFRequired()
1138 1138 # NOTE(marcink): this view is allowed for default users, as we can
1139 1139 # edit their IP white list
1140 1140 def ips_delete(self):
1141 1141 _ = self.request.translate
1142 1142 c = self.load_default_context()
1143 1143
1144 1144 user_id = self.db_user_id
1145 1145 c.user = self.db_user
1146 1146
1147 1147 ip_id = self.request.POST.get('del_ip_id')
1148 1148 user_model = UserModel()
1149 1149 user_data = c.user.get_api_data()
1150 1150 ip = UserIpMap.query().get(ip_id).ip_addr
1151 1151 user_model.delete_extra_ip(c.user.user_id, ip_id)
1152 1152 audit_logger.store_web(
1153 1153 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1154 1154 user=self._rhodecode_user)
1155 1155 Session().commit()
1156 1156 h.flash(_("Removed ip address from user whitelist"), category='success')
1157 1157
1158 1158 if 'default_user' in self.request.POST:
1159 1159 # case for editing global IP list we do it for 'DEFAULT' user
1160 1160 raise HTTPFound(h.route_path('admin_permissions_ips'))
1161 1161 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1162 1162
1163 1163 @LoginRequired()
1164 1164 @HasPermissionAllDecorator('hg.admin')
1165 1165 def groups_management(self):
1166 1166 c = self.load_default_context()
1167 1167 c.user = self.db_user
1168 1168 c.data = c.user.group_member
1169 1169
1170 1170 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1171 1171 for group in c.user.group_member]
1172 1172 c.groups = ext_json.str_json(groups)
1173 1173 c.active = 'groups'
1174 1174
1175 1175 return self._get_template_context(c)
1176 1176
1177 1177 @LoginRequired()
1178 1178 @HasPermissionAllDecorator('hg.admin')
1179 1179 @CSRFRequired()
1180 1180 def groups_management_updates(self):
1181 1181 _ = self.request.translate
1182 1182 c = self.load_default_context()
1183 1183
1184 1184 user_id = self.db_user_id
1185 1185 c.user = self.db_user
1186 1186
1187 1187 user_groups = set(self.request.POST.getall('users_group_id'))
1188 1188 user_groups_objects = []
1189 1189
1190 1190 for ugid in user_groups:
1191 1191 user_groups_objects.append(
1192 1192 UserGroupModel().get_group(safe_int(ugid)))
1193 1193 user_group_model = UserGroupModel()
1194 1194 added_to_groups, removed_from_groups = \
1195 1195 user_group_model.change_groups(c.user, user_groups_objects)
1196 1196
1197 1197 user_data = c.user.get_api_data()
1198 1198 for user_group_id in added_to_groups:
1199 1199 user_group = UserGroup.get(user_group_id)
1200 1200 old_values = user_group.get_api_data()
1201 1201 audit_logger.store_web(
1202 1202 'user_group.edit.member.add',
1203 1203 action_data={'user': user_data, 'old_data': old_values},
1204 1204 user=self._rhodecode_user)
1205 1205
1206 1206 for user_group_id in removed_from_groups:
1207 1207 user_group = UserGroup.get(user_group_id)
1208 1208 old_values = user_group.get_api_data()
1209 1209 audit_logger.store_web(
1210 1210 'user_group.edit.member.delete',
1211 1211 action_data={'user': user_data, 'old_data': old_values},
1212 1212 user=self._rhodecode_user)
1213 1213
1214 1214 Session().commit()
1215 1215 c.active = 'user_groups_management'
1216 1216 h.flash(_("Groups successfully changed"), category='success')
1217 1217
1218 1218 return HTTPFound(h.route_path(
1219 1219 'edit_user_groups_management', user_id=user_id))
1220 1220
1221 1221 @LoginRequired()
1222 1222 @HasPermissionAllDecorator('hg.admin')
1223 1223 def user_audit_logs(self):
1224 1224 _ = self.request.translate
1225 1225 c = self.load_default_context()
1226 1226 c.user = self.db_user
1227 1227
1228 1228 c.active = 'audit'
1229 1229
1230 1230 p = safe_int(self.request.GET.get('page', 1), 1)
1231 1231
1232 1232 filter_term = self.request.GET.get('filter')
1233 1233 user_log = UserModel().get_user_log(c.user, filter_term)
1234 1234
1235 1235 def url_generator(page_num):
1236 1236 query_params = {
1237 1237 'page': page_num
1238 1238 }
1239 1239 if filter_term:
1240 1240 query_params['filter'] = filter_term
1241 1241 return self.request.current_route_path(_query=query_params)
1242 1242
1243 1243 c.audit_logs = SqlPage(
1244 1244 user_log, page=p, items_per_page=10, url_maker=url_generator)
1245 1245 c.filter_term = filter_term
1246 1246 return self._get_template_context(c)
1247 1247
1248 1248 @LoginRequired()
1249 1249 @HasPermissionAllDecorator('hg.admin')
1250 1250 def user_audit_logs_download(self):
1251 1251 _ = self.request.translate
1252 1252 c = self.load_default_context()
1253 1253 c.user = self.db_user
1254 1254
1255 1255 user_log = UserModel().get_user_log(c.user, filter_term=None)
1256 1256
1257 1257 audit_log_data = {}
1258 1258 for entry in user_log:
1259 1259 audit_log_data[entry.user_log_id] = entry.get_dict()
1260 1260
1261 1261 response = Response(ext_json.formatted_str_json(audit_log_data))
1262 1262 response.content_disposition = f'attachment; filename=user_{c.user.user_id}_audit_logs.json'
1263 1263 response.content_type = 'application/json'
1264 1264
1265 1265 return response
1266 1266
1267 1267 @LoginRequired()
1268 1268 @HasPermissionAllDecorator('hg.admin')
1269 1269 def user_perms_summary(self):
1270 1270 _ = self.request.translate
1271 1271 c = self.load_default_context()
1272 1272 c.user = self.db_user
1273 1273
1274 1274 c.active = 'perms_summary'
1275 1275 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1276 1276
1277 1277 return self._get_template_context(c)
1278 1278
1279 1279 @LoginRequired()
1280 1280 @HasPermissionAllDecorator('hg.admin')
1281 1281 def user_perms_summary_json(self):
1282 1282 self.load_default_context()
1283 1283 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1284 1284
1285 1285 return perm_user.permissions
1286 1286
1287 1287 @LoginRequired()
1288 1288 @HasPermissionAllDecorator('hg.admin')
1289 1289 def user_caches(self):
1290 1290 _ = self.request.translate
1291 1291 c = self.load_default_context()
1292 1292 c.user = self.db_user
1293 1293
1294 1294 c.active = 'caches'
1295 1295 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1296 1296
1297 cache_namespace_uid = f'cache_user_auth.{self.db_user.user_id}'
1297 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1298 1298 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1299 1299 c.backend = c.region.backend
1300 1300 c.user_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
1301 1301
1302 1302 return self._get_template_context(c)
1303 1303
1304 1304 @LoginRequired()
1305 1305 @HasPermissionAllDecorator('hg.admin')
1306 1306 @CSRFRequired()
1307 1307 def user_caches_update(self):
1308 1308 _ = self.request.translate
1309 1309 c = self.load_default_context()
1310 1310 c.user = self.db_user
1311 1311
1312 1312 c.active = 'caches'
1313 1313 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1314 1314
1315 cache_namespace_uid = f'cache_user_auth.{self.db_user.user_id}'
1315 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{self.db_user.user_id}'
1316 1316 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
1317 1317
1318 1318 h.flash(_("Deleted {} cache keys").format(del_keys), category='success')
1319 1319
1320 1320 return HTTPFound(h.route_path(
1321 1321 'edit_user_caches', user_id=c.user.user_id))
@@ -1,1585 +1,1584 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import itertools
20 20 import logging
21 21 import os
22 22 import collections
23 23 import urllib.request
24 24 import urllib.parse
25 25 import urllib.error
26 26 import pathlib
27 27
28 28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
29 29
30 30 from pyramid.renderers import render
31 31 from pyramid.response import Response
32 32
33 33 import rhodecode
34 34 from rhodecode.apps._base import RepoAppView
35 35
36 36
37 37 from rhodecode.lib import diffs, helpers as h, rc_cache
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.hash_utils import sha1_safe
40 40 from rhodecode.lib.rc_cache.archive_cache import get_archival_cache_store, get_archival_config, ReentrantLock
41 41 from rhodecode.lib.str_utils import safe_bytes
42 42 from rhodecode.lib.view_utils import parse_path_ref
43 43 from rhodecode.lib.exceptions import NonRelativePathError
44 44 from rhodecode.lib.codeblocks import (
45 45 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
46 46 from rhodecode.lib.utils2 import convert_line_endings, detect_mode
47 47 from rhodecode.lib.type_utils import str2bool
48 48 from rhodecode.lib.str_utils import safe_str, safe_int
49 49 from rhodecode.lib.auth import (
50 50 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
51 51 from rhodecode.lib.vcs import path as vcspath
52 52 from rhodecode.lib.vcs.backends.base import EmptyCommit
53 53 from rhodecode.lib.vcs.conf import settings
54 54 from rhodecode.lib.vcs.nodes import FileNode
55 55 from rhodecode.lib.vcs.exceptions import (
56 56 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
57 57 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
58 58 NodeDoesNotExistError, CommitError, NodeError)
59 59
60 60 from rhodecode.model.scm import ScmModel
61 61 from rhodecode.model.db import Repository
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 def get_archive_name(db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
67 67 # original backward compat name of archive
68 68 clean_name = safe_str(db_repo_name.replace('/', '_'))
69 69
70 70 # e.g vcsserver-sub-1-abcfdef-archive-all.zip
71 71 # vcsserver-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
72 72
73 73 sub_repo = 'sub-1' if subrepos else 'sub-0'
74 74 commit = commit_sha if with_hash else 'archive'
75 75 path_marker = (path_sha if with_hash else '') or 'all'
76 76 archive_name = f'{clean_name}-{sub_repo}-{commit}-{path_marker}{ext}'
77 77
78 78 return archive_name
79 79
80 80
81 81 def get_path_sha(at_path):
82 82 return safe_str(sha1_safe(at_path)[:8])
83 83
84 84
85 85 def _get_archive_spec(fname):
86 86 log.debug('Detecting archive spec for: `%s`', fname)
87 87
88 88 fileformat = None
89 89 ext = None
90 90 content_type = None
91 91 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
92 92
93 93 if fname.endswith(extension):
94 94 fileformat = a_type
95 95 log.debug('archive is of type: %s', fileformat)
96 96 ext = extension
97 97 break
98 98
99 99 if not fileformat:
100 100 raise ValueError()
101 101
102 102 # left over part of whole fname is the commit
103 103 commit_id = fname[:-len(ext)]
104 104
105 105 return commit_id, ext, fileformat, content_type
106 106
107 107
108 108 class RepoFilesView(RepoAppView):
109 109
110 110 @staticmethod
111 111 def adjust_file_path_for_svn(f_path, repo):
112 112 """
113 113 Computes the relative path of `f_path`.
114 114
115 115 This is mainly based on prefix matching of the recognized tags and
116 116 branches in the underlying repository.
117 117 """
118 118 tags_and_branches = itertools.chain(
119 119 repo.branches.keys(),
120 120 repo.tags.keys())
121 121 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
122 122
123 123 for name in tags_and_branches:
124 124 if f_path.startswith(f'{name}/'):
125 125 f_path = vcspath.relpath(f_path, name)
126 126 break
127 127 return f_path
128 128
129 129 def load_default_context(self):
130 130 c = self._get_local_tmpl_context(include_app_defaults=True)
131 131 c.rhodecode_repo = self.rhodecode_vcs_repo
132 132 c.enable_downloads = self.db_repo.enable_downloads
133 133 return c
134 134
135 135 def _ensure_not_locked(self, commit_id='tip'):
136 136 _ = self.request.translate
137 137
138 138 repo = self.db_repo
139 139 if repo.enable_locking and repo.locked[0]:
140 140 h.flash(_('This repository has been locked by %s on %s')
141 141 % (h.person_by_id(repo.locked[0]),
142 142 h.format_date(h.time_to_datetime(repo.locked[1]))),
143 143 'warning')
144 144 files_url = h.route_path(
145 145 'repo_files:default_path',
146 146 repo_name=self.db_repo_name, commit_id=commit_id)
147 147 raise HTTPFound(files_url)
148 148
149 149 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
150 150 _ = self.request.translate
151 151
152 152 if not is_head:
153 153 message = _('Cannot modify file. '
154 154 'Given commit `{}` is not head of a branch.').format(commit_id)
155 155 h.flash(message, category='warning')
156 156
157 157 if json_mode:
158 158 return message
159 159
160 160 files_url = h.route_path(
161 161 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
162 162 f_path=f_path)
163 163 raise HTTPFound(files_url)
164 164
165 165 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
166 166 _ = self.request.translate
167 167
168 168 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
169 169 self.db_repo_name, branch_name)
170 170 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
171 171 message = _('Branch `{}` changes forbidden by rule {}.').format(
172 172 h.escape(branch_name), h.escape(rule))
173 173 h.flash(message, 'warning')
174 174
175 175 if json_mode:
176 176 return message
177 177
178 178 files_url = h.route_path(
179 179 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
180 180
181 181 raise HTTPFound(files_url)
182 182
183 183 def _get_commit_and_path(self):
184 184 default_commit_id = self.db_repo.landing_ref_name
185 185 default_f_path = '/'
186 186
187 187 commit_id = self.request.matchdict.get(
188 188 'commit_id', default_commit_id)
189 189 f_path = self._get_f_path(self.request.matchdict, default_f_path)
190 190 return commit_id, f_path
191 191
192 192 def _get_default_encoding(self, c):
193 193 enc_list = getattr(c, 'default_encodings', [])
194 194 return enc_list[0] if enc_list else 'UTF-8'
195 195
196 196 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
197 197 """
198 198 This is a safe way to get commit. If an error occurs it redirects to
199 199 tip with proper message
200 200
201 201 :param commit_id: id of commit to fetch
202 202 :param redirect_after: toggle redirection
203 203 """
204 204 _ = self.request.translate
205 205
206 206 try:
207 207 return self.rhodecode_vcs_repo.get_commit(commit_id)
208 208 except EmptyRepositoryError:
209 209 if not redirect_after:
210 210 return None
211 211
212 212 add_new = upload_new = ""
213 213 if h.HasRepoPermissionAny(
214 214 'repository.write', 'repository.admin')(self.db_repo_name):
215 215 _url = h.route_path(
216 216 'repo_files_add_file',
217 217 repo_name=self.db_repo_name, commit_id=0, f_path='')
218 218 add_new = h.link_to(
219 219 _('add a new file'), _url, class_="alert-link")
220 220
221 221 _url_upld = h.route_path(
222 222 'repo_files_upload_file',
223 223 repo_name=self.db_repo_name, commit_id=0, f_path='')
224 224 upload_new = h.link_to(
225 225 _('upload a new file'), _url_upld, class_="alert-link")
226 226
227 227 h.flash(h.literal(
228 228 _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning')
229 229 raise HTTPFound(
230 230 h.route_path('repo_summary', repo_name=self.db_repo_name))
231 231
232 232 except (CommitDoesNotExistError, LookupError) as e:
233 233 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
234 234 h.flash(msg, category='error')
235 235 raise HTTPNotFound()
236 236 except RepositoryError as e:
237 237 h.flash(h.escape(safe_str(e)), category='error')
238 238 raise HTTPNotFound()
239 239
240 240 def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
241 241 """
242 242 Returns file_node, if error occurs or given path is directory,
243 243 it'll redirect to top level path
244 244 """
245 245 _ = self.request.translate
246 246
247 247 try:
248 248 file_node = commit_obj.get_node(path, pre_load=pre_load)
249 249 if file_node.is_dir():
250 250 raise RepositoryError('The given path is a directory')
251 251 except CommitDoesNotExistError:
252 252 log.exception('No such commit exists for this repository')
253 253 h.flash(_('No such commit exists for this repository'), category='error')
254 254 raise HTTPNotFound()
255 255 except RepositoryError as e:
256 256 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
257 257 h.flash(h.escape(safe_str(e)), category='error')
258 258 raise HTTPNotFound()
259 259
260 260 return file_node
261 261
262 262 def _is_valid_head(self, commit_id, repo, landing_ref):
263 263 branch_name = sha_commit_id = ''
264 264 is_head = False
265 265 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
266 266
267 267 for _branch_name, branch_commit_id in repo.branches.items():
268 268 # simple case we pass in branch name, it's a HEAD
269 269 if commit_id == _branch_name:
270 270 is_head = True
271 271 branch_name = _branch_name
272 272 sha_commit_id = branch_commit_id
273 273 break
274 274 # case when we pass in full sha commit_id, which is a head
275 275 elif commit_id == branch_commit_id:
276 276 is_head = True
277 277 branch_name = _branch_name
278 278 sha_commit_id = branch_commit_id
279 279 break
280 280
281 281 if h.is_svn(repo) and not repo.is_empty():
282 282 # Note: Subversion only has one head.
283 283 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
284 284 is_head = True
285 285 return branch_name, sha_commit_id, is_head
286 286
287 287 # checked branches, means we only need to try to get the branch/commit_sha
288 288 if repo.is_empty():
289 289 is_head = True
290 290 branch_name = landing_ref
291 291 sha_commit_id = EmptyCommit().raw_id
292 292 else:
293 293 commit = repo.get_commit(commit_id=commit_id)
294 294 if commit:
295 295 branch_name = commit.branch
296 296 sha_commit_id = commit.raw_id
297 297
298 298 return branch_name, sha_commit_id, is_head
299 299
300 300 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
301 301
302 302 repo_id = self.db_repo.repo_id
303 303 force_recache = self.get_recache_flag()
304 304
305 305 cache_seconds = safe_int(
306 306 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
307 307 cache_on = not force_recache and cache_seconds > 0
308 308 log.debug(
309 309 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
310 310 'with caching: %s[TTL: %ss]' % (
311 311 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
312 312
313 cache_namespace_uid = f'repo.{repo_id}'
313 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
314 314 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
315 315
316 316 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
317 def compute_file_tree(ver, _name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
317 def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
318 318 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
319 ver, _repo_id, _commit_id, _f_path)
319 _repo_id, _commit_id, _f_path)
320 320
321 321 c.full_load = _full_load
322 322 return render(
323 323 'rhodecode:templates/files/files_browser_tree.mako',
324 324 self._get_template_context(c), self.request, _at_rev)
325 325
326 326 return compute_file_tree(
327 rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash,
328 self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
327 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
329 328
330 329 def create_pure_path(self, *parts):
331 330 # Split paths and sanitize them, removing any ../ etc
332 331 sanitized_path = [
333 332 x for x in pathlib.PurePath(*parts).parts
334 333 if x not in ['.', '..']]
335 334
336 335 pure_path = pathlib.PurePath(*sanitized_path)
337 336 return pure_path
338 337
339 338 def _is_lf_enabled(self, target_repo):
340 339 lf_enabled = False
341 340
342 341 lf_key_for_vcs_map = {
343 342 'hg': 'extensions_largefiles',
344 343 'git': 'vcs_git_lfs_enabled'
345 344 }
346 345
347 346 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
348 347
349 348 if lf_key_for_vcs:
350 349 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
351 350
352 351 return lf_enabled
353 352
354 353 @LoginRequired()
355 354 @HasRepoPermissionAnyDecorator(
356 355 'repository.read', 'repository.write', 'repository.admin')
357 356 def repo_archivefile(self):
358 357 # archive cache config
359 358 from rhodecode import CONFIG
360 359 _ = self.request.translate
361 360 self.load_default_context()
362 361 default_at_path = '/'
363 362 fname = self.request.matchdict['fname']
364 363 subrepos = self.request.GET.get('subrepos') == 'true'
365 364 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
366 365 at_path = self.request.GET.get('at_path') or default_at_path
367 366
368 367 if not self.db_repo.enable_downloads:
369 368 return Response(_('Downloads disabled'))
370 369
371 370 try:
372 371 commit_id, ext, fileformat, content_type = \
373 372 _get_archive_spec(fname)
374 373 except ValueError:
375 374 return Response(_('Unknown archive type for: `{}`').format(
376 375 h.escape(fname)))
377 376
378 377 try:
379 378 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
380 379 except CommitDoesNotExistError:
381 380 return Response(_('Unknown commit_id {}').format(
382 381 h.escape(commit_id)))
383 382 except EmptyRepositoryError:
384 383 return Response(_('Empty repository'))
385 384
386 385 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
387 386 if commit_id != commit.raw_id:
388 387 fname=f'{commit.raw_id}{ext}'
389 388 raise HTTPFound(self.request.current_route_path(fname=fname))
390 389
391 390 try:
392 391 at_path = commit.get_node(at_path).path or default_at_path
393 392 except Exception:
394 393 return Response(_('No node at path {} for this repository').format(h.escape(at_path)))
395 394
396 395 path_sha = get_path_sha(at_path)
397 396
398 397 # used for cache etc, consistent unique archive name
399 398 archive_name_key = get_archive_name(
400 399 self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
401 400 path_sha=path_sha, with_hash=True)
402 401
403 402 if not with_hash:
404 403 path_sha = ''
405 404
406 405 # what end client gets served
407 406 response_archive_name = get_archive_name(
408 407 self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
409 408 path_sha=path_sha, with_hash=with_hash)
410 409
411 410 # remove extension from our archive directory name
412 411 archive_dir_name = response_archive_name[:-len(ext)]
413 412
414 413 archive_cache_disable = self.request.GET.get('no_cache')
415 414
416 415 d_cache = get_archival_cache_store(config=CONFIG)
417 416 # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
418 417 d_cache_conf = get_archival_config(config=CONFIG)
419 418
420 419 reentrant_lock_key = archive_name_key + '.lock'
421 420 with ReentrantLock(d_cache, reentrant_lock_key):
422 421 # This is also a cache key
423 422 use_cached_archive = False
424 423 if archive_name_key in d_cache and not archive_cache_disable:
425 424 reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
426 425 use_cached_archive = True
427 426 log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
428 427 archive_name_key, tag, reader.name)
429 428 else:
430 429 reader = None
431 430 log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
432 431
433 432 # generate new archive, as previous was not found in the cache
434 433 if not reader:
435 434 # first remove expired items, before generating a new one :)
436 435 # we di this manually because automatic eviction is disabled
437 436 d_cache.cull(retry=True)
438 437
439 438 try:
440 439 commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
441 440 kind=fileformat, subrepos=subrepos,
442 441 archive_at_path=at_path, cache_config=d_cache_conf)
443 442 except ImproperArchiveTypeError:
444 443 return _('Unknown archive type')
445 444
446 445 reader, tag = d_cache.get(archive_name_key, read=True, tag=True, retry=True)
447 446
448 447 if not reader:
449 448 raise ValueError('archive cache reader is empty, failed to fetch file from distributed archive cache')
450 449
451 450 def archive_iterator(_reader):
452 451 while 1:
453 452 data = _reader.read(1024)
454 453 if not data:
455 454 break
456 455 yield data
457 456
458 457 response = Response(app_iter=archive_iterator(reader))
459 458 response.content_disposition = f'attachment; filename={response_archive_name}'
460 459 response.content_type = str(content_type)
461 460
462 461 try:
463 462 return response
464 463 finally:
465 464 # store download action
466 465 audit_logger.store_web(
467 466 'repo.archive.download', action_data={
468 467 'user_agent': self.request.user_agent,
469 468 'archive_name': archive_name_key,
470 469 'archive_spec': fname,
471 470 'archive_cached': use_cached_archive},
472 471 user=self._rhodecode_user,
473 472 repo=self.db_repo,
474 473 commit=True
475 474 )
476 475
477 476 def _get_file_node(self, commit_id, f_path):
478 477 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
479 478 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
480 479 try:
481 480 node = commit.get_node(f_path)
482 481 if node.is_dir():
483 482 raise NodeError(f'{node} path is a {type(node)} not a file')
484 483 except NodeDoesNotExistError:
485 484 commit = EmptyCommit(
486 485 commit_id=commit_id,
487 486 idx=commit.idx,
488 487 repo=commit.repository,
489 488 alias=commit.repository.alias,
490 489 message=commit.message,
491 490 author=commit.author,
492 491 date=commit.date)
493 492 node = FileNode(safe_bytes(f_path), b'', commit=commit)
494 493 else:
495 494 commit = EmptyCommit(
496 495 repo=self.rhodecode_vcs_repo,
497 496 alias=self.rhodecode_vcs_repo.alias)
498 497 node = FileNode(safe_bytes(f_path), b'', commit=commit)
499 498 return node
500 499
501 500 @LoginRequired()
502 501 @HasRepoPermissionAnyDecorator(
503 502 'repository.read', 'repository.write', 'repository.admin')
504 503 def repo_files_diff(self):
505 504 c = self.load_default_context()
506 505 f_path = self._get_f_path(self.request.matchdict)
507 506 diff1 = self.request.GET.get('diff1', '')
508 507 diff2 = self.request.GET.get('diff2', '')
509 508
510 509 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
511 510
512 511 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
513 512 line_context = self.request.GET.get('context', 3)
514 513
515 514 if not any((diff1, diff2)):
516 515 h.flash(
517 516 'Need query parameter "diff1" or "diff2" to generate a diff.',
518 517 category='error')
519 518 raise HTTPBadRequest()
520 519
521 520 c.action = self.request.GET.get('diff')
522 521 if c.action not in ['download', 'raw']:
523 522 compare_url = h.route_path(
524 523 'repo_compare',
525 524 repo_name=self.db_repo_name,
526 525 source_ref_type='rev',
527 526 source_ref=diff1,
528 527 target_repo=self.db_repo_name,
529 528 target_ref_type='rev',
530 529 target_ref=diff2,
531 530 _query=dict(f_path=f_path))
532 531 # redirect to new view if we render diff
533 532 raise HTTPFound(compare_url)
534 533
535 534 try:
536 535 node1 = self._get_file_node(diff1, path1)
537 536 node2 = self._get_file_node(diff2, f_path)
538 537 except (RepositoryError, NodeError):
539 538 log.exception("Exception while trying to get node from repository")
540 539 raise HTTPFound(
541 540 h.route_path('repo_files', repo_name=self.db_repo_name,
542 541 commit_id='tip', f_path=f_path))
543 542
544 543 if all(isinstance(node.commit, EmptyCommit)
545 544 for node in (node1, node2)):
546 545 raise HTTPNotFound()
547 546
548 547 c.commit_1 = node1.commit
549 548 c.commit_2 = node2.commit
550 549
551 550 if c.action == 'download':
552 551 _diff = diffs.get_gitdiff(node1, node2,
553 552 ignore_whitespace=ignore_whitespace,
554 553 context=line_context)
555 554 # NOTE: this was using diff_format='gitdiff'
556 555 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
557 556
558 557 response = Response(self.path_filter.get_raw_patch(diff))
559 558 response.content_type = 'text/plain'
560 559 response.content_disposition = (
561 560 f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff'
562 561 )
563 562 charset = self._get_default_encoding(c)
564 563 if charset:
565 564 response.charset = charset
566 565 return response
567 566
568 567 elif c.action == 'raw':
569 568 _diff = diffs.get_gitdiff(node1, node2,
570 569 ignore_whitespace=ignore_whitespace,
571 570 context=line_context)
572 571 # NOTE: this was using diff_format='gitdiff'
573 572 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
574 573
575 574 response = Response(self.path_filter.get_raw_patch(diff))
576 575 response.content_type = 'text/plain'
577 576 charset = self._get_default_encoding(c)
578 577 if charset:
579 578 response.charset = charset
580 579 return response
581 580
582 581 # in case we ever end up here
583 582 raise HTTPNotFound()
584 583
585 584 @LoginRequired()
586 585 @HasRepoPermissionAnyDecorator(
587 586 'repository.read', 'repository.write', 'repository.admin')
588 587 def repo_files_diff_2way_redirect(self):
589 588 """
590 589 Kept only to make OLD links work
591 590 """
592 591 f_path = self._get_f_path_unchecked(self.request.matchdict)
593 592 diff1 = self.request.GET.get('diff1', '')
594 593 diff2 = self.request.GET.get('diff2', '')
595 594
596 595 if not any((diff1, diff2)):
597 596 h.flash(
598 597 'Need query parameter "diff1" or "diff2" to generate a diff.',
599 598 category='error')
600 599 raise HTTPBadRequest()
601 600
602 601 compare_url = h.route_path(
603 602 'repo_compare',
604 603 repo_name=self.db_repo_name,
605 604 source_ref_type='rev',
606 605 source_ref=diff1,
607 606 target_ref_type='rev',
608 607 target_ref=diff2,
609 608 _query=dict(f_path=f_path, diffmode='sideside',
610 609 target_repo=self.db_repo_name,))
611 610 raise HTTPFound(compare_url)
612 611
613 612 @LoginRequired()
614 613 def repo_files_default_commit_redirect(self):
615 614 """
616 615 Special page that redirects to the landing page of files based on the default
617 616 commit for repository
618 617 """
619 618 c = self.load_default_context()
620 619 ref_name = c.rhodecode_db_repo.landing_ref_name
621 620 landing_url = h.repo_files_by_ref_url(
622 621 c.rhodecode_db_repo.repo_name,
623 622 c.rhodecode_db_repo.repo_type,
624 623 f_path='',
625 624 ref_name=ref_name,
626 625 commit_id='tip',
627 626 query=dict(at=ref_name)
628 627 )
629 628
630 629 raise HTTPFound(landing_url)
631 630
632 631 @LoginRequired()
633 632 @HasRepoPermissionAnyDecorator(
634 633 'repository.read', 'repository.write', 'repository.admin')
635 634 def repo_files(self):
636 635 c = self.load_default_context()
637 636
638 637 view_name = getattr(self.request.matched_route, 'name', None)
639 638
640 639 c.annotate = view_name == 'repo_files:annotated'
641 640 # default is false, but .rst/.md files later are auto rendered, we can
642 641 # overwrite auto rendering by setting this GET flag
643 642 c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False)
644 643
645 644 commit_id, f_path = self._get_commit_and_path()
646 645
647 646 c.commit = self._get_commit_or_redirect(commit_id)
648 647 c.branch = self.request.GET.get('branch', None)
649 648 c.f_path = f_path
650 649 at_rev = self.request.GET.get('at')
651 650
652 651 # prev link
653 652 try:
654 653 prev_commit = c.commit.prev(c.branch)
655 654 c.prev_commit = prev_commit
656 655 c.url_prev = h.route_path(
657 656 'repo_files', repo_name=self.db_repo_name,
658 657 commit_id=prev_commit.raw_id, f_path=f_path)
659 658 if c.branch:
660 659 c.url_prev += '?branch=%s' % c.branch
661 660 except (CommitDoesNotExistError, VCSError):
662 661 c.url_prev = '#'
663 662 c.prev_commit = EmptyCommit()
664 663
665 664 # next link
666 665 try:
667 666 next_commit = c.commit.next(c.branch)
668 667 c.next_commit = next_commit
669 668 c.url_next = h.route_path(
670 669 'repo_files', repo_name=self.db_repo_name,
671 670 commit_id=next_commit.raw_id, f_path=f_path)
672 671 if c.branch:
673 672 c.url_next += '?branch=%s' % c.branch
674 673 except (CommitDoesNotExistError, VCSError):
675 674 c.url_next = '#'
676 675 c.next_commit = EmptyCommit()
677 676
678 677 # files or dirs
679 678 try:
680 679 c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data'])
681 680
682 681 c.file_author = True
683 682 c.file_tree = ''
684 683
685 684 # load file content
686 685 if c.file.is_file():
687 686 c.lf_node = {}
688 687
689 688 has_lf_enabled = self._is_lf_enabled(self.db_repo)
690 689 if has_lf_enabled:
691 690 c.lf_node = c.file.get_largefile_node()
692 691
693 692 c.file_source_page = 'true'
694 693 c.file_last_commit = c.file.last_commit
695 694
696 695 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
697 696
698 697 if not (c.file_size_too_big or c.file.is_binary):
699 698 if c.annotate: # annotation has precedence over renderer
700 699 c.annotated_lines = filenode_as_annotated_lines_tokens(
701 700 c.file
702 701 )
703 702 else:
704 703 c.renderer = (
705 704 c.renderer and h.renderer_from_filename(c.file.path)
706 705 )
707 706 if not c.renderer:
708 707 c.lines = filenode_as_lines_tokens(c.file)
709 708
710 709 _branch_name, _sha_commit_id, is_head = \
711 710 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
712 711 landing_ref=self.db_repo.landing_ref_name)
713 712 c.on_branch_head = is_head
714 713
715 714 branch = c.commit.branch if (
716 715 c.commit.branch and '/' not in c.commit.branch) else None
717 716 c.branch_or_raw_id = branch or c.commit.raw_id
718 717 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
719 718
720 719 author = c.file_last_commit.author
721 720 c.authors = [[
722 721 h.email(author),
723 722 h.person(author, 'username_or_name_or_email'),
724 723 1
725 724 ]]
726 725
727 726 else: # load tree content at path
728 727 c.file_source_page = 'false'
729 728 c.authors = []
730 729 # this loads a simple tree without metadata to speed things up
731 730 # later via ajax we call repo_nodetree_full and fetch whole
732 731 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
733 732
734 733 c.readme_data, c.readme_file = \
735 734 self._get_readme_data(self.db_repo, c.visual.default_renderer,
736 735 c.commit.raw_id, f_path)
737 736
738 737 except RepositoryError as e:
739 738 h.flash(h.escape(safe_str(e)), category='error')
740 739 raise HTTPNotFound()
741 740
742 741 if self.request.environ.get('HTTP_X_PJAX'):
743 742 html = render('rhodecode:templates/files/files_pjax.mako',
744 743 self._get_template_context(c), self.request)
745 744 else:
746 745 html = render('rhodecode:templates/files/files.mako',
747 746 self._get_template_context(c), self.request)
748 747 return Response(html)
749 748
750 749 @HasRepoPermissionAnyDecorator(
751 750 'repository.read', 'repository.write', 'repository.admin')
752 751 def repo_files_annotated_previous(self):
753 752 self.load_default_context()
754 753
755 754 commit_id, f_path = self._get_commit_and_path()
756 755 commit = self._get_commit_or_redirect(commit_id)
757 756 prev_commit_id = commit.raw_id
758 757 line_anchor = self.request.GET.get('line_anchor')
759 758 is_file = False
760 759 try:
761 760 _file = commit.get_node(f_path)
762 761 is_file = _file.is_file()
763 762 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
764 763 pass
765 764
766 765 if is_file:
767 766 history = commit.get_path_history(f_path)
768 767 prev_commit_id = history[1].raw_id \
769 768 if len(history) > 1 else prev_commit_id
770 769 prev_url = h.route_path(
771 770 'repo_files:annotated', repo_name=self.db_repo_name,
772 771 commit_id=prev_commit_id, f_path=f_path,
773 772 _anchor=f'L{line_anchor}')
774 773
775 774 raise HTTPFound(prev_url)
776 775
777 776 @LoginRequired()
778 777 @HasRepoPermissionAnyDecorator(
779 778 'repository.read', 'repository.write', 'repository.admin')
780 779 def repo_nodetree_full(self):
781 780 """
782 781 Returns rendered html of file tree that contains commit date,
783 782 author, commit_id for the specified combination of
784 783 repo, commit_id and file path
785 784 """
786 785 c = self.load_default_context()
787 786
788 787 commit_id, f_path = self._get_commit_and_path()
789 788 commit = self._get_commit_or_redirect(commit_id)
790 789 try:
791 790 dir_node = commit.get_node(f_path)
792 791 except RepositoryError as e:
793 792 return Response(f'error: {h.escape(safe_str(e))}')
794 793
795 794 if dir_node.is_file():
796 795 return Response('')
797 796
798 797 c.file = dir_node
799 798 c.commit = commit
800 799 at_rev = self.request.GET.get('at')
801 800
802 801 html = self._get_tree_at_commit(
803 802 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
804 803
805 804 return Response(html)
806 805
807 806 def _get_attachement_headers(self, f_path):
808 807 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
809 808 safe_path = f_name.replace('"', '\\"')
810 809 encoded_path = urllib.parse.quote(f_name)
811 810
812 811 return "attachment; " \
813 812 "filename=\"{}\"; " \
814 813 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
815 814
816 815 @LoginRequired()
817 816 @HasRepoPermissionAnyDecorator(
818 817 'repository.read', 'repository.write', 'repository.admin')
819 818 def repo_file_raw(self):
820 819 """
821 820 Action for show as raw, some mimetypes are "rendered",
822 821 those include images, icons.
823 822 """
824 823 c = self.load_default_context()
825 824
826 825 commit_id, f_path = self._get_commit_and_path()
827 826 commit = self._get_commit_or_redirect(commit_id)
828 827 file_node = self._get_filenode_or_redirect(commit, f_path)
829 828
830 829 raw_mimetype_mapping = {
831 830 # map original mimetype to a mimetype used for "show as raw"
832 831 # you can also provide a content-disposition to override the
833 832 # default "attachment" disposition.
834 833 # orig_type: (new_type, new_dispo)
835 834
836 835 # show images inline:
837 836 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
838 837 # for example render an SVG with javascript inside or even render
839 838 # HTML.
840 839 'image/x-icon': ('image/x-icon', 'inline'),
841 840 'image/png': ('image/png', 'inline'),
842 841 'image/gif': ('image/gif', 'inline'),
843 842 'image/jpeg': ('image/jpeg', 'inline'),
844 843 'application/pdf': ('application/pdf', 'inline'),
845 844 }
846 845
847 846 mimetype = file_node.mimetype
848 847 try:
849 848 mimetype, disposition = raw_mimetype_mapping[mimetype]
850 849 except KeyError:
851 850 # we don't know anything special about this, handle it safely
852 851 if file_node.is_binary:
853 852 # do same as download raw for binary files
854 853 mimetype, disposition = 'application/octet-stream', 'attachment'
855 854 else:
856 855 # do not just use the original mimetype, but force text/plain,
857 856 # otherwise it would serve text/html and that might be unsafe.
858 857 # Note: underlying vcs library fakes text/plain mimetype if the
859 858 # mimetype can not be determined and it thinks it is not
860 859 # binary.This might lead to erroneous text display in some
861 860 # cases, but helps in other cases, like with text files
862 861 # without extension.
863 862 mimetype, disposition = 'text/plain', 'inline'
864 863
865 864 if disposition == 'attachment':
866 865 disposition = self._get_attachement_headers(f_path)
867 866
868 867 stream_content = file_node.stream_bytes()
869 868
870 869 response = Response(app_iter=stream_content)
871 870 response.content_disposition = disposition
872 871 response.content_type = mimetype
873 872
874 873 charset = self._get_default_encoding(c)
875 874 if charset:
876 875 response.charset = charset
877 876
878 877 return response
879 878
880 879 @LoginRequired()
881 880 @HasRepoPermissionAnyDecorator(
882 881 'repository.read', 'repository.write', 'repository.admin')
883 882 def repo_file_download(self):
884 883 c = self.load_default_context()
885 884
886 885 commit_id, f_path = self._get_commit_and_path()
887 886 commit = self._get_commit_or_redirect(commit_id)
888 887 file_node = self._get_filenode_or_redirect(commit, f_path)
889 888
890 889 if self.request.GET.get('lf'):
891 890 # only if lf get flag is passed, we download this file
892 891 # as LFS/Largefile
893 892 lf_node = file_node.get_largefile_node()
894 893 if lf_node:
895 894 # overwrite our pointer with the REAL large-file
896 895 file_node = lf_node
897 896
898 897 disposition = self._get_attachement_headers(f_path)
899 898
900 899 stream_content = file_node.stream_bytes()
901 900
902 901 response = Response(app_iter=stream_content)
903 902 response.content_disposition = disposition
904 903 response.content_type = file_node.mimetype
905 904
906 905 charset = self._get_default_encoding(c)
907 906 if charset:
908 907 response.charset = charset
909 908
910 909 return response
911 910
912 911 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
913 912
914 913 cache_seconds = safe_int(
915 914 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
916 915 cache_on = cache_seconds > 0
917 916 log.debug(
918 917 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
919 918 'with caching: %s[TTL: %ss]' % (
920 919 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
921 920
922 921 cache_namespace_uid = f'repo.{repo_id}'
923 922 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
924 923
925 924 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
926 925 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
927 926 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
928 927 _repo_id, commit_id, f_path)
929 928 try:
930 929 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
931 930 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
932 931 log.exception(safe_str(e))
933 932 h.flash(h.escape(safe_str(e)), category='error')
934 933 raise HTTPFound(h.route_path(
935 934 'repo_files', repo_name=self.db_repo_name,
936 935 commit_id='tip', f_path='/'))
937 936
938 937 return _d + _f
939 938
940 939 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
941 940 commit_id, f_path)
942 941 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
943 942
944 943 @LoginRequired()
945 944 @HasRepoPermissionAnyDecorator(
946 945 'repository.read', 'repository.write', 'repository.admin')
947 946 def repo_nodelist(self):
948 947 self.load_default_context()
949 948
950 949 commit_id, f_path = self._get_commit_and_path()
951 950 commit = self._get_commit_or_redirect(commit_id)
952 951
953 952 metadata = self._get_nodelist_at_commit(
954 953 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
955 954 return {'nodes': [x for x in metadata]}
956 955
957 956 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
958 957 items = []
959 958 for name, commit_id in branches_or_tags.items():
960 959 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
961 960 items.append((sym_ref, name, ref_type))
962 961 return items
963 962
964 963 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
965 964 return commit_id
966 965
967 966 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
968 967 return commit_id
969 968
970 969 # NOTE(dan): old code we used in "diff" mode compare
971 970 new_f_path = vcspath.join(name, f_path)
972 971 return f'{new_f_path}@{commit_id}'
973 972
974 973 def _get_node_history(self, commit_obj, f_path, commits=None):
975 974 """
976 975 get commit history for given node
977 976
978 977 :param commit_obj: commit to calculate history
979 978 :param f_path: path for node to calculate history for
980 979 :param commits: if passed don't calculate history and take
981 980 commits defined in this list
982 981 """
983 982 _ = self.request.translate
984 983
985 984 # calculate history based on tip
986 985 tip = self.rhodecode_vcs_repo.get_commit()
987 986 if commits is None:
988 987 pre_load = ["author", "branch"]
989 988 try:
990 989 commits = tip.get_path_history(f_path, pre_load=pre_load)
991 990 except (NodeDoesNotExistError, CommitError):
992 991 # this node is not present at tip!
993 992 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
994 993
995 994 history = []
996 995 commits_group = ([], _("Changesets"))
997 996 for commit in commits:
998 997 branch = ' (%s)' % commit.branch if commit.branch else ''
999 998 n_desc = f'r{commit.idx}:{commit.short_id}{branch}'
1000 999 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
1001 1000 history.append(commits_group)
1002 1001
1003 1002 symbolic_reference = self._symbolic_reference
1004 1003
1005 1004 if self.rhodecode_vcs_repo.alias == 'svn':
1006 1005 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1007 1006 f_path, self.rhodecode_vcs_repo)
1008 1007 if adjusted_f_path != f_path:
1009 1008 log.debug(
1010 1009 'Recognized svn tag or branch in file "%s", using svn '
1011 1010 'specific symbolic references', f_path)
1012 1011 f_path = adjusted_f_path
1013 1012 symbolic_reference = self._symbolic_reference_svn
1014 1013
1015 1014 branches = self._create_references(
1016 1015 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1017 1016 branches_group = (branches, _("Branches"))
1018 1017
1019 1018 tags = self._create_references(
1020 1019 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1021 1020 tags_group = (tags, _("Tags"))
1022 1021
1023 1022 history.append(branches_group)
1024 1023 history.append(tags_group)
1025 1024
1026 1025 return history, commits
1027 1026
1028 1027 @LoginRequired()
1029 1028 @HasRepoPermissionAnyDecorator(
1030 1029 'repository.read', 'repository.write', 'repository.admin')
1031 1030 def repo_file_history(self):
1032 1031 self.load_default_context()
1033 1032
1034 1033 commit_id, f_path = self._get_commit_and_path()
1035 1034 commit = self._get_commit_or_redirect(commit_id)
1036 1035 file_node = self._get_filenode_or_redirect(commit, f_path)
1037 1036
1038 1037 if file_node.is_file():
1039 1038 file_history, _hist = self._get_node_history(commit, f_path)
1040 1039
1041 1040 res = []
1042 1041 for section_items, section in file_history:
1043 1042 items = []
1044 1043 for obj_id, obj_text, obj_type in section_items:
1045 1044 at_rev = ''
1046 1045 if obj_type in ['branch', 'bookmark', 'tag']:
1047 1046 at_rev = obj_text
1048 1047 entry = {
1049 1048 'id': obj_id,
1050 1049 'text': obj_text,
1051 1050 'type': obj_type,
1052 1051 'at_rev': at_rev
1053 1052 }
1054 1053
1055 1054 items.append(entry)
1056 1055
1057 1056 res.append({
1058 1057 'text': section,
1059 1058 'children': items
1060 1059 })
1061 1060
1062 1061 data = {
1063 1062 'more': False,
1064 1063 'results': res
1065 1064 }
1066 1065 return data
1067 1066
1068 1067 log.warning('Cannot fetch history for directory')
1069 1068 raise HTTPBadRequest()
1070 1069
1071 1070 @LoginRequired()
1072 1071 @HasRepoPermissionAnyDecorator(
1073 1072 'repository.read', 'repository.write', 'repository.admin')
1074 1073 def repo_file_authors(self):
1075 1074 c = self.load_default_context()
1076 1075
1077 1076 commit_id, f_path = self._get_commit_and_path()
1078 1077 commit = self._get_commit_or_redirect(commit_id)
1079 1078 file_node = self._get_filenode_or_redirect(commit, f_path)
1080 1079
1081 1080 if not file_node.is_file():
1082 1081 raise HTTPBadRequest()
1083 1082
1084 1083 c.file_last_commit = file_node.last_commit
1085 1084 if self.request.GET.get('annotate') == '1':
1086 1085 # use _hist from annotation if annotation mode is on
1087 1086 commit_ids = {x[1] for x in file_node.annotate}
1088 1087 _hist = (
1089 1088 self.rhodecode_vcs_repo.get_commit(commit_id)
1090 1089 for commit_id in commit_ids)
1091 1090 else:
1092 1091 _f_history, _hist = self._get_node_history(commit, f_path)
1093 1092 c.file_author = False
1094 1093
1095 1094 unique = collections.OrderedDict()
1096 1095 for commit in _hist:
1097 1096 author = commit.author
1098 1097 if author not in unique:
1099 1098 unique[commit.author] = [
1100 1099 h.email(author),
1101 1100 h.person(author, 'username_or_name_or_email'),
1102 1101 1 # counter
1103 1102 ]
1104 1103
1105 1104 else:
1106 1105 # increase counter
1107 1106 unique[commit.author][2] += 1
1108 1107
1109 1108 c.authors = [val for val in unique.values()]
1110 1109
1111 1110 return self._get_template_context(c)
1112 1111
1113 1112 @LoginRequired()
1114 1113 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1115 1114 def repo_files_check_head(self):
1116 1115 self.load_default_context()
1117 1116
1118 1117 commit_id, f_path = self._get_commit_and_path()
1119 1118 _branch_name, _sha_commit_id, is_head = \
1120 1119 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1121 1120 landing_ref=self.db_repo.landing_ref_name)
1122 1121
1123 1122 new_path = self.request.POST.get('path')
1124 1123 operation = self.request.POST.get('operation')
1125 1124 path_exist = ''
1126 1125
1127 1126 if new_path and operation in ['create', 'upload']:
1128 1127 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1129 1128 try:
1130 1129 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1131 1130 # NOTE(dan): construct whole path without leading /
1132 1131 file_node = commit_obj.get_node(new_f_path)
1133 1132 if file_node is not None:
1134 1133 path_exist = new_f_path
1135 1134 except EmptyRepositoryError:
1136 1135 pass
1137 1136 except Exception:
1138 1137 pass
1139 1138
1140 1139 return {
1141 1140 'branch': _branch_name,
1142 1141 'sha': _sha_commit_id,
1143 1142 'is_head': is_head,
1144 1143 'path_exists': path_exist
1145 1144 }
1146 1145
1147 1146 @LoginRequired()
1148 1147 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1149 1148 def repo_files_remove_file(self):
1150 1149 _ = self.request.translate
1151 1150 c = self.load_default_context()
1152 1151 commit_id, f_path = self._get_commit_and_path()
1153 1152
1154 1153 self._ensure_not_locked()
1155 1154 _branch_name, _sha_commit_id, is_head = \
1156 1155 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1157 1156 landing_ref=self.db_repo.landing_ref_name)
1158 1157
1159 1158 self.forbid_non_head(is_head, f_path)
1160 1159 self.check_branch_permission(_branch_name)
1161 1160
1162 1161 c.commit = self._get_commit_or_redirect(commit_id)
1163 1162 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1164 1163
1165 1164 c.default_message = _(
1166 1165 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1167 1166 c.f_path = f_path
1168 1167
1169 1168 return self._get_template_context(c)
1170 1169
1171 1170 @LoginRequired()
1172 1171 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1173 1172 @CSRFRequired()
1174 1173 def repo_files_delete_file(self):
1175 1174 _ = self.request.translate
1176 1175
1177 1176 c = self.load_default_context()
1178 1177 commit_id, f_path = self._get_commit_and_path()
1179 1178
1180 1179 self._ensure_not_locked()
1181 1180 _branch_name, _sha_commit_id, is_head = \
1182 1181 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1183 1182 landing_ref=self.db_repo.landing_ref_name)
1184 1183
1185 1184 self.forbid_non_head(is_head, f_path)
1186 1185 self.check_branch_permission(_branch_name)
1187 1186
1188 1187 c.commit = self._get_commit_or_redirect(commit_id)
1189 1188 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1190 1189
1191 1190 c.default_message = _(
1192 1191 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1193 1192 c.f_path = f_path
1194 1193 node_path = f_path
1195 1194 author = self._rhodecode_db_user.full_contact
1196 1195 message = self.request.POST.get('message') or c.default_message
1197 1196 try:
1198 1197 nodes = {
1199 1198 safe_bytes(node_path): {
1200 1199 'content': b''
1201 1200 }
1202 1201 }
1203 1202 ScmModel().delete_nodes(
1204 1203 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1205 1204 message=message,
1206 1205 nodes=nodes,
1207 1206 parent_commit=c.commit,
1208 1207 author=author,
1209 1208 )
1210 1209
1211 1210 h.flash(
1212 1211 _('Successfully deleted file `{}`').format(
1213 1212 h.escape(f_path)), category='success')
1214 1213 except Exception:
1215 1214 log.exception('Error during commit operation')
1216 1215 h.flash(_('Error occurred during commit'), category='error')
1217 1216 raise HTTPFound(
1218 1217 h.route_path('repo_commit', repo_name=self.db_repo_name,
1219 1218 commit_id='tip'))
1220 1219
1221 1220 @LoginRequired()
1222 1221 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1223 1222 def repo_files_edit_file(self):
1224 1223 _ = self.request.translate
1225 1224 c = self.load_default_context()
1226 1225 commit_id, f_path = self._get_commit_and_path()
1227 1226
1228 1227 self._ensure_not_locked()
1229 1228 _branch_name, _sha_commit_id, is_head = \
1230 1229 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1231 1230 landing_ref=self.db_repo.landing_ref_name)
1232 1231
1233 1232 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1234 1233 self.check_branch_permission(_branch_name, commit_id=commit_id)
1235 1234
1236 1235 c.commit = self._get_commit_or_redirect(commit_id)
1237 1236 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1238 1237
1239 1238 if c.file.is_binary:
1240 1239 files_url = h.route_path(
1241 1240 'repo_files',
1242 1241 repo_name=self.db_repo_name,
1243 1242 commit_id=c.commit.raw_id, f_path=f_path)
1244 1243 raise HTTPFound(files_url)
1245 1244
1246 1245 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1247 1246 c.f_path = f_path
1248 1247
1249 1248 return self._get_template_context(c)
1250 1249
1251 1250 @LoginRequired()
1252 1251 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1253 1252 @CSRFRequired()
1254 1253 def repo_files_update_file(self):
1255 1254 _ = self.request.translate
1256 1255 c = self.load_default_context()
1257 1256 commit_id, f_path = self._get_commit_and_path()
1258 1257
1259 1258 self._ensure_not_locked()
1260 1259
1261 1260 c.commit = self._get_commit_or_redirect(commit_id)
1262 1261 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1263 1262
1264 1263 if c.file.is_binary:
1265 1264 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1266 1265 commit_id=c.commit.raw_id, f_path=f_path))
1267 1266
1268 1267 _branch_name, _sha_commit_id, is_head = \
1269 1268 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1270 1269 landing_ref=self.db_repo.landing_ref_name)
1271 1270
1272 1271 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1273 1272 self.check_branch_permission(_branch_name, commit_id=commit_id)
1274 1273
1275 1274 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1276 1275 c.f_path = f_path
1277 1276
1278 1277 old_content = c.file.str_content
1279 1278 sl = old_content.splitlines(1)
1280 1279 first_line = sl[0] if sl else ''
1281 1280
1282 1281 r_post = self.request.POST
1283 1282 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1284 1283 line_ending_mode = detect_mode(first_line, 0)
1285 1284 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1286 1285
1287 1286 message = r_post.get('message') or c.default_message
1288 1287
1289 1288 org_node_path = c.file.str_path
1290 1289 filename = r_post['filename']
1291 1290
1292 1291 root_path = c.file.dir_path
1293 1292 pure_path = self.create_pure_path(root_path, filename)
1294 1293 node_path = pure_path.as_posix()
1295 1294
1296 1295 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1297 1296 commit_id=commit_id)
1298 1297 if content == old_content and node_path == org_node_path:
1299 1298 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1300 1299 category='warning')
1301 1300 raise HTTPFound(default_redirect_url)
1302 1301
1303 1302 try:
1304 1303 mapping = {
1305 1304 c.file.bytes_path: {
1306 1305 'org_filename': org_node_path,
1307 1306 'filename': safe_bytes(node_path),
1308 1307 'content': safe_bytes(content),
1309 1308 'lexer': '',
1310 1309 'op': 'mod',
1311 1310 'mode': c.file.mode
1312 1311 }
1313 1312 }
1314 1313
1315 1314 commit = ScmModel().update_nodes(
1316 1315 user=self._rhodecode_db_user.user_id,
1317 1316 repo=self.db_repo,
1318 1317 message=message,
1319 1318 nodes=mapping,
1320 1319 parent_commit=c.commit,
1321 1320 )
1322 1321
1323 1322 h.flash(_('Successfully committed changes to file `{}`').format(
1324 1323 h.escape(f_path)), category='success')
1325 1324 default_redirect_url = h.route_path(
1326 1325 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1327 1326
1328 1327 except Exception:
1329 1328 log.exception('Error occurred during commit')
1330 1329 h.flash(_('Error occurred during commit'), category='error')
1331 1330
1332 1331 raise HTTPFound(default_redirect_url)
1333 1332
1334 1333 @LoginRequired()
1335 1334 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1336 1335 def repo_files_add_file(self):
1337 1336 _ = self.request.translate
1338 1337 c = self.load_default_context()
1339 1338 commit_id, f_path = self._get_commit_and_path()
1340 1339
1341 1340 self._ensure_not_locked()
1342 1341
1343 1342 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1344 1343 if c.commit is None:
1345 1344 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1346 1345
1347 1346 if self.rhodecode_vcs_repo.is_empty():
1348 1347 # for empty repository we cannot check for current branch, we rely on
1349 1348 # c.commit.branch instead
1350 1349 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1351 1350 else:
1352 1351 _branch_name, _sha_commit_id, is_head = \
1353 1352 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1354 1353 landing_ref=self.db_repo.landing_ref_name)
1355 1354
1356 1355 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1357 1356 self.check_branch_permission(_branch_name, commit_id=commit_id)
1358 1357
1359 1358 c.default_message = (_('Added file via RhodeCode Enterprise'))
1360 1359 c.f_path = f_path.lstrip('/') # ensure not relative path
1361 1360
1362 1361 return self._get_template_context(c)
1363 1362
1364 1363 @LoginRequired()
1365 1364 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1366 1365 @CSRFRequired()
1367 1366 def repo_files_create_file(self):
1368 1367 _ = self.request.translate
1369 1368 c = self.load_default_context()
1370 1369 commit_id, f_path = self._get_commit_and_path()
1371 1370
1372 1371 self._ensure_not_locked()
1373 1372
1374 1373 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1375 1374 if c.commit is None:
1376 1375 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1377 1376
1378 1377 # calculate redirect URL
1379 1378 if self.rhodecode_vcs_repo.is_empty():
1380 1379 default_redirect_url = h.route_path(
1381 1380 'repo_summary', repo_name=self.db_repo_name)
1382 1381 else:
1383 1382 default_redirect_url = h.route_path(
1384 1383 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1385 1384
1386 1385 if self.rhodecode_vcs_repo.is_empty():
1387 1386 # for empty repository we cannot check for current branch, we rely on
1388 1387 # c.commit.branch instead
1389 1388 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1390 1389 else:
1391 1390 _branch_name, _sha_commit_id, is_head = \
1392 1391 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1393 1392 landing_ref=self.db_repo.landing_ref_name)
1394 1393
1395 1394 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1396 1395 self.check_branch_permission(_branch_name, commit_id=commit_id)
1397 1396
1398 1397 c.default_message = (_('Added file via RhodeCode Enterprise'))
1399 1398 c.f_path = f_path
1400 1399
1401 1400 r_post = self.request.POST
1402 1401 message = r_post.get('message') or c.default_message
1403 1402 filename = r_post.get('filename')
1404 1403 unix_mode = 0
1405 1404
1406 1405 if not filename:
1407 1406 # If there's no commit, redirect to repo summary
1408 1407 if type(c.commit) is EmptyCommit:
1409 1408 redirect_url = h.route_path(
1410 1409 'repo_summary', repo_name=self.db_repo_name)
1411 1410 else:
1412 1411 redirect_url = default_redirect_url
1413 1412 h.flash(_('No filename specified'), category='warning')
1414 1413 raise HTTPFound(redirect_url)
1415 1414
1416 1415 root_path = f_path
1417 1416 pure_path = self.create_pure_path(root_path, filename)
1418 1417 node_path = pure_path.as_posix().lstrip('/')
1419 1418
1420 1419 author = self._rhodecode_db_user.full_contact
1421 1420 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1422 1421 nodes = {
1423 1422 safe_bytes(node_path): {
1424 1423 'content': safe_bytes(content)
1425 1424 }
1426 1425 }
1427 1426
1428 1427 try:
1429 1428
1430 1429 commit = ScmModel().create_nodes(
1431 1430 user=self._rhodecode_db_user.user_id,
1432 1431 repo=self.db_repo,
1433 1432 message=message,
1434 1433 nodes=nodes,
1435 1434 parent_commit=c.commit,
1436 1435 author=author,
1437 1436 )
1438 1437
1439 1438 h.flash(_('Successfully committed new file `{}`').format(
1440 1439 h.escape(node_path)), category='success')
1441 1440
1442 1441 default_redirect_url = h.route_path(
1443 1442 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1444 1443
1445 1444 except NonRelativePathError:
1446 1445 log.exception('Non Relative path found')
1447 1446 h.flash(_('The location specified must be a relative path and must not '
1448 1447 'contain .. in the path'), category='warning')
1449 1448 raise HTTPFound(default_redirect_url)
1450 1449 except (NodeError, NodeAlreadyExistsError) as e:
1451 1450 h.flash(h.escape(safe_str(e)), category='error')
1452 1451 except Exception:
1453 1452 log.exception('Error occurred during commit')
1454 1453 h.flash(_('Error occurred during commit'), category='error')
1455 1454
1456 1455 raise HTTPFound(default_redirect_url)
1457 1456
1458 1457 @LoginRequired()
1459 1458 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1460 1459 @CSRFRequired()
1461 1460 def repo_files_upload_file(self):
1462 1461 _ = self.request.translate
1463 1462 c = self.load_default_context()
1464 1463 commit_id, f_path = self._get_commit_and_path()
1465 1464
1466 1465 self._ensure_not_locked()
1467 1466
1468 1467 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1469 1468 if c.commit is None:
1470 1469 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1471 1470
1472 1471 # calculate redirect URL
1473 1472 if self.rhodecode_vcs_repo.is_empty():
1474 1473 default_redirect_url = h.route_path(
1475 1474 'repo_summary', repo_name=self.db_repo_name)
1476 1475 else:
1477 1476 default_redirect_url = h.route_path(
1478 1477 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1479 1478
1480 1479 if self.rhodecode_vcs_repo.is_empty():
1481 1480 # for empty repository we cannot check for current branch, we rely on
1482 1481 # c.commit.branch instead
1483 1482 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1484 1483 else:
1485 1484 _branch_name, _sha_commit_id, is_head = \
1486 1485 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1487 1486 landing_ref=self.db_repo.landing_ref_name)
1488 1487
1489 1488 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1490 1489 if error:
1491 1490 return {
1492 1491 'error': error,
1493 1492 'redirect_url': default_redirect_url
1494 1493 }
1495 1494 error = self.check_branch_permission(_branch_name, json_mode=True)
1496 1495 if error:
1497 1496 return {
1498 1497 'error': error,
1499 1498 'redirect_url': default_redirect_url
1500 1499 }
1501 1500
1502 1501 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1503 1502 c.f_path = f_path
1504 1503
1505 1504 r_post = self.request.POST
1506 1505
1507 1506 message = c.default_message
1508 1507 user_message = r_post.getall('message')
1509 1508 if isinstance(user_message, list) and user_message:
1510 1509 # we take the first from duplicated results if it's not empty
1511 1510 message = user_message[0] if user_message[0] else message
1512 1511
1513 1512 nodes = {}
1514 1513
1515 1514 for file_obj in r_post.getall('files_upload') or []:
1516 1515 content = file_obj.file
1517 1516 filename = file_obj.filename
1518 1517
1519 1518 root_path = f_path
1520 1519 pure_path = self.create_pure_path(root_path, filename)
1521 1520 node_path = pure_path.as_posix().lstrip('/')
1522 1521
1523 1522 nodes[safe_bytes(node_path)] = {
1524 1523 'content': content
1525 1524 }
1526 1525
1527 1526 if not nodes:
1528 1527 error = 'missing files'
1529 1528 return {
1530 1529 'error': error,
1531 1530 'redirect_url': default_redirect_url
1532 1531 }
1533 1532
1534 1533 author = self._rhodecode_db_user.full_contact
1535 1534
1536 1535 try:
1537 1536 commit = ScmModel().create_nodes(
1538 1537 user=self._rhodecode_db_user.user_id,
1539 1538 repo=self.db_repo,
1540 1539 message=message,
1541 1540 nodes=nodes,
1542 1541 parent_commit=c.commit,
1543 1542 author=author,
1544 1543 )
1545 1544 if len(nodes) == 1:
1546 1545 flash_message = _('Successfully committed {} new files').format(len(nodes))
1547 1546 else:
1548 1547 flash_message = _('Successfully committed 1 new file')
1549 1548
1550 1549 h.flash(flash_message, category='success')
1551 1550
1552 1551 default_redirect_url = h.route_path(
1553 1552 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1554 1553
1555 1554 except NonRelativePathError:
1556 1555 log.exception('Non Relative path found')
1557 1556 error = _('The location specified must be a relative path and must not '
1558 1557 'contain .. in the path')
1559 1558 h.flash(error, category='warning')
1560 1559
1561 1560 return {
1562 1561 'error': error,
1563 1562 'redirect_url': default_redirect_url
1564 1563 }
1565 1564 except (NodeError, NodeAlreadyExistsError) as e:
1566 1565 error = h.escape(e)
1567 1566 h.flash(error, category='error')
1568 1567
1569 1568 return {
1570 1569 'error': error,
1571 1570 'redirect_url': default_redirect_url
1572 1571 }
1573 1572 except Exception:
1574 1573 log.exception('Error occurred during commit')
1575 1574 error = _('Error occurred during commit')
1576 1575 h.flash(error, category='error')
1577 1576 return {
1578 1577 'error': error,
1579 1578 'redirect_url': default_redirect_url
1580 1579 }
1581 1580
1582 1581 return {
1583 1582 'error': None,
1584 1583 'redirect_url': default_redirect_url
1585 1584 }
@@ -1,825 +1,825 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Authentication modules
21 21 """
22 22 import socket
23 23 import string
24 24 import colander
25 25 import copy
26 26 import logging
27 27 import time
28 28 import traceback
29 29 import warnings
30 30 import functools
31 31
32 32 from pyramid.threadlocal import get_current_registry
33 33
34 34 from rhodecode.authentication import AuthenticationPluginRegistry
35 35 from rhodecode.authentication.interface import IAuthnPluginRegistry
36 36 from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase
37 37 from rhodecode.lib import rc_cache
38 38 from rhodecode.lib.statsd_client import StatsdClient
39 39 from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt
40 40 from rhodecode.lib.str_utils import safe_bytes
41 41 from rhodecode.lib.utils2 import safe_int, safe_str
42 42 from rhodecode.lib.exceptions import (LdapConnectionError, LdapUsernameError, LdapPasswordError)
43 43 from rhodecode.model.db import User
44 44 from rhodecode.model.meta import Session
45 45 from rhodecode.model.settings import SettingsModel
46 46 from rhodecode.model.user import UserModel
47 47 from rhodecode.model.user_group import UserGroupModel
48 48
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52 # auth types that authenticate() function can receive
53 53 VCS_TYPE = 'vcs'
54 54 HTTP_TYPE = 'http'
55 55
56 56 external_auth_session_key = 'rhodecode.external_auth'
57 57
58 58
59 59 class hybrid_property(object):
60 60 """
61 61 a property decorator that works both for instance and class
62 62 """
63 63 def __init__(self, fget, fset=None, fdel=None, expr=None):
64 64 self.fget = fget
65 65 self.fset = fset
66 66 self.fdel = fdel
67 67 self.expr = expr or fget
68 68 functools.update_wrapper(self, fget)
69 69
70 70 def __get__(self, instance, owner):
71 71 if instance is None:
72 72 return self.expr(owner)
73 73 else:
74 74 return self.fget(instance)
75 75
76 76 def __set__(self, instance, value):
77 77 self.fset(instance, value)
78 78
79 79 def __delete__(self, instance):
80 80 self.fdel(instance)
81 81
82 82
83 83 class LazyFormencode(object):
84 84 def __init__(self, formencode_obj, *args, **kwargs):
85 85 self.formencode_obj = formencode_obj
86 86 self.args = args
87 87 self.kwargs = kwargs
88 88
89 89 def __call__(self, *args, **kwargs):
90 90 from inspect import isfunction
91 91 formencode_obj = self.formencode_obj
92 92 if isfunction(formencode_obj):
93 93 # case we wrap validators into functions
94 94 formencode_obj = self.formencode_obj(*args, **kwargs)
95 95 return formencode_obj(*self.args, **self.kwargs)
96 96
97 97
98 98 class RhodeCodeAuthPluginBase(object):
99 99 # UID is used to register plugin to the registry
100 100 uid = None
101 101
102 102 # cache the authentication request for N amount of seconds. Some kind
103 103 # of authentication methods are very heavy and it's very efficient to cache
104 104 # the result of a call. If it's set to None (default) cache is off
105 105 AUTH_CACHE_TTL = None
106 106 AUTH_CACHE = {}
107 107
108 108 auth_func_attrs = {
109 109 "username": "unique username",
110 110 "firstname": "first name",
111 111 "lastname": "last name",
112 112 "email": "email address",
113 113 "groups": '["list", "of", "groups"]',
114 114 "user_group_sync":
115 115 'True|False defines if returned user groups should be synced',
116 116 "extern_name": "name in external source of record",
117 117 "extern_type": "type of external source of record",
118 118 "admin": 'True|False defines if user should be RhodeCode super admin',
119 119 "active":
120 120 'True|False defines active state of user internally for RhodeCode',
121 121 "active_from_extern":
122 122 "True|False|None, active state from the external auth, "
123 123 "None means use definition from RhodeCode extern_type active value"
124 124
125 125 }
126 126 # set on authenticate() method and via set_auth_type func.
127 127 auth_type = None
128 128
129 129 # set on authenticate() method and via set_calling_scope_repo, this is a
130 130 # calling scope repository when doing authentication most likely on VCS
131 131 # operations
132 132 acl_repo_name = None
133 133
134 134 # List of setting names to store encrypted. Plugins may override this list
135 135 # to store settings encrypted.
136 136 _settings_encrypted = []
137 137
138 138 # Mapping of python to DB settings model types. Plugins may override or
139 139 # extend this mapping.
140 140 _settings_type_map = {
141 141 colander.String: 'unicode',
142 142 colander.Integer: 'int',
143 143 colander.Boolean: 'bool',
144 144 colander.List: 'list',
145 145 }
146 146
147 147 # list of keys in settings that are unsafe to be logged, should be passwords
148 148 # or other crucial credentials
149 149 _settings_unsafe_keys = []
150 150
151 151 def __init__(self, plugin_id):
152 152 self._plugin_id = plugin_id
153 153
154 154 def __str__(self):
155 155 return self.get_id()
156 156
157 157 def _get_setting_full_name(self, name):
158 158 """
159 159 Return the full setting name used for storing values in the database.
160 160 """
161 161 # TODO: johbo: Using the name here is problematic. It would be good to
162 162 # introduce either new models in the database to hold Plugin and
163 163 # PluginSetting or to use the plugin id here.
164 164 return f'auth_{self.name}_{name}'
165 165
166 166 def _get_setting_type(self, name):
167 167 """
168 168 Return the type of a setting. This type is defined by the SettingsModel
169 169 and determines how the setting is stored in DB. Optionally the suffix
170 170 `.encrypted` is appended to instruct SettingsModel to store it
171 171 encrypted.
172 172 """
173 173 schema_node = self.get_settings_schema().get(name)
174 174 db_type = self._settings_type_map.get(
175 175 type(schema_node.typ), 'unicode')
176 176 if name in self._settings_encrypted:
177 177 db_type = f'{db_type}.encrypted'
178 178 return db_type
179 179
180 180 @classmethod
181 181 def docs(cls):
182 182 """
183 183 Defines documentation url which helps with plugin setup
184 184 """
185 185 return ''
186 186
187 187 @classmethod
188 188 def icon(cls):
189 189 """
190 190 Defines ICON in SVG format for authentication method
191 191 """
192 192 return ''
193 193
194 194 def is_enabled(self):
195 195 """
196 196 Returns true if this plugin is enabled. An enabled plugin can be
197 197 configured in the admin interface but it is not consulted during
198 198 authentication.
199 199 """
200 200 auth_plugins = SettingsModel().get_auth_plugins()
201 201 return self.get_id() in auth_plugins
202 202
203 203 def is_active(self, plugin_cached_settings=None):
204 204 """
205 205 Returns true if the plugin is activated. An activated plugin is
206 206 consulted during authentication, assumed it is also enabled.
207 207 """
208 208 return self.get_setting_by_name(
209 209 'enabled', plugin_cached_settings=plugin_cached_settings)
210 210
211 211 def get_id(self):
212 212 """
213 213 Returns the plugin id.
214 214 """
215 215 return self._plugin_id
216 216
217 217 def get_display_name(self, load_from_settings=False):
218 218 """
219 219 Returns a translation string for displaying purposes.
220 220 if load_from_settings is set, plugin settings can override the display name
221 221 """
222 222 raise NotImplementedError('Not implemented in base class')
223 223
224 224 def get_settings_schema(self):
225 225 """
226 226 Returns a colander schema, representing the plugin settings.
227 227 """
228 228 return AuthnPluginSettingsSchemaBase()
229 229
230 230 def _propagate_settings(self, raw_settings):
231 231 settings = {}
232 232 for node in self.get_settings_schema():
233 233 settings[node.name] = self.get_setting_by_name(
234 234 node.name, plugin_cached_settings=raw_settings)
235 235 return settings
236 236
237 237 def get_settings(self, use_cache=True):
238 238 """
239 239 Returns the plugin settings as dictionary.
240 240 """
241 241
242 242 raw_settings = SettingsModel().get_all_settings(cache=use_cache)
243 243 settings = self._propagate_settings(raw_settings)
244 244
245 245 return settings
246 246
247 247 def get_setting_by_name(self, name, default=None, plugin_cached_settings=None):
248 248 """
249 249 Returns a plugin setting by name.
250 250 """
251 251 full_name = f'rhodecode_{self._get_setting_full_name(name)}'
252 252 if plugin_cached_settings:
253 253 plugin_settings = plugin_cached_settings
254 254 else:
255 255 plugin_settings = SettingsModel().get_all_settings()
256 256
257 257 if full_name in plugin_settings:
258 258 return plugin_settings[full_name]
259 259 else:
260 260 return default
261 261
262 262 def create_or_update_setting(self, name, value):
263 263 """
264 264 Create or update a setting for this plugin in the persistent storage.
265 265 """
266 266 full_name = self._get_setting_full_name(name)
267 267 type_ = self._get_setting_type(name)
268 268 db_setting = SettingsModel().create_or_update_setting(
269 269 full_name, value, type_)
270 270 return db_setting.app_settings_value
271 271
272 272 def log_safe_settings(self, settings):
273 273 """
274 274 returns a log safe representation of settings, without any secrets
275 275 """
276 276 settings_copy = copy.deepcopy(settings)
277 277 for k in self._settings_unsafe_keys:
278 278 if k in settings_copy:
279 279 del settings_copy[k]
280 280 return settings_copy
281 281
282 282 @hybrid_property
283 283 def name(self):
284 284 """
285 285 Returns the name of this authentication plugin.
286 286
287 287 :returns: string
288 288 """
289 289 raise NotImplementedError("Not implemented in base class")
290 290
291 291 def get_url_slug(self):
292 292 """
293 293 Returns a slug which should be used when constructing URLs which refer
294 294 to this plugin. By default it returns the plugin name. If the name is
295 295 not suitable for using it in an URL the plugin should override this
296 296 method.
297 297 """
298 298 return self.name
299 299
300 300 @property
301 301 def is_headers_auth(self):
302 302 """
303 303 Returns True if this authentication plugin uses HTTP headers as
304 304 authentication method.
305 305 """
306 306 return False
307 307
308 308 @hybrid_property
309 309 def is_container_auth(self):
310 310 """
311 311 Deprecated method that indicates if this authentication plugin uses
312 312 HTTP headers as authentication method.
313 313 """
314 314 warnings.warn(
315 315 'Use is_headers_auth instead.', category=DeprecationWarning)
316 316 return self.is_headers_auth
317 317
318 318 @hybrid_property
319 319 def allows_creating_users(self):
320 320 """
321 321 Defines if Plugin allows users to be created on-the-fly when
322 322 authentication is called. Controls how external plugins should behave
323 323 in terms if they are allowed to create new users, or not. Base plugins
324 324 should not be allowed to, but External ones should be !
325 325
326 326 :return: bool
327 327 """
328 328 return False
329 329
330 330 def set_auth_type(self, auth_type):
331 331 self.auth_type = auth_type
332 332
333 333 def set_calling_scope_repo(self, acl_repo_name):
334 334 self.acl_repo_name = acl_repo_name
335 335
336 336 def allows_authentication_from(
337 337 self, user, allows_non_existing_user=True,
338 338 allowed_auth_plugins=None, allowed_auth_sources=None):
339 339 """
340 340 Checks if this authentication module should accept a request for
341 341 the current user.
342 342
343 343 :param user: user object fetched using plugin's get_user() method.
344 344 :param allows_non_existing_user: if True, don't allow the
345 345 user to be empty, meaning not existing in our database
346 346 :param allowed_auth_plugins: if provided, users extern_type will be
347 347 checked against a list of provided extern types, which are plugin
348 348 auth_names in the end
349 349 :param allowed_auth_sources: authentication type allowed,
350 350 `http` or `vcs` default is both.
351 351 defines if plugin will accept only http authentication vcs
352 352 authentication(git/hg) or both
353 353 :returns: boolean
354 354 """
355 355 if not user and not allows_non_existing_user:
356 356 log.debug('User is empty but plugin does not allow empty users,'
357 357 'not allowed to authenticate')
358 358 return False
359 359
360 360 expected_auth_plugins = allowed_auth_plugins or [self.name]
361 361 if user and (user.extern_type and
362 362 user.extern_type not in expected_auth_plugins):
363 363 log.debug(
364 364 'User `%s` is bound to `%s` auth type. Plugin allows only '
365 365 '%s, skipping', user, user.extern_type, expected_auth_plugins)
366 366
367 367 return False
368 368
369 369 # by default accept both
370 370 expected_auth_from = allowed_auth_sources or [HTTP_TYPE, VCS_TYPE]
371 371 if self.auth_type not in expected_auth_from:
372 372 log.debug('Current auth source is %s but plugin only allows %s',
373 373 self.auth_type, expected_auth_from)
374 374 return False
375 375
376 376 return True
377 377
378 378 def get_user(self, username=None, **kwargs):
379 379 """
380 380 Helper method for user fetching in plugins, by default it's using
381 381 simple fetch by username, but this method can be customized in plugins
382 382 eg. headers auth plugin to fetch user by environ params
383 383
384 384 :param username: username if given to fetch from database
385 385 :param kwargs: extra arguments needed for user fetching.
386 386 """
387 387
388 388 user = None
389 389 log.debug(
390 390 'Trying to fetch user `%s` from RhodeCode database', username)
391 391 if username:
392 392 user = User.get_by_username(username)
393 393 if not user:
394 394 log.debug('User not found, fallback to fetch user in '
395 395 'case insensitive mode')
396 396 user = User.get_by_username(username, case_insensitive=True)
397 397 else:
398 398 log.debug('provided username:`%s` is empty skipping...', username)
399 399 if not user:
400 400 log.debug('User `%s` not found in database', username)
401 401 else:
402 402 log.debug('Got DB user:%s', user)
403 403 return user
404 404
405 405 def user_activation_state(self):
406 406 """
407 407 Defines user activation state when creating new users
408 408
409 409 :returns: boolean
410 410 """
411 411 raise NotImplementedError("Not implemented in base class")
412 412
413 413 def auth(self, userobj, username, passwd, settings, **kwargs):
414 414 """
415 415 Given a user object (which may be null), username, a plaintext
416 416 password, and a settings object (containing all the keys needed as
417 417 listed in settings()), authenticate this user's login attempt.
418 418
419 419 Return None on failure. On success, return a dictionary of the form:
420 420
421 421 see: RhodeCodeAuthPluginBase.auth_func_attrs
422 422 This is later validated for correctness
423 423 """
424 424 raise NotImplementedError("not implemented in base class")
425 425
426 426 def _authenticate(self, userobj, username, passwd, settings, **kwargs):
427 427 """
428 428 Wrapper to call self.auth() that validates call on it
429 429
430 430 :param userobj: userobj
431 431 :param username: username
432 432 :param passwd: plaintext password
433 433 :param settings: plugin settings
434 434 """
435 435 auth = self.auth(userobj, username, passwd, settings, **kwargs)
436 436 if auth:
437 437 auth['_plugin'] = self.name
438 438 auth['_ttl_cache'] = self.get_ttl_cache(settings)
439 439 # check if hash should be migrated ?
440 440 new_hash = auth.get('_hash_migrate')
441 441 if new_hash:
442 442 # new_hash is a newly encrypted destination hash
443 443 self._migrate_hash_to_bcrypt(username, passwd, new_hash)
444 444 if 'user_group_sync' not in auth:
445 445 auth['user_group_sync'] = False
446 446 return self._validate_auth_return(auth)
447 447 return auth
448 448
449 449 def _migrate_hash_to_bcrypt(self, username, password, new_hash):
450 450 new_hash_cypher = _RhodeCodeCryptoBCrypt()
451 451 # extra checks, so make sure new hash is correct.
452 452 password_as_bytes = safe_bytes(password)
453 453
454 454 if new_hash and new_hash_cypher.hash_check(password_as_bytes, new_hash):
455 455 cur_user = User.get_by_username(username)
456 456 cur_user.password = new_hash
457 457 Session().add(cur_user)
458 458 Session().flush()
459 459 log.info('Migrated user %s hash to bcrypt', cur_user)
460 460
461 461 def _validate_auth_return(self, ret):
462 462 if not isinstance(ret, dict):
463 463 raise Exception('returned value from auth must be a dict')
464 464 for k in self.auth_func_attrs:
465 465 if k not in ret:
466 466 raise Exception('Missing %s attribute from returned data' % k)
467 467 return ret
468 468
469 469 def get_ttl_cache(self, settings=None):
470 470 plugin_settings = settings or self.get_settings()
471 471 # we set default to 30, we make a compromise here,
472 472 # performance > security, mostly due to LDAP/SVN, majority
473 473 # of users pick cache_ttl to be enabled
474 474 from rhodecode.authentication import plugin_default_auth_ttl
475 475 cache_ttl = plugin_default_auth_ttl
476 476
477 477 if isinstance(self.AUTH_CACHE_TTL, int):
478 478 # plugin cache set inside is more important than the settings value
479 479 cache_ttl = self.AUTH_CACHE_TTL
480 480 elif plugin_settings.get('cache_ttl'):
481 481 cache_ttl = safe_int(plugin_settings.get('cache_ttl'), 0)
482 482
483 483 plugin_cache_active = bool(cache_ttl and cache_ttl > 0)
484 484 return plugin_cache_active, cache_ttl
485 485
486 486
487 487 class RhodeCodeExternalAuthPlugin(RhodeCodeAuthPluginBase):
488 488
489 489 @hybrid_property
490 490 def allows_creating_users(self):
491 491 return True
492 492
493 493 def use_fake_password(self):
494 494 """
495 495 Return a boolean that indicates whether or not we should set the user's
496 496 password to a random value when it is authenticated by this plugin.
497 497 If your plugin provides authentication, then you will generally
498 498 want this.
499 499
500 500 :returns: boolean
501 501 """
502 502 raise NotImplementedError("Not implemented in base class")
503 503
504 504 def _authenticate(self, userobj, username, passwd, settings, **kwargs):
505 505 # at this point _authenticate calls plugin's `auth()` function
506 506 auth = super()._authenticate(
507 507 userobj, username, passwd, settings, **kwargs)
508 508
509 509 if auth:
510 510 # maybe plugin will clean the username ?
511 511 # we should use the return value
512 512 username = auth['username']
513 513
514 514 # if external source tells us that user is not active, we should
515 515 # skip rest of the process. This can prevent from creating users in
516 516 # RhodeCode when using external authentication, but if it's
517 517 # inactive user we shouldn't create that user anyway
518 518 if auth['active_from_extern'] is False:
519 519 log.warning(
520 520 "User %s authenticated against %s, but is inactive",
521 521 username, self.__module__)
522 522 return None
523 523
524 524 cur_user = User.get_by_username(username, case_insensitive=True)
525 525 is_user_existing = cur_user is not None
526 526
527 527 if is_user_existing:
528 528 log.debug('Syncing user `%s` from '
529 529 '`%s` plugin', username, self.name)
530 530 else:
531 531 log.debug('Creating non existing user `%s` from '
532 532 '`%s` plugin', username, self.name)
533 533
534 534 if self.allows_creating_users:
535 535 log.debug('Plugin `%s` allows to '
536 536 'create new users', self.name)
537 537 else:
538 538 log.debug('Plugin `%s` does not allow to '
539 539 'create new users', self.name)
540 540
541 541 user_parameters = {
542 542 'username': username,
543 543 'email': auth["email"],
544 544 'firstname': auth["firstname"],
545 545 'lastname': auth["lastname"],
546 546 'active': auth["active"],
547 547 'admin': auth["admin"],
548 548 'extern_name': auth["extern_name"],
549 549 'extern_type': self.name,
550 550 'plugin': self,
551 551 'allow_to_create_user': self.allows_creating_users,
552 552 }
553 553
554 554 if not is_user_existing:
555 555 if self.use_fake_password():
556 556 # Randomize the PW because we don't need it, but don't want
557 557 # them blank either
558 558 passwd = PasswordGenerator().gen_password(length=16)
559 559 user_parameters['password'] = passwd
560 560 else:
561 561 # Since the password is required by create_or_update method of
562 562 # UserModel, we need to set it explicitly.
563 563 # The create_or_update method is smart and recognises the
564 564 # password hashes as well.
565 565 user_parameters['password'] = cur_user.password
566 566
567 567 # we either create or update users, we also pass the flag
568 568 # that controls if this method can actually do that.
569 569 # raises NotAllowedToCreateUserError if it cannot, and we try to.
570 570 user = UserModel().create_or_update(**user_parameters)
571 571 Session().flush()
572 572 # enforce user is just in given groups, all of them has to be ones
573 573 # created from plugins. We store this info in _group_data JSON
574 574 # field
575 575
576 576 if auth['user_group_sync']:
577 577 try:
578 578 groups = auth['groups'] or []
579 579 log.debug(
580 580 'Performing user_group sync based on set `%s` '
581 581 'returned by `%s` plugin', groups, self.name)
582 582 UserGroupModel().enforce_groups(user, groups, self.name)
583 583 except Exception:
584 584 # for any reason group syncing fails, we should
585 585 # proceed with login
586 586 log.error(traceback.format_exc())
587 587
588 588 Session().commit()
589 589 return auth
590 590
591 591
592 592 class AuthLdapBase(object):
593 593
594 594 @classmethod
595 595 def _build_servers(cls, ldap_server_type, ldap_server, port, use_resolver=True):
596 596
597 597 def host_resolver(host, port, full_resolve=True):
598 598 """
599 599 Main work for this function is to prevent ldap connection issues,
600 600 and detect them early using a "greenified" sockets
601 601 """
602 602 host = host.strip()
603 603 if not full_resolve:
604 604 return f'{host}:{port}'
605 605
606 606 log.debug('LDAP: Resolving IP for LDAP host `%s`', host)
607 607 try:
608 608 ip = socket.gethostbyname(host)
609 609 log.debug('LDAP: Got LDAP host `%s` ip %s', host, ip)
610 610 except Exception:
611 611 raise LdapConnectionError(f'Failed to resolve host: `{host}`')
612 612
613 613 log.debug('LDAP: Checking if IP %s is accessible', ip)
614 614 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
615 615 try:
616 616 s.connect((ip, int(port)))
617 617 s.shutdown(socket.SHUT_RD)
618 618 log.debug('LDAP: connection to %s successful', ip)
619 619 except Exception:
620 620 raise LdapConnectionError(
621 621 f'Failed to connect to host: `{host}:{port}`')
622 622
623 623 return f'{host}:{port}'
624 624
625 625 if len(ldap_server) == 1:
626 626 # in case of single server use resolver to detect potential
627 627 # connection issues
628 628 full_resolve = True
629 629 else:
630 630 full_resolve = False
631 631
632 632 return ', '.join(
633 633 ["{}://{}".format(
634 634 ldap_server_type,
635 635 host_resolver(host, port, full_resolve=use_resolver and full_resolve))
636 636 for host in ldap_server])
637 637
638 638 @classmethod
639 639 def _get_server_list(cls, servers):
640 640 return map(string.strip, servers.split(','))
641 641
642 642 @classmethod
643 643 def get_uid(cls, username, server_addresses):
644 644 uid = username
645 645 for server_addr in server_addresses:
646 646 uid = chop_at(username, "@%s" % server_addr)
647 647 return uid
648 648
649 649 @classmethod
650 650 def validate_username(cls, username):
651 651 if "," in username:
652 652 raise LdapUsernameError(
653 653 f"invalid character `,` in username: `{username}`")
654 654
655 655 @classmethod
656 656 def validate_password(cls, username, password):
657 657 if not password:
658 658 msg = "Authenticating user %s with blank password not allowed"
659 659 log.warning(msg, username)
660 660 raise LdapPasswordError(msg)
661 661
662 662
663 663 def loadplugin(plugin_id):
664 664 """
665 665 Loads and returns an instantiated authentication plugin.
666 666 Returns the RhodeCodeAuthPluginBase subclass on success,
667 667 or None on failure.
668 668 """
669 669 # TODO: Disusing pyramids thread locals to retrieve the registry.
670 670 authn_registry = get_authn_registry()
671 671 plugin = authn_registry.get_plugin(plugin_id)
672 672 if plugin is None:
673 673 log.error('Authentication plugin not found: "%s"', plugin_id)
674 674 return plugin
675 675
676 676
677 677 def get_authn_registry(registry=None) -> AuthenticationPluginRegistry:
678 678 registry = registry or get_current_registry()
679 679 authn_registry = registry.queryUtility(IAuthnPluginRegistry)
680 680 return authn_registry
681 681
682 682
683 683 def authenticate(username, password, environ=None, auth_type=None,
684 684 skip_missing=False, registry=None, acl_repo_name=None):
685 685 """
686 686 Authentication function used for access control,
687 687 It tries to authenticate based on enabled authentication modules.
688 688
689 689 :param username: username can be empty for headers auth
690 690 :param password: password can be empty for headers auth
691 691 :param environ: environ headers passed for headers auth
692 692 :param auth_type: type of authentication, either `HTTP_TYPE` or `VCS_TYPE`
693 693 :param skip_missing: ignores plugins that are in db but not in environment
694 694 :param registry: pyramid registry
695 695 :param acl_repo_name: name of repo for ACL checks
696 696 :returns: None if auth failed, plugin_user dict if auth is correct
697 697 """
698 698 if not auth_type or auth_type not in [HTTP_TYPE, VCS_TYPE]:
699 699 raise ValueError(f'auth type must be on of http, vcs got "{auth_type}" instead')
700 700
701 701 auth_credentials = (username and password)
702 702 headers_only = environ and not auth_credentials
703 703
704 704 authn_registry = get_authn_registry(registry)
705 705
706 706 plugins_to_check = authn_registry.get_plugins_for_authentication()
707 707 log.debug('authentication: headers=%s, username_and_passwd=%s', headers_only, bool(auth_credentials))
708 708 log.debug('Starting ordered authentication chain using %s plugins',
709 709 [x.name for x in plugins_to_check])
710 710
711 711 for plugin in plugins_to_check:
712 712 plugin.set_auth_type(auth_type)
713 713 plugin.set_calling_scope_repo(acl_repo_name)
714 714
715 715 if headers_only and not plugin.is_headers_auth:
716 716 log.debug('Auth type is for headers only and plugin `%s` is not '
717 717 'headers plugin, skipping...', plugin.get_id())
718 718 continue
719 719
720 720 log.debug('Trying authentication using ** %s **', plugin.get_id())
721 721
722 722 # load plugin settings from RhodeCode database
723 723 plugin_settings = plugin.get_settings()
724 724 plugin_sanitized_settings = plugin.log_safe_settings(plugin_settings)
725 725 log.debug('Plugin `%s` settings:%s', plugin.get_id(), plugin_sanitized_settings)
726 726
727 727 # use plugin's method of user extraction.
728 728 user = plugin.get_user(username, environ=environ,
729 729 settings=plugin_settings)
730 730 display_user = user.username if user else username
731 731 log.debug(
732 732 'Plugin %s extracted user is `%s`', plugin.get_id(), display_user)
733 733
734 734 if not plugin.allows_authentication_from(user):
735 735 log.debug('Plugin %s does not accept user `%s` for authentication',
736 736 plugin.get_id(), display_user)
737 737 continue
738 738 else:
739 739 log.debug('Plugin %s accepted user `%s` for authentication',
740 740 plugin.get_id(), display_user)
741 741
742 742 log.info('Authenticating user `%s` using %s plugin',
743 743 display_user, plugin.get_id())
744 744
745 745 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings)
746 746
747 747 log.debug('AUTH_CACHE_TTL for plugin `%s` active: %s (TTL: %s)',
748 748 plugin.get_id(), plugin_cache_active, cache_ttl)
749 749
750 750 user_id = user.user_id if user else 'no-user'
751 751 # don't cache for empty users
752 752 plugin_cache_active = plugin_cache_active and user_id
753 cache_namespace_uid = f'cache_user_auth.{user_id}'
753 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
754 754 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
755 755
756 756 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
757 757 expiration_time=cache_ttl,
758 758 condition=plugin_cache_active)
759 759 def compute_auth(
760 760 cache_name, plugin_name, username, password):
761 761
762 762 # _authenticate is a wrapper for .auth() method of plugin.
763 763 # it checks if .auth() sends proper data.
764 764 # For RhodeCodeExternalAuthPlugin it also maps users to
765 765 # Database and maps the attributes returned from .auth()
766 766 # to RhodeCode database. If this function returns data
767 767 # then auth is correct.
768 768 log.debug('Running plugin `%s` _authenticate method '
769 769 'using username and password', plugin.get_id())
770 770 return plugin._authenticate(
771 771 user, username, password, plugin_settings,
772 772 environ=environ or {})
773 773
774 774 start = time.time()
775 775 # for environ based auth, password can be empty, but then the validation is
776 776 # on the server that fills in the env data needed for authentication
777 777 plugin_user = compute_auth('auth', plugin.name, username, (password or ''))
778 778
779 779 auth_time = time.time() - start
780 780 log.debug('Authentication for plugin `%s` completed in %.4fs, '
781 781 'expiration time of fetched cache %.1fs.',
782 782 plugin.get_id(), auth_time, cache_ttl,
783 783 extra={"plugin": plugin.get_id(), "time": auth_time})
784 784
785 785 log.debug('PLUGIN USER DATA: %s', plugin_user)
786 786
787 787 statsd = StatsdClient.statsd
788 788
789 789 if plugin_user:
790 790 log.debug('Plugin returned proper authentication data')
791 791 if statsd:
792 792 elapsed_time_ms = round(1000.0 * auth_time) # use ms only
793 793 statsd.incr('rhodecode_login_success_total')
794 794 statsd.timing("rhodecode_login_timing.histogram", elapsed_time_ms,
795 795 tags=[f"plugin:{plugin.get_id()}"],
796 796 use_decimals=False
797 797 )
798 798 return plugin_user
799 799
800 800 # we failed to Auth because .auth() method didn't return proper user
801 801 log.debug("User `%s` failed to authenticate against %s",
802 802 display_user, plugin.get_id())
803 803 if statsd:
804 804 statsd.incr('rhodecode_login_fail_total')
805 805
806 806 # case when we failed to authenticate against all defined plugins
807 807 return None
808 808
809 809
810 810 def chop_at(s, sub, inclusive=False):
811 811 """Truncate string ``s`` at the first occurrence of ``sub``.
812 812
813 813 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
814 814
815 815 >>> chop_at("plutocratic brats", "rat")
816 816 'plutoc'
817 817 >>> chop_at("plutocratic brats", "rat", True)
818 818 'plutocrat'
819 819 """
820 820 pos = s.find(sub)
821 821 if pos == -1:
822 822 return s
823 823 if inclusive:
824 824 return s[:pos+len(sub)]
825 825 return s[:pos]
@@ -1,139 +1,139 b''
1 1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import time
20 20 import logging
21 21
22 22 from pyramid.exceptions import ConfigurationError
23 23 from zope.interface import implementer
24 24
25 25 from rhodecode.authentication.interface import IAuthnPluginRegistry
26 26 from rhodecode.model.settings import SettingsModel
27 27 from rhodecode.lib.utils2 import safe_str
28 28 from rhodecode.lib.statsd_client import StatsdClient
29 29 from rhodecode.lib import rc_cache
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 @implementer(IAuthnPluginRegistry)
35 35 class AuthenticationPluginRegistry(object):
36 36
37 37 # INI settings key to set a fallback authentication plugin.
38 38 fallback_plugin_key = 'rhodecode.auth_plugin_fallback'
39 39
40 40 def __init__(self, settings):
41 41 self._plugins = {}
42 42 self._fallback_plugin = settings.get(self.fallback_plugin_key, None)
43 43
44 44 def add_authn_plugin(self, config, plugin):
45 45 plugin_id = plugin.get_id()
46 46 if plugin_id in self._plugins.keys():
47 47 raise ConfigurationError(
48 48 'Cannot register authentication plugin twice: "%s"', plugin_id)
49 49 else:
50 50 log.debug('Register authentication plugin: "%s"', plugin_id)
51 51 self._plugins[plugin_id] = plugin
52 52
53 53 def get_plugins(self):
54 54 def sort_key(plugin):
55 55 return str.lower(safe_str(plugin.get_display_name()))
56 56
57 57 return sorted(self._plugins.values(), key=sort_key)
58 58
59 59 def get_plugin(self, plugin_id):
60 60 return self._plugins.get(plugin_id, None)
61 61
62 62 def get_plugin_by_uid(self, plugin_uid):
63 63 for plugin in self._plugins.values():
64 64 if plugin.uid == plugin_uid:
65 65 return plugin
66 66
67 67 def get_cache_call_method(self, cache=True):
68 68 region, _ns = self.get_cache_region()
69 69
70 70 @region.conditional_cache_on_arguments(condition=cache)
71 71 def _get_auth_plugins(name: str, key: str, fallback_plugin):
72 72 log.debug('auth-plugins: calculating plugins available for authentication')
73 73
74 74 _plugins = []
75 75 # Add all enabled and active plugins to the list. We iterate over the
76 76 # auth_plugins setting from DB because it also represents the ordering.
77 77 enabled_plugins = SettingsModel().get_auth_plugins()
78 78 raw_settings = SettingsModel().get_all_settings(cache=False)
79 79
80 80 for plugin_id in enabled_plugins:
81 81 plugin = self.get_plugin(plugin_id)
82 82 if plugin is not None and plugin.is_active(
83 83 plugin_cached_settings=raw_settings):
84 84
85 85 # inject settings into plugin, we can re-use the DB fetched settings here
86 86 plugin._settings = plugin._propagate_settings(raw_settings)
87 87 _plugins.append(plugin)
88 88
89 89 # Add the fallback plugin from ini file.
90 90 if fallback_plugin:
91 91 log.warning(
92 92 'Using fallback authentication plugin from INI file: "%s"',
93 93 fallback_plugin)
94 94 plugin = self.get_plugin(fallback_plugin)
95 95 if plugin is not None and plugin not in _plugins:
96 96 plugin._settings = plugin._propagate_settings(raw_settings)
97 97 _plugins.append(plugin)
98 98 return _plugins
99 99
100 100 return _get_auth_plugins
101 101
102 102 def get_plugins_for_authentication(self, cache=True):
103 103 """
104 104 Returns a list of plugins which should be consulted when authenticating
105 105 a user. It only returns plugins which are enabled and active.
106 106 Additionally, it includes the fallback plugin from the INI file, if
107 107 `rhodecode.auth_plugin_fallback` is set to a plugin ID.
108 108 """
109 109
110 110 _get_auth_plugins = self.get_cache_call_method(cache=cache)
111 111
112 112 start = time.time()
113 113 plugins = _get_auth_plugins('rhodecode_auth_plugins', 'v1', self._fallback_plugin)
114 114
115 115 compute_time = time.time() - start
116 116 log.debug('cached method:%s took %.4fs', _get_auth_plugins.__name__, compute_time)
117 117
118 118 statsd = StatsdClient.statsd
119 119 if statsd:
120 120 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
121 121 statsd.timing("rhodecode_auth_plugins_timing.histogram", elapsed_time_ms,
122 122 use_decimals=False)
123 123
124 124 return plugins
125 125
126 126 @classmethod
127 127 def get_cache_region(cls):
128 cache_namespace_uid = 'auth_plugins'
128 cache_namespace_uid = 'auth_plugins.v1'
129 129 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
130 130 return region, cache_namespace_uid
131 131
132 132 @classmethod
133 133 def invalidate_auth_plugins_cache(cls, hard=True):
134 134 region, namespace_key = cls.get_cache_region()
135 135 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
136 136 'invalidate_auth_plugins_cache', region, namespace_key)
137 137
138 138 # we use hard cleanup if invalidation is sent
139 139 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
@@ -1,2538 +1,2538 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 authentication and permission libraries
21 21 """
22 22
23 23 import os
24 24
25 25 import time
26 26 import collections
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import random
31 31 import traceback
32 32 from functools import wraps
33 33 import bcrypt
34 34 import ipaddress
35 35
36 36 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
37 37 from sqlalchemy.orm.exc import ObjectDeletedError
38 38 from sqlalchemy.orm import joinedload
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.model import meta
43 43 from rhodecode.model.meta import Session
44 44 from rhodecode.model.user import UserModel
45 45 from rhodecode.model.db import (
46 46 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
47 47 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
48 48 from rhodecode.lib import rc_cache
49 49 from rhodecode.lib.utils import (
50 50 get_repo_slug, get_repo_group_slug, get_user_group_slug)
51 51 from rhodecode.lib.type_utils import aslist
52 52 from rhodecode.lib.hash_utils import sha1, sha256, md5
53 53 from rhodecode.lib.str_utils import ascii_bytes, safe_str, safe_int, safe_bytes
54 54 from rhodecode.lib.caching_query import FromCache
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59 csrf_token_key = "csrf_token"
60 60
61 61
62 62 class PasswordGenerator(object):
63 63 """
64 64 This is a simple class for generating password from different sets of
65 65 characters
66 66 usage::
67 67 passwd_gen = PasswordGenerator()
68 68 #print 8-letter password containing only big and small letters
69 69 of alphabet
70 70 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
71 71 """
72 72 ALPHABETS_NUM = r'''1234567890'''
73 73 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
74 74 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
75 75 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
76 76 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
77 77 + ALPHABETS_NUM + ALPHABETS_SPECIAL
78 78 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
79 79 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
80 80 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
81 81 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
82 82
83 83 def __init__(self, passwd=''):
84 84 self.passwd = passwd
85 85
86 86 def gen_password(self, length, type_=None):
87 87 if type_ is None:
88 88 type_ = self.ALPHABETS_FULL
89 89 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
90 90 return self.passwd
91 91
92 92
93 93 class _RhodeCodeCryptoBase(object):
94 94 ENC_PREF = None
95 95
96 96 def hash_create(self, str_):
97 97 """
98 98 hash the string using
99 99
100 100 :param str_: password to hash
101 101 """
102 102 raise NotImplementedError
103 103
104 104 def hash_check_with_upgrade(self, password: bytes, hashed: bytes):
105 105 """
106 106 Returns tuple in which first element is boolean that states that
107 107 given password matches it's hashed version, and the second is new hash
108 108 of the password, in case this password should be migrated to new
109 109 cipher.
110 110 """
111 111 self._assert_bytes(password)
112 112 checked_hash = self.hash_check(password, hashed)
113 113 return checked_hash, None
114 114
115 115 def hash_check(self, password, hashed):
116 116 """
117 117 Checks matching password with it's hashed value.
118 118
119 119 :param password: password
120 120 :param hashed: password in hashed form
121 121 """
122 122 raise NotImplementedError
123 123
124 124 @classmethod
125 125 def _assert_bytes(cls, value):
126 126 """
127 127 Passing in an `unicode` object can lead too hard to detect issues
128 128 if passwords contain non-ascii characters. Doing a type check
129 129 during runtime, so that such mistakes are detected early on.
130 130 """
131 131 if not isinstance(value, bytes):
132 132 raise TypeError(f"Bytestring required as input, got {type(value)}.")
133 133
134 134
135 135 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
136 136 ENC_PREF = ('$2a$10', '$2b$10')
137 137
138 138 def hash_create(self, str_):
139 139 self._assert_bytes(str_)
140 140 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
141 141
142 142 def hash_check_with_upgrade(self, password: bytes, hashed: bytes):
143 143 """
144 144 Returns tuple in which first element is boolean that states that
145 145 given password matches it's hashed version, and the second is new hash
146 146 of the password, in case this password should be migrated to new
147 147 cipher.
148 148
149 149 This implements special upgrade logic which works like that:
150 150 - check if the given password == bcrypted hash, if yes then we
151 151 properly used password and it was already in bcrypt. Proceed
152 152 without any changes
153 153 - if bcrypt hash check is not working try with sha256. If hash compare
154 154 is ok, it means we using correct but old hashed password. indicate
155 155 hash change and proceed
156 156 """
157 157 self._assert_bytes(password)
158 158 new_hash = None
159 159
160 160 # regular pw check
161 161 password_match_bcrypt = self.hash_check(password, hashed)
162 162
163 163 # now we want to know if the password was maybe from sha256
164 164 # basically calling _RhodeCodeCryptoSha256().hash_check()
165 165 if not password_match_bcrypt:
166 166 if _RhodeCodeCryptoSha256().hash_check(password, hashed): # match by OLD algo
167 167 new_hash = self.hash_create(password) # make new bcrypt hash, aka "migrate" hash
168 168 password_match_bcrypt = True
169 169
170 170 return password_match_bcrypt, new_hash
171 171
172 172 def hash_check(self, password: bytes, hashed: bytes) -> bool:
173 173 """
174 174 Checks matching password with it's hashed value.
175 175
176 176 :param password: password
177 177 :param hashed: password in hashed form
178 178 """
179 179 self._assert_bytes(password)
180 180 try:
181 181 return bcrypt.hashpw(password, hashed) == hashed
182 182 except ValueError as e:
183 183 # we're having a invalid salt here probably, we should not crash
184 184 # just return with False as it would be a wrong password.
185 185 log.debug('Failed to check password hash using bcrypt %s',
186 186 safe_str(e))
187 187
188 188 return False
189 189
190 190
191 191 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
192 192 """
193 193 Legacy CryptoBackend used in OLD versions on Windows. Now it's just here to
194 194 Migrate passwords to new backend bcrypt
195 195 """
196 196 ENC_PREF = '_'
197 197
198 198 def hash_create(self, str_):
199 199 self._assert_bytes(str_)
200 200 return sha256(str_)
201 201
202 202 def hash_check(self, password: bytes, hashed: bytes) -> bool:
203 203 """
204 204 Checks matching password with it's hashed value.
205 205
206 206 :param password: password
207 207 :param hashed: password in hashed form
208 208 """
209 209 self._assert_bytes(password)
210 210 return sha256(password) == hashed
211 211
212 212
213 213 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
214 214 ENC_PREF = '_'
215 215
216 216 def hash_create(self, str_):
217 217 self._assert_bytes(str_)
218 218 return sha1(str_)
219 219
220 220 def hash_check(self, password: bytes, hashed: bytes) -> bool:
221 221 """
222 222 Checks matching password with it's hashed value.
223 223
224 224 :param password: password
225 225 :param hashed: password in hashed form
226 226 """
227 227 self._assert_bytes(password)
228 228 return sha1(password) == hashed
229 229
230 230
231 231 def crypto_backend():
232 232 """
233 233 Return the matching crypto backend.
234 234
235 235 Selection is based on if we run tests or not, we pick sha1-test backend to run
236 236 tests faster since BCRYPT is expensive to calculate
237 237 """
238 238 if rhodecode.is_test:
239 239 RhodeCodeCrypto = _RhodeCodeCryptoTest()
240 240 else:
241 241 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 242
243 243 return RhodeCodeCrypto
244 244
245 245
246 246 def get_crypt_password(password):
247 247 """
248 248 Create the hash of `password` with the active crypto backend.
249 249
250 250 :param password: The cleartext password.
251 251 """
252 252 return crypto_backend().hash_create(safe_bytes(password))
253 253
254 254
255 255 def check_password(password, hashed):
256 256 """
257 257 Check if the value in `password` matches the hash in `hashed`.
258 258
259 259 :param password: The cleartext password.
260 260 :type password: unicode
261 261
262 262 :param hashed: The expected hashed version of the password.
263 263 :type hashed: The hash has to be passed in in text representation.
264 264 """
265 265 password = safe_bytes(password)
266 266 return crypto_backend().hash_check(password, hashed)
267 267
268 268
269 269 def generate_auth_token(data, salt=None):
270 270 """
271 271 Generates API KEY from given string
272 272 """
273 273
274 274 if salt is None:
275 275 salt = os.urandom(16)
276 276 token = safe_bytes(data) + safe_bytes(salt)
277 277 return sha1(token)
278 278
279 279
280 280 def get_came_from(request):
281 281 """
282 282 get query_string+path from request sanitized after removing auth_token
283 283 """
284 284 _req = request
285 285
286 286 path = _req.path
287 287 if 'auth_token' in _req.GET:
288 288 # sanitize the request and remove auth_token for redirection
289 289 _req.GET.pop('auth_token')
290 290 qs = _req.query_string
291 291 if qs:
292 292 path += '?' + qs
293 293
294 294 return path
295 295
296 296
297 297 class CookieStoreWrapper(object):
298 298
299 299 def __init__(self, cookie_store):
300 300 self.cookie_store = cookie_store
301 301
302 302 def __repr__(self):
303 303 return f'CookieStore<{self.cookie_store}>'
304 304
305 305 def get(self, key, other=None):
306 306 if isinstance(self.cookie_store, dict):
307 307 return self.cookie_store.get(key, other)
308 308 elif isinstance(self.cookie_store, AuthUser):
309 309 return self.cookie_store.__dict__.get(key, other)
310 310
311 311
312 312 def _cached_perms_data(user_id, scope, user_is_admin,
313 313 user_inherit_default_permissions, explicit, algo,
314 314 calculate_super_admin):
315 315
316 316 permissions = PermissionCalculator(
317 317 user_id, scope, user_is_admin, user_inherit_default_permissions,
318 318 explicit, algo, calculate_super_admin)
319 319 return permissions.calculate()
320 320
321 321
322 322 class PermOrigin(object):
323 323 SUPER_ADMIN = 'superadmin'
324 324 ARCHIVED = 'archived'
325 325
326 326 REPO_USER = 'user:%s'
327 327 REPO_USERGROUP = 'usergroup:%s'
328 328 REPO_OWNER = 'repo.owner'
329 329 REPO_DEFAULT = 'repo.default'
330 330 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
331 331 REPO_PRIVATE = 'repo.private'
332 332
333 333 REPOGROUP_USER = 'user:%s'
334 334 REPOGROUP_USERGROUP = 'usergroup:%s'
335 335 REPOGROUP_OWNER = 'group.owner'
336 336 REPOGROUP_DEFAULT = 'group.default'
337 337 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
338 338
339 339 USERGROUP_USER = 'user:%s'
340 340 USERGROUP_USERGROUP = 'usergroup:%s'
341 341 USERGROUP_OWNER = 'usergroup.owner'
342 342 USERGROUP_DEFAULT = 'usergroup.default'
343 343 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
344 344
345 345
346 346 class PermOriginDict(dict):
347 347 """
348 348 A special dict used for tracking permissions along with their origins.
349 349
350 350 `__setitem__` has been overridden to expect a tuple(perm, origin)
351 351 `__getitem__` will return only the perm
352 352 `.perm_origin_stack` will return the stack of (perm, origin) set per key
353 353
354 354 >>> perms = PermOriginDict()
355 355 >>> perms['resource'] = 'read', 'default', 1
356 356 >>> perms['resource']
357 357 'read'
358 358 >>> perms['resource'] = 'write', 'admin', 2
359 359 >>> perms['resource']
360 360 'write'
361 361 >>> perms.perm_origin_stack
362 362 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
363 363 """
364 364
365 365 def __init__(self, *args, **kw):
366 366 dict.__init__(self, *args, **kw)
367 367 self.perm_origin_stack = collections.OrderedDict()
368 368
369 369 def __setitem__(self, key, perm_origin_obj_id):
370 370 # set (most likely via pickle) key:val pair without tuple
371 371 if not isinstance(perm_origin_obj_id, tuple):
372 372 perm = perm_origin_obj_id
373 373 dict.__setitem__(self, key, perm)
374 374 else:
375 375 # unpack if we create a key from tuple
376 376 (perm, origin, obj_id) = perm_origin_obj_id
377 377 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
378 378 dict.__setitem__(self, key, perm)
379 379
380 380
381 381 class BranchPermOriginDict(dict):
382 382 """
383 383 Dedicated branch permissions dict, with tracking of patterns and origins.
384 384
385 385 >>> perms = BranchPermOriginDict()
386 386 >>> perms['resource'] = '*pattern', 'read', 'default'
387 387 >>> perms['resource']
388 388 {'*pattern': 'read'}
389 389 >>> perms['resource'] = '*pattern', 'write', 'admin'
390 390 >>> perms['resource']
391 391 {'*pattern': 'write'}
392 392 >>> perms.perm_origin_stack
393 393 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
394 394 """
395 395 def __init__(self, *args, **kw):
396 396 dict.__init__(self, *args, **kw)
397 397 self.perm_origin_stack = collections.OrderedDict()
398 398
399 399 def __setitem__(self, key, pattern_perm_origin):
400 400 # set (most likely via pickle) key:val pair without tuple
401 401 if not isinstance(pattern_perm_origin, tuple):
402 402 pattern_perm = pattern_perm_origin
403 403 dict.__setitem__(self, key, pattern_perm)
404 404
405 405 else:
406 406 (pattern_perm, origin) = pattern_perm_origin
407 407 # we're passing in the dict, so we save the the stack
408 408 for pattern, perm in list(pattern_perm.items()):
409 409 self.perm_origin_stack.setdefault(key, {})\
410 410 .setdefault(pattern, []).append((perm, origin))
411 411
412 412 dict.__setitem__(self, key, pattern_perm)
413 413
414 414
415 415 class PermissionCalculator(object):
416 416
417 417 def __init__(
418 418 self, user_id, scope, user_is_admin,
419 419 user_inherit_default_permissions, explicit, algo,
420 420 calculate_super_admin_as_user=False):
421 421
422 422 self.user_id = user_id
423 423 self.user_is_admin = user_is_admin
424 424 self.inherit_default_permissions = user_inherit_default_permissions
425 425 self.explicit = explicit
426 426 self.algo = algo
427 427 self.calculate_super_admin_as_user = calculate_super_admin_as_user
428 428
429 429 scope = scope or {}
430 430 self.scope_repo_id = scope.get('repo_id')
431 431 self.scope_repo_group_id = scope.get('repo_group_id')
432 432 self.scope_user_group_id = scope.get('user_group_id')
433 433
434 434 self.default_user_id = User.get_default_user(cache=True).user_id
435 435
436 436 self.permissions_repositories = PermOriginDict()
437 437 self.permissions_repository_groups = PermOriginDict()
438 438 self.permissions_user_groups = PermOriginDict()
439 439 self.permissions_repository_branches = BranchPermOriginDict()
440 440 self.permissions_global = set()
441 441
442 442 self.default_repo_perms = Permission.get_default_repo_perms(
443 443 self.default_user_id, self.scope_repo_id)
444 444 self.default_repo_groups_perms = Permission.get_default_group_perms(
445 445 self.default_user_id, self.scope_repo_group_id)
446 446 self.default_user_group_perms = \
447 447 Permission.get_default_user_group_perms(
448 448 self.default_user_id, self.scope_user_group_id)
449 449
450 450 # default branch perms
451 451 self.default_branch_repo_perms = \
452 452 Permission.get_default_repo_branch_perms(
453 453 self.default_user_id, self.scope_repo_id)
454 454
455 455 def calculate(self):
456 456 if self.user_is_admin and not self.calculate_super_admin_as_user:
457 457 return self._calculate_super_admin_permissions()
458 458
459 459 self._calculate_global_default_permissions()
460 460 self._calculate_global_permissions()
461 461 self._calculate_default_permissions()
462 462 self._calculate_repository_permissions()
463 463 self._calculate_repository_branch_permissions()
464 464 self._calculate_repository_group_permissions()
465 465 self._calculate_user_group_permissions()
466 466 return self._permission_structure()
467 467
468 468 def _calculate_super_admin_permissions(self):
469 469 """
470 470 super-admin user have all default rights for repositories
471 471 and groups set to admin
472 472 """
473 473 self.permissions_global.add('hg.admin')
474 474 self.permissions_global.add('hg.create.write_on_repogroup.true')
475 475
476 476 # repositories
477 477 for perm in self.default_repo_perms:
478 478 r_k = perm.UserRepoToPerm.repository.repo_name
479 479 obj_id = perm.UserRepoToPerm.repository.repo_id
480 480 archived = perm.UserRepoToPerm.repository.archived
481 481 p = 'repository.admin'
482 482 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
483 483 # special case for archived repositories, which we block still even for
484 484 # super admins
485 485 if archived:
486 486 p = 'repository.read'
487 487 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
488 488
489 489 # repository groups
490 490 for perm in self.default_repo_groups_perms:
491 491 rg_k = perm.UserRepoGroupToPerm.group.group_name
492 492 obj_id = perm.UserRepoGroupToPerm.group.group_id
493 493 p = 'group.admin'
494 494 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
495 495
496 496 # user groups
497 497 for perm in self.default_user_group_perms:
498 498 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
499 499 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
500 500 p = 'usergroup.admin'
501 501 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
502 502
503 503 # branch permissions
504 504 # since super-admin also can have custom rule permissions
505 505 # we *always* need to calculate those inherited from default, and also explicit
506 506 self._calculate_default_permissions_repository_branches(
507 507 user_inherit_object_permissions=False)
508 508 self._calculate_repository_branch_permissions()
509 509
510 510 return self._permission_structure()
511 511
512 512 def _calculate_global_default_permissions(self):
513 513 """
514 514 global permissions taken from the default user
515 515 """
516 516 default_global_perms = UserToPerm.query()\
517 517 .filter(UserToPerm.user_id == self.default_user_id)\
518 518 .options(joinedload(UserToPerm.permission))
519 519
520 520 for perm in default_global_perms:
521 521 self.permissions_global.add(perm.permission.permission_name)
522 522
523 523 if self.user_is_admin:
524 524 self.permissions_global.add('hg.admin')
525 525 self.permissions_global.add('hg.create.write_on_repogroup.true')
526 526
527 527 def _calculate_global_permissions(self):
528 528 """
529 529 Set global system permissions with user permissions or permissions
530 530 taken from the user groups of the current user.
531 531
532 532 The permissions include repo creating, repo group creating, forking
533 533 etc.
534 534 """
535 535
536 536 # now we read the defined permissions and overwrite what we have set
537 537 # before those can be configured from groups or users explicitly.
538 538
539 539 # In case we want to extend this list we should make sure
540 540 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
541 541 from rhodecode.model.permission import PermissionModel
542 542
543 543 _configurable = frozenset([
544 544 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
545 545 'hg.create.none', 'hg.create.repository',
546 546 'hg.usergroup.create.false', 'hg.usergroup.create.true',
547 547 'hg.repogroup.create.false', 'hg.repogroup.create.true',
548 548 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
549 549 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
550 550 ])
551 551
552 552 # USER GROUPS comes first user group global permissions
553 553 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
554 554 .options(joinedload(UserGroupToPerm.permission))\
555 555 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
556 556 UserGroupMember.users_group_id))\
557 557 .filter(UserGroupMember.user_id == self.user_id)\
558 558 .order_by(UserGroupToPerm.users_group_id)\
559 559 .all()
560 560
561 561 # need to group here by groups since user can be in more than
562 562 # one group, so we get all groups
563 563 _explicit_grouped_perms = [
564 564 [x, list(y)] for x, y in
565 565 itertools.groupby(user_perms_from_users_groups,
566 566 lambda _x: _x.users_group)]
567 567
568 568 for gr, perms in _explicit_grouped_perms:
569 569 # since user can be in multiple groups iterate over them and
570 570 # select the lowest permissions first (more explicit)
571 571 # TODO(marcink): do this^^
572 572
573 573 # group doesn't inherit default permissions so we actually set them
574 574 if not gr.inherit_default_permissions:
575 575 # NEED TO IGNORE all previously set configurable permissions
576 576 # and replace them with explicitly set from this user
577 577 # group permissions
578 578 self.permissions_global = self.permissions_global.difference(
579 579 _configurable)
580 580 for perm in perms:
581 581 self.permissions_global.add(perm.permission.permission_name)
582 582
583 583 # user explicit global permissions
584 584 user_perms = Session().query(UserToPerm)\
585 585 .options(joinedload(UserToPerm.permission))\
586 586 .filter(UserToPerm.user_id == self.user_id).all()
587 587
588 588 if not self.inherit_default_permissions:
589 589 # NEED TO IGNORE all configurable permissions and
590 590 # replace them with explicitly set from this user permissions
591 591 self.permissions_global = self.permissions_global.difference(
592 592 _configurable)
593 593 for perm in user_perms:
594 594 self.permissions_global.add(perm.permission.permission_name)
595 595
596 596 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
597 597 for perm in self.default_repo_perms:
598 598 r_k = perm.UserRepoToPerm.repository.repo_name
599 599 obj_id = perm.UserRepoToPerm.repository.repo_id
600 600 archived = perm.UserRepoToPerm.repository.archived
601 601 p = perm.Permission.permission_name
602 602 o = PermOrigin.REPO_DEFAULT
603 603 self.permissions_repositories[r_k] = p, o, obj_id
604 604
605 605 # if we decide this user isn't inheriting permissions from
606 606 # default user we set him to .none so only explicit
607 607 # permissions work
608 608 if not user_inherit_object_permissions:
609 609 p = 'repository.none'
610 610 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
611 611 self.permissions_repositories[r_k] = p, o, obj_id
612 612
613 613 if perm.Repository.private and not (
614 614 perm.Repository.user_id == self.user_id):
615 615 # disable defaults for private repos,
616 616 p = 'repository.none'
617 617 o = PermOrigin.REPO_PRIVATE
618 618 self.permissions_repositories[r_k] = p, o, obj_id
619 619
620 620 elif perm.Repository.user_id == self.user_id:
621 621 # set admin if owner
622 622 p = 'repository.admin'
623 623 o = PermOrigin.REPO_OWNER
624 624 self.permissions_repositories[r_k] = p, o, obj_id
625 625
626 626 if self.user_is_admin:
627 627 p = 'repository.admin'
628 628 o = PermOrigin.SUPER_ADMIN
629 629 self.permissions_repositories[r_k] = p, o, obj_id
630 630
631 631 # finally in case of archived repositories, we downgrade higher
632 632 # permissions to read
633 633 if archived:
634 634 current_perm = self.permissions_repositories[r_k]
635 635 if current_perm in ['repository.write', 'repository.admin']:
636 636 p = 'repository.read'
637 637 o = PermOrigin.ARCHIVED
638 638 self.permissions_repositories[r_k] = p, o, obj_id
639 639
640 640 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
641 641 for perm in self.default_branch_repo_perms:
642 642
643 643 r_k = perm.UserRepoToPerm.repository.repo_name
644 644 p = perm.Permission.permission_name
645 645 pattern = perm.UserToRepoBranchPermission.branch_pattern
646 646 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
647 647
648 648 if not self.explicit:
649 649 cur_perm = self.permissions_repository_branches.get(r_k)
650 650 if cur_perm:
651 651 cur_perm = cur_perm[pattern]
652 652 cur_perm = cur_perm or 'branch.none'
653 653
654 654 p = self._choose_permission(p, cur_perm)
655 655
656 656 # NOTE(marcink): register all pattern/perm instances in this
657 657 # special dict that aggregates entries
658 658 self.permissions_repository_branches[r_k] = {pattern: p}, o
659 659
660 660 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
661 661 for perm in self.default_repo_groups_perms:
662 662 rg_k = perm.UserRepoGroupToPerm.group.group_name
663 663 obj_id = perm.UserRepoGroupToPerm.group.group_id
664 664 p = perm.Permission.permission_name
665 665 o = PermOrigin.REPOGROUP_DEFAULT
666 666 self.permissions_repository_groups[rg_k] = p, o, obj_id
667 667
668 668 # if we decide this user isn't inheriting permissions from default
669 669 # user we set him to .none so only explicit permissions work
670 670 if not user_inherit_object_permissions:
671 671 p = 'group.none'
672 672 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
673 673 self.permissions_repository_groups[rg_k] = p, o, obj_id
674 674
675 675 if perm.RepoGroup.user_id == self.user_id:
676 676 # set admin if owner
677 677 p = 'group.admin'
678 678 o = PermOrigin.REPOGROUP_OWNER
679 679 self.permissions_repository_groups[rg_k] = p, o, obj_id
680 680
681 681 if self.user_is_admin:
682 682 p = 'group.admin'
683 683 o = PermOrigin.SUPER_ADMIN
684 684 self.permissions_repository_groups[rg_k] = p, o, obj_id
685 685
686 686 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
687 687 for perm in self.default_user_group_perms:
688 688 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
689 689 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
690 690 p = perm.Permission.permission_name
691 691 o = PermOrigin.USERGROUP_DEFAULT
692 692 self.permissions_user_groups[u_k] = p, o, obj_id
693 693
694 694 # if we decide this user isn't inheriting permissions from default
695 695 # user we set him to .none so only explicit permissions work
696 696 if not user_inherit_object_permissions:
697 697 p = 'usergroup.none'
698 698 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
699 699 self.permissions_user_groups[u_k] = p, o, obj_id
700 700
701 701 if perm.UserGroup.user_id == self.user_id:
702 702 # set admin if owner
703 703 p = 'usergroup.admin'
704 704 o = PermOrigin.USERGROUP_OWNER
705 705 self.permissions_user_groups[u_k] = p, o, obj_id
706 706
707 707 if self.user_is_admin:
708 708 p = 'usergroup.admin'
709 709 o = PermOrigin.SUPER_ADMIN
710 710 self.permissions_user_groups[u_k] = p, o, obj_id
711 711
712 712 def _calculate_default_permissions(self):
713 713 """
714 714 Set default user permissions for repositories, repository branches,
715 715 repository groups, user groups taken from the default user.
716 716
717 717 Calculate inheritance of object permissions based on what we have now
718 718 in GLOBAL permissions. We check if .false is in GLOBAL since this is
719 719 explicitly set. Inherit is the opposite of .false being there.
720 720
721 721 .. note::
722 722
723 723 the syntax is little bit odd but what we need to check here is
724 724 the opposite of .false permission being in the list so even for
725 725 inconsistent state when both .true/.false is there
726 726 .false is more important
727 727
728 728 """
729 729 user_inherit_object_permissions = (
730 730 'hg.inherit_default_perms.false' not in self.permissions_global)
731 731
732 732 # default permissions inherited from `default` user permissions
733 733 self._calculate_default_permissions_repositories(
734 734 user_inherit_object_permissions)
735 735
736 736 self._calculate_default_permissions_repository_branches(
737 737 user_inherit_object_permissions)
738 738
739 739 self._calculate_default_permissions_repository_groups(
740 740 user_inherit_object_permissions)
741 741
742 742 self._calculate_default_permissions_user_groups(
743 743 user_inherit_object_permissions)
744 744
745 745 def _calculate_repository_permissions(self):
746 746 """
747 747 Repository access permissions for the current user.
748 748
749 749 Check if the user is part of user groups for this repository and
750 750 fill in the permission from it. `_choose_permission` decides of which
751 751 permission should be selected based on selected method.
752 752 """
753 753
754 754 # user group for repositories permissions
755 755 user_repo_perms_from_user_group = Permission\
756 756 .get_default_repo_perms_from_user_group(
757 757 self.user_id, self.scope_repo_id)
758 758
759 759 multiple_counter = collections.defaultdict(int)
760 760 for perm in user_repo_perms_from_user_group:
761 761 r_k = perm.UserGroupRepoToPerm.repository.repo_name
762 762 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
763 763 multiple_counter[r_k] += 1
764 764 p = perm.Permission.permission_name
765 765 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
766 766 .users_group.users_group_name
767 767
768 768 if multiple_counter[r_k] > 1:
769 769 cur_perm = self.permissions_repositories[r_k]
770 770 p = self._choose_permission(p, cur_perm)
771 771
772 772 self.permissions_repositories[r_k] = p, o, obj_id
773 773
774 774 if perm.Repository.user_id == self.user_id:
775 775 # set admin if owner
776 776 p = 'repository.admin'
777 777 o = PermOrigin.REPO_OWNER
778 778 self.permissions_repositories[r_k] = p, o, obj_id
779 779
780 780 if self.user_is_admin:
781 781 p = 'repository.admin'
782 782 o = PermOrigin.SUPER_ADMIN
783 783 self.permissions_repositories[r_k] = p, o, obj_id
784 784
785 785 # user explicit permissions for repositories, overrides any specified
786 786 # by the group permission
787 787 user_repo_perms = Permission.get_default_repo_perms(
788 788 self.user_id, self.scope_repo_id)
789 789 for perm in user_repo_perms:
790 790 r_k = perm.UserRepoToPerm.repository.repo_name
791 791 obj_id = perm.UserRepoToPerm.repository.repo_id
792 792 archived = perm.UserRepoToPerm.repository.archived
793 793 p = perm.Permission.permission_name
794 794 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
795 795
796 796 if not self.explicit:
797 797 cur_perm = self.permissions_repositories.get(
798 798 r_k, 'repository.none')
799 799 p = self._choose_permission(p, cur_perm)
800 800
801 801 self.permissions_repositories[r_k] = p, o, obj_id
802 802
803 803 if perm.Repository.user_id == self.user_id:
804 804 # set admin if owner
805 805 p = 'repository.admin'
806 806 o = PermOrigin.REPO_OWNER
807 807 self.permissions_repositories[r_k] = p, o, obj_id
808 808
809 809 if self.user_is_admin:
810 810 p = 'repository.admin'
811 811 o = PermOrigin.SUPER_ADMIN
812 812 self.permissions_repositories[r_k] = p, o, obj_id
813 813
814 814 # finally in case of archived repositories, we downgrade higher
815 815 # permissions to read
816 816 if archived:
817 817 current_perm = self.permissions_repositories[r_k]
818 818 if current_perm in ['repository.write', 'repository.admin']:
819 819 p = 'repository.read'
820 820 o = PermOrigin.ARCHIVED
821 821 self.permissions_repositories[r_k] = p, o, obj_id
822 822
823 823 def _calculate_repository_branch_permissions(self):
824 824 # user group for repositories permissions
825 825 user_repo_branch_perms_from_user_group = Permission\
826 826 .get_default_repo_branch_perms_from_user_group(
827 827 self.user_id, self.scope_repo_id)
828 828
829 829 multiple_counter = collections.defaultdict(int)
830 830 for perm in user_repo_branch_perms_from_user_group:
831 831 r_k = perm.UserGroupRepoToPerm.repository.repo_name
832 832 p = perm.Permission.permission_name
833 833 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
834 834 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
835 835 .users_group.users_group_name
836 836
837 837 multiple_counter[r_k] += 1
838 838 if multiple_counter[r_k] > 1:
839 839 cur_perm = self.permissions_repository_branches[r_k][pattern]
840 840 p = self._choose_permission(p, cur_perm)
841 841
842 842 self.permissions_repository_branches[r_k] = {pattern: p}, o
843 843
844 844 # user explicit branch permissions for repositories, overrides
845 845 # any specified by the group permission
846 846 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
847 847 self.user_id, self.scope_repo_id)
848 848
849 849 for perm in user_repo_branch_perms:
850 850
851 851 r_k = perm.UserRepoToPerm.repository.repo_name
852 852 p = perm.Permission.permission_name
853 853 pattern = perm.UserToRepoBranchPermission.branch_pattern
854 854 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
855 855
856 856 if not self.explicit:
857 857 cur_perm = self.permissions_repository_branches.get(r_k)
858 858 if cur_perm:
859 859 cur_perm = cur_perm[pattern]
860 860 cur_perm = cur_perm or 'branch.none'
861 861 p = self._choose_permission(p, cur_perm)
862 862
863 863 # NOTE(marcink): register all pattern/perm instances in this
864 864 # special dict that aggregates entries
865 865 self.permissions_repository_branches[r_k] = {pattern: p}, o
866 866
867 867 def _calculate_repository_group_permissions(self):
868 868 """
869 869 Repository group permissions for the current user.
870 870
871 871 Check if the user is part of user groups for repository groups and
872 872 fill in the permissions from it. `_choose_permission` decides of which
873 873 permission should be selected based on selected method.
874 874 """
875 875 # user group for repo groups permissions
876 876 user_repo_group_perms_from_user_group = Permission\
877 877 .get_default_group_perms_from_user_group(
878 878 self.user_id, self.scope_repo_group_id)
879 879
880 880 multiple_counter = collections.defaultdict(int)
881 881 for perm in user_repo_group_perms_from_user_group:
882 882 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
883 883 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
884 884 multiple_counter[rg_k] += 1
885 885 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
886 886 .users_group.users_group_name
887 887 p = perm.Permission.permission_name
888 888
889 889 if multiple_counter[rg_k] > 1:
890 890 cur_perm = self.permissions_repository_groups[rg_k]
891 891 p = self._choose_permission(p, cur_perm)
892 892 self.permissions_repository_groups[rg_k] = p, o, obj_id
893 893
894 894 if perm.RepoGroup.user_id == self.user_id:
895 895 # set admin if owner, even for member of other user group
896 896 p = 'group.admin'
897 897 o = PermOrigin.REPOGROUP_OWNER
898 898 self.permissions_repository_groups[rg_k] = p, o, obj_id
899 899
900 900 if self.user_is_admin:
901 901 p = 'group.admin'
902 902 o = PermOrigin.SUPER_ADMIN
903 903 self.permissions_repository_groups[rg_k] = p, o, obj_id
904 904
905 905 # user explicit permissions for repository groups
906 906 user_repo_groups_perms = Permission.get_default_group_perms(
907 907 self.user_id, self.scope_repo_group_id)
908 908 for perm in user_repo_groups_perms:
909 909 rg_k = perm.UserRepoGroupToPerm.group.group_name
910 910 obj_id = perm.UserRepoGroupToPerm.group.group_id
911 911 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
912 912 .user.username
913 913 p = perm.Permission.permission_name
914 914
915 915 if not self.explicit:
916 916 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
917 917 p = self._choose_permission(p, cur_perm)
918 918
919 919 self.permissions_repository_groups[rg_k] = p, o, obj_id
920 920
921 921 if perm.RepoGroup.user_id == self.user_id:
922 922 # set admin if owner
923 923 p = 'group.admin'
924 924 o = PermOrigin.REPOGROUP_OWNER
925 925 self.permissions_repository_groups[rg_k] = p, o, obj_id
926 926
927 927 if self.user_is_admin:
928 928 p = 'group.admin'
929 929 o = PermOrigin.SUPER_ADMIN
930 930 self.permissions_repository_groups[rg_k] = p, o, obj_id
931 931
932 932 def _calculate_user_group_permissions(self):
933 933 """
934 934 User group permissions for the current user.
935 935 """
936 936 # user group for user group permissions
937 937 user_group_from_user_group = Permission\
938 938 .get_default_user_group_perms_from_user_group(
939 939 self.user_id, self.scope_user_group_id)
940 940
941 941 multiple_counter = collections.defaultdict(int)
942 942 for perm in user_group_from_user_group:
943 943 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
944 944 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
945 945 multiple_counter[ug_k] += 1
946 946 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
947 947 .user_group.users_group_name
948 948 p = perm.Permission.permission_name
949 949
950 950 if multiple_counter[ug_k] > 1:
951 951 cur_perm = self.permissions_user_groups[ug_k]
952 952 p = self._choose_permission(p, cur_perm)
953 953
954 954 self.permissions_user_groups[ug_k] = p, o, obj_id
955 955
956 956 if perm.UserGroup.user_id == self.user_id:
957 957 # set admin if owner, even for member of other user group
958 958 p = 'usergroup.admin'
959 959 o = PermOrigin.USERGROUP_OWNER
960 960 self.permissions_user_groups[ug_k] = p, o, obj_id
961 961
962 962 if self.user_is_admin:
963 963 p = 'usergroup.admin'
964 964 o = PermOrigin.SUPER_ADMIN
965 965 self.permissions_user_groups[ug_k] = p, o, obj_id
966 966
967 967 # user explicit permission for user groups
968 968 user_user_groups_perms = Permission.get_default_user_group_perms(
969 969 self.user_id, self.scope_user_group_id)
970 970 for perm in user_user_groups_perms:
971 971 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
972 972 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
973 973 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
974 974 .user.username
975 975 p = perm.Permission.permission_name
976 976
977 977 if not self.explicit:
978 978 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
979 979 p = self._choose_permission(p, cur_perm)
980 980
981 981 self.permissions_user_groups[ug_k] = p, o, obj_id
982 982
983 983 if perm.UserGroup.user_id == self.user_id:
984 984 # set admin if owner
985 985 p = 'usergroup.admin'
986 986 o = PermOrigin.USERGROUP_OWNER
987 987 self.permissions_user_groups[ug_k] = p, o, obj_id
988 988
989 989 if self.user_is_admin:
990 990 p = 'usergroup.admin'
991 991 o = PermOrigin.SUPER_ADMIN
992 992 self.permissions_user_groups[ug_k] = p, o, obj_id
993 993
994 994 def _choose_permission(self, new_perm, cur_perm):
995 995 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
996 996 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
997 997 if self.algo == 'higherwin':
998 998 if new_perm_val > cur_perm_val:
999 999 return new_perm
1000 1000 return cur_perm
1001 1001 elif self.algo == 'lowerwin':
1002 1002 if new_perm_val < cur_perm_val:
1003 1003 return new_perm
1004 1004 return cur_perm
1005 1005
1006 1006 def _permission_structure(self):
1007 1007 return {
1008 1008 'global': self.permissions_global,
1009 1009 'repositories': self.permissions_repositories,
1010 1010 'repository_branches': self.permissions_repository_branches,
1011 1011 'repositories_groups': self.permissions_repository_groups,
1012 1012 'user_groups': self.permissions_user_groups,
1013 1013 }
1014 1014
1015 1015
1016 1016 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1017 1017 """
1018 1018 Check if given controller_name is in whitelist of auth token access
1019 1019 """
1020 1020 if not whitelist:
1021 1021 from rhodecode import CONFIG
1022 1022 whitelist = aslist(
1023 1023 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1024 1024 # backward compat translation
1025 1025 compat = {
1026 1026 # old controller, new VIEW
1027 1027 'ChangesetController:*': 'RepoCommitsView:*',
1028 1028 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1029 1029 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1030 1030 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1031 1031 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1032 1032 'GistsController:*': 'GistView:*',
1033 1033 }
1034 1034
1035 1035 log.debug(
1036 1036 'Allowed views for AUTH TOKEN access: %s', whitelist)
1037 1037 auth_token_access_valid = False
1038 1038
1039 1039 for entry in whitelist:
1040 1040 token_match = True
1041 1041 if entry in compat:
1042 1042 # translate from old Controllers to Pyramid Views
1043 1043 entry = compat[entry]
1044 1044
1045 1045 if '@' in entry:
1046 1046 # specific AuthToken
1047 1047 entry, allowed_token = entry.split('@', 1)
1048 1048 token_match = auth_token == allowed_token
1049 1049
1050 1050 if fnmatch.fnmatch(view_name, entry) and token_match:
1051 1051 auth_token_access_valid = True
1052 1052 break
1053 1053
1054 1054 if auth_token_access_valid:
1055 1055 log.debug('view: `%s` matches entry in whitelist: %s',
1056 1056 view_name, whitelist)
1057 1057
1058 1058 else:
1059 1059 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1060 1060 % (view_name, whitelist))
1061 1061 if auth_token:
1062 1062 # if we use auth token key and don't have access it's a warning
1063 1063 log.warning(msg)
1064 1064 else:
1065 1065 log.debug(msg)
1066 1066
1067 1067 return auth_token_access_valid
1068 1068
1069 1069
1070 1070 class AuthUser(object):
1071 1071 """
1072 1072 A simple object that handles all attributes of user in RhodeCode
1073 1073
1074 1074 It does lookup based on API key,given user, or user present in session
1075 1075 Then it fills all required information for such user. It also checks if
1076 1076 anonymous access is enabled and if so, it returns default user as logged in
1077 1077 """
1078 1078 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1079 1079 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1080 1080 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1081 1081 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1082 1082
1083 1083 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1084 1084
1085 1085 self.user_id = user_id
1086 1086 self._api_key = api_key
1087 1087
1088 1088 self.api_key = None
1089 1089 self.username = username
1090 1090 self.ip_addr = ip_addr
1091 1091 self.name = ''
1092 1092 self.lastname = ''
1093 1093 self.first_name = ''
1094 1094 self.last_name = ''
1095 1095 self.email = ''
1096 1096 self.is_authenticated = False
1097 1097 self.admin = False
1098 1098 self.inherit_default_permissions = False
1099 1099 self.password = ''
1100 1100
1101 1101 self.anonymous_user = None # propagated on propagate_data
1102 1102 self.propagate_data()
1103 1103 self._instance = None
1104 1104 self._permissions_scoped_cache = {} # used to bind scoped calculation
1105 1105
1106 1106 @LazyProperty
1107 1107 def permissions(self):
1108 1108 return self.get_perms(user=self, cache=None)
1109 1109
1110 1110 @LazyProperty
1111 1111 def permissions_safe(self):
1112 1112 """
1113 1113 Filtered permissions excluding not allowed repositories
1114 1114 """
1115 1115 perms = self.get_perms(user=self, cache=None)
1116 1116
1117 1117 perms['repositories'] = {
1118 1118 k: v for k, v in list(perms['repositories'].items())
1119 1119 if v != 'repository.none'
1120 1120 }
1121 1121 perms['repositories_groups'] = {
1122 1122 k: v for k, v in list(perms['repositories_groups'].items())
1123 1123 if v != 'group.none'
1124 1124 }
1125 1125 perms['user_groups'] = {
1126 1126 k: v for k, v in list(perms['user_groups'].items())
1127 1127 if v != 'usergroup.none'
1128 1128 }
1129 1129 perms['repository_branches'] = {
1130 1130 k: v for k, v in list(perms['repository_branches'].items())
1131 1131 if v != 'branch.none'
1132 1132 }
1133 1133 return perms
1134 1134
1135 1135 @LazyProperty
1136 1136 def permissions_full_details(self):
1137 1137 return self.get_perms(
1138 1138 user=self, cache=None, calculate_super_admin=True)
1139 1139
1140 1140 def permissions_with_scope(self, scope):
1141 1141 """
1142 1142 Call the get_perms function with scoped data. The scope in that function
1143 1143 narrows the SQL calls to the given ID of objects resulting in fetching
1144 1144 Just particular permission we want to obtain. If scope is an empty dict
1145 1145 then it basically narrows the scope to GLOBAL permissions only.
1146 1146
1147 1147 :param scope: dict
1148 1148 """
1149 1149 if 'repo_name' in scope:
1150 1150 obj = Repository.get_by_repo_name(scope['repo_name'])
1151 1151 if obj:
1152 1152 scope['repo_id'] = obj.repo_id
1153 1153 _scope = collections.OrderedDict()
1154 1154 _scope['repo_id'] = -1
1155 1155 _scope['user_group_id'] = -1
1156 1156 _scope['repo_group_id'] = -1
1157 1157
1158 1158 for k in sorted(scope.keys()):
1159 1159 _scope[k] = scope[k]
1160 1160
1161 1161 # store in cache to mimic how the @LazyProperty works,
1162 1162 # the difference here is that we use the unique key calculated
1163 1163 # from params and values
1164 1164 return self.get_perms(user=self, cache=None, scope=_scope)
1165 1165
1166 1166 def get_instance(self):
1167 1167 return User.get(self.user_id)
1168 1168
1169 1169 def propagate_data(self):
1170 1170 """
1171 1171 Fills in user data and propagates values to this instance. Maps fetched
1172 1172 user attributes to this class instance attributes
1173 1173 """
1174 1174 log.debug('AuthUser: starting data propagation for new potential user')
1175 1175 user_model = UserModel()
1176 1176 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1177 1177 is_user_loaded = False
1178 1178
1179 1179 # lookup by userid
1180 1180 if self.user_id is not None and self.user_id != anon_user.user_id:
1181 1181 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1182 1182 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1183 1183
1184 1184 # try go get user by api key
1185 1185 elif self._api_key and self._api_key != anon_user.api_key:
1186 1186 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1187 1187 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1188 1188
1189 1189 # lookup by username
1190 1190 elif self.username:
1191 1191 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1192 1192 is_user_loaded = user_model.fill_data(self, username=self.username)
1193 1193 else:
1194 1194 log.debug('No data in %s that could been used to log in', self)
1195 1195
1196 1196 if not is_user_loaded:
1197 1197 log.debug(
1198 1198 'Failed to load user. Fallback to default user %s', anon_user)
1199 1199 # if we cannot authenticate user try anonymous
1200 1200 if anon_user.active:
1201 1201 log.debug('default user is active, using it as a session user')
1202 1202 user_model.fill_data(self, user_id=anon_user.user_id)
1203 1203 # then we set this user is logged in
1204 1204 self.is_authenticated = True
1205 1205 else:
1206 1206 log.debug('default user is NOT active')
1207 1207 # in case of disabled anonymous user we reset some of the
1208 1208 # parameters so such user is "corrupted", skipping the fill_data
1209 1209 for attr in ['user_id', 'username', 'admin', 'active']:
1210 1210 setattr(self, attr, None)
1211 1211 self.is_authenticated = False
1212 1212
1213 1213 if not self.username:
1214 1214 self.username = 'None'
1215 1215
1216 1216 log.debug('AuthUser: propagated user is now %s', self)
1217 1217
1218 1218 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1219 1219 calculate_super_admin=False, cache=None):
1220 1220 """
1221 1221 Fills user permission attribute with permissions taken from database
1222 1222 works for permissions given for repositories, and for permissions that
1223 1223 are granted to groups
1224 1224
1225 1225 :param user: instance of User object from database
1226 1226 :param scope:
1227 1227 :param explicit: In case there are permissions both for user and a group
1228 1228 that user is part of, explicit flag will defiine if user will
1229 1229 explicitly override permissions from group, if it's False it will
1230 1230 make decision based on the algo
1231 1231 :param algo: algorithm to decide what permission should be choose if
1232 1232 it's multiple defined, eg user in two different groups. It also
1233 1233 decides if explicit flag is turned off how to specify the permission
1234 1234 for case when user is in a group + have defined separate permission
1235 1235 :param calculate_super_admin: calculate permissions for super-admin in the
1236 1236 same way as for regular user without speedups
1237 1237 :param cache: Use caching for calculation, None = let the cache backend decide
1238 1238 """
1239 1239 user_id = user.user_id
1240 1240 user_is_admin = user.is_admin
1241 1241
1242 1242 # inheritance of global permissions like create repo/fork repo etc
1243 1243 user_inherit_default_permissions = user.inherit_default_permissions
1244 1244
1245 1245 cache_seconds = safe_int(
1246 1246 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1247 1247
1248 1248 if cache is None:
1249 1249 # let the backend cache decide
1250 1250 cache_on = cache_seconds > 0
1251 1251 else:
1252 1252 cache_on = cache
1253 1253
1254 1254 log.debug(
1255 1255 'Computing PERMISSION tree for user %s scope `%s` '
1256 1256 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1257 1257
1258 cache_namespace_uid = f'cache_user_auth.{user_id}'
1258 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
1259 1259 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1260 1260
1261 1261 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1262 1262 condition=cache_on)
1263 1263 def compute_perm_tree(cache_name, cache_ver,
1264 1264 user_id, scope, user_is_admin,user_inherit_default_permissions,
1265 1265 explicit, algo, calculate_super_admin):
1266 1266 return _cached_perms_data(
1267 1267 user_id, scope, user_is_admin, user_inherit_default_permissions,
1268 1268 explicit, algo, calculate_super_admin)
1269 1269
1270 1270 start = time.time()
1271 1271 result = compute_perm_tree(
1272 1272 'permissions', 'v1', user_id, scope, user_is_admin,
1273 1273 user_inherit_default_permissions, explicit, algo,
1274 1274 calculate_super_admin)
1275 1275
1276 1276 result_repr = []
1277 1277 for k in result:
1278 1278 result_repr.append((k, len(result[k])))
1279 1279 total = time.time() - start
1280 1280 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1281 1281 user, total, result_repr)
1282 1282
1283 1283 return result
1284 1284
1285 1285 @property
1286 1286 def is_default(self):
1287 1287 return self.username == User.DEFAULT_USER
1288 1288
1289 1289 @property
1290 1290 def is_admin(self):
1291 1291 return self.admin
1292 1292
1293 1293 @property
1294 1294 def is_user_object(self):
1295 1295 return self.user_id is not None
1296 1296
1297 1297 @property
1298 1298 def repositories_admin(self):
1299 1299 """
1300 1300 Returns list of repositories you're an admin of
1301 1301 """
1302 1302 return [
1303 1303 x[0] for x in list(self.permissions['repositories'].items())
1304 1304 if x[1] == 'repository.admin']
1305 1305
1306 1306 @property
1307 1307 def repository_groups_admin(self):
1308 1308 """
1309 1309 Returns list of repository groups you're an admin of
1310 1310 """
1311 1311 return [
1312 1312 x[0] for x in list(self.permissions['repositories_groups'].items())
1313 1313 if x[1] == 'group.admin']
1314 1314
1315 1315 @property
1316 1316 def user_groups_admin(self):
1317 1317 """
1318 1318 Returns list of user groups you're an admin of
1319 1319 """
1320 1320 return [
1321 1321 x[0] for x in list(self.permissions['user_groups'].items())
1322 1322 if x[1] == 'usergroup.admin']
1323 1323
1324 1324 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1325 1325 if not perms:
1326 1326 perms = AuthUser.repo_read_perms
1327 1327 allowed_ids = []
1328 1328 for k, stack_data in list(self.permissions['repositories'].perm_origin_stack.items()):
1329 1329 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1330 1330 if prefix_filter and not k.startswith(prefix_filter):
1331 1331 continue
1332 1332 if perm in perms:
1333 1333 allowed_ids.append(obj_id)
1334 1334 return allowed_ids
1335 1335
1336 1336 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1337 1337 """
1338 1338 Returns list of repository ids that user have access to based on given
1339 1339 perms. The cache flag should be only used in cases that are used for
1340 1340 display purposes, NOT IN ANY CASE for permission checks.
1341 1341 """
1342 1342 from rhodecode.model.scm import RepoList
1343 1343 if not perms:
1344 1344 perms = AuthUser.repo_read_perms
1345 1345
1346 1346 if not isinstance(perms, list):
1347 1347 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1348 1348
1349 1349 def _cached_repo_acl(perm_def, _name_filter):
1350 1350 qry = Repository.query()
1351 1351 if _name_filter:
1352 1352 ilike_expression = '%{}%'.format(_name_filter)
1353 1353 qry = qry.filter(
1354 1354 Repository.repo_name.ilike(ilike_expression))
1355 1355
1356 1356 return [x.repo_id for x in
1357 1357 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1358 1358
1359 1359 log.debug('Computing REPO ACL IDS user %s', self)
1360 1360
1361 cache_namespace_uid = 'cache_user_repo_acl_ids.{}'.format(self.user_id)
1361 cache_namespace_uid = f'cache_user_repo_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{self.user_id}'
1362 1362 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1363 1363
1364 1364 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1365 1365 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1366 1366 return _cached_repo_acl(perm_def, _name_filter)
1367 1367
1368 1368 start = time.time()
1369 1369 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1370 1370 total = time.time() - start
1371 1371 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1372 1372
1373 1373 return result
1374 1374
1375 1375 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1376 1376 if not perms:
1377 1377 perms = AuthUser.repo_group_read_perms
1378 1378 allowed_ids = []
1379 1379 for k, stack_data in list(self.permissions['repositories_groups'].perm_origin_stack.items()):
1380 1380 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1381 1381 if prefix_filter and not k.startswith(prefix_filter):
1382 1382 continue
1383 1383 if perm in perms:
1384 1384 allowed_ids.append(obj_id)
1385 1385 return allowed_ids
1386 1386
1387 1387 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1388 1388 """
1389 1389 Returns list of repository group ids that user have access to based on given
1390 1390 perms. The cache flag should be only used in cases that are used for
1391 1391 display purposes, NOT IN ANY CASE for permission checks.
1392 1392 """
1393 1393 from rhodecode.model.scm import RepoGroupList
1394 1394 if not perms:
1395 1395 perms = AuthUser.repo_group_read_perms
1396 1396
1397 1397 if not isinstance(perms, list):
1398 1398 raise ValueError(f'perms parameter must be a list got {perms} instead')
1399 1399
1400 1400 def _cached_repo_group_acl(perm_def, _name_filter):
1401 1401 qry = RepoGroup.query()
1402 1402 if _name_filter:
1403 1403 ilike_expression = '%{}%'.format(_name_filter)
1404 1404 qry = qry.filter(
1405 1405 RepoGroup.group_name.ilike(ilike_expression))
1406 1406
1407 1407 return [x.group_id for x in
1408 1408 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1409 1409
1410 1410 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1411 1411
1412 cache_namespace_uid = 'cache_user_repo_group_acl_ids.{}'.format(self.user_id)
1412 cache_namespace_uid = f'cache_user_repo_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{self.user_id}'
1413 1413 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1414 1414
1415 1415 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1416 1416 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1417 1417 return _cached_repo_group_acl(perm_def, _name_filter)
1418 1418
1419 1419 start = time.time()
1420 1420 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1421 1421 total = time.time() - start
1422 1422 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1423 1423
1424 1424 return result
1425 1425
1426 1426 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1427 1427 if not perms:
1428 1428 perms = AuthUser.user_group_read_perms
1429 1429 allowed_ids = []
1430 1430 for k, stack_data in list(self.permissions['user_groups'].perm_origin_stack.items()):
1431 1431 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1432 1432 if perm in perms:
1433 1433 allowed_ids.append(obj_id)
1434 1434 return allowed_ids
1435 1435
1436 1436 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1437 1437 """
1438 1438 Returns list of user group ids that user have access to based on given
1439 1439 perms. The cache flag should be only used in cases that are used for
1440 1440 display purposes, NOT IN ANY CASE for permission checks.
1441 1441 """
1442 1442 from rhodecode.model.scm import UserGroupList
1443 1443 if not perms:
1444 1444 perms = AuthUser.user_group_read_perms
1445 1445
1446 1446 if not isinstance(perms, list):
1447 1447 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1448 1448
1449 1449 def _cached_user_group_acl(perm_def, _name_filter):
1450 1450 qry = UserGroup.query()
1451 1451 if _name_filter:
1452 1452 ilike_expression = '%{}%'.format(_name_filter)
1453 1453 qry = qry.filter(
1454 1454 UserGroup.users_group_name.ilike(ilike_expression))
1455 1455
1456 1456 return [x.users_group_id for x in
1457 1457 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1458 1458
1459 1459 log.debug('Computing USER GROUP ACL IDS user %s', self)
1460 1460
1461 cache_namespace_uid = 'cache_user_user_group_acl_ids.{}'.format(self.user_id)
1461 cache_namespace_uid = f'cache_user_user_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{self.user_id}'
1462 1462 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1463 1463
1464 1464 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1465 1465 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1466 1466 return _cached_user_group_acl(perm_def, _name_filter)
1467 1467
1468 1468 start = time.time()
1469 1469 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1470 1470 total = time.time() - start
1471 1471 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1472 1472
1473 1473 return result
1474 1474
1475 1475 @property
1476 1476 def ip_allowed(self):
1477 1477 """
1478 1478 Checks if ip_addr used in constructor is allowed from defined list of
1479 1479 allowed ip_addresses for user
1480 1480
1481 1481 :returns: boolean, True if ip is in allowed ip range
1482 1482 """
1483 1483 # check IP
1484 1484 inherit = self.inherit_default_permissions
1485 1485 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1486 1486 inherit_from_default=inherit)
1487 1487
1488 1488 @property
1489 1489 def personal_repo_group(self):
1490 1490 return RepoGroup.get_user_personal_repo_group(self.user_id)
1491 1491
1492 1492 @LazyProperty
1493 1493 def feed_token(self):
1494 1494 return self.get_instance().feed_token
1495 1495
1496 1496 @LazyProperty
1497 1497 def artifact_token(self):
1498 1498 return self.get_instance().artifact_token
1499 1499
1500 1500 @classmethod
1501 1501 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1502 1502 allowed_ips = AuthUser.get_allowed_ips(
1503 1503 user_id, cache=True, inherit_from_default=inherit_from_default)
1504 1504 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1505 1505 log.debug('IP:%s for user %s is in range of %s',
1506 1506 ip_addr, user_id, allowed_ips)
1507 1507 return True
1508 1508 else:
1509 1509 log.info('Access for IP:%s forbidden for user %s, '
1510 1510 'not in %s', ip_addr, user_id, allowed_ips,
1511 1511 extra={"ip": ip_addr, "user_id": user_id})
1512 1512 return False
1513 1513
1514 1514 def get_branch_permissions(self, repo_name, perms=None):
1515 1515 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1516 1516 branch_perms = perms.get('repository_branches', {})
1517 1517 if not branch_perms:
1518 1518 return {}
1519 1519 repo_branch_perms = branch_perms.get(repo_name)
1520 1520 return repo_branch_perms or {}
1521 1521
1522 1522 def get_rule_and_branch_permission(self, repo_name, branch_name):
1523 1523 """
1524 1524 Check if this AuthUser has defined any permissions for branches. If any of
1525 1525 the rules match in order, we return the matching permissions
1526 1526 """
1527 1527
1528 1528 rule = default_perm = ''
1529 1529
1530 1530 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1531 1531 if not repo_branch_perms:
1532 1532 return rule, default_perm
1533 1533
1534 1534 # now calculate the permissions
1535 1535 for pattern, branch_perm in list(repo_branch_perms.items()):
1536 1536 if fnmatch.fnmatch(branch_name, pattern):
1537 1537 rule = '`{}`=>{}'.format(pattern, branch_perm)
1538 1538 return rule, branch_perm
1539 1539
1540 1540 return rule, default_perm
1541 1541
1542 1542 def get_notice_messages(self):
1543 1543
1544 1544 notice_level = 'notice-error'
1545 1545 notice_messages = []
1546 1546 if self.is_default:
1547 1547 return [], notice_level
1548 1548
1549 1549 notices = UserNotice.query()\
1550 1550 .filter(UserNotice.user_id == self.user_id)\
1551 1551 .filter(UserNotice.notice_read == false())\
1552 1552 .all()
1553 1553
1554 1554 try:
1555 1555 for entry in notices:
1556 1556
1557 1557 msg = {
1558 1558 'msg_id': entry.user_notice_id,
1559 1559 'level': entry.notification_level,
1560 1560 'subject': entry.notice_subject,
1561 1561 'body': entry.notice_body,
1562 1562 }
1563 1563 notice_messages.append(msg)
1564 1564
1565 1565 log.debug('Got user %s %s messages', self, len(notice_messages))
1566 1566
1567 1567 levels = [x['level'] for x in notice_messages]
1568 1568 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1569 1569 except Exception:
1570 1570 pass
1571 1571
1572 1572 return notice_messages, notice_level
1573 1573
1574 1574 def __repr__(self):
1575 1575 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1576 1576
1577 1577 def set_authenticated(self, authenticated=True):
1578 1578 if self.user_id != self.anonymous_user.user_id:
1579 1579 self.is_authenticated = authenticated
1580 1580
1581 1581 def get_cookie_store(self):
1582 1582 return {
1583 1583 'username': self.username,
1584 1584 'password': md5(safe_bytes(self.password or '')),
1585 1585 'user_id': self.user_id,
1586 1586 'is_authenticated': self.is_authenticated
1587 1587 }
1588 1588
1589 1589 @classmethod
1590 1590 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1591 1591 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1592 1592 return tmpl.format(user_id, username, ip, is_authenticated)
1593 1593
1594 1594 @classmethod
1595 1595 def from_cookie_store(cls, cookie_store):
1596 1596 """
1597 1597 Creates AuthUser from a cookie store
1598 1598
1599 1599 :param cls:
1600 1600 :param cookie_store:
1601 1601 """
1602 1602 user_id = cookie_store.get('user_id')
1603 1603 username = cookie_store.get('username')
1604 1604 api_key = cookie_store.get('api_key')
1605 1605 return AuthUser(user_id, api_key, username)
1606 1606
1607 1607 @classmethod
1608 1608 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1609 1609 _set = set()
1610 1610
1611 1611 if inherit_from_default:
1612 1612 def_user_id = User.get_default_user(cache=True).user_id
1613 1613 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1614 1614 if cache:
1615 1615 default_ips = default_ips.options(
1616 1616 FromCache("sql_cache_short", "get_user_ips_default"))
1617 1617
1618 1618 # populate from default user
1619 1619 for ip in default_ips:
1620 1620 try:
1621 1621 _set.add(ip.ip_addr)
1622 1622 except ObjectDeletedError:
1623 1623 # since we use heavy caching sometimes it happens that
1624 1624 # we get deleted objects here, we just skip them
1625 1625 pass
1626 1626
1627 1627 # NOTE:(marcink) we don't want to load any rules for empty
1628 1628 # user_id which is the case of access of non logged users when anonymous
1629 1629 # access is disabled
1630 1630 user_ips = []
1631 1631 if user_id:
1632 1632 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1633 1633 if cache:
1634 1634 user_ips = user_ips.options(
1635 1635 FromCache("sql_cache_short", f"get_user_ips_{user_id}"))
1636 1636
1637 1637 for ip in user_ips:
1638 1638 try:
1639 1639 _set.add(ip.ip_addr)
1640 1640 except ObjectDeletedError:
1641 1641 # since we use heavy caching sometimes it happens that we get
1642 1642 # deleted objects here, we just skip them
1643 1643 pass
1644 1644 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1645 1645
1646 1646
1647 1647 def set_available_permissions(settings):
1648 1648 """
1649 1649 This function will propagate pyramid settings with all available defined
1650 1650 permission given in db. We don't want to check each time from db for new
1651 1651 permissions since adding a new permission also requires application restart
1652 1652 ie. to decorate new views with the newly created permission
1653 1653
1654 1654 :param settings: current pyramid registry.settings
1655 1655
1656 1656 """
1657 1657 log.debug('auth: getting information about all available permissions')
1658 1658 try:
1659 1659 sa = meta.Session
1660 1660 all_perms = sa.query(Permission).all()
1661 1661 settings.setdefault('available_permissions',
1662 1662 [x.permission_name for x in all_perms])
1663 1663 log.debug('auth: set available permissions')
1664 1664 except Exception:
1665 1665 log.exception('Failed to fetch permissions from the database.')
1666 1666 raise
1667 1667
1668 1668
1669 1669 def get_csrf_token(session, force_new=False, save_if_missing=True):
1670 1670 """
1671 1671 Return the current authentication token, creating one if one doesn't
1672 1672 already exist and the save_if_missing flag is present.
1673 1673
1674 1674 :param session: pass in the pyramid session, else we use the global ones
1675 1675 :param force_new: force to re-generate the token and store it in session
1676 1676 :param save_if_missing: save the newly generated token if it's missing in
1677 1677 session
1678 1678 """
1679 1679 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1680 1680 # from pyramid.csrf import get_csrf_token
1681 1681
1682 1682 if (csrf_token_key not in session and save_if_missing) or force_new:
1683 1683 token = sha1(ascii_bytes(str(random.getrandbits(128))))
1684 1684 session[csrf_token_key] = token
1685 1685 if hasattr(session, 'save'):
1686 1686 session.save()
1687 1687 return session.get(csrf_token_key)
1688 1688
1689 1689
1690 1690 def get_request(perm_class_instance):
1691 1691 from pyramid.threadlocal import get_current_request
1692 1692 pyramid_request = get_current_request()
1693 1693 return pyramid_request
1694 1694
1695 1695
1696 1696 # CHECK DECORATORS
1697 1697 class CSRFRequired(object):
1698 1698 """
1699 1699 Decorator for authenticating a form
1700 1700
1701 1701 This decorator uses an authorization token stored in the client's
1702 1702 session for prevention of certain Cross-site request forgery (CSRF)
1703 1703 attacks (See
1704 1704 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1705 1705 information).
1706 1706
1707 1707 For use with the ``secure_form`` helper functions.
1708 1708
1709 1709 """
1710 1710 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1711 1711 self.token = token
1712 1712 self.header = header
1713 1713 self.except_methods = except_methods or []
1714 1714
1715 1715 def __call__(self, func):
1716 1716 return get_cython_compat_decorator(self.__wrapper, func)
1717 1717
1718 1718 def _get_csrf(self, _request):
1719 1719 return _request.POST.get(self.token, _request.headers.get(self.header))
1720 1720
1721 1721 def check_csrf(self, _request, cur_token):
1722 1722 supplied_token = self._get_csrf(_request)
1723 1723 return supplied_token and supplied_token == cur_token
1724 1724
1725 1725 def _get_request(self):
1726 1726 return get_request(self)
1727 1727
1728 1728 def __wrapper(self, func, *fargs, **fkwargs):
1729 1729 cls = fargs[0]
1730 1730 request = cls.request or self._get_request()
1731 1731
1732 1732 if request.method in self.except_methods:
1733 1733 return func(*fargs, **fkwargs)
1734 1734
1735 1735 cur_token = get_csrf_token(request.session, save_if_missing=False)
1736 1736 if self.check_csrf(request, cur_token):
1737 1737 if request.POST.get(self.token):
1738 1738 del request.POST[self.token]
1739 1739 return func(*fargs, **fkwargs)
1740 1740 else:
1741 1741 reason = 'token-missing'
1742 1742 supplied_token = self._get_csrf(request)
1743 1743 if supplied_token and cur_token != supplied_token:
1744 1744 reason = 'token-mismatch [%s:%s]' % (
1745 1745 cur_token or ''[:6], supplied_token or ''[:6])
1746 1746
1747 1747 csrf_message = \
1748 1748 ("Cross-site request forgery detected, request denied. See "
1749 1749 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1750 1750 "more information.")
1751 1751 log.warning('Cross-site request forgery detected, request %r DENIED: %s '
1752 1752 'REMOTE_ADDR:%s, HEADERS:%s' % (
1753 1753 request, reason, request.remote_addr, request.headers))
1754 1754
1755 1755 raise HTTPForbidden(explanation=csrf_message)
1756 1756
1757 1757
1758 1758 class LoginRequired(object):
1759 1759 """
1760 1760 Must be logged in to execute this function else
1761 1761 redirect to login page
1762 1762
1763 1763 :param auth_token_access: if enabled this checks only for valid auth token
1764 1764 and grants access based on valid token
1765 1765 """
1766 1766 def __init__(self, auth_token_access=None):
1767 1767 self.auth_token_access = auth_token_access
1768 1768 if self.auth_token_access:
1769 1769 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1770 1770 if not valid_type:
1771 1771 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1772 1772 UserApiKeys.ROLES, auth_token_access))
1773 1773
1774 1774 def __call__(self, func):
1775 1775 return get_cython_compat_decorator(self.__wrapper, func)
1776 1776
1777 1777 def _get_request(self):
1778 1778 return get_request(self)
1779 1779
1780 1780 def __wrapper(self, func, *fargs, **fkwargs):
1781 1781 from rhodecode.lib import helpers as h
1782 1782 cls = fargs[0]
1783 1783 user = cls._rhodecode_user
1784 1784 request = cls.request or self._get_request()
1785 1785 _ = request.translate
1786 1786
1787 1787 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1788 1788 log.debug('Starting login restriction checks for user: %s', user)
1789 1789 # check if our IP is allowed
1790 1790 ip_access_valid = True
1791 1791 if not user.ip_allowed:
1792 1792 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1793 1793 category='warning')
1794 1794 ip_access_valid = False
1795 1795
1796 1796 # we used stored token that is extract from GET or URL param (if any)
1797 1797 _auth_token = request.user_auth_token
1798 1798
1799 1799 # check if we used an AUTH_TOKEN and it's a valid one
1800 1800 # defined white-list of controllers which API access will be enabled
1801 1801 whitelist = None
1802 1802 if self.auth_token_access:
1803 1803 # since this location is allowed by @LoginRequired decorator it's our
1804 1804 # only whitelist
1805 1805 whitelist = [loc]
1806 1806 auth_token_access_valid = allowed_auth_token_access(
1807 1807 loc, whitelist=whitelist, auth_token=_auth_token)
1808 1808
1809 1809 # explicit controller is enabled or API is in our whitelist
1810 1810 if auth_token_access_valid:
1811 1811 log.debug('Checking AUTH TOKEN access for %s', cls)
1812 1812 db_user = user.get_instance()
1813 1813
1814 1814 if db_user:
1815 1815 if self.auth_token_access:
1816 1816 roles = self.auth_token_access
1817 1817 else:
1818 1818 roles = [UserApiKeys.ROLE_HTTP]
1819 1819 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1820 1820 db_user, roles)
1821 1821 token_match = db_user.authenticate_by_token(
1822 1822 _auth_token, roles=roles)
1823 1823 else:
1824 1824 log.debug('Unable to fetch db instance for auth user: %s', user)
1825 1825 token_match = False
1826 1826
1827 1827 if _auth_token and token_match:
1828 1828 auth_token_access_valid = True
1829 1829 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1830 1830 else:
1831 1831 auth_token_access_valid = False
1832 1832 if not _auth_token:
1833 1833 log.debug("AUTH TOKEN *NOT* present in request")
1834 1834 else:
1835 1835 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1836 1836
1837 1837 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1838 1838 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1839 1839 else 'AUTH_TOKEN_AUTH'
1840 1840
1841 1841 if ip_access_valid and (
1842 1842 user.is_authenticated or auth_token_access_valid):
1843 1843 log.info('user %s authenticating with:%s IS authenticated on func %s',
1844 1844 user, reason, loc)
1845 1845
1846 1846 return func(*fargs, **fkwargs)
1847 1847 else:
1848 1848 log.warning(
1849 1849 'user %s authenticating with:%s NOT authenticated on '
1850 1850 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1851 1851 user, reason, loc, ip_access_valid, auth_token_access_valid)
1852 1852 # we preserve the get PARAM
1853 1853 came_from = get_came_from(request)
1854 1854
1855 1855 log.debug('redirecting to login page with %s', came_from)
1856 1856 raise HTTPFound(
1857 1857 h.route_path('login', _query={'came_from': came_from}))
1858 1858
1859 1859
1860 1860 class NotAnonymous(object):
1861 1861 """
1862 1862 Must be logged in to execute this function else
1863 1863 redirect to login page
1864 1864 """
1865 1865
1866 1866 def __call__(self, func):
1867 1867 return get_cython_compat_decorator(self.__wrapper, func)
1868 1868
1869 1869 def _get_request(self):
1870 1870 return get_request(self)
1871 1871
1872 1872 def __wrapper(self, func, *fargs, **fkwargs):
1873 1873 import rhodecode.lib.helpers as h
1874 1874 cls = fargs[0]
1875 1875 self.user = cls._rhodecode_user
1876 1876 request = cls.request or self._get_request()
1877 1877 _ = request.translate
1878 1878 log.debug('Checking if user is not anonymous @%s', cls)
1879 1879
1880 1880 anonymous = self.user.username == User.DEFAULT_USER
1881 1881
1882 1882 if anonymous:
1883 1883 came_from = get_came_from(request)
1884 1884 h.flash(_('You need to be a registered user to '
1885 1885 'perform this action'),
1886 1886 category='warning')
1887 1887 raise HTTPFound(
1888 1888 h.route_path('login', _query={'came_from': came_from}))
1889 1889 else:
1890 1890 return func(*fargs, **fkwargs)
1891 1891
1892 1892
1893 1893 class PermsDecorator(object):
1894 1894 """
1895 1895 Base class for controller decorators, we extract the current user from
1896 1896 the class itself, which has it stored in base controllers
1897 1897 """
1898 1898
1899 1899 def __init__(self, *required_perms):
1900 1900 self.required_perms = set(required_perms)
1901 1901
1902 1902 def __call__(self, func):
1903 1903 return get_cython_compat_decorator(self.__wrapper, func)
1904 1904
1905 1905 def _get_request(self):
1906 1906 return get_request(self)
1907 1907
1908 1908 def __wrapper(self, func, *fargs, **fkwargs):
1909 1909 cls = fargs[0]
1910 1910 _user = cls._rhodecode_user
1911 1911 request = cls.request or self._get_request()
1912 1912 self.request = request
1913 1913 _ = request.translate
1914 1914
1915 1915 log.debug('checking %s permissions %s for %s %s',
1916 1916 self.__class__.__name__, self.required_perms, cls, _user)
1917 1917
1918 1918 if self.check_permissions(_user):
1919 1919 log.debug('Permission granted for %s %s', cls, _user)
1920 1920 return func(*fargs, **fkwargs)
1921 1921
1922 1922 else:
1923 1923 log.debug('Permission denied for %s %s', cls, _user)
1924 1924 anonymous = _user.username == User.DEFAULT_USER
1925 1925
1926 1926 if anonymous:
1927 1927 import rhodecode.lib.helpers as h
1928 1928 came_from = get_came_from(self._get_request())
1929 1929 h.flash(_('You need to be signed in to view this page'),
1930 1930 category='warning')
1931 1931 raise HTTPFound(
1932 1932 h.route_path('login', _query={'came_from': came_from}))
1933 1933
1934 1934 else:
1935 1935 # redirect with 404 to prevent resource discovery
1936 1936 raise HTTPNotFound()
1937 1937
1938 1938 def check_permissions(self, user):
1939 1939 """Dummy function for overriding"""
1940 1940 raise NotImplementedError(
1941 1941 'You have to write this function in child class')
1942 1942
1943 1943
1944 1944 class HasPermissionAllDecorator(PermsDecorator):
1945 1945 """
1946 1946 Checks for access permission for all given predicates. All of them
1947 1947 have to be meet in order to fulfill the request
1948 1948 """
1949 1949
1950 1950 def check_permissions(self, user):
1951 1951 perms = user.permissions_with_scope({})
1952 1952 if self.required_perms.issubset(perms['global']):
1953 1953 return True
1954 1954 return False
1955 1955
1956 1956
1957 1957 class HasPermissionAnyDecorator(PermsDecorator):
1958 1958 """
1959 1959 Checks for access permission for any of given predicates. In order to
1960 1960 fulfill the request any of predicates must be meet
1961 1961 """
1962 1962
1963 1963 def check_permissions(self, user):
1964 1964 perms = user.permissions_with_scope({})
1965 1965 if self.required_perms.intersection(perms['global']):
1966 1966 return True
1967 1967 return False
1968 1968
1969 1969
1970 1970 class HasRepoPermissionAllDecorator(PermsDecorator):
1971 1971 """
1972 1972 Checks for access permission for all given predicates for specific
1973 1973 repository. All of them have to be meet in order to fulfill the request
1974 1974 """
1975 1975 def _get_repo_name(self):
1976 1976 _request = self.request or self._get_request()
1977 1977 return get_repo_slug(_request)
1978 1978
1979 1979 def check_permissions(self, user):
1980 1980 perms = user.permissions
1981 1981 repo_name = self._get_repo_name()
1982 1982
1983 1983 try:
1984 1984 user_perms = {perms['repositories'][repo_name]}
1985 1985 except KeyError:
1986 1986 log.debug('cannot locate repo with name: `%s` in permissions defs',
1987 1987 repo_name)
1988 1988 return False
1989 1989 log.debug('checking `%s` permissions for repo `%s`',
1990 1990 user_perms, repo_name)
1991 1991 if self.required_perms.issubset(user_perms):
1992 1992 return True
1993 1993 return False
1994 1994
1995 1995
1996 1996 class HasRepoPermissionAnyDecorator(PermsDecorator):
1997 1997 """
1998 1998 Checks for access permission for any of given predicates for specific
1999 1999 repository. In order to fulfill the request any of predicates must be meet
2000 2000 """
2001 2001 def _get_repo_name(self):
2002 2002 _request = self.request or self._get_request()
2003 2003 return get_repo_slug(_request)
2004 2004
2005 2005 def check_permissions(self, user):
2006 2006 perms = user.permissions
2007 2007 repo_name = self._get_repo_name()
2008 2008
2009 2009 try:
2010 2010 user_perms = {perms['repositories'][repo_name]}
2011 2011 except KeyError:
2012 2012 log.debug(
2013 2013 'cannot locate repo with name: `%s` in permissions defs',
2014 2014 repo_name)
2015 2015 return False
2016 2016
2017 2017 log.debug('checking `%s` permissions for repo `%s`',
2018 2018 user_perms, repo_name)
2019 2019 if self.required_perms.intersection(user_perms):
2020 2020 return True
2021 2021 return False
2022 2022
2023 2023
2024 2024 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2025 2025 """
2026 2026 Checks for access permission for all given predicates for specific
2027 2027 repository group. All of them have to be meet in order to
2028 2028 fulfill the request
2029 2029 """
2030 2030 def _get_repo_group_name(self):
2031 2031 _request = self.request or self._get_request()
2032 2032 return get_repo_group_slug(_request)
2033 2033
2034 2034 def check_permissions(self, user):
2035 2035 perms = user.permissions
2036 2036 group_name = self._get_repo_group_name()
2037 2037 try:
2038 2038 user_perms = {perms['repositories_groups'][group_name]}
2039 2039 except KeyError:
2040 2040 log.debug(
2041 2041 'cannot locate repo group with name: `%s` in permissions defs',
2042 2042 group_name)
2043 2043 return False
2044 2044
2045 2045 log.debug('checking `%s` permissions for repo group `%s`',
2046 2046 user_perms, group_name)
2047 2047 if self.required_perms.issubset(user_perms):
2048 2048 return True
2049 2049 return False
2050 2050
2051 2051
2052 2052 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2053 2053 """
2054 2054 Checks for access permission for any of given predicates for specific
2055 2055 repository group. In order to fulfill the request any
2056 2056 of predicates must be met
2057 2057 """
2058 2058 def _get_repo_group_name(self):
2059 2059 _request = self.request or self._get_request()
2060 2060 return get_repo_group_slug(_request)
2061 2061
2062 2062 def check_permissions(self, user):
2063 2063 perms = user.permissions
2064 2064 group_name = self._get_repo_group_name()
2065 2065
2066 2066 try:
2067 2067 user_perms = {perms['repositories_groups'][group_name]}
2068 2068 except KeyError:
2069 2069 log.debug(
2070 2070 'cannot locate repo group with name: `%s` in permissions defs',
2071 2071 group_name)
2072 2072 return False
2073 2073
2074 2074 log.debug('checking `%s` permissions for repo group `%s`',
2075 2075 user_perms, group_name)
2076 2076 if self.required_perms.intersection(user_perms):
2077 2077 return True
2078 2078 return False
2079 2079
2080 2080
2081 2081 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2082 2082 """
2083 2083 Checks for access permission for all given predicates for specific
2084 2084 user group. All of them have to be meet in order to fulfill the request
2085 2085 """
2086 2086 def _get_user_group_name(self):
2087 2087 _request = self.request or self._get_request()
2088 2088 return get_user_group_slug(_request)
2089 2089
2090 2090 def check_permissions(self, user):
2091 2091 perms = user.permissions
2092 2092 group_name = self._get_user_group_name()
2093 2093 try:
2094 2094 user_perms = {perms['user_groups'][group_name]}
2095 2095 except KeyError:
2096 2096 return False
2097 2097
2098 2098 if self.required_perms.issubset(user_perms):
2099 2099 return True
2100 2100 return False
2101 2101
2102 2102
2103 2103 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2104 2104 """
2105 2105 Checks for access permission for any of given predicates for specific
2106 2106 user group. In order to fulfill the request any of predicates must be meet
2107 2107 """
2108 2108 def _get_user_group_name(self):
2109 2109 _request = self.request or self._get_request()
2110 2110 return get_user_group_slug(_request)
2111 2111
2112 2112 def check_permissions(self, user):
2113 2113 perms = user.permissions
2114 2114 group_name = self._get_user_group_name()
2115 2115 try:
2116 2116 user_perms = {perms['user_groups'][group_name]}
2117 2117 except KeyError:
2118 2118 return False
2119 2119
2120 2120 if self.required_perms.intersection(user_perms):
2121 2121 return True
2122 2122 return False
2123 2123
2124 2124
2125 2125 # CHECK FUNCTIONS
2126 2126 class PermsFunction(object):
2127 2127 """Base function for other check functions"""
2128 2128
2129 2129 def __init__(self, *perms):
2130 2130 self.required_perms = set(perms)
2131 2131 self.repo_name = None
2132 2132 self.repo_group_name = None
2133 2133 self.user_group_name = None
2134 2134
2135 2135 def __bool__(self):
2136 2136 import inspect
2137 2137 frame = inspect.currentframe()
2138 2138 stack_trace = traceback.format_stack(frame)
2139 2139 log.error('Checking bool value on a class instance of perm '
2140 2140 'function is not allowed: %s', ''.join(stack_trace))
2141 2141 # rather than throwing errors, here we always return False so if by
2142 2142 # accident someone checks truth for just an instance it will always end
2143 2143 # up in returning False
2144 2144 return False
2145 2145 __nonzero__ = __bool__
2146 2146
2147 2147 def __call__(self, check_location='', user=None):
2148 2148 if not user:
2149 2149 log.debug('Using user attribute from global request')
2150 2150 request = self._get_request()
2151 2151 user = request.user
2152 2152
2153 2153 # init auth user if not already given
2154 2154 if not isinstance(user, AuthUser):
2155 2155 log.debug('Wrapping user %s into AuthUser', user)
2156 2156 user = AuthUser(user.user_id)
2157 2157
2158 2158 cls_name = self.__class__.__name__
2159 2159 check_scope = self._get_check_scope(cls_name)
2160 2160 check_location = check_location or 'unspecified location'
2161 2161
2162 2162 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2163 2163 self.required_perms, user, check_scope, check_location)
2164 2164 if not user:
2165 2165 log.warning('Empty user given for permission check')
2166 2166 return False
2167 2167
2168 2168 if self.check_permissions(user):
2169 2169 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2170 2170 check_scope, user, check_location)
2171 2171 return True
2172 2172
2173 2173 else:
2174 2174 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2175 2175 check_scope, user, check_location)
2176 2176 return False
2177 2177
2178 2178 def _get_request(self):
2179 2179 return get_request(self)
2180 2180
2181 2181 def _get_check_scope(self, cls_name):
2182 2182 return {
2183 2183 'HasPermissionAll': 'GLOBAL',
2184 2184 'HasPermissionAny': 'GLOBAL',
2185 2185 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2186 2186 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2187 2187 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2188 2188 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2189 2189 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2190 2190 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2191 2191 }.get(cls_name, '?:%s' % cls_name)
2192 2192
2193 2193 def check_permissions(self, user):
2194 2194 """Dummy function for overriding"""
2195 2195 raise Exception('You have to write this function in child class')
2196 2196
2197 2197
2198 2198 class HasPermissionAll(PermsFunction):
2199 2199 def check_permissions(self, user):
2200 2200 perms = user.permissions_with_scope({})
2201 2201 if self.required_perms.issubset(perms.get('global')):
2202 2202 return True
2203 2203 return False
2204 2204
2205 2205
2206 2206 class HasPermissionAny(PermsFunction):
2207 2207 def check_permissions(self, user):
2208 2208 perms = user.permissions_with_scope({})
2209 2209 if self.required_perms.intersection(perms.get('global')):
2210 2210 return True
2211 2211 return False
2212 2212
2213 2213
2214 2214 class HasRepoPermissionAll(PermsFunction):
2215 2215 def __call__(self, repo_name=None, check_location='', user=None):
2216 2216 self.repo_name = repo_name
2217 2217 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2218 2218
2219 2219 def _get_repo_name(self):
2220 2220 if not self.repo_name:
2221 2221 _request = self._get_request()
2222 2222 self.repo_name = get_repo_slug(_request)
2223 2223 return self.repo_name
2224 2224
2225 2225 def check_permissions(self, user):
2226 2226 self.repo_name = self._get_repo_name()
2227 2227 perms = user.permissions
2228 2228 try:
2229 2229 user_perms = {perms['repositories'][self.repo_name]}
2230 2230 except KeyError:
2231 2231 return False
2232 2232 if self.required_perms.issubset(user_perms):
2233 2233 return True
2234 2234 return False
2235 2235
2236 2236
2237 2237 class HasRepoPermissionAny(PermsFunction):
2238 2238 def __call__(self, repo_name=None, check_location='', user=None):
2239 2239 self.repo_name = repo_name
2240 2240 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2241 2241
2242 2242 def _get_repo_name(self):
2243 2243 if not self.repo_name:
2244 2244 _request = self._get_request()
2245 2245 self.repo_name = get_repo_slug(_request)
2246 2246 return self.repo_name
2247 2247
2248 2248 def check_permissions(self, user):
2249 2249 self.repo_name = self._get_repo_name()
2250 2250 perms = user.permissions
2251 2251 try:
2252 2252 user_perms = {perms['repositories'][self.repo_name]}
2253 2253 except KeyError:
2254 2254 return False
2255 2255 if self.required_perms.intersection(user_perms):
2256 2256 return True
2257 2257 return False
2258 2258
2259 2259
2260 2260 class HasRepoGroupPermissionAny(PermsFunction):
2261 2261
2262 2262 def __call__(self, group_name=None, check_location='', user=None):
2263 2263 self.repo_group_name = group_name
2264 2264 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2265 2265
2266 2266 def check_permissions(self, user):
2267 2267 perms = user.permissions
2268 2268 try:
2269 2269 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2270 2270 except KeyError:
2271 2271 return False
2272 2272 if self.required_perms.intersection(user_perms):
2273 2273 return True
2274 2274 return False
2275 2275
2276 2276
2277 2277 class HasRepoGroupPermissionAll(PermsFunction):
2278 2278 def __call__(self, group_name=None, check_location='', user=None):
2279 2279 self.repo_group_name = group_name
2280 2280 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2281 2281
2282 2282 def check_permissions(self, user):
2283 2283 perms = user.permissions
2284 2284 try:
2285 2285 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2286 2286 except KeyError:
2287 2287 return False
2288 2288 if self.required_perms.issubset(user_perms):
2289 2289 return True
2290 2290 return False
2291 2291
2292 2292
2293 2293 class HasUserGroupPermissionAny(PermsFunction):
2294 2294 def __call__(self, user_group_name=None, check_location='', user=None):
2295 2295 self.user_group_name = user_group_name
2296 2296 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2297 2297
2298 2298 def check_permissions(self, user):
2299 2299 perms = user.permissions
2300 2300 try:
2301 2301 user_perms = {perms['user_groups'][self.user_group_name]}
2302 2302 except KeyError:
2303 2303 return False
2304 2304 if self.required_perms.intersection(user_perms):
2305 2305 return True
2306 2306 return False
2307 2307
2308 2308
2309 2309 class HasUserGroupPermissionAll(PermsFunction):
2310 2310 def __call__(self, user_group_name=None, check_location='', user=None):
2311 2311 self.user_group_name = user_group_name
2312 2312 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2313 2313
2314 2314 def check_permissions(self, user):
2315 2315 perms = user.permissions
2316 2316 try:
2317 2317 user_perms = {perms['user_groups'][self.user_group_name]}
2318 2318 except KeyError:
2319 2319 return False
2320 2320 if self.required_perms.issubset(user_perms):
2321 2321 return True
2322 2322 return False
2323 2323
2324 2324
2325 2325 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2326 2326 class HasPermissionAnyMiddleware(object):
2327 2327 def __init__(self, *perms):
2328 2328 self.required_perms = set(perms)
2329 2329
2330 2330 def __call__(self, auth_user, repo_name):
2331 2331 # # repo_name MUST be unicode, since we handle keys in permission
2332 2332 # # dict by unicode
2333 2333 #TODO: verify
2334 2334 # repo_name = safe_str(repo_name)
2335 2335
2336 2336 log.debug(
2337 2337 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2338 2338 self.required_perms, auth_user, repo_name)
2339 2339
2340 2340 if self.check_permissions(auth_user, repo_name):
2341 2341 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2342 2342 repo_name, auth_user, 'PermissionMiddleware')
2343 2343 return True
2344 2344
2345 2345 else:
2346 2346 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2347 2347 repo_name, auth_user, 'PermissionMiddleware')
2348 2348 return False
2349 2349
2350 2350 def check_permissions(self, user, repo_name):
2351 2351 perms = user.permissions_with_scope({'repo_name': repo_name})
2352 2352
2353 2353 try:
2354 2354 user_perms = {perms['repositories'][repo_name]}
2355 2355 except Exception:
2356 2356 log.exception('Error while accessing user permissions')
2357 2357 return False
2358 2358
2359 2359 if self.required_perms.intersection(user_perms):
2360 2360 return True
2361 2361 return False
2362 2362
2363 2363
2364 2364 # SPECIAL VERSION TO HANDLE API AUTH
2365 2365 class _BaseApiPerm(object):
2366 2366 def __init__(self, *perms):
2367 2367 self.required_perms = set(perms)
2368 2368
2369 2369 def __call__(self, check_location=None, user=None, repo_name=None,
2370 2370 group_name=None, user_group_name=None):
2371 2371 cls_name = self.__class__.__name__
2372 2372 check_scope = 'global:%s' % (self.required_perms,)
2373 2373 if repo_name:
2374 2374 check_scope += ', repo_name:%s' % (repo_name,)
2375 2375
2376 2376 if group_name:
2377 2377 check_scope += ', repo_group_name:%s' % (group_name,)
2378 2378
2379 2379 if user_group_name:
2380 2380 check_scope += ', user_group_name:%s' % (user_group_name,)
2381 2381
2382 2382 log.debug('checking cls:%s %s %s @ %s',
2383 2383 cls_name, self.required_perms, check_scope, check_location)
2384 2384 if not user:
2385 2385 log.debug('Empty User passed into arguments')
2386 2386 return False
2387 2387
2388 2388 # process user
2389 2389 if not isinstance(user, AuthUser):
2390 2390 user = AuthUser(user.user_id)
2391 2391 if not check_location:
2392 2392 check_location = 'unspecified'
2393 2393 if self.check_permissions(user.permissions, repo_name, group_name,
2394 2394 user_group_name):
2395 2395 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2396 2396 check_scope, user, check_location)
2397 2397 return True
2398 2398
2399 2399 else:
2400 2400 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2401 2401 check_scope, user, check_location)
2402 2402 return False
2403 2403
2404 2404 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2405 2405 user_group_name=None):
2406 2406 """
2407 2407 implement in child class should return True if permissions are ok,
2408 2408 False otherwise
2409 2409
2410 2410 :param perm_defs: dict with permission definitions
2411 2411 :param repo_name: repo name
2412 2412 """
2413 2413 raise NotImplementedError()
2414 2414
2415 2415
2416 2416 class HasPermissionAllApi(_BaseApiPerm):
2417 2417 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2418 2418 user_group_name=None):
2419 2419 if self.required_perms.issubset(perm_defs.get('global')):
2420 2420 return True
2421 2421 return False
2422 2422
2423 2423
2424 2424 class HasPermissionAnyApi(_BaseApiPerm):
2425 2425 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2426 2426 user_group_name=None):
2427 2427 if self.required_perms.intersection(perm_defs.get('global')):
2428 2428 return True
2429 2429 return False
2430 2430
2431 2431
2432 2432 class HasRepoPermissionAllApi(_BaseApiPerm):
2433 2433 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2434 2434 user_group_name=None):
2435 2435 try:
2436 2436 _user_perms = {perm_defs['repositories'][repo_name]}
2437 2437 except KeyError:
2438 2438 log.warning(traceback.format_exc())
2439 2439 return False
2440 2440 if self.required_perms.issubset(_user_perms):
2441 2441 return True
2442 2442 return False
2443 2443
2444 2444
2445 2445 class HasRepoPermissionAnyApi(_BaseApiPerm):
2446 2446 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2447 2447 user_group_name=None):
2448 2448 try:
2449 2449 _user_perms = {perm_defs['repositories'][repo_name]}
2450 2450 except KeyError:
2451 2451 log.warning(traceback.format_exc())
2452 2452 return False
2453 2453 if self.required_perms.intersection(_user_perms):
2454 2454 return True
2455 2455 return False
2456 2456
2457 2457
2458 2458 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2459 2459 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2460 2460 user_group_name=None):
2461 2461 try:
2462 2462 _user_perms = {perm_defs['repositories_groups'][group_name]}
2463 2463 except KeyError:
2464 2464 log.warning(traceback.format_exc())
2465 2465 return False
2466 2466 if self.required_perms.intersection(_user_perms):
2467 2467 return True
2468 2468 return False
2469 2469
2470 2470
2471 2471 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2472 2472 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2473 2473 user_group_name=None):
2474 2474 try:
2475 2475 _user_perms = {perm_defs['repositories_groups'][group_name]}
2476 2476 except KeyError:
2477 2477 log.warning(traceback.format_exc())
2478 2478 return False
2479 2479 if self.required_perms.issubset(_user_perms):
2480 2480 return True
2481 2481 return False
2482 2482
2483 2483
2484 2484 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2485 2485 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2486 2486 user_group_name=None):
2487 2487 try:
2488 2488 _user_perms = {perm_defs['user_groups'][user_group_name]}
2489 2489 except KeyError:
2490 2490 log.warning(traceback.format_exc())
2491 2491 return False
2492 2492 if self.required_perms.intersection(_user_perms):
2493 2493 return True
2494 2494 return False
2495 2495
2496 2496
2497 2497 def check_ip_access(source_ip, allowed_ips=None):
2498 2498 """
2499 2499 Checks if source_ip is a subnet of any of allowed_ips.
2500 2500
2501 2501 :param source_ip:
2502 2502 :param allowed_ips: list of allowed ips together with mask
2503 2503 """
2504 2504 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2505 2505 source_ip_address = ipaddress.ip_address(source_ip)
2506 2506 if isinstance(allowed_ips, (tuple, list, set)):
2507 2507 for ip in allowed_ips:
2508 2508 #TODO: verify
2509 2509 #ip = safe_str(ip)
2510 2510 try:
2511 2511 network_address = ipaddress.ip_network(ip, strict=False)
2512 2512 if source_ip_address in network_address:
2513 2513 log.debug('IP %s is network %s', source_ip_address, network_address)
2514 2514 return True
2515 2515 # for any case we cannot determine the IP, don't crash just
2516 2516 # skip it and log as error, we want to say forbidden still when
2517 2517 # sending bad IP
2518 2518 except Exception:
2519 2519 log.error(traceback.format_exc())
2520 2520 continue
2521 2521 return False
2522 2522
2523 2523
2524 2524 def get_cython_compat_decorator(wrapper, func):
2525 2525 """
2526 2526 Creates a cython compatible decorator. The previously used
2527 2527 decorator.decorator() function seems to be incompatible with cython.
2528 2528
2529 2529 :param wrapper: __wrapper method of the decorator class
2530 2530 :param func: decorated function
2531 2531 """
2532 2532 @wraps(func)
2533 2533 def local_wrapper(*args, **kwds):
2534 2534 return wrapper(func, *args, **kwds)
2535 2535 local_wrapper.__wrapped__ = func
2536 2536 return local_wrapper
2537 2537
2538 2538
@@ -1,689 +1,689 b''
1 1
2 2
3 3 # Copyright (C) 2014-2023 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import io
29 29 import logging
30 30 import importlib
31 31 from functools import wraps
32 32 from lxml import etree
33 33
34 34 import time
35 35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36 36
37 37 from pyramid.httpexceptions import (
38 38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 43 from rhodecode.lib import rc_cache
44 44 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 45 from rhodecode.lib.base import (
46 46 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 47 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 48 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 49 from rhodecode.lib.middleware import appenlight
50 50 from rhodecode.lib.middleware.utils import scm_app_http
51 51 from rhodecode.lib.str_utils import safe_bytes
52 52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
54 54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 55 from rhodecode.lib.vcs.backends import base
56 56
57 57 from rhodecode.model import meta
58 58 from rhodecode.model.db import User, Repository, PullRequest
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.pull_request import PullRequestModel
61 61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 def extract_svn_txn_id(acl_repo_name, data):
67 67 """
68 68 Helper method for extraction of svn txn_id from submitted XML data during
69 69 POST operations
70 70 """
71 71 try:
72 72 root = etree.fromstring(data)
73 73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 74 for el in root:
75 75 if el.tag == '{DAV:}source':
76 76 for sub_el in el:
77 77 if sub_el.tag == '{DAV:}href':
78 78 match = pat.search(sub_el.text)
79 79 if match:
80 80 svn_tx_id = match.groupdict()['txn_id']
81 81 txn_id = rc_cache.utils.compute_key_from_params(
82 82 acl_repo_name, svn_tx_id)
83 83 return txn_id
84 84 except Exception:
85 85 log.exception('Failed to extract txn_id')
86 86
87 87
88 88 def initialize_generator(factory):
89 89 """
90 90 Initializes the returned generator by draining its first element.
91 91
92 92 This can be used to give a generator an initializer, which is the code
93 93 up to the first yield statement. This decorator enforces that the first
94 94 produced element has the value ``"__init__"`` to make its special
95 95 purpose very explicit in the using code.
96 96 """
97 97
98 98 @wraps(factory)
99 99 def wrapper(*args, **kwargs):
100 100 gen = factory(*args, **kwargs)
101 101 try:
102 102 init = next(gen)
103 103 except StopIteration:
104 104 raise ValueError('Generator must yield at least one element.')
105 105 if init != "__init__":
106 106 raise ValueError('First yielded element must be "__init__".')
107 107 return gen
108 108 return wrapper
109 109
110 110
111 111 class SimpleVCS(object):
112 112 """Common functionality for SCM HTTP handlers."""
113 113
114 114 SCM = 'unknown'
115 115
116 116 acl_repo_name = None
117 117 url_repo_name = None
118 118 vcs_repo_name = None
119 119 rc_extras = {}
120 120
121 121 # We have to handle requests to shadow repositories different than requests
122 122 # to normal repositories. Therefore we have to distinguish them. To do this
123 123 # we use this regex which will match only on URLs pointing to shadow
124 124 # repositories.
125 125 shadow_repo_re = re.compile(
126 126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
127 127 '(?P<target>{slug_pat})/' # target repo
128 128 'pull-request/(?P<pr_id>\\d+)/' # pull request
129 129 'repository$' # shadow repo
130 130 .format(slug_pat=SLUG_RE.pattern))
131 131
132 132 def __init__(self, config, registry):
133 133 self.registry = registry
134 134 self.config = config
135 135 # re-populated by specialized middleware
136 136 self.repo_vcs_config = base.Config()
137 137
138 138 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
139 139 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
140 140
141 141 # authenticate this VCS request using authfunc
142 142 auth_ret_code_detection = \
143 143 str2bool(self.config.get('auth_ret_code_detection', False))
144 144 self.authenticate = BasicAuth(
145 145 '', authenticate, registry, config.get('auth_ret_code'),
146 146 auth_ret_code_detection, rc_realm=realm)
147 147 self.ip_addr = '0.0.0.0'
148 148
149 149 @LazyProperty
150 150 def global_vcs_config(self):
151 151 try:
152 152 return VcsSettingsModel().get_ui_settings_as_config_obj()
153 153 except Exception:
154 154 return base.Config()
155 155
156 156 @property
157 157 def base_path(self):
158 158 settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
159 159
160 160 if not settings_path:
161 161 settings_path = self.global_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
162 162
163 163 if not settings_path:
164 164 # try, maybe we passed in explicitly as config option
165 165 settings_path = self.config.get('base_path')
166 166
167 167 if not settings_path:
168 168 raise ValueError('FATAL: base_path is empty')
169 169 return settings_path
170 170
171 171 def set_repo_names(self, environ):
172 172 """
173 173 This will populate the attributes acl_repo_name, url_repo_name,
174 174 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
175 175 shadow) repositories all names are equal. In case of requests to a
176 176 shadow repository the acl-name points to the target repo of the pull
177 177 request and the vcs-name points to the shadow repo file system path.
178 178 The url-name is always the URL used by the vcs client program.
179 179
180 180 Example in case of a shadow repo:
181 181 acl_repo_name = RepoGroup/MyRepo
182 182 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
183 183 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
184 184 """
185 185 # First we set the repo name from URL for all attributes. This is the
186 186 # default if handling normal (non shadow) repo requests.
187 187 self.url_repo_name = self._get_repository_name(environ)
188 188 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
189 189 self.is_shadow_repo = False
190 190
191 191 # Check if this is a request to a shadow repository.
192 192 match = self.shadow_repo_re.match(self.url_repo_name)
193 193 if match:
194 194 match_dict = match.groupdict()
195 195
196 196 # Build acl repo name from regex match.
197 197 acl_repo_name = safe_str('{groups}{target}'.format(
198 198 groups=match_dict['groups'] or '',
199 199 target=match_dict['target']))
200 200
201 201 # Retrieve pull request instance by ID from regex match.
202 202 pull_request = PullRequest.get(match_dict['pr_id'])
203 203
204 204 # Only proceed if we got a pull request and if acl repo name from
205 205 # URL equals the target repo name of the pull request.
206 206 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
207 207
208 208 # Get file system path to shadow repository.
209 209 workspace_id = PullRequestModel()._workspace_id(pull_request)
210 210 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
211 211
212 212 # Store names for later usage.
213 213 self.vcs_repo_name = vcs_repo_name
214 214 self.acl_repo_name = acl_repo_name
215 215 self.is_shadow_repo = True
216 216
217 217 log.debug('Setting all VCS repository names: %s', {
218 218 'acl_repo_name': self.acl_repo_name,
219 219 'url_repo_name': self.url_repo_name,
220 220 'vcs_repo_name': self.vcs_repo_name,
221 221 })
222 222
223 223 @property
224 224 def scm_app(self):
225 225 custom_implementation = self.config['vcs.scm_app_implementation']
226 226 if custom_implementation == 'http':
227 227 log.debug('Using HTTP implementation of scm app.')
228 228 scm_app_impl = scm_app_http
229 229 else:
230 230 log.debug('Using custom implementation of scm_app: "{}"'.format(
231 231 custom_implementation))
232 232 scm_app_impl = importlib.import_module(custom_implementation)
233 233 return scm_app_impl
234 234
235 235 def _get_by_id(self, repo_name):
236 236 """
237 237 Gets a special pattern _<ID> from clone url and tries to replace it
238 238 with a repository_name for support of _<ID> non changeable urls
239 239 """
240 240
241 241 data = repo_name.split('/')
242 242 if len(data) >= 2:
243 243 from rhodecode.model.repo import RepoModel
244 244 by_id_match = RepoModel().get_repo_by_id(repo_name)
245 245 if by_id_match:
246 246 data[1] = by_id_match.repo_name
247 247
248 248 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
249 249 # and we use this data
250 250 maybe_new_path = '/'.join(data)
251 251 return safe_bytes(maybe_new_path).decode('latin1')
252 252
253 253 def _invalidate_cache(self, repo_name):
254 254 """
255 255 Set's cache for this repository for invalidation on next access
256 256
257 257 :param repo_name: full repo name, also a cache key
258 258 """
259 259 ScmModel().mark_for_invalidation(repo_name)
260 260
261 261 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 262 db_repo = Repository.get_by_repo_name(repo_name)
263 263 if not db_repo:
264 264 log.debug('Repository `%s` not found inside the database.',
265 265 repo_name)
266 266 return False
267 267
268 268 if db_repo.repo_type != scm_type:
269 269 log.warning(
270 270 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 271 repo_name, db_repo.repo_type, scm_type)
272 272 return False
273 273
274 274 config = db_repo._config
275 275 config.set('extensions', 'largefiles', '')
276 276 return is_valid_repo(
277 277 repo_name, base_path,
278 278 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279 279
280 280 def valid_and_active_user(self, user):
281 281 """
282 282 Checks if that user is not empty, and if it's actually object it checks
283 283 if he's active.
284 284
285 285 :param user: user object or None
286 286 :return: boolean
287 287 """
288 288 if user is None:
289 289 return False
290 290
291 291 elif user.active:
292 292 return True
293 293
294 294 return False
295 295
296 296 @property
297 297 def is_shadow_repo_dir(self):
298 298 return os.path.isdir(self.vcs_repo_name)
299 299
300 300 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
301 301 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 302 """
303 303 Checks permissions using action (push/pull) user and repository
304 304 name. If plugin_cache and ttl is set it will use the plugin which
305 305 authenticated the user to store the cached permissions result for N
306 306 amount of seconds as in cache_ttl
307 307
308 308 :param action: push or pull action
309 309 :param user: user instance
310 310 :param repo_name: repository name
311 311 """
312 312
313 313 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
314 314 plugin_id, plugin_cache_active, cache_ttl)
315 315
316 316 user_id = user.user_id
317 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
317 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
318 318 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
319 319
320 320 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
321 321 expiration_time=cache_ttl,
322 322 condition=plugin_cache_active)
323 323 def compute_perm_vcs(
324 324 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
325 325
326 326 log.debug('auth: calculating permission access now...')
327 327 # check IP
328 328 inherit = user.inherit_default_permissions
329 329 ip_allowed = AuthUser.check_ip_allowed(
330 330 user_id, ip_addr, inherit_from_default=inherit)
331 331 if ip_allowed:
332 332 log.info('Access for IP:%s allowed', ip_addr)
333 333 else:
334 334 return False
335 335
336 336 if action == 'push':
337 337 perms = ('repository.write', 'repository.admin')
338 338 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
339 339 return False
340 340
341 341 else:
342 342 # any other action need at least read permission
343 343 perms = (
344 344 'repository.read', 'repository.write', 'repository.admin')
345 345 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
346 346 return False
347 347
348 348 return True
349 349
350 350 start = time.time()
351 351 log.debug('Running plugin `%s` permissions check', plugin_id)
352 352
353 353 # for environ based auth, password can be empty, but then the validation is
354 354 # on the server that fills in the env data needed for authentication
355 355 perm_result = compute_perm_vcs(
356 356 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
357 357
358 358 auth_time = time.time() - start
359 359 log.debug('Permissions for plugin `%s` completed in %.4fs, '
360 360 'expiration time of fetched cache %.1fs.',
361 361 plugin_id, auth_time, cache_ttl)
362 362
363 363 return perm_result
364 364
365 365 def _get_http_scheme(self, environ):
366 366 try:
367 367 return environ['wsgi.url_scheme']
368 368 except Exception:
369 369 log.exception('Failed to read http scheme')
370 370 return 'http'
371 371
372 372 def _check_ssl(self, environ, start_response):
373 373 """
374 374 Checks the SSL check flag and returns False if SSL is not present
375 375 and required True otherwise
376 376 """
377 377 org_proto = environ['wsgi._org_proto']
378 378 # check if we have SSL required ! if not it's a bad request !
379 379 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
380 380 if require_ssl and org_proto == 'http':
381 381 log.debug(
382 382 'Bad request: detected protocol is `%s` and '
383 383 'SSL/HTTPS is required.', org_proto)
384 384 return False
385 385 return True
386 386
387 387 def _get_default_cache_ttl(self):
388 388 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
389 389 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
390 390 plugin_settings = plugin.get_settings()
391 391 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
392 392 plugin_settings) or (False, 0)
393 393 return plugin_cache_active, cache_ttl
394 394
395 395 def __call__(self, environ, start_response):
396 396 try:
397 397 return self._handle_request(environ, start_response)
398 398 except Exception:
399 399 log.exception("Exception while handling request")
400 400 appenlight.track_exception(environ)
401 401 return HTTPInternalServerError()(environ, start_response)
402 402 finally:
403 403 meta.Session.remove()
404 404
405 405 def _handle_request(self, environ, start_response):
406 406 if not self._check_ssl(environ, start_response):
407 407 reason = ('SSL required, while RhodeCode was unable '
408 408 'to detect this as SSL request')
409 409 log.debug('User not allowed to proceed, %s', reason)
410 410 return HTTPNotAcceptable(reason)(environ, start_response)
411 411
412 412 if not self.url_repo_name:
413 413 log.warning('Repository name is empty: %s', self.url_repo_name)
414 414 # failed to get repo name, we fail now
415 415 return HTTPNotFound()(environ, start_response)
416 416 log.debug('Extracted repo name is %s', self.url_repo_name)
417 417
418 418 ip_addr = get_ip_addr(environ)
419 419 user_agent = get_user_agent(environ)
420 420 username = None
421 421
422 422 # skip passing error to error controller
423 423 environ['pylons.status_code_redirect'] = True
424 424
425 425 # ======================================================================
426 426 # GET ACTION PULL or PUSH
427 427 # ======================================================================
428 428 action = self._get_action(environ)
429 429
430 430 # ======================================================================
431 431 # Check if this is a request to a shadow repository of a pull request.
432 432 # In this case only pull action is allowed.
433 433 # ======================================================================
434 434 if self.is_shadow_repo and action != 'pull':
435 435 reason = 'Only pull action is allowed for shadow repositories.'
436 436 log.debug('User not allowed to proceed, %s', reason)
437 437 return HTTPNotAcceptable(reason)(environ, start_response)
438 438
439 439 # Check if the shadow repo actually exists, in case someone refers
440 440 # to it, and it has been deleted because of successful merge.
441 441 if self.is_shadow_repo and not self.is_shadow_repo_dir:
442 442 log.debug(
443 443 'Shadow repo detected, and shadow repo dir `%s` is missing',
444 444 self.is_shadow_repo_dir)
445 445 return HTTPNotFound()(environ, start_response)
446 446
447 447 # ======================================================================
448 448 # CHECK ANONYMOUS PERMISSION
449 449 # ======================================================================
450 450 detect_force_push = False
451 451 check_branch_perms = False
452 452 if action in ['pull', 'push']:
453 453 user_obj = anonymous_user = User.get_default_user()
454 454 auth_user = user_obj.AuthUser()
455 455 username = anonymous_user.username
456 456 if anonymous_user.active:
457 457 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
458 458 # ONLY check permissions if the user is activated
459 459 anonymous_perm = self._check_permission(
460 460 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
461 461 plugin_id='anonymous_access',
462 462 plugin_cache_active=plugin_cache_active,
463 463 cache_ttl=cache_ttl,
464 464 )
465 465 else:
466 466 anonymous_perm = False
467 467
468 468 if not anonymous_user.active or not anonymous_perm:
469 469 if not anonymous_user.active:
470 470 log.debug('Anonymous access is disabled, running '
471 471 'authentication')
472 472
473 473 if not anonymous_perm:
474 474 log.debug('Not enough credentials to access repo: `%s` '
475 475 'repository as anonymous user', self.acl_repo_name)
476 476
477 477
478 478 username = None
479 479 # ==============================================================
480 480 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
481 481 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
482 482 # ==============================================================
483 483
484 484 # try to auth based on environ, container auth methods
485 485 log.debug('Running PRE-AUTH for container|headers based authentication')
486 486
487 487 # headers auth, by just reading special headers and bypass the auth with user/passwd
488 488 pre_auth = authenticate(
489 489 '', '', environ, VCS_TYPE, registry=self.registry,
490 490 acl_repo_name=self.acl_repo_name)
491 491
492 492 if pre_auth and pre_auth.get('username'):
493 493 username = pre_auth['username']
494 494 log.debug('PRE-AUTH got `%s` as username', username)
495 495 if pre_auth:
496 496 log.debug('PRE-AUTH successful from %s',
497 497 pre_auth.get('auth_data', {}).get('_plugin'))
498 498
499 499 # If not authenticated by the container, running basic auth
500 500 # before inject the calling repo_name for special scope checks
501 501 self.authenticate.acl_repo_name = self.acl_repo_name
502 502
503 503 plugin_cache_active, cache_ttl = False, 0
504 504 plugin = None
505 505
506 506 # regular auth chain
507 507 if not username:
508 508 self.authenticate.realm = self.authenticate.get_rc_realm()
509 509
510 510 try:
511 511 auth_result = self.authenticate(environ)
512 512 except (UserCreationError, NotAllowedToCreateUserError) as e:
513 513 log.error(e)
514 514 reason = safe_str(e)
515 515 return HTTPNotAcceptable(reason)(environ, start_response)
516 516
517 517 if isinstance(auth_result, dict):
518 518 AUTH_TYPE.update(environ, 'basic')
519 519 REMOTE_USER.update(environ, auth_result['username'])
520 520 username = auth_result['username']
521 521 plugin = auth_result.get('auth_data', {}).get('_plugin')
522 522 log.info(
523 523 'MAIN-AUTH successful for user `%s` from %s plugin',
524 524 username, plugin)
525 525
526 526 plugin_cache_active, cache_ttl = auth_result.get(
527 527 'auth_data', {}).get('_ttl_cache') or (False, 0)
528 528 else:
529 529 return auth_result.wsgi_application(environ, start_response)
530 530
531 531 # ==============================================================
532 532 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
533 533 # ==============================================================
534 534 user = User.get_by_username(username)
535 535 if not self.valid_and_active_user(user):
536 536 return HTTPForbidden()(environ, start_response)
537 537 username = user.username
538 538 user_id = user.user_id
539 539
540 540 # check user attributes for password change flag
541 541 user_obj = user
542 542 auth_user = user_obj.AuthUser()
543 543 if user_obj and user_obj.username != User.DEFAULT_USER and \
544 544 user_obj.user_data.get('force_password_change'):
545 545 reason = 'password change required'
546 546 log.debug('User not allowed to authenticate, %s', reason)
547 547 return HTTPNotAcceptable(reason)(environ, start_response)
548 548
549 549 # check permissions for this repository
550 550 perm = self._check_permission(
551 551 action, user, auth_user, self.acl_repo_name, ip_addr,
552 552 plugin, plugin_cache_active, cache_ttl)
553 553 if not perm:
554 554 return HTTPForbidden()(environ, start_response)
555 555 environ['rc_auth_user_id'] = str(user_id)
556 556
557 557 if action == 'push':
558 558 perms = auth_user.get_branch_permissions(self.acl_repo_name)
559 559 if perms:
560 560 check_branch_perms = True
561 561 detect_force_push = True
562 562
563 563 # extras are injected into UI object and later available
564 564 # in hooks executed by RhodeCode
565 565 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
566 566
567 567 extras = vcs_operation_context(
568 568 environ, repo_name=self.acl_repo_name, username=username,
569 569 action=action, scm=self.SCM, check_locking=check_locking,
570 570 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
571 571 detect_force_push=detect_force_push
572 572 )
573 573
574 574 # ======================================================================
575 575 # REQUEST HANDLING
576 576 # ======================================================================
577 577 repo_path = os.path.join(
578 578 safe_str(self.base_path), safe_str(self.vcs_repo_name))
579 579 log.debug('Repository path is %s', repo_path)
580 580
581 581 fix_PATH()
582 582
583 583 log.info(
584 584 '%s action on %s repo "%s" by "%s" from %s %s',
585 585 action, self.SCM, safe_str(self.url_repo_name),
586 586 safe_str(username), ip_addr, user_agent)
587 587
588 588 return self._generate_vcs_response(
589 589 environ, start_response, repo_path, extras, action)
590 590
591 591 @initialize_generator
592 592 def _generate_vcs_response(
593 593 self, environ, start_response, repo_path, extras, action):
594 594 """
595 595 Returns a generator for the response content.
596 596
597 597 This method is implemented as a generator, so that it can trigger
598 598 the cache validation after all content sent back to the client. It
599 599 also handles the locking exceptions which will be triggered when
600 600 the first chunk is produced by the underlying WSGI application.
601 601 """
602 602 txn_id = ''
603 603 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
604 604 # case for SVN, we want to re-use the callback daemon port
605 605 # so we use the txn_id, for this we peek the body, and still save
606 606 # it as wsgi.input
607 607 data = environ['wsgi.input'].read()
608 608 environ['wsgi.input'] = io.StringIO(data)
609 609 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
610 610
611 611 callback_daemon, extras = self._prepare_callback_daemon(
612 612 extras, environ, action, txn_id=txn_id)
613 613 log.debug('HOOKS extras is %s', extras)
614 614
615 615 http_scheme = self._get_http_scheme(environ)
616 616
617 617 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
618 618 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
619 619 with callback_daemon:
620 620 app.rc_extras = extras
621 621
622 622 try:
623 623 response = app(environ, start_response)
624 624 finally:
625 625 # This statement works together with the decorator
626 626 # "initialize_generator" above. The decorator ensures that
627 627 # we hit the first yield statement before the generator is
628 628 # returned back to the WSGI server. This is needed to
629 629 # ensure that the call to "app" above triggers the
630 630 # needed callback to "start_response" before the
631 631 # generator is actually used.
632 632 yield "__init__"
633 633
634 634 # iter content
635 635 for chunk in response:
636 636 yield chunk
637 637
638 638 try:
639 639 # invalidate cache on push
640 640 if action == 'push':
641 641 self._invalidate_cache(self.url_repo_name)
642 642 finally:
643 643 meta.Session.remove()
644 644
645 645 def _get_repository_name(self, environ):
646 646 """Get repository name out of the environmnent
647 647
648 648 :param environ: WSGI environment
649 649 """
650 650 raise NotImplementedError()
651 651
652 652 def _get_action(self, environ):
653 653 """Map request commands into a pull or push command.
654 654
655 655 :param environ: WSGI environment
656 656 """
657 657 raise NotImplementedError()
658 658
659 659 def _create_wsgi_app(self, repo_path, repo_name, config):
660 660 """Return the WSGI app that will finally handle the request."""
661 661 raise NotImplementedError()
662 662
663 663 def _create_config(self, extras, repo_name, scheme='http'):
664 664 """Create a safe config representation."""
665 665 raise NotImplementedError()
666 666
667 667 def _should_use_callback_daemon(self, extras, environ, action):
668 668 if extras.get('is_shadow_repo'):
669 669 # we don't want to execute hooks, and callback daemon for shadow repos
670 670 return False
671 671 return True
672 672
673 673 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
674 674 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
675 675 if not self._should_use_callback_daemon(extras, environ, action):
676 676 # disable callback daemon for actions that don't require it
677 677 direct_calls = True
678 678
679 679 return prepare_callback_daemon(
680 680 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
681 681 host=vcs_settings.HOOKS_HOST, use_direct_calls=direct_calls, txn_id=txn_id)
682 682
683 683
684 684 def _should_check_locking(query_string):
685 685 # this is kind of hacky, but due to how mercurial handles client-server
686 686 # server see all operation on commit; bookmarks, phases and
687 687 # obsolescence marker in different transaction, we don't want to check
688 688 # locking on those
689 689 return query_string not in ['cmd=listkeys']
@@ -1,120 +1,120 b''
1 1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import threading
21 21
22 22 from dogpile.cache import register_backend
23 23
24 24 from . import region_meta
25 25 from .utils import (
26 26 ActiveRegionCache,
27 27 FreshRegionCache,
28 28 InvalidationContext,
29 29 backend_key_generator,
30 30 clear_cache_namespace,
31 31 get_default_cache_settings,
32 32 get_or_create_region,
33 33 make_region,
34 34 str2bool,
35 35 )
36 36
37 37 module_name = 'rhodecode'
38 38
39 39 register_backend(
40 40 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
41 41 "LRUMemoryBackend")
42 42
43 43 register_backend(
44 44 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
45 45 "FileNamespaceBackend")
46 46
47 47 register_backend(
48 48 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
49 49 "RedisPickleBackend")
50 50
51 51 register_backend(
52 52 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
53 53 "RedisMsgPackBackend")
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 FILE_TREE_CACHE_VER = 'v4'
60 LICENSE_CACHE_VER = 'v2'
61
59 FILE_TREE_CACHE_VER = 'v5'
60 LICENSE_CACHE_VER = 'v3'
61 PERMISSIONS_CACHE_VER = 'v2'
62 62
63 63 CLEAR_DELETE = 'delete'
64 64 CLEAR_INVALIDATE = 'invalidate'
65 65
66 66
67 67 def async_creation_runner(cache, somekey, creator, mutex):
68 68
69 69 def runner():
70 70 try:
71 71 value = creator()
72 72 cache.set(somekey, value)
73 73 finally:
74 74 mutex.release()
75 75
76 76 thread = threading.Thread(target=runner)
77 77 thread.start()
78 78
79 79
80 80 def configure_dogpile_cache(settings):
81 81 cache_dir = settings.get('cache_dir')
82 82 if cache_dir:
83 83 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
84 84
85 85 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
86 86
87 87 # inspect available namespaces
88 88 avail_regions = set()
89 89 for key in rc_cache_data.keys():
90 90 namespace_name = key.split('.', 1)[0]
91 91 if namespace_name in avail_regions:
92 92 continue
93 93
94 94 avail_regions.add(namespace_name)
95 95 log.debug('dogpile: found following cache regions: %s', namespace_name)
96 96
97 97 new_region = make_region(
98 98 name=namespace_name,
99 99 function_key_generator=None,
100 100 async_creation_runner=None
101 101 )
102 102
103 103 new_region.configure_from_config(settings, f'rc_cache.{namespace_name}.')
104 104 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
105 105
106 106 async_creator = str2bool(settings.pop(f'rc_cache.{namespace_name}.async_creator', 'false'))
107 107 if async_creator:
108 108 log.debug('configuring region %s with async creator', new_region)
109 109 new_region.async_creation_runner = async_creation_runner
110 110
111 111 if log.isEnabledFor(logging.DEBUG):
112 112 region_args = dict(backend=new_region.actual_backend,
113 113 region_invalidator=new_region.region_invalidator.__class__)
114 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
114 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
115 115
116 116 region_meta.dogpile_cache_regions[namespace_name] = new_region
117 117
118 118
119 119 def includeme(config):
120 120 configure_dogpile_cache(config.registry.settings)
@@ -1,405 +1,406 b''
1 1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import functools
20 20 import logging
21 21 import os
22 22 import threading
23 23 import time
24 24
25 25 import decorator
26 26 from dogpile.cache import CacheRegion
27 27
28 28 import rhodecode
29 29 from rhodecode.lib.hash_utils import sha1
30 30 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.type_utils import str2bool
31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
32 32
33 33 from . import region_meta, cache_key_meta
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 37
38 38 def isCython(func):
39 39 """
40 40 Private helper that checks if a function is a cython function.
41 41 """
42 42 return func.__class__.__name__ == 'cython_function_or_method'
43 43
44 44
45 45 class RhodeCodeCacheRegion(CacheRegion):
46 46
47 47 def __repr__(self):
48 48 return f'{self.__class__}(name={self.name})'
49 49
50 50 def conditional_cache_on_arguments(
51 51 self, namespace=None,
52 52 expiration_time=None,
53 53 should_cache_fn=None,
54 54 to_str=str,
55 55 function_key_generator=None,
56 56 condition=True):
57 57 """
58 58 Custom conditional decorator, that will not touch any dogpile internals if
59 59 condition isn't meet. This works a bit different from should_cache_fn
60 60 And it's faster in cases we don't ever want to compute cached values
61 61 """
62 62 expiration_time_is_callable = callable(expiration_time)
63 63 if not namespace:
64 64 namespace = getattr(self, '_default_namespace', None)
65 65
66 66 if function_key_generator is None:
67 67 function_key_generator = self.function_key_generator
68 68
69 69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70 70
71 71 if not condition:
72 72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 73 start = time.time()
74 74 result = user_func(*arg, **kw)
75 75 total = time.time() - start
76 76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 77 return result
78 78
79 79 key = func_key_generator(*arg, **kw)
80 80
81 81 timeout = expiration_time() if expiration_time_is_callable \
82 82 else expiration_time
83 83
84 84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86 86
87 87 def cache_decorator(user_func):
88 88 if to_str is str:
89 89 # backwards compatible
90 90 key_generator = function_key_generator(namespace, user_func)
91 91 else:
92 92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93 93
94 94 def refresh(*arg, **kw):
95 95 """
96 96 Like invalidate, but regenerates the value instead
97 97 """
98 98 key = key_generator(*arg, **kw)
99 99 value = user_func(*arg, **kw)
100 100 self.set(key, value)
101 101 return value
102 102
103 103 def invalidate(*arg, **kw):
104 104 key = key_generator(*arg, **kw)
105 105 self.delete(key)
106 106
107 107 def set_(value, *arg, **kw):
108 108 key = key_generator(*arg, **kw)
109 109 self.set(key, value)
110 110
111 111 def get(*arg, **kw):
112 112 key = key_generator(*arg, **kw)
113 113 return self.get(key)
114 114
115 115 user_func.set = set_
116 116 user_func.invalidate = invalidate
117 117 user_func.get = get
118 118 user_func.refresh = refresh
119 119 user_func.key_generator = key_generator
120 120 user_func.original = user_func
121 121
122 122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 123 return decorator.decorate(user_func, functools.partial(
124 124 get_or_create_for_user_func, key_generator))
125 125
126 126 return cache_decorator
127 127
128 128
129 129 def make_region(*arg, **kw):
130 130 return RhodeCodeCacheRegion(*arg, **kw)
131 131
132 132
133 133 def get_default_cache_settings(settings, prefixes=None):
134 134 prefixes = prefixes or []
135 135 cache_settings = {}
136 136 for key in settings.keys():
137 137 for prefix in prefixes:
138 138 if key.startswith(prefix):
139 139 name = key.split(prefix)[1].strip()
140 140 val = settings[key]
141 141 if isinstance(val, str):
142 142 val = val.strip()
143 143 cache_settings[name] = val
144 144 return cache_settings
145 145
146 146
147 147 def compute_key_from_params(*args):
148 148 """
149 149 Helper to compute key from given params to be used in cache manager
150 150 """
151 151 return sha1(safe_bytes("_".join(map(str, args))))
152 152
153 153
154 154 def custom_key_generator(backend, namespace, fn):
155 155 func_name = fn.__name__
156 156
157 157 def generate_key(*args):
158 158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 159 namespace_pref = namespace or 'default_namespace'
160 160 arg_key = compute_key_from_params(*args)
161 161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162 162
163 163 return final_key
164 164
165 165 return generate_key
166 166
167 167
168 168 def backend_key_generator(backend):
169 169 """
170 170 Special wrapper that also sends over the backend to the key generator
171 171 """
172 172 def wrapper(namespace, fn):
173 173 return custom_key_generator(backend, namespace, fn)
174 174 return wrapper
175 175
176 176
177 177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 178 from .backends import FileNamespaceBackend
179 179 from . import async_creation_runner
180 180
181 181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 182 if not region_obj:
183 183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185 185
186 186 region_uid_name = f'{region_name}:{region_namespace}'
187 187
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
188 189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
189 190 if not region_namespace:
190 191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
191 192
192 193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
193 194 if region_exist:
194 195 log.debug('Using already configured region: %s', region_namespace)
195 196 return region_exist
196 197
197 198 expiration_time = region_obj.expiration_time
198 199
199 200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
200 201 namespace_cache_dir = cache_dir
201 202
202 203 # we default the namespace_cache_dir to our default cache dir.
203 # however if this backend is configured with filename= param, we prioritize that
204 # however, if this backend is configured with filename= param, we prioritize that
204 205 # so all caches within that particular region, even those namespaced end up in the same path
205 206 if region_obj.actual_backend.filename:
206 207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
207 208
208 209 if not os.path.isdir(namespace_cache_dir):
209 210 os.makedirs(namespace_cache_dir)
210 211 new_region = make_region(
211 212 name=region_uid_name,
212 213 function_key_generator=backend_key_generator(region_obj.actual_backend)
213 214 )
214 215
215 216 namespace_filename = os.path.join(
216 217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
217 218 # special type that allows 1db per namespace
218 219 new_region.configure(
219 220 backend='dogpile.cache.rc.file_namespace',
220 221 expiration_time=expiration_time,
221 222 arguments={"filename": namespace_filename}
222 223 )
223 224
224 225 # create and save in region caches
225 226 log.debug('configuring new region: %s', region_uid_name)
226 227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
227 228
228 229 region_obj._default_namespace = region_namespace
229 230 if use_async_runner:
230 231 region_obj.async_creation_runner = async_creation_runner
231 232 return region_obj
232 233
233 234
234 235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
235 236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
236 237
237 238 if not isinstance(cache_region, RhodeCodeCacheRegion):
238 239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
239 240 log.debug('clearing cache region: %s with method=%s', cache_region, method)
240 241
241 242 num_affected_keys = None
242 243
243 244 if method == CLEAR_INVALIDATE:
244 245 # NOTE: The CacheRegion.invalidate() method’s default mode of
245 246 # operation is to set a timestamp local to this CacheRegion in this Python process only.
246 247 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
247 248 cache_region.invalidate(hard=True)
248 249
249 250 if method == CLEAR_DELETE:
250 251 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
251 252 num_affected_keys = len(cache_keys)
252 253 if num_affected_keys:
253 254 cache_region.delete_multi(cache_keys)
254 255
255 256 return num_affected_keys
256 257
257 258
258 259 class ActiveRegionCache(object):
259 260 def __init__(self, context, cache_data):
260 261 self.context = context
261 262 self.cache_data = cache_data
262 263
263 264 def should_invalidate(self):
264 265 return False
265 266
266 267
267 268 class FreshRegionCache(object):
268 269 def __init__(self, context, cache_data):
269 270 self.context = context
270 271 self.cache_data = cache_data
271 272
272 273 def should_invalidate(self):
273 274 return True
274 275
275 276
276 277 class InvalidationContext(object):
277 278 """
278 279 usage::
279 280
280 281 from rhodecode.lib import rc_cache
281 282
282 283 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
283 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
284 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
284 285
285 286 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
286 287 def heavy_compute(cache_name, param1, param2):
287 288 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
288 289
289 290 # invalidation namespace is shared namespace key for all process caches
290 291 # we use it to send a global signal
291 292 invalidation_namespace = 'repo_cache:1'
292 293
293 294 inv_context_manager = rc_cache.InvalidationContext(
294 295 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
295 296 with inv_context_manager as invalidation_context:
296 297 args = ('one', 'two')
297 298 # re-compute and store cache if we get invalidate signal
298 299 if invalidation_context.should_invalidate():
299 300 result = heavy_compute.refresh(*args)
300 301 else:
301 302 result = heavy_compute(*args)
302 303
303 304 compute_time = inv_context_manager.compute_time
304 305 log.debug('result computed in %.4fs', compute_time)
305 306
306 307 # To send global invalidation signal, simply run
307 308 CacheKey.set_invalidate(invalidation_namespace)
308 309
309 310 """
310 311
311 312 def __repr__(self):
312 313 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
313 314
314 315 def __init__(self, uid, invalidation_namespace='',
315 316 raise_exception=False, thread_scoped=None):
316 317 self.uid = uid
317 318 self.invalidation_namespace = invalidation_namespace
318 319 self.raise_exception = raise_exception
319 320 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
320 321 self.thread_id = 'global'
321 322
322 323 if thread_scoped is None:
323 324 # if we set "default" we can override this via .ini settings
324 325 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
325 326
326 327 # Append the thread id to the cache key if this invalidation context
327 328 # should be scoped to the current thread.
328 329 if thread_scoped is True:
329 330 self.thread_id = threading.current_thread().ident
330 331
331 332 self.cache_key = compute_key_from_params(uid)
332 333 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
333 334 self.proc_id, self.thread_id, self.cache_key)
334 335 self.proc_key = f'proc:{self.proc_id}'
335 336 self.compute_time = 0
336 337
337 338 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
338 339 from rhodecode.model.db import CacheKey
339 340
340 341 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
341 342 # fetch all cache keys for this namespace and convert them to a map to find if we
342 343 # have specific cache_key object registered. We do this because we want to have
343 344 # all consistent cache_state_uid for newly registered objects
344 345 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
345 346 cache_obj = cache_obj_map.get(self.cache_key)
346 347 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
347 348
348 349 if not cache_obj:
349 350 new_cache_args = invalidation_namespace
350 351 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
351 352 cache_state_uid = None
352 353 if first_cache_obj:
353 354 cache_state_uid = first_cache_obj.cache_state_uid
354 355 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
355 356 cache_state_uid=cache_state_uid)
356 357 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
357 358
358 359 return cache_obj
359 360
360 361 def __enter__(self):
361 362 """
362 363 Test if current object is valid, and return CacheRegion function
363 364 that does invalidation and calculation
364 365 """
365 366 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
366 367 # register or get a new key based on uid
367 368 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
368 369 cache_data = self.cache_obj.get_dict()
369 370 self._start_time = time.time()
370 371 if self.cache_obj.cache_active:
371 372 # means our cache obj is existing and marked as it's
372 373 # cache is not outdated, we return ActiveRegionCache
373 374 self.skip_cache_active_change = True
374 375
375 376 return ActiveRegionCache(context=self, cache_data=cache_data)
376 377
377 378 # the key is either not existing or set to False, we return
378 379 # the real invalidator which re-computes value. We additionally set
379 380 # the flag to actually update the Database objects
380 381 self.skip_cache_active_change = False
381 382 return FreshRegionCache(context=self, cache_data=cache_data)
382 383
383 384 def __exit__(self, exc_type, exc_val, exc_tb):
384 385 from rhodecode.model.db import IntegrityError, Session
385 386
386 387 # save compute time
387 388 self.compute_time = time.time() - self._start_time
388 389
389 390 if self.skip_cache_active_change:
390 391 return
391 392
392 393 try:
393 394 self.cache_obj.cache_active = True
394 395 Session().add(self.cache_obj)
395 396 Session().commit()
396 397 except IntegrityError:
397 398 # if we catch integrity error, it means we inserted this object
398 399 # assumption is that's really an edge race-condition case and
399 400 # it's safe is to skip it
400 401 Session().rollback()
401 402 except Exception:
402 403 log.exception('Failed to commit on cache key update')
403 404 Session().rollback()
404 405 if self.raise_exception:
405 406 raise
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now