##// END OF EJS Templates
fix(LFS): fixed pull_changes method to support a new (sync_large_objects) flag, updated all parts that will use a new flag. Fixes: RCCE-8
ilin.s -
r5256:6b054b38 default
parent child Browse files
Show More
@@ -1,2533 +1,2535 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import time
21 21
22 22 import rhodecode
23 23 from rhodecode.api import (
24 24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
25 25 from rhodecode.api.utils import (
26 26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 27 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
28 28 get_perm_or_error, parse_args, get_origin, build_commit_data,
29 29 validate_set_owner_permissions)
30 30 from rhodecode.lib import audit_logger, rc_cache, channelstream
31 31 from rhodecode.lib import repo_maintenance
32 32 from rhodecode.lib.auth import (
33 33 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
34 34 HasRepoPermissionAnyApi)
35 35 from rhodecode.lib.celerylib.utils import get_task_id
36 36 from rhodecode.lib.utils2 import (
37 37 str2bool, time_to_datetime, safe_str, safe_int)
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.exceptions import (
40 40 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
41 41 from rhodecode.lib.vcs import RepositoryError
42 42 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 46 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
47 47 ChangesetComment)
48 48 from rhodecode.model.permission import PermissionModel
49 49 from rhodecode.model.pull_request import PullRequestModel
50 50 from rhodecode.model.repo import RepoModel
51 51 from rhodecode.model.scm import ScmModel, RepoList
52 52 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
53 53 from rhodecode.model import validation_schema
54 54 from rhodecode.model.validation_schema.schemas import repo_schema
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 @jsonrpc_method()
60 60 def get_repo(request, apiuser, repoid, cache=Optional(True)):
61 61 """
62 62 Gets an existing repository by its name or repository_id.
63 63
64 64 The members section so the output returns users groups or users
65 65 associated with that repository.
66 66
67 67 This command can only be run using an |authtoken| with admin rights,
68 68 or users with at least read rights to the |repo|.
69 69
70 70 :param apiuser: This is filled automatically from the |authtoken|.
71 71 :type apiuser: AuthUser
72 72 :param repoid: The repository name or repository id.
73 73 :type repoid: str or int
74 74 :param cache: use the cached value for last changeset
75 75 :type: cache: Optional(bool)
76 76
77 77 Example output:
78 78
79 79 .. code-block:: bash
80 80
81 81 {
82 82 "error": null,
83 83 "id": <repo_id>,
84 84 "result": {
85 85 "clone_uri": null,
86 86 "created_on": "timestamp",
87 87 "description": "repo description",
88 88 "enable_downloads": false,
89 89 "enable_locking": false,
90 90 "enable_statistics": false,
91 91 "followers": [
92 92 {
93 93 "active": true,
94 94 "admin": false,
95 95 "api_key": "****************************************",
96 96 "api_keys": [
97 97 "****************************************"
98 98 ],
99 99 "email": "user@example.com",
100 100 "emails": [
101 101 "user@example.com"
102 102 ],
103 103 "extern_name": "rhodecode",
104 104 "extern_type": "rhodecode",
105 105 "firstname": "username",
106 106 "ip_addresses": [],
107 107 "language": null,
108 108 "last_login": "2015-09-16T17:16:35.854",
109 109 "lastname": "surname",
110 110 "user_id": <user_id>,
111 111 "username": "name"
112 112 }
113 113 ],
114 114 "fork_of": "parent-repo",
115 115 "landing_rev": [
116 116 "rev",
117 117 "tip"
118 118 ],
119 119 "last_changeset": {
120 120 "author": "User <user@example.com>",
121 121 "branch": "default",
122 122 "date": "timestamp",
123 123 "message": "last commit message",
124 124 "parents": [
125 125 {
126 126 "raw_id": "commit-id"
127 127 }
128 128 ],
129 129 "raw_id": "commit-id",
130 130 "revision": <revision number>,
131 131 "short_id": "short id"
132 132 },
133 133 "lock_reason": null,
134 134 "locked_by": null,
135 135 "locked_date": null,
136 136 "owner": "owner-name",
137 137 "permissions": [
138 138 {
139 139 "name": "super-admin-name",
140 140 "origin": "super-admin",
141 141 "permission": "repository.admin",
142 142 "type": "user"
143 143 },
144 144 {
145 145 "name": "owner-name",
146 146 "origin": "owner",
147 147 "permission": "repository.admin",
148 148 "type": "user"
149 149 },
150 150 {
151 151 "name": "user-group-name",
152 152 "origin": "permission",
153 153 "permission": "repository.write",
154 154 "type": "user_group"
155 155 }
156 156 ],
157 157 "private": true,
158 158 "repo_id": 676,
159 159 "repo_name": "user-group/repo-name",
160 160 "repo_type": "hg"
161 161 }
162 162 }
163 163 """
164 164
165 165 repo = get_repo_or_error(repoid)
166 166 cache = Optional.extract(cache)
167 167
168 168 include_secrets = False
169 169 if has_superadmin_permission(apiuser):
170 170 include_secrets = True
171 171 else:
172 172 # check if we have at least read permission for this repo !
173 173 _perms = (
174 174 'repository.admin', 'repository.write', 'repository.read',)
175 175 validate_repo_permissions(apiuser, repoid, repo, _perms)
176 176
177 177 permissions = []
178 178 for _user in repo.permissions():
179 179 user_data = {
180 180 'name': _user.username,
181 181 'permission': _user.permission,
182 182 'origin': get_origin(_user),
183 183 'type': "user",
184 184 }
185 185 permissions.append(user_data)
186 186
187 187 for _user_group in repo.permission_user_groups():
188 188 user_group_data = {
189 189 'name': _user_group.users_group_name,
190 190 'permission': _user_group.permission,
191 191 'origin': get_origin(_user_group),
192 192 'type': "user_group",
193 193 }
194 194 permissions.append(user_group_data)
195 195
196 196 following_users = [
197 197 user.user.get_api_data(include_secrets=include_secrets)
198 198 for user in repo.followers]
199 199
200 200 if not cache:
201 201 repo.update_commit_cache()
202 202 data = repo.get_api_data(include_secrets=include_secrets)
203 203 data['permissions'] = permissions
204 204 data['followers'] = following_users
205 205
206 206 return data
207 207
208 208
209 209 @jsonrpc_method()
210 210 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
211 211 """
212 212 Lists all existing repositories.
213 213
214 214 This command can only be run using an |authtoken| with admin rights,
215 215 or users with at least read rights to |repos|.
216 216
217 217 :param apiuser: This is filled automatically from the |authtoken|.
218 218 :type apiuser: AuthUser
219 219 :param root: specify root repository group to fetch repositories.
220 220 filters the returned repositories to be members of given root group.
221 221 :type root: Optional(None)
222 222 :param traverse: traverse given root into subrepositories. With this flag
223 223 set to False, it will only return top-level repositories from `root`.
224 224 if root is empty it will return just top-level repositories.
225 225 :type traverse: Optional(True)
226 226
227 227
228 228 Example output:
229 229
230 230 .. code-block:: bash
231 231
232 232 id : <id_given_in_input>
233 233 result: [
234 234 {
235 235 "repo_id" : "<repo_id>",
236 236 "repo_name" : "<reponame>"
237 237 "repo_type" : "<repo_type>",
238 238 "clone_uri" : "<clone_uri>",
239 239 "private": : "<bool>",
240 240 "created_on" : "<datetimecreated>",
241 241 "description" : "<description>",
242 242 "landing_rev": "<landing_rev>",
243 243 "owner": "<repo_owner>",
244 244 "fork_of": "<name_of_fork_parent>",
245 245 "enable_downloads": "<bool>",
246 246 "enable_locking": "<bool>",
247 247 "enable_statistics": "<bool>",
248 248 },
249 249 ...
250 250 ]
251 251 error: null
252 252 """
253 253
254 254 include_secrets = has_superadmin_permission(apiuser)
255 255 _perms = ('repository.read', 'repository.write', 'repository.admin',)
256 256 extras = {'user': apiuser}
257 257
258 258 root = Optional.extract(root)
259 259 traverse = Optional.extract(traverse, binary=True)
260 260
261 261 if root:
262 262 # verify parent existance, if it's empty return an error
263 263 parent = RepoGroup.get_by_group_name(root)
264 264 if not parent:
265 265 raise JSONRPCError(
266 266 f'Root repository group `{root}` does not exist')
267 267
268 268 if traverse:
269 269 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
270 270 else:
271 271 repos = RepoModel().get_repos_for_root(root=parent)
272 272 else:
273 273 if traverse:
274 274 repos = RepoModel().get_all()
275 275 else:
276 276 # return just top-level
277 277 repos = RepoModel().get_repos_for_root(root=None)
278 278
279 279 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
280 280 return [repo.get_api_data(include_secrets=include_secrets)
281 281 for repo in repo_list]
282 282
283 283
284 284 @jsonrpc_method()
285 285 def get_repo_changeset(request, apiuser, repoid, revision,
286 286 details=Optional('basic')):
287 287 """
288 288 Returns information about a changeset.
289 289
290 290 Additionally parameters define the amount of details returned by
291 291 this function.
292 292
293 293 This command can only be run using an |authtoken| with admin rights,
294 294 or users with at least read rights to the |repo|.
295 295
296 296 :param apiuser: This is filled automatically from the |authtoken|.
297 297 :type apiuser: AuthUser
298 298 :param repoid: The repository name or repository id
299 299 :type repoid: str or int
300 300 :param revision: revision for which listing should be done
301 301 :type revision: str
302 302 :param details: details can be 'basic|extended|full' full gives diff
303 303 info details like the diff itself, and number of changed files etc.
304 304 :type details: Optional(str)
305 305
306 306 """
307 307 repo = get_repo_or_error(repoid)
308 308 if not has_superadmin_permission(apiuser):
309 309 _perms = ('repository.admin', 'repository.write', 'repository.read',)
310 310 validate_repo_permissions(apiuser, repoid, repo, _perms)
311 311
312 312 changes_details = Optional.extract(details)
313 313 _changes_details_types = ['basic', 'extended', 'full']
314 314 if changes_details not in _changes_details_types:
315 315 raise JSONRPCError(
316 316 'ret_type must be one of %s' % (
317 317 ','.join(_changes_details_types)))
318 318
319 319 vcs_repo = repo.scm_instance()
320 320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
321 321 'status', '_commit', '_file_paths']
322 322
323 323 try:
324 324 commit = repo.get_commit(commit_id=revision, pre_load=pre_load)
325 325 except TypeError as e:
326 326 raise JSONRPCError(safe_str(e))
327 327 _cs_json = commit.__json__()
328 328 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
329 329 if changes_details == 'full':
330 330 _cs_json['refs'] = commit._get_refs()
331 331 return _cs_json
332 332
333 333
334 334 @jsonrpc_method()
335 335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
336 336 details=Optional('basic')):
337 337 """
338 338 Returns a set of commits limited by the number starting
339 339 from the `start_rev` option.
340 340
341 341 Additional parameters define the amount of details returned by this
342 342 function.
343 343
344 344 This command can only be run using an |authtoken| with admin rights,
345 345 or users with at least read rights to |repos|.
346 346
347 347 :param apiuser: This is filled automatically from the |authtoken|.
348 348 :type apiuser: AuthUser
349 349 :param repoid: The repository name or repository ID.
350 350 :type repoid: str or int
351 351 :param start_rev: The starting revision from where to get changesets.
352 352 :type start_rev: str
353 353 :param limit: Limit the number of commits to this amount
354 354 :type limit: str or int
355 355 :param details: Set the level of detail returned. Valid option are:
356 356 ``basic``, ``extended`` and ``full``.
357 357 :type details: Optional(str)
358 358
359 359 .. note::
360 360
361 361 Setting the parameter `details` to the value ``full`` is extensive
362 362 and returns details like the diff itself, and the number
363 363 of changed files.
364 364
365 365 """
366 366 repo = get_repo_or_error(repoid)
367 367 if not has_superadmin_permission(apiuser):
368 368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
369 369 validate_repo_permissions(apiuser, repoid, repo, _perms)
370 370
371 371 changes_details = Optional.extract(details)
372 372 _changes_details_types = ['basic', 'extended', 'full']
373 373 if changes_details not in _changes_details_types:
374 374 raise JSONRPCError(
375 375 'ret_type must be one of %s' % (
376 376 ','.join(_changes_details_types)))
377 377
378 378 limit = int(limit)
379 379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
380 380 'status', '_commit', '_file_paths']
381 381
382 382 vcs_repo = repo.scm_instance()
383 383 # SVN needs a special case to distinguish its index and commit id
384 384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
385 385 start_rev = vcs_repo.commit_ids[0]
386 386
387 387 try:
388 388 commits = vcs_repo.get_commits(
389 389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
390 390 except TypeError as e:
391 391 raise JSONRPCError(safe_str(e))
392 392 except Exception:
393 393 log.exception('Fetching of commits failed')
394 394 raise JSONRPCError('Error occurred during commit fetching')
395 395
396 396 ret = []
397 397 for cnt, commit in enumerate(commits):
398 398 if cnt >= limit != -1:
399 399 break
400 400 _cs_json = commit.__json__()
401 401 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
402 402 if changes_details == 'full':
403 403 _cs_json['refs'] = {
404 404 'branches': [commit.branch],
405 405 'bookmarks': getattr(commit, 'bookmarks', []),
406 406 'tags': commit.tags
407 407 }
408 408 ret.append(_cs_json)
409 409 return ret
410 410
411 411
412 412 @jsonrpc_method()
413 413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
414 414 ret_type=Optional('all'), details=Optional('basic'),
415 415 max_file_bytes=Optional(None)):
416 416 """
417 417 Returns a list of nodes and children in a flat list for a given
418 418 path at given revision.
419 419
420 420 It's possible to specify ret_type to show only `files` or `dirs`.
421 421
422 422 This command can only be run using an |authtoken| with admin rights,
423 423 or users with at least read rights to |repos|.
424 424
425 425 :param apiuser: This is filled automatically from the |authtoken|.
426 426 :type apiuser: AuthUser
427 427 :param repoid: The repository name or repository ID.
428 428 :type repoid: str or int
429 429 :param revision: The revision for which listing should be done.
430 430 :type revision: str
431 431 :param root_path: The path from which to start displaying.
432 432 :type root_path: str
433 433 :param ret_type: Set the return type. Valid options are
434 434 ``all`` (default), ``files`` and ``dirs``.
435 435 :type ret_type: Optional(str)
436 436 :param details: Returns extended information about nodes, such as
437 437 md5, binary, and or content.
438 438 The valid options are ``basic`` and ``full``.
439 439 :type details: Optional(str)
440 440 :param max_file_bytes: Only return file content under this file size bytes
441 441 :type details: Optional(int)
442 442
443 443 Example output:
444 444
445 445 .. code-block:: bash
446 446
447 447 id : <id_given_in_input>
448 448 result: [
449 449 {
450 450 "binary": false,
451 451 "content": "File line",
452 452 "extension": "md",
453 453 "lines": 2,
454 454 "md5": "059fa5d29b19c0657e384749480f6422",
455 455 "mimetype": "text/x-minidsrc",
456 456 "name": "file.md",
457 457 "size": 580,
458 458 "type": "file"
459 459 },
460 460 ...
461 461 ]
462 462 error: null
463 463 """
464 464
465 465 repo = get_repo_or_error(repoid)
466 466 if not has_superadmin_permission(apiuser):
467 467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
468 468 validate_repo_permissions(apiuser, repoid, repo, _perms)
469 469
470 470 ret_type = Optional.extract(ret_type)
471 471 details = Optional.extract(details)
472 472 max_file_bytes = Optional.extract(max_file_bytes)
473 473
474 474 _extended_types = ['basic', 'full']
475 475 if details not in _extended_types:
476 476 ret_types = ','.join(_extended_types)
477 477 raise JSONRPCError(f'ret_type must be one of {ret_types}')
478 478
479 479 extended_info = False
480 480 content = False
481 481 if details == 'basic':
482 482 extended_info = True
483 483
484 484 if details == 'full':
485 485 extended_info = content = True
486 486
487 487 _map = {}
488 488 try:
489 489 # check if repo is not empty by any chance, skip quicker if it is.
490 490 _scm = repo.scm_instance()
491 491 if _scm.is_empty():
492 492 return []
493 493
494 494 _d, _f = ScmModel().get_nodes(
495 495 repo, revision, root_path, flat=False,
496 496 extended_info=extended_info, content=content,
497 497 max_file_bytes=max_file_bytes)
498 498
499 499 _map = {
500 500 'all': _d + _f,
501 501 'files': _f,
502 502 'dirs': _d,
503 503 }
504 504
505 505 return _map[ret_type]
506 506 except KeyError:
507 507 keys = ','.join(sorted(_map.keys()))
508 508 raise JSONRPCError(f'ret_type must be one of {keys}')
509 509 except Exception:
510 510 log.exception("Exception occurred while trying to get repo nodes")
511 511 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
512 512
513 513
514 514 @jsonrpc_method()
515 515 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
516 516 max_file_bytes=Optional(0), details=Optional('basic'),
517 517 cache=Optional(True)):
518 518 """
519 519 Returns a single file from repository at given revision.
520 520
521 521 This command can only be run using an |authtoken| with admin rights,
522 522 or users with at least read rights to |repos|.
523 523
524 524 :param apiuser: This is filled automatically from the |authtoken|.
525 525 :type apiuser: AuthUser
526 526 :param repoid: The repository name or repository ID.
527 527 :type repoid: str or int
528 528 :param commit_id: The revision for which listing should be done.
529 529 :type commit_id: str
530 530 :param file_path: The path from which to start displaying.
531 531 :type file_path: str
532 532 :param details: Returns different set of information about nodes.
533 533 The valid options are ``minimal`` ``basic`` and ``full``.
534 534 :type details: Optional(str)
535 535 :param max_file_bytes: Only return file content under this file size bytes
536 536 :type max_file_bytes: Optional(int)
537 537 :param cache: Use internal caches for fetching files. If disabled fetching
538 538 files is slower but more memory efficient
539 539 :type cache: Optional(bool)
540 540
541 541 Example output:
542 542
543 543 .. code-block:: bash
544 544
545 545 id : <id_given_in_input>
546 546 result: {
547 547 "binary": false,
548 548 "extension": "py",
549 549 "lines": 35,
550 550 "content": "....",
551 551 "md5": "76318336366b0f17ee249e11b0c99c41",
552 552 "mimetype": "text/x-python",
553 553 "name": "python.py",
554 554 "size": 817,
555 555 "type": "file",
556 556 }
557 557 error: null
558 558 """
559 559
560 560 repo = get_repo_or_error(repoid)
561 561 if not has_superadmin_permission(apiuser):
562 562 _perms = ('repository.admin', 'repository.write', 'repository.read',)
563 563 validate_repo_permissions(apiuser, repoid, repo, _perms)
564 564
565 565 cache = Optional.extract(cache, binary=True)
566 566 details = Optional.extract(details)
567 567 max_file_bytes = Optional.extract(max_file_bytes)
568 568
569 569 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
570 570 if details not in _extended_types:
571 571 ret_types = ','.join(_extended_types)
572 572 raise JSONRPCError(f'ret_type must be one of %s, got {ret_types}', details)
573 573 extended_info = False
574 574 content = False
575 575
576 576 if details == 'minimal':
577 577 extended_info = False
578 578
579 579 elif details == 'basic':
580 580 extended_info = True
581 581
582 582 elif details == 'full':
583 583 extended_info = content = True
584 584
585 585 file_path = safe_str(file_path)
586 586 try:
587 587 # check if repo is not empty by any chance, skip quicker if it is.
588 588 _scm = repo.scm_instance()
589 589 if _scm.is_empty():
590 590 return None
591 591
592 592 node = ScmModel().get_node(
593 593 repo, commit_id, file_path, extended_info=extended_info,
594 594 content=content, max_file_bytes=max_file_bytes, cache=cache)
595 595
596 596 except NodeDoesNotExistError:
597 597 raise JSONRPCError(
598 598 f'There is no file in repo: `{repo.repo_name}` at path `{file_path}` for commit: `{commit_id}`')
599 599 except Exception:
600 600 log.exception("Exception occurred while trying to get repo %s file",
601 601 repo.repo_name)
602 602 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` file at path {file_path}')
603 603
604 604 return node
605 605
606 606
607 607 @jsonrpc_method()
608 608 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
609 609 """
610 610 Returns a list of tree nodes for path at given revision. This api is built
611 611 strictly for usage in full text search building, and shouldn't be consumed
612 612
613 613 This command can only be run using an |authtoken| with admin rights,
614 614 or users with at least read rights to |repos|.
615 615
616 616 """
617 617
618 618 repo = get_repo_or_error(repoid)
619 619 if not has_superadmin_permission(apiuser):
620 620 _perms = ('repository.admin', 'repository.write', 'repository.read',)
621 621 validate_repo_permissions(apiuser, repoid, repo, _perms)
622 622
623 623 repo_id = repo.repo_id
624 624 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
625 625 cache_on = cache_seconds > 0
626 626
627 627 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
628 628 rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
629 629
630 630 def compute_fts_tree(repo_id, commit_id, root_path):
631 631 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
632 632
633 633 try:
634 634 # check if repo is not empty by any chance, skip quicker if it is.
635 635 _scm = repo.scm_instance()
636 636 if not _scm or _scm.is_empty():
637 637 return []
638 638 except RepositoryError:
639 639 log.exception("Exception occurred while trying to get repo nodes")
640 640 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
641 641
642 642 try:
643 643 # we need to resolve commit_id to a FULL sha for cache to work correctly.
644 644 # sending 'master' is a pointer that needs to be translated to current commit.
645 645 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
646 646 log.debug(
647 647 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
648 648 'with caching: %s[TTL: %ss]' % (
649 649 repo_id, commit_id, cache_on, cache_seconds or 0))
650 650
651 651 tree_files = compute_fts_tree(repo_id, commit_id, root_path)
652 652
653 653 return tree_files
654 654
655 655 except Exception:
656 656 log.exception("Exception occurred while trying to get repo nodes")
657 657 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
658 658
659 659
660 660 @jsonrpc_method()
661 661 def get_repo_refs(request, apiuser, repoid):
662 662 """
663 663 Returns a dictionary of current references. It returns
664 664 bookmarks, branches, closed_branches, and tags for given repository
665 665
666 666 It's possible to specify ret_type to show only `files` or `dirs`.
667 667
668 668 This command can only be run using an |authtoken| with admin rights,
669 669 or users with at least read rights to |repos|.
670 670
671 671 :param apiuser: This is filled automatically from the |authtoken|.
672 672 :type apiuser: AuthUser
673 673 :param repoid: The repository name or repository ID.
674 674 :type repoid: str or int
675 675
676 676 Example output:
677 677
678 678 .. code-block:: bash
679 679
680 680 id : <id_given_in_input>
681 681 "result": {
682 682 "bookmarks": {
683 683 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
684 684 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
685 685 },
686 686 "branches": {
687 687 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
688 688 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
689 689 },
690 690 "branches_closed": {},
691 691 "tags": {
692 692 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
693 693 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
694 694 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
695 695 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
696 696 }
697 697 }
698 698 error: null
699 699 """
700 700
701 701 repo = get_repo_or_error(repoid)
702 702 if not has_superadmin_permission(apiuser):
703 703 _perms = ('repository.admin', 'repository.write', 'repository.read',)
704 704 validate_repo_permissions(apiuser, repoid, repo, _perms)
705 705
706 706 try:
707 707 # check if repo is not empty by any chance, skip quicker if it is.
708 708 vcs_instance = repo.scm_instance()
709 709 refs = vcs_instance.refs()
710 710 return refs
711 711 except Exception:
712 712 log.exception("Exception occurred while trying to get repo refs")
713 713 raise JSONRPCError(
714 714 'failed to get repo: `%s` references' % repo.repo_name
715 715 )
716 716
717 717
718 718 @jsonrpc_method()
719 719 def create_repo(
720 720 request, apiuser, repo_name, repo_type,
721 721 owner=Optional(OAttr('apiuser')),
722 722 description=Optional(''),
723 723 private=Optional(False),
724 724 clone_uri=Optional(None),
725 725 push_uri=Optional(None),
726 726 landing_rev=Optional(None),
727 727 enable_statistics=Optional(False),
728 728 enable_locking=Optional(False),
729 729 enable_downloads=Optional(False),
730 730 copy_permissions=Optional(False)):
731 731 """
732 732 Creates a repository.
733 733
734 734 * If the repository name contains "/", repository will be created inside
735 735 a repository group or nested repository groups
736 736
737 737 For example "foo/bar/repo1" will create |repo| called "repo1" inside
738 738 group "foo/bar". You have to have permissions to access and write to
739 739 the last repository group ("bar" in this example)
740 740
741 741 This command can only be run using an |authtoken| with at least
742 742 permissions to create repositories, or write permissions to
743 743 parent repository groups.
744 744
745 745 :param apiuser: This is filled automatically from the |authtoken|.
746 746 :type apiuser: AuthUser
747 747 :param repo_name: Set the repository name.
748 748 :type repo_name: str
749 749 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
750 750 :type repo_type: str
751 751 :param owner: user_id or username
752 752 :type owner: Optional(str)
753 753 :param description: Set the repository description.
754 754 :type description: Optional(str)
755 755 :param private: set repository as private
756 756 :type private: bool
757 757 :param clone_uri: set clone_uri
758 758 :type clone_uri: str
759 759 :param push_uri: set push_uri
760 760 :type push_uri: str
761 761 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
762 762 :type landing_rev: str
763 763 :param enable_locking:
764 764 :type enable_locking: bool
765 765 :param enable_downloads:
766 766 :type enable_downloads: bool
767 767 :param enable_statistics:
768 768 :type enable_statistics: bool
769 769 :param copy_permissions: Copy permission from group in which the
770 770 repository is being created.
771 771 :type copy_permissions: bool
772 772
773 773
774 774 Example output:
775 775
776 776 .. code-block:: bash
777 777
778 778 id : <id_given_in_input>
779 779 result: {
780 780 "msg": "Created new repository `<reponame>`",
781 781 "success": true,
782 782 "task": "<celery task id or None if done sync>"
783 783 }
784 784 error: null
785 785
786 786
787 787 Example error output:
788 788
789 789 .. code-block:: bash
790 790
791 791 id : <id_given_in_input>
792 792 result : null
793 793 error : {
794 794 'failed to create repository `<repo_name>`'
795 795 }
796 796
797 797 """
798 798
799 799 owner = validate_set_owner_permissions(apiuser, owner)
800 800
801 801 description = Optional.extract(description)
802 802 copy_permissions = Optional.extract(copy_permissions)
803 803 clone_uri = Optional.extract(clone_uri)
804 804 push_uri = Optional.extract(push_uri)
805 805
806 806 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
807 807 if isinstance(private, Optional):
808 808 private = defs.get('repo_private') or Optional.extract(private)
809 809 if isinstance(repo_type, Optional):
810 810 repo_type = defs.get('repo_type')
811 811 if isinstance(enable_statistics, Optional):
812 812 enable_statistics = defs.get('repo_enable_statistics')
813 813 if isinstance(enable_locking, Optional):
814 814 enable_locking = defs.get('repo_enable_locking')
815 815 if isinstance(enable_downloads, Optional):
816 816 enable_downloads = defs.get('repo_enable_downloads')
817 817
818 818 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
819 819 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
820 820 ref_choices = list(set(ref_choices + [landing_ref]))
821 821
822 822 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
823 823
824 824 schema = repo_schema.RepoSchema().bind(
825 825 repo_type_options=rhodecode.BACKENDS.keys(),
826 826 repo_ref_options=ref_choices,
827 827 repo_type=repo_type,
828 828 # user caller
829 829 user=apiuser)
830 830
831 831 try:
832 832 schema_data = schema.deserialize(dict(
833 833 repo_name=repo_name,
834 834 repo_type=repo_type,
835 835 repo_owner=owner.username,
836 836 repo_description=description,
837 837 repo_landing_commit_ref=landing_commit_ref,
838 838 repo_clone_uri=clone_uri,
839 839 repo_push_uri=push_uri,
840 840 repo_private=private,
841 841 repo_copy_permissions=copy_permissions,
842 842 repo_enable_statistics=enable_statistics,
843 843 repo_enable_downloads=enable_downloads,
844 844 repo_enable_locking=enable_locking))
845 845 except validation_schema.Invalid as err:
846 846 raise JSONRPCValidationError(colander_exc=err)
847 847
848 848 try:
849 849 data = {
850 850 'owner': owner,
851 851 'repo_name': schema_data['repo_group']['repo_name_without_group'],
852 852 'repo_name_full': schema_data['repo_name'],
853 853 'repo_group': schema_data['repo_group']['repo_group_id'],
854 854 'repo_type': schema_data['repo_type'],
855 855 'repo_description': schema_data['repo_description'],
856 856 'repo_private': schema_data['repo_private'],
857 857 'clone_uri': schema_data['repo_clone_uri'],
858 858 'push_uri': schema_data['repo_push_uri'],
859 859 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
860 860 'enable_statistics': schema_data['repo_enable_statistics'],
861 861 'enable_locking': schema_data['repo_enable_locking'],
862 862 'enable_downloads': schema_data['repo_enable_downloads'],
863 863 'repo_copy_permissions': schema_data['repo_copy_permissions'],
864 864 }
865 865
866 866 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
867 867 task_id = get_task_id(task)
868 868 # no commit, it's done in RepoModel, or async via celery
869 869 return {
870 870 'msg': "Created new repository `{}`".format(schema_data['repo_name']),
871 871 'success': True, # cannot return the repo data here since fork
872 872 # can be done async
873 873 'task': task_id
874 874 }
875 875 except Exception:
876 876 log.exception(
877 877 "Exception while trying to create the repository %s",
878 878 schema_data['repo_name'])
879 879 raise JSONRPCError(
880 880 'failed to create repository `{}`'.format(schema_data['repo_name']))
881 881
882 882
883 883 @jsonrpc_method()
884 884 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
885 885 description=Optional('')):
886 886 """
887 887 Adds an extra field to a repository.
888 888
889 889 This command can only be run using an |authtoken| with at least
890 890 write permissions to the |repo|.
891 891
892 892 :param apiuser: This is filled automatically from the |authtoken|.
893 893 :type apiuser: AuthUser
894 894 :param repoid: Set the repository name or repository id.
895 895 :type repoid: str or int
896 896 :param key: Create a unique field key for this repository.
897 897 :type key: str
898 898 :param label:
899 899 :type label: Optional(str)
900 900 :param description:
901 901 :type description: Optional(str)
902 902 """
903 903 repo = get_repo_or_error(repoid)
904 904 if not has_superadmin_permission(apiuser):
905 905 _perms = ('repository.admin',)
906 906 validate_repo_permissions(apiuser, repoid, repo, _perms)
907 907
908 908 label = Optional.extract(label) or key
909 909 description = Optional.extract(description)
910 910
911 911 field = RepositoryField.get_by_key_name(key, repo)
912 912 if field:
913 913 raise JSONRPCError(f'Field with key `{key}` exists for repo `{repoid}`')
914 914
915 915 try:
916 916 RepoModel().add_repo_field(repo, key, field_label=label,
917 917 field_desc=description)
918 918 Session().commit()
919 919 return {
920 920 'msg': f"Added new repository field `{key}`",
921 921 'success': True,
922 922 }
923 923 except Exception:
924 924 log.exception("Exception occurred while trying to add field to repo")
925 925 raise JSONRPCError(
926 926 f'failed to create new field for repository `{repoid}`')
927 927
928 928
929 929 @jsonrpc_method()
930 930 def remove_field_from_repo(request, apiuser, repoid, key):
931 931 """
932 932 Removes an extra field from a repository.
933 933
934 934 This command can only be run using an |authtoken| with at least
935 935 write permissions to the |repo|.
936 936
937 937 :param apiuser: This is filled automatically from the |authtoken|.
938 938 :type apiuser: AuthUser
939 939 :param repoid: Set the repository name or repository ID.
940 940 :type repoid: str or int
941 941 :param key: Set the unique field key for this repository.
942 942 :type key: str
943 943 """
944 944
945 945 repo = get_repo_or_error(repoid)
946 946 if not has_superadmin_permission(apiuser):
947 947 _perms = ('repository.admin',)
948 948 validate_repo_permissions(apiuser, repoid, repo, _perms)
949 949
950 950 field = RepositoryField.get_by_key_name(key, repo)
951 951 if not field:
952 952 raise JSONRPCError('Field with key `%s` does not '
953 953 'exists for repo `%s`' % (key, repoid))
954 954
955 955 try:
956 956 RepoModel().delete_repo_field(repo, field_key=key)
957 957 Session().commit()
958 958 return {
959 959 'msg': f"Deleted repository field `{key}`",
960 960 'success': True,
961 961 }
962 962 except Exception:
963 963 log.exception(
964 964 "Exception occurred while trying to delete field from repo")
965 965 raise JSONRPCError(
966 966 f'failed to delete field for repository `{repoid}`')
967 967
968 968
969 969 @jsonrpc_method()
970 970 def update_repo(
971 971 request, apiuser, repoid, repo_name=Optional(None),
972 972 owner=Optional(OAttr('apiuser')), description=Optional(''),
973 973 private=Optional(False),
974 974 clone_uri=Optional(None), push_uri=Optional(None),
975 975 landing_rev=Optional(None), fork_of=Optional(None),
976 976 enable_statistics=Optional(False),
977 977 enable_locking=Optional(False),
978 978 enable_downloads=Optional(False), fields=Optional('')):
979 979 r"""
980 980 Updates a repository with the given information.
981 981
982 982 This command can only be run using an |authtoken| with at least
983 983 admin permissions to the |repo|.
984 984
985 985 * If the repository name contains "/", repository will be updated
986 986 accordingly with a repository group or nested repository groups
987 987
988 988 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
989 989 called "repo-test" and place it inside group "foo/bar".
990 990 You have to have permissions to access and write to the last repository
991 991 group ("bar" in this example)
992 992
993 993 :param apiuser: This is filled automatically from the |authtoken|.
994 994 :type apiuser: AuthUser
995 995 :param repoid: repository name or repository ID.
996 996 :type repoid: str or int
997 997 :param repo_name: Update the |repo| name, including the
998 998 repository group it's in.
999 999 :type repo_name: str
1000 1000 :param owner: Set the |repo| owner.
1001 1001 :type owner: str
1002 1002 :param fork_of: Set the |repo| as fork of another |repo|.
1003 1003 :type fork_of: str
1004 1004 :param description: Update the |repo| description.
1005 1005 :type description: str
1006 1006 :param private: Set the |repo| as private. (True | False)
1007 1007 :type private: bool
1008 1008 :param clone_uri: Update the |repo| clone URI.
1009 1009 :type clone_uri: str
1010 1010 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1011 1011 :type landing_rev: str
1012 1012 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1013 1013 :type enable_statistics: bool
1014 1014 :param enable_locking: Enable |repo| locking.
1015 1015 :type enable_locking: bool
1016 1016 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1017 1017 :type enable_downloads: bool
1018 1018 :param fields: Add extra fields to the |repo|. Use the following
1019 1019 example format: ``field_key=field_val,field_key2=fieldval2``.
1020 1020 Escape ', ' with \,
1021 1021 :type fields: str
1022 1022 """
1023 1023
1024 1024 repo = get_repo_or_error(repoid)
1025 1025
1026 1026 include_secrets = False
1027 1027 if not has_superadmin_permission(apiuser):
1028 1028 _perms = ('repository.admin',)
1029 1029 validate_repo_permissions(apiuser, repoid, repo, _perms)
1030 1030 else:
1031 1031 include_secrets = True
1032 1032
1033 1033 updates = dict(
1034 1034 repo_name=repo_name
1035 1035 if not isinstance(repo_name, Optional) else repo.repo_name,
1036 1036
1037 1037 fork_id=fork_of
1038 1038 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1039 1039
1040 1040 user=owner
1041 1041 if not isinstance(owner, Optional) else repo.user.username,
1042 1042
1043 1043 repo_description=description
1044 1044 if not isinstance(description, Optional) else repo.description,
1045 1045
1046 1046 repo_private=private
1047 1047 if not isinstance(private, Optional) else repo.private,
1048 1048
1049 1049 clone_uri=clone_uri
1050 1050 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1051 1051
1052 1052 push_uri=push_uri
1053 1053 if not isinstance(push_uri, Optional) else repo.push_uri,
1054 1054
1055 1055 repo_landing_rev=landing_rev
1056 1056 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1057 1057
1058 1058 repo_enable_statistics=enable_statistics
1059 1059 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1060 1060
1061 1061 repo_enable_locking=enable_locking
1062 1062 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1063 1063
1064 1064 repo_enable_downloads=enable_downloads
1065 1065 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1066 1066
1067 1067 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1068 1068 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1069 1069 request.translate, repo=repo)
1070 1070 ref_choices = list(set(ref_choices + [landing_ref]))
1071 1071
1072 1072 old_values = repo.get_api_data()
1073 1073 repo_type = repo.repo_type
1074 1074 schema = repo_schema.RepoSchema().bind(
1075 1075 repo_type_options=rhodecode.BACKENDS.keys(),
1076 1076 repo_ref_options=ref_choices,
1077 1077 repo_type=repo_type,
1078 1078 # user caller
1079 1079 user=apiuser,
1080 1080 old_values=old_values)
1081 1081 try:
1082 1082 schema_data = schema.deserialize(dict(
1083 1083 # we save old value, users cannot change type
1084 1084 repo_type=repo_type,
1085 1085
1086 1086 repo_name=updates['repo_name'],
1087 1087 repo_owner=updates['user'],
1088 1088 repo_description=updates['repo_description'],
1089 1089 repo_clone_uri=updates['clone_uri'],
1090 1090 repo_push_uri=updates['push_uri'],
1091 1091 repo_fork_of=updates['fork_id'],
1092 1092 repo_private=updates['repo_private'],
1093 1093 repo_landing_commit_ref=updates['repo_landing_rev'],
1094 1094 repo_enable_statistics=updates['repo_enable_statistics'],
1095 1095 repo_enable_downloads=updates['repo_enable_downloads'],
1096 1096 repo_enable_locking=updates['repo_enable_locking']))
1097 1097 except validation_schema.Invalid as err:
1098 1098 raise JSONRPCValidationError(colander_exc=err)
1099 1099
1100 1100 # save validated data back into the updates dict
1101 1101 validated_updates = dict(
1102 1102 repo_name=schema_data['repo_group']['repo_name_without_group'],
1103 1103 repo_group=schema_data['repo_group']['repo_group_id'],
1104 1104
1105 1105 user=schema_data['repo_owner'],
1106 1106 repo_description=schema_data['repo_description'],
1107 1107 repo_private=schema_data['repo_private'],
1108 1108 clone_uri=schema_data['repo_clone_uri'],
1109 1109 push_uri=schema_data['repo_push_uri'],
1110 1110 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1111 1111 repo_enable_statistics=schema_data['repo_enable_statistics'],
1112 1112 repo_enable_locking=schema_data['repo_enable_locking'],
1113 1113 repo_enable_downloads=schema_data['repo_enable_downloads'],
1114 1114 )
1115 1115
1116 1116 if schema_data['repo_fork_of']:
1117 1117 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1118 1118 validated_updates['fork_id'] = fork_repo.repo_id
1119 1119
1120 1120 # extra fields
1121 1121 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1122 1122 if fields:
1123 1123 validated_updates.update(fields)
1124 1124
1125 1125 try:
1126 1126 RepoModel().update(repo, **validated_updates)
1127 1127 audit_logger.store_api(
1128 1128 'repo.edit', action_data={'old_data': old_values},
1129 1129 user=apiuser, repo=repo)
1130 1130 Session().commit()
1131 1131 return {
1132 1132 'msg': f'updated repo ID:{repo.repo_id} {repo.repo_name}',
1133 1133 'repository': repo.get_api_data(include_secrets=include_secrets)
1134 1134 }
1135 1135 except Exception:
1136 1136 log.exception(
1137 1137 "Exception while trying to update the repository %s",
1138 1138 repoid)
1139 1139 raise JSONRPCError('failed to update repo `%s`' % repoid)
1140 1140
1141 1141
1142 1142 @jsonrpc_method()
1143 1143 def fork_repo(request, apiuser, repoid, fork_name,
1144 1144 owner=Optional(OAttr('apiuser')),
1145 1145 description=Optional(''),
1146 1146 private=Optional(False),
1147 1147 clone_uri=Optional(None),
1148 1148 landing_rev=Optional(None),
1149 1149 copy_permissions=Optional(False)):
1150 1150 """
1151 1151 Creates a fork of the specified |repo|.
1152 1152
1153 1153 * If the fork_name contains "/", fork will be created inside
1154 1154 a repository group or nested repository groups
1155 1155
1156 1156 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1157 1157 inside group "foo/bar". You have to have permissions to access and
1158 1158 write to the last repository group ("bar" in this example)
1159 1159
1160 1160 This command can only be run using an |authtoken| with minimum
1161 1161 read permissions of the forked repo, create fork permissions for an user.
1162 1162
1163 1163 :param apiuser: This is filled automatically from the |authtoken|.
1164 1164 :type apiuser: AuthUser
1165 1165 :param repoid: Set repository name or repository ID.
1166 1166 :type repoid: str or int
1167 1167 :param fork_name: Set the fork name, including it's repository group membership.
1168 1168 :type fork_name: str
1169 1169 :param owner: Set the fork owner.
1170 1170 :type owner: str
1171 1171 :param description: Set the fork description.
1172 1172 :type description: str
1173 1173 :param copy_permissions: Copy permissions from parent |repo|. The
1174 1174 default is False.
1175 1175 :type copy_permissions: bool
1176 1176 :param private: Make the fork private. The default is False.
1177 1177 :type private: bool
1178 1178 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1179 1179
1180 1180 Example output:
1181 1181
1182 1182 .. code-block:: bash
1183 1183
1184 1184 id : <id_for_response>
1185 1185 api_key : "<api_key>"
1186 1186 args: {
1187 1187 "repoid" : "<reponame or repo_id>",
1188 1188 "fork_name": "<forkname>",
1189 1189 "owner": "<username or user_id = Optional(=apiuser)>",
1190 1190 "description": "<description>",
1191 1191 "copy_permissions": "<bool>",
1192 1192 "private": "<bool>",
1193 1193 "landing_rev": "<landing_rev>"
1194 1194 }
1195 1195
1196 1196 Example error output:
1197 1197
1198 1198 .. code-block:: bash
1199 1199
1200 1200 id : <id_given_in_input>
1201 1201 result: {
1202 1202 "msg": "Created fork of `<reponame>` as `<forkname>`",
1203 1203 "success": true,
1204 1204 "task": "<celery task id or None if done sync>"
1205 1205 }
1206 1206 error: null
1207 1207
1208 1208 """
1209 1209
1210 1210 repo = get_repo_or_error(repoid)
1211 1211 repo_name = repo.repo_name
1212 1212
1213 1213 if not has_superadmin_permission(apiuser):
1214 1214 # check if we have at least read permission for
1215 1215 # this repo that we fork !
1216 1216 _perms = ('repository.admin', 'repository.write', 'repository.read')
1217 1217 validate_repo_permissions(apiuser, repoid, repo, _perms)
1218 1218
1219 1219 # check if the regular user has at least fork permissions as well
1220 1220 if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser):
1221 1221 raise JSONRPCForbidden()
1222 1222
1223 1223 # check if user can set owner parameter
1224 1224 owner = validate_set_owner_permissions(apiuser, owner)
1225 1225
1226 1226 description = Optional.extract(description)
1227 1227 copy_permissions = Optional.extract(copy_permissions)
1228 1228 clone_uri = Optional.extract(clone_uri)
1229 1229
1230 1230 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1231 1231 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1232 1232 ref_choices = list(set(ref_choices + [landing_ref]))
1233 1233 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1234 1234
1235 1235 private = Optional.extract(private)
1236 1236
1237 1237 schema = repo_schema.RepoSchema().bind(
1238 1238 repo_type_options=rhodecode.BACKENDS.keys(),
1239 1239 repo_ref_options=ref_choices,
1240 1240 repo_type=repo.repo_type,
1241 1241 # user caller
1242 1242 user=apiuser)
1243 1243
1244 1244 try:
1245 1245 schema_data = schema.deserialize(dict(
1246 1246 repo_name=fork_name,
1247 1247 repo_type=repo.repo_type,
1248 1248 repo_owner=owner.username,
1249 1249 repo_description=description,
1250 1250 repo_landing_commit_ref=landing_commit_ref,
1251 1251 repo_clone_uri=clone_uri,
1252 1252 repo_private=private,
1253 1253 repo_copy_permissions=copy_permissions))
1254 1254 except validation_schema.Invalid as err:
1255 1255 raise JSONRPCValidationError(colander_exc=err)
1256 1256
1257 1257 try:
1258 1258 data = {
1259 1259 'fork_parent_id': repo.repo_id,
1260 1260
1261 1261 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1262 1262 'repo_name_full': schema_data['repo_name'],
1263 1263 'repo_group': schema_data['repo_group']['repo_group_id'],
1264 1264 'repo_type': schema_data['repo_type'],
1265 1265 'description': schema_data['repo_description'],
1266 1266 'private': schema_data['repo_private'],
1267 1267 'copy_permissions': schema_data['repo_copy_permissions'],
1268 1268 'landing_rev': schema_data['repo_landing_commit_ref'],
1269 1269 }
1270 1270
1271 1271 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1272 1272 # no commit, it's done in RepoModel, or async via celery
1273 1273 task_id = get_task_id(task)
1274 1274
1275 1275 return {
1276 1276 'msg': 'Created fork of `{}` as `{}`'.format(
1277 1277 repo.repo_name, schema_data['repo_name']),
1278 1278 'success': True, # cannot return the repo data here since fork
1279 1279 # can be done async
1280 1280 'task': task_id
1281 1281 }
1282 1282 except Exception:
1283 1283 log.exception(
1284 1284 "Exception while trying to create fork %s",
1285 1285 schema_data['repo_name'])
1286 1286 raise JSONRPCError(
1287 1287 'failed to fork repository `{}` as `{}`'.format(
1288 1288 repo_name, schema_data['repo_name']))
1289 1289
1290 1290
1291 1291 @jsonrpc_method()
1292 1292 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1293 1293 """
1294 1294 Deletes a repository.
1295 1295
1296 1296 * When the `forks` parameter is set it's possible to detach or delete
1297 1297 forks of deleted repository.
1298 1298
1299 1299 This command can only be run using an |authtoken| with admin
1300 1300 permissions on the |repo|.
1301 1301
1302 1302 :param apiuser: This is filled automatically from the |authtoken|.
1303 1303 :type apiuser: AuthUser
1304 1304 :param repoid: Set the repository name or repository ID.
1305 1305 :type repoid: str or int
1306 1306 :param forks: Set to `detach` or `delete` forks from the |repo|.
1307 1307 :type forks: Optional(str)
1308 1308
1309 1309 Example error output:
1310 1310
1311 1311 .. code-block:: bash
1312 1312
1313 1313 id : <id_given_in_input>
1314 1314 result: {
1315 1315 "msg": "Deleted repository `<reponame>`",
1316 1316 "success": true
1317 1317 }
1318 1318 error: null
1319 1319 """
1320 1320
1321 1321 repo = get_repo_or_error(repoid)
1322 1322 repo_name = repo.repo_name
1323 1323 if not has_superadmin_permission(apiuser):
1324 1324 _perms = ('repository.admin',)
1325 1325 validate_repo_permissions(apiuser, repoid, repo, _perms)
1326 1326
1327 1327 try:
1328 1328 handle_forks = Optional.extract(forks)
1329 1329 _forks_msg = ''
1330 1330 _forks = [f for f in repo.forks]
1331 1331 if handle_forks == 'detach':
1332 1332 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1333 1333 elif handle_forks == 'delete':
1334 1334 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1335 1335 elif _forks:
1336 1336 raise JSONRPCError(
1337 1337 'Cannot delete `%s` it still contains attached forks' %
1338 1338 (repo.repo_name,)
1339 1339 )
1340 1340 old_data = repo.get_api_data()
1341 1341 RepoModel().delete(repo, forks=forks)
1342 1342
1343 1343 repo = audit_logger.RepoWrap(repo_id=None,
1344 1344 repo_name=repo.repo_name)
1345 1345
1346 1346 audit_logger.store_api(
1347 1347 'repo.delete', action_data={'old_data': old_data},
1348 1348 user=apiuser, repo=repo)
1349 1349
1350 1350 ScmModel().mark_for_invalidation(repo_name, delete=True)
1351 1351 Session().commit()
1352 1352 return {
1353 1353 'msg': f'Deleted repository `{repo_name}`{_forks_msg}',
1354 1354 'success': True
1355 1355 }
1356 1356 except Exception:
1357 1357 log.exception("Exception occurred while trying to delete repo")
1358 1358 raise JSONRPCError(
1359 1359 f'failed to delete repository `{repo_name}`'
1360 1360 )
1361 1361
1362 1362
1363 1363 #TODO: marcink, change name ?
1364 1364 @jsonrpc_method()
1365 1365 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1366 1366 """
1367 1367 Invalidates the cache for the specified repository.
1368 1368
1369 1369 This command can only be run using an |authtoken| with admin rights to
1370 1370 the specified repository.
1371 1371
1372 1372 This command takes the following options:
1373 1373
1374 1374 :param apiuser: This is filled automatically from |authtoken|.
1375 1375 :type apiuser: AuthUser
1376 1376 :param repoid: Sets the repository name or repository ID.
1377 1377 :type repoid: str or int
1378 1378 :param delete_keys: This deletes the invalidated keys instead of
1379 1379 just flagging them.
1380 1380 :type delete_keys: Optional(``True`` | ``False``)
1381 1381
1382 1382 Example output:
1383 1383
1384 1384 .. code-block:: bash
1385 1385
1386 1386 id : <id_given_in_input>
1387 1387 result : {
1388 1388 'msg': Cache for repository `<repository name>` was invalidated,
1389 1389 'repository': <repository name>
1390 1390 }
1391 1391 error : null
1392 1392
1393 1393 Example error output:
1394 1394
1395 1395 .. code-block:: bash
1396 1396
1397 1397 id : <id_given_in_input>
1398 1398 result : null
1399 1399 error : {
1400 1400 'Error occurred during cache invalidation action'
1401 1401 }
1402 1402
1403 1403 """
1404 1404
1405 1405 repo = get_repo_or_error(repoid)
1406 1406 if not has_superadmin_permission(apiuser):
1407 1407 _perms = ('repository.admin', 'repository.write',)
1408 1408 validate_repo_permissions(apiuser, repoid, repo, _perms)
1409 1409
1410 1410 delete = Optional.extract(delete_keys)
1411 1411 try:
1412 1412 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1413 1413 return {
1414 1414 'msg': f'Cache for repository `{repoid}` was invalidated',
1415 1415 'repository': repo.repo_name
1416 1416 }
1417 1417 except Exception:
1418 1418 log.exception(
1419 1419 "Exception occurred while trying to invalidate repo cache")
1420 1420 raise JSONRPCError(
1421 1421 'Error occurred during cache invalidation action'
1422 1422 )
1423 1423
1424 1424
1425 1425 #TODO: marcink, change name ?
1426 1426 @jsonrpc_method()
1427 1427 def lock(request, apiuser, repoid, locked=Optional(None),
1428 1428 userid=Optional(OAttr('apiuser'))):
1429 1429 """
1430 1430 Sets the lock state of the specified |repo| by the given user.
1431 1431 From more information, see :ref:`repo-locking`.
1432 1432
1433 1433 * If the ``userid`` option is not set, the repository is locked to the
1434 1434 user who called the method.
1435 1435 * If the ``locked`` parameter is not set, the current lock state of the
1436 1436 repository is displayed.
1437 1437
1438 1438 This command can only be run using an |authtoken| with admin rights to
1439 1439 the specified repository.
1440 1440
1441 1441 This command takes the following options:
1442 1442
1443 1443 :param apiuser: This is filled automatically from the |authtoken|.
1444 1444 :type apiuser: AuthUser
1445 1445 :param repoid: Sets the repository name or repository ID.
1446 1446 :type repoid: str or int
1447 1447 :param locked: Sets the lock state.
1448 1448 :type locked: Optional(``True`` | ``False``)
1449 1449 :param userid: Set the repository lock to this user.
1450 1450 :type userid: Optional(str or int)
1451 1451
1452 1452 Example error output:
1453 1453
1454 1454 .. code-block:: bash
1455 1455
1456 1456 id : <id_given_in_input>
1457 1457 result : {
1458 1458 'repo': '<reponame>',
1459 1459 'locked': <bool: lock state>,
1460 1460 'locked_since': <int: lock timestamp>,
1461 1461 'locked_by': <username of person who made the lock>,
1462 1462 'lock_reason': <str: reason for locking>,
1463 1463 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1464 1464 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1465 1465 or
1466 1466 'msg': 'Repo `<repository name>` not locked.'
1467 1467 or
1468 1468 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1469 1469 }
1470 1470 error : null
1471 1471
1472 1472 Example error output:
1473 1473
1474 1474 .. code-block:: bash
1475 1475
1476 1476 id : <id_given_in_input>
1477 1477 result : null
1478 1478 error : {
1479 1479 'Error occurred locking repository `<reponame>`'
1480 1480 }
1481 1481 """
1482 1482
1483 1483 repo = get_repo_or_error(repoid)
1484 1484 if not has_superadmin_permission(apiuser):
1485 1485 # check if we have at least write permission for this repo !
1486 1486 _perms = ('repository.admin', 'repository.write',)
1487 1487 validate_repo_permissions(apiuser, repoid, repo, _perms)
1488 1488
1489 1489 # make sure normal user does not pass someone else userid,
1490 1490 # he is not allowed to do that
1491 1491 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1492 1492 raise JSONRPCError('userid is not the same as your user')
1493 1493
1494 1494 if isinstance(userid, Optional):
1495 1495 userid = apiuser.user_id
1496 1496
1497 1497 user = get_user_or_error(userid)
1498 1498
1499 1499 if isinstance(locked, Optional):
1500 1500 lockobj = repo.locked
1501 1501
1502 1502 if lockobj[0] is None:
1503 1503 _d = {
1504 1504 'repo': repo.repo_name,
1505 1505 'locked': False,
1506 1506 'locked_since': None,
1507 1507 'locked_by': None,
1508 1508 'lock_reason': None,
1509 1509 'lock_state_changed': False,
1510 1510 'msg': 'Repo `%s` not locked.' % repo.repo_name
1511 1511 }
1512 1512 return _d
1513 1513 else:
1514 1514 _user_id, _time, _reason = lockobj
1515 1515 lock_user = get_user_or_error(userid)
1516 1516 _d = {
1517 1517 'repo': repo.repo_name,
1518 1518 'locked': True,
1519 1519 'locked_since': _time,
1520 1520 'locked_by': lock_user.username,
1521 1521 'lock_reason': _reason,
1522 1522 'lock_state_changed': False,
1523 1523 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1524 1524 % (repo.repo_name, lock_user.username,
1525 1525 json.dumps(time_to_datetime(_time))))
1526 1526 }
1527 1527 return _d
1528 1528
1529 1529 # force locked state through a flag
1530 1530 else:
1531 1531 locked = str2bool(locked)
1532 1532 lock_reason = Repository.LOCK_API
1533 1533 try:
1534 1534 if locked:
1535 1535 lock_time = time.time()
1536 1536 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1537 1537 else:
1538 1538 lock_time = None
1539 1539 Repository.unlock(repo)
1540 1540 _d = {
1541 1541 'repo': repo.repo_name,
1542 1542 'locked': locked,
1543 1543 'locked_since': lock_time,
1544 1544 'locked_by': user.username,
1545 1545 'lock_reason': lock_reason,
1546 1546 'lock_state_changed': True,
1547 1547 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1548 1548 % (user.username, repo.repo_name, locked))
1549 1549 }
1550 1550 return _d
1551 1551 except Exception:
1552 1552 log.exception(
1553 1553 "Exception occurred while trying to lock repository")
1554 1554 raise JSONRPCError(
1555 1555 'Error occurred locking repository `%s`' % repo.repo_name
1556 1556 )
1557 1557
1558 1558
1559 1559 @jsonrpc_method()
1560 1560 def comment_commit(
1561 1561 request, apiuser, repoid, commit_id, message, status=Optional(None),
1562 1562 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1563 1563 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1564 1564 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1565 1565 """
1566 1566 Set a commit comment, and optionally change the status of the commit.
1567 1567
1568 1568 :param apiuser: This is filled automatically from the |authtoken|.
1569 1569 :type apiuser: AuthUser
1570 1570 :param repoid: Set the repository name or repository ID.
1571 1571 :type repoid: str or int
1572 1572 :param commit_id: Specify the commit_id for which to set a comment.
1573 1573 :type commit_id: str
1574 1574 :param message: The comment text.
1575 1575 :type message: str
1576 1576 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1577 1577 'approved', 'rejected', 'under_review'
1578 1578 :type status: str
1579 1579 :param comment_type: Comment type, one of: 'note', 'todo'
1580 1580 :type comment_type: Optional(str), default: 'note'
1581 1581 :param resolves_comment_id: id of comment which this one will resolve
1582 1582 :type resolves_comment_id: Optional(int)
1583 1583 :param extra_recipients: list of user ids or usernames to add
1584 1584 notifications for this comment. Acts like a CC for notification
1585 1585 :type extra_recipients: Optional(list)
1586 1586 :param userid: Set the user name of the comment creator.
1587 1587 :type userid: Optional(str or int)
1588 1588 :param send_email: Define if this comment should also send email notification
1589 1589 :type send_email: Optional(bool)
1590 1590
1591 1591 Example error output:
1592 1592
1593 1593 .. code-block:: bash
1594 1594
1595 1595 {
1596 1596 "id" : <id_given_in_input>,
1597 1597 "result" : {
1598 1598 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1599 1599 "status_change": null or <status>,
1600 1600 "success": true
1601 1601 },
1602 1602 "error" : null
1603 1603 }
1604 1604
1605 1605 """
1606 1606 _ = request.translate
1607 1607
1608 1608 repo = get_repo_or_error(repoid)
1609 1609 if not has_superadmin_permission(apiuser):
1610 1610 _perms = ('repository.read', 'repository.write', 'repository.admin')
1611 1611 validate_repo_permissions(apiuser, repoid, repo, _perms)
1612 1612 db_repo_name = repo.repo_name
1613 1613
1614 1614 try:
1615 1615 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1616 1616 commit_id = commit.raw_id
1617 1617 except Exception as e:
1618 1618 log.exception('Failed to fetch commit')
1619 1619 raise JSONRPCError(safe_str(e))
1620 1620
1621 1621 if isinstance(userid, Optional):
1622 1622 userid = apiuser.user_id
1623 1623
1624 1624 user = get_user_or_error(userid)
1625 1625 status = Optional.extract(status)
1626 1626 comment_type = Optional.extract(comment_type)
1627 1627 resolves_comment_id = Optional.extract(resolves_comment_id)
1628 1628 extra_recipients = Optional.extract(extra_recipients)
1629 1629 send_email = Optional.extract(send_email, binary=True)
1630 1630
1631 1631 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1632 1632 if status and status not in allowed_statuses:
1633 1633 raise JSONRPCError('Bad status, must be on '
1634 1634 'of %s got %s' % (allowed_statuses, status,))
1635 1635
1636 1636 if resolves_comment_id:
1637 1637 comment = ChangesetComment.get(resolves_comment_id)
1638 1638 if not comment:
1639 1639 raise JSONRPCError(
1640 1640 'Invalid resolves_comment_id `%s` for this commit.'
1641 1641 % resolves_comment_id)
1642 1642 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1643 1643 raise JSONRPCError(
1644 1644 'Comment `%s` is wrong type for setting status to resolved.'
1645 1645 % resolves_comment_id)
1646 1646
1647 1647 try:
1648 1648 rc_config = SettingsModel().get_all_settings()
1649 1649 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1650 1650 status_change_label = ChangesetStatus.get_status_lbl(status)
1651 1651 comment = CommentsModel().create(
1652 1652 message, repo, user, commit_id=commit_id,
1653 1653 status_change=status_change_label,
1654 1654 status_change_type=status,
1655 1655 renderer=renderer,
1656 1656 comment_type=comment_type,
1657 1657 resolves_comment_id=resolves_comment_id,
1658 1658 auth_user=apiuser,
1659 1659 extra_recipients=extra_recipients,
1660 1660 send_email=send_email
1661 1661 )
1662 1662 is_inline = comment.is_inline
1663 1663
1664 1664 if status:
1665 1665 # also do a status change
1666 1666 try:
1667 1667 ChangesetStatusModel().set_status(
1668 1668 repo, status, user, comment, revision=commit_id,
1669 1669 dont_allow_on_closed_pull_request=True
1670 1670 )
1671 1671 except StatusChangeOnClosedPullRequestError:
1672 1672 log.exception(
1673 1673 "Exception occurred while trying to change repo commit status")
1674 1674 msg = ('Changing status on a commit associated with '
1675 1675 'a closed pull request is not allowed')
1676 1676 raise JSONRPCError(msg)
1677 1677
1678 1678 CommentsModel().trigger_commit_comment_hook(
1679 1679 repo, apiuser, 'create',
1680 1680 data={'comment': comment, 'commit': commit})
1681 1681
1682 1682 Session().commit()
1683 1683
1684 1684 comment_broadcast_channel = channelstream.comment_channel(
1685 1685 db_repo_name, commit_obj=commit)
1686 1686
1687 1687 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1688 1688 comment_type = 'inline' if is_inline else 'general'
1689 1689 channelstream.comment_channelstream_push(
1690 1690 request, comment_broadcast_channel, apiuser,
1691 1691 _('posted a new {} comment').format(comment_type),
1692 1692 comment_data=comment_data)
1693 1693
1694 1694 return {
1695 1695 'msg': (
1696 1696 'Commented on commit `{}` for repository `{}`'.format(
1697 1697 comment.revision, repo.repo_name)),
1698 1698 'status_change': status,
1699 1699 'success': True,
1700 1700 }
1701 1701 except JSONRPCError:
1702 1702 # catch any inside errors, and re-raise them to prevent from
1703 1703 # below global catch to silence them
1704 1704 raise
1705 1705 except Exception:
1706 1706 log.exception("Exception occurred while trying to comment on commit")
1707 1707 raise JSONRPCError(
1708 1708 f'failed to set comment on repository `{repo.repo_name}`'
1709 1709 )
1710 1710
1711 1711
1712 1712 @jsonrpc_method()
1713 1713 def get_repo_comments(request, apiuser, repoid,
1714 1714 commit_id=Optional(None), comment_type=Optional(None),
1715 1715 userid=Optional(None)):
1716 1716 """
1717 1717 Get all comments for a repository
1718 1718
1719 1719 :param apiuser: This is filled automatically from the |authtoken|.
1720 1720 :type apiuser: AuthUser
1721 1721 :param repoid: Set the repository name or repository ID.
1722 1722 :type repoid: str or int
1723 1723 :param commit_id: Optionally filter the comments by the commit_id
1724 1724 :type commit_id: Optional(str), default: None
1725 1725 :param comment_type: Optionally filter the comments by the comment_type
1726 1726 one of: 'note', 'todo'
1727 1727 :type comment_type: Optional(str), default: None
1728 1728 :param userid: Optionally filter the comments by the author of comment
1729 1729 :type userid: Optional(str or int), Default: None
1730 1730
1731 1731 Example error output:
1732 1732
1733 1733 .. code-block:: bash
1734 1734
1735 1735 {
1736 1736 "id" : <id_given_in_input>,
1737 1737 "result" : [
1738 1738 {
1739 1739 "comment_author": <USER_DETAILS>,
1740 1740 "comment_created_on": "2017-02-01T14:38:16.309",
1741 1741 "comment_f_path": "file.txt",
1742 1742 "comment_id": 282,
1743 1743 "comment_lineno": "n1",
1744 1744 "comment_resolved_by": null,
1745 1745 "comment_status": [],
1746 1746 "comment_text": "This file needs a header",
1747 1747 "comment_type": "todo",
1748 1748 "comment_last_version: 0
1749 1749 }
1750 1750 ],
1751 1751 "error" : null
1752 1752 }
1753 1753
1754 1754 """
1755 1755 repo = get_repo_or_error(repoid)
1756 1756 if not has_superadmin_permission(apiuser):
1757 1757 _perms = ('repository.read', 'repository.write', 'repository.admin')
1758 1758 validate_repo_permissions(apiuser, repoid, repo, _perms)
1759 1759
1760 1760 commit_id = Optional.extract(commit_id)
1761 1761
1762 1762 userid = Optional.extract(userid)
1763 1763 if userid:
1764 1764 user = get_user_or_error(userid)
1765 1765 else:
1766 1766 user = None
1767 1767
1768 1768 comment_type = Optional.extract(comment_type)
1769 1769 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1770 1770 raise JSONRPCError(
1771 1771 'comment_type must be one of `{}` got {}'.format(
1772 1772 ChangesetComment.COMMENT_TYPES, comment_type)
1773 1773 )
1774 1774
1775 1775 comments = CommentsModel().get_repository_comments(
1776 1776 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1777 1777 return comments
1778 1778
1779 1779
1780 1780 @jsonrpc_method()
1781 1781 def get_comment(request, apiuser, comment_id):
1782 1782 """
1783 1783 Get single comment from repository or pull_request
1784 1784
1785 1785 :param apiuser: This is filled automatically from the |authtoken|.
1786 1786 :type apiuser: AuthUser
1787 1787 :param comment_id: comment id found in the URL of comment
1788 1788 :type comment_id: str or int
1789 1789
1790 1790 Example error output:
1791 1791
1792 1792 .. code-block:: bash
1793 1793
1794 1794 {
1795 1795 "id" : <id_given_in_input>,
1796 1796 "result" : {
1797 1797 "comment_author": <USER_DETAILS>,
1798 1798 "comment_created_on": "2017-02-01T14:38:16.309",
1799 1799 "comment_f_path": "file.txt",
1800 1800 "comment_id": 282,
1801 1801 "comment_lineno": "n1",
1802 1802 "comment_resolved_by": null,
1803 1803 "comment_status": [],
1804 1804 "comment_text": "This file needs a header",
1805 1805 "comment_type": "todo",
1806 1806 "comment_last_version: 0
1807 1807 },
1808 1808 "error" : null
1809 1809 }
1810 1810
1811 1811 """
1812 1812
1813 1813 comment = ChangesetComment.get(comment_id)
1814 1814 if not comment:
1815 1815 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1816 1816
1817 1817 perms = ('repository.read', 'repository.write', 'repository.admin')
1818 1818 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1819 1819 (user=apiuser, repo_name=comment.repo.repo_name)
1820 1820
1821 1821 if not has_comment_perm:
1822 1822 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1823 1823
1824 1824 return comment
1825 1825
1826 1826
1827 1827 @jsonrpc_method()
1828 1828 def edit_comment(request, apiuser, message, comment_id, version,
1829 1829 userid=Optional(OAttr('apiuser'))):
1830 1830 """
1831 1831 Edit comment on the pull request or commit,
1832 1832 specified by the `comment_id` and version. Initially version should be 0
1833 1833
1834 1834 :param apiuser: This is filled automatically from the |authtoken|.
1835 1835 :type apiuser: AuthUser
1836 1836 :param comment_id: Specify the comment_id for editing
1837 1837 :type comment_id: int
1838 1838 :param version: version of the comment that will be created, starts from 0
1839 1839 :type version: int
1840 1840 :param message: The text content of the comment.
1841 1841 :type message: str
1842 1842 :param userid: Comment on the pull request as this user
1843 1843 :type userid: Optional(str or int)
1844 1844
1845 1845 Example output:
1846 1846
1847 1847 .. code-block:: bash
1848 1848
1849 1849 id : <id_given_in_input>
1850 1850 result : {
1851 1851 "comment": "<comment data>",
1852 1852 "version": "<Integer>",
1853 1853 },
1854 1854 error : null
1855 1855 """
1856 1856
1857 1857 auth_user = apiuser
1858 1858 comment = ChangesetComment.get(comment_id)
1859 1859 if not comment:
1860 1860 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1861 1861
1862 1862 is_super_admin = has_superadmin_permission(apiuser)
1863 1863 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1864 1864 (user=apiuser, repo_name=comment.repo.repo_name)
1865 1865
1866 1866 if not isinstance(userid, Optional):
1867 1867 if is_super_admin or is_repo_admin:
1868 1868 apiuser = get_user_or_error(userid)
1869 1869 auth_user = apiuser.AuthUser()
1870 1870 else:
1871 1871 raise JSONRPCError('userid is not the same as your user')
1872 1872
1873 1873 comment_author = comment.author.user_id == auth_user.user_id
1874 1874
1875 1875 if comment.immutable:
1876 1876 raise JSONRPCError("Immutable comment cannot be edited")
1877 1877
1878 1878 if not (is_super_admin or is_repo_admin or comment_author):
1879 1879 raise JSONRPCError("you don't have access to edit this comment")
1880 1880
1881 1881 try:
1882 1882 comment_history = CommentsModel().edit(
1883 1883 comment_id=comment_id,
1884 1884 text=message,
1885 1885 auth_user=auth_user,
1886 1886 version=version,
1887 1887 )
1888 1888 Session().commit()
1889 1889 except CommentVersionMismatch:
1890 1890 raise JSONRPCError(
1891 1891 f'comment ({comment_id}) version ({version}) mismatch'
1892 1892 )
1893 1893 if not comment_history and not message:
1894 1894 raise JSONRPCError(
1895 1895 f"comment ({comment_id}) can't be changed with empty string"
1896 1896 )
1897 1897
1898 1898 if comment.pull_request:
1899 1899 pull_request = comment.pull_request
1900 1900 PullRequestModel().trigger_pull_request_hook(
1901 1901 pull_request, apiuser, 'comment_edit',
1902 1902 data={'comment': comment})
1903 1903 else:
1904 1904 db_repo = comment.repo
1905 1905 commit_id = comment.revision
1906 1906 commit = db_repo.get_commit(commit_id)
1907 1907 CommentsModel().trigger_commit_comment_hook(
1908 1908 db_repo, apiuser, 'edit',
1909 1909 data={'comment': comment, 'commit': commit})
1910 1910
1911 1911 data = {
1912 1912 'comment': comment,
1913 1913 'version': comment_history.version if comment_history else None,
1914 1914 }
1915 1915 return data
1916 1916
1917 1917
1918 1918 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1919 1919 # @jsonrpc_method()
1920 1920 # def delete_comment(request, apiuser, comment_id):
1921 1921 # auth_user = apiuser
1922 1922 #
1923 1923 # comment = ChangesetComment.get(comment_id)
1924 1924 # if not comment:
1925 1925 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1926 1926 #
1927 1927 # is_super_admin = has_superadmin_permission(apiuser)
1928 1928 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1929 1929 # (user=apiuser, repo_name=comment.repo.repo_name)
1930 1930 #
1931 1931 # comment_author = comment.author.user_id == auth_user.user_id
1932 1932 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1933 1933 # raise JSONRPCError("you don't have access to edit this comment")
1934 1934
1935 1935 @jsonrpc_method()
1936 1936 def grant_user_permission(request, apiuser, repoid, userid, perm):
1937 1937 """
1938 1938 Grant permissions for the specified user on the given repository,
1939 1939 or update existing permissions if found.
1940 1940
1941 1941 This command can only be run using an |authtoken| with admin
1942 1942 permissions on the |repo|.
1943 1943
1944 1944 :param apiuser: This is filled automatically from the |authtoken|.
1945 1945 :type apiuser: AuthUser
1946 1946 :param repoid: Set the repository name or repository ID.
1947 1947 :type repoid: str or int
1948 1948 :param userid: Set the user name.
1949 1949 :type userid: str
1950 1950 :param perm: Set the user permissions, using the following format
1951 1951 ``(repository.(none|read|write|admin))``
1952 1952 :type perm: str
1953 1953
1954 1954 Example output:
1955 1955
1956 1956 .. code-block:: bash
1957 1957
1958 1958 id : <id_given_in_input>
1959 1959 result: {
1960 1960 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1961 1961 "success": true
1962 1962 }
1963 1963 error: null
1964 1964 """
1965 1965
1966 1966 repo = get_repo_or_error(repoid)
1967 1967 user = get_user_or_error(userid)
1968 1968 perm = get_perm_or_error(perm)
1969 1969 if not has_superadmin_permission(apiuser):
1970 1970 _perms = ('repository.admin',)
1971 1971 validate_repo_permissions(apiuser, repoid, repo, _perms)
1972 1972
1973 1973 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1974 1974 try:
1975 1975 changes = RepoModel().update_permissions(
1976 1976 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1977 1977
1978 1978 action_data = {
1979 1979 'added': changes['added'],
1980 1980 'updated': changes['updated'],
1981 1981 'deleted': changes['deleted'],
1982 1982 }
1983 1983 audit_logger.store_api(
1984 1984 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1985 1985 Session().commit()
1986 1986 PermissionModel().flush_user_permission_caches(changes)
1987 1987
1988 1988 return {
1989 1989 'msg': 'Granted perm: `{}` for user: `{}` in repo: `{}`'.format(
1990 1990 perm.permission_name, user.username, repo.repo_name
1991 1991 ),
1992 1992 'success': True
1993 1993 }
1994 1994 except Exception:
1995 1995 log.exception("Exception occurred while trying edit permissions for repo")
1996 1996 raise JSONRPCError(
1997 1997 'failed to edit permission for user: `{}` in repo: `{}`'.format(
1998 1998 userid, repoid
1999 1999 )
2000 2000 )
2001 2001
2002 2002
2003 2003 @jsonrpc_method()
2004 2004 def revoke_user_permission(request, apiuser, repoid, userid):
2005 2005 """
2006 2006 Revoke permission for a user on the specified repository.
2007 2007
2008 2008 This command can only be run using an |authtoken| with admin
2009 2009 permissions on the |repo|.
2010 2010
2011 2011 :param apiuser: This is filled automatically from the |authtoken|.
2012 2012 :type apiuser: AuthUser
2013 2013 :param repoid: Set the repository name or repository ID.
2014 2014 :type repoid: str or int
2015 2015 :param userid: Set the user name of revoked user.
2016 2016 :type userid: str or int
2017 2017
2018 2018 Example error output:
2019 2019
2020 2020 .. code-block:: bash
2021 2021
2022 2022 id : <id_given_in_input>
2023 2023 result: {
2024 2024 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2025 2025 "success": true
2026 2026 }
2027 2027 error: null
2028 2028 """
2029 2029
2030 2030 repo = get_repo_or_error(repoid)
2031 2031 user = get_user_or_error(userid)
2032 2032 if not has_superadmin_permission(apiuser):
2033 2033 _perms = ('repository.admin',)
2034 2034 validate_repo_permissions(apiuser, repoid, repo, _perms)
2035 2035
2036 2036 perm_deletions = [[user.user_id, None, "user"]]
2037 2037 try:
2038 2038 changes = RepoModel().update_permissions(
2039 2039 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2040 2040
2041 2041 action_data = {
2042 2042 'added': changes['added'],
2043 2043 'updated': changes['updated'],
2044 2044 'deleted': changes['deleted'],
2045 2045 }
2046 2046 audit_logger.store_api(
2047 2047 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2048 2048 Session().commit()
2049 2049 PermissionModel().flush_user_permission_caches(changes)
2050 2050
2051 2051 return {
2052 2052 'msg': 'Revoked perm for user: `{}` in repo: `{}`'.format(
2053 2053 user.username, repo.repo_name
2054 2054 ),
2055 2055 'success': True
2056 2056 }
2057 2057 except Exception:
2058 2058 log.exception("Exception occurred while trying revoke permissions to repo")
2059 2059 raise JSONRPCError(
2060 2060 'failed to edit permission for user: `{}` in repo: `{}`'.format(
2061 2061 userid, repoid
2062 2062 )
2063 2063 )
2064 2064
2065 2065
2066 2066 @jsonrpc_method()
2067 2067 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2068 2068 """
2069 2069 Grant permission for a user group on the specified repository,
2070 2070 or update existing permissions.
2071 2071
2072 2072 This command can only be run using an |authtoken| with admin
2073 2073 permissions on the |repo|.
2074 2074
2075 2075 :param apiuser: This is filled automatically from the |authtoken|.
2076 2076 :type apiuser: AuthUser
2077 2077 :param repoid: Set the repository name or repository ID.
2078 2078 :type repoid: str or int
2079 2079 :param usergroupid: Specify the ID of the user group.
2080 2080 :type usergroupid: str or int
2081 2081 :param perm: Set the user group permissions using the following
2082 2082 format: (repository.(none|read|write|admin))
2083 2083 :type perm: str
2084 2084
2085 2085 Example output:
2086 2086
2087 2087 .. code-block:: bash
2088 2088
2089 2089 id : <id_given_in_input>
2090 2090 result : {
2091 2091 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2092 2092 "success": true
2093 2093
2094 2094 }
2095 2095 error : null
2096 2096
2097 2097 Example error output:
2098 2098
2099 2099 .. code-block:: bash
2100 2100
2101 2101 id : <id_given_in_input>
2102 2102 result : null
2103 2103 error : {
2104 2104 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2105 2105 }
2106 2106
2107 2107 """
2108 2108
2109 2109 repo = get_repo_or_error(repoid)
2110 2110 perm = get_perm_or_error(perm)
2111 2111 if not has_superadmin_permission(apiuser):
2112 2112 _perms = ('repository.admin',)
2113 2113 validate_repo_permissions(apiuser, repoid, repo, _perms)
2114 2114
2115 2115 user_group = get_user_group_or_error(usergroupid)
2116 2116 if not has_superadmin_permission(apiuser):
2117 2117 # check if we have at least read permission for this user group !
2118 2118 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2119 2119 if not HasUserGroupPermissionAnyApi(*_perms)(
2120 2120 user=apiuser, user_group_name=user_group.users_group_name):
2121 2121 raise JSONRPCError(
2122 2122 f'user group `{usergroupid}` does not exist')
2123 2123
2124 2124 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2125 2125 try:
2126 2126 changes = RepoModel().update_permissions(
2127 2127 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2128 2128 action_data = {
2129 2129 'added': changes['added'],
2130 2130 'updated': changes['updated'],
2131 2131 'deleted': changes['deleted'],
2132 2132 }
2133 2133 audit_logger.store_api(
2134 2134 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2135 2135 Session().commit()
2136 2136 PermissionModel().flush_user_permission_caches(changes)
2137 2137
2138 2138 return {
2139 2139 'msg': 'Granted perm: `%s` for user group: `%s` in '
2140 2140 'repo: `%s`' % (
2141 2141 perm.permission_name, user_group.users_group_name,
2142 2142 repo.repo_name
2143 2143 ),
2144 2144 'success': True
2145 2145 }
2146 2146 except Exception:
2147 2147 log.exception(
2148 2148 "Exception occurred while trying change permission on repo")
2149 2149 raise JSONRPCError(
2150 2150 'failed to edit permission for user group: `%s` in '
2151 2151 'repo: `%s`' % (
2152 2152 usergroupid, repo.repo_name
2153 2153 )
2154 2154 )
2155 2155
2156 2156
2157 2157 @jsonrpc_method()
2158 2158 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2159 2159 """
2160 2160 Revoke the permissions of a user group on a given repository.
2161 2161
2162 2162 This command can only be run using an |authtoken| with admin
2163 2163 permissions on the |repo|.
2164 2164
2165 2165 :param apiuser: This is filled automatically from the |authtoken|.
2166 2166 :type apiuser: AuthUser
2167 2167 :param repoid: Set the repository name or repository ID.
2168 2168 :type repoid: str or int
2169 2169 :param usergroupid: Specify the user group ID.
2170 2170 :type usergroupid: str or int
2171 2171
2172 2172 Example output:
2173 2173
2174 2174 .. code-block:: bash
2175 2175
2176 2176 id : <id_given_in_input>
2177 2177 result: {
2178 2178 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2179 2179 "success": true
2180 2180 }
2181 2181 error: null
2182 2182 """
2183 2183
2184 2184 repo = get_repo_or_error(repoid)
2185 2185 if not has_superadmin_permission(apiuser):
2186 2186 _perms = ('repository.admin',)
2187 2187 validate_repo_permissions(apiuser, repoid, repo, _perms)
2188 2188
2189 2189 user_group = get_user_group_or_error(usergroupid)
2190 2190 if not has_superadmin_permission(apiuser):
2191 2191 # check if we have at least read permission for this user group !
2192 2192 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2193 2193 if not HasUserGroupPermissionAnyApi(*_perms)(
2194 2194 user=apiuser, user_group_name=user_group.users_group_name):
2195 2195 raise JSONRPCError(
2196 2196 f'user group `{usergroupid}` does not exist')
2197 2197
2198 2198 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2199 2199 try:
2200 2200 changes = RepoModel().update_permissions(
2201 2201 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2202 2202 action_data = {
2203 2203 'added': changes['added'],
2204 2204 'updated': changes['updated'],
2205 2205 'deleted': changes['deleted'],
2206 2206 }
2207 2207 audit_logger.store_api(
2208 2208 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2209 2209 Session().commit()
2210 2210 PermissionModel().flush_user_permission_caches(changes)
2211 2211
2212 2212 return {
2213 2213 'msg': 'Revoked perm for user group: `{}` in repo: `{}`'.format(
2214 2214 user_group.users_group_name, repo.repo_name
2215 2215 ),
2216 2216 'success': True
2217 2217 }
2218 2218 except Exception:
2219 2219 log.exception("Exception occurred while trying revoke "
2220 2220 "user group permission on repo")
2221 2221 raise JSONRPCError(
2222 2222 'failed to edit permission for user group: `%s` in '
2223 2223 'repo: `%s`' % (
2224 2224 user_group.users_group_name, repo.repo_name
2225 2225 )
2226 2226 )
2227 2227
2228 2228
2229 2229 @jsonrpc_method()
2230 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2230 def pull(request, apiuser, repoid, remote_uri=Optional(None), sync_large_objects=Optional(False)):
2231 2231 """
2232 2232 Triggers a pull on the given repository from a remote location. You
2233 2233 can use this to keep remote repositories up-to-date.
2234 2234
2235 2235 This command can only be run using an |authtoken| with admin
2236 2236 rights to the specified repository. For more information,
2237 2237 see :ref:`config-token-ref`.
2238 2238
2239 2239 This command takes the following options:
2240 2240
2241 2241 :param apiuser: This is filled automatically from the |authtoken|.
2242 2242 :type apiuser: AuthUser
2243 2243 :param repoid: The repository name or repository ID.
2244 2244 :type repoid: str or int
2245 2245 :param remote_uri: Optional remote URI to pass in for pull
2246 2246 :type remote_uri: str
2247 :param sync_large_objects: Optional flag for pulling LFS objects.
2248 :type sync_large_objects: bool
2247 2249
2248 2250 Example output:
2249 2251
2250 2252 .. code-block:: bash
2251 2253
2252 2254 id : <id_given_in_input>
2253 2255 result : {
2254 2256 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2255 2257 "repository": "<repository name>"
2256 2258 }
2257 2259 error : null
2258 2260
2259 2261 Example error output:
2260 2262
2261 2263 .. code-block:: bash
2262 2264
2263 2265 id : <id_given_in_input>
2264 2266 result : null
2265 2267 error : {
2266 2268 "Unable to push changes from `<remote_url>`"
2267 2269 }
2268 2270
2269 2271 """
2270 2272
2271 2273 repo = get_repo_or_error(repoid)
2272 2274 remote_uri = Optional.extract(remote_uri)
2273 2275 remote_uri_display = remote_uri or repo.clone_uri_hidden
2274 2276 if not has_superadmin_permission(apiuser):
2275 2277 _perms = ('repository.admin',)
2276 2278 validate_repo_permissions(apiuser, repoid, repo, _perms)
2277 2279
2278 2280 try:
2279 2281 ScmModel().pull_changes(
2280 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2282 repo.repo_name, apiuser.username, remote_uri=remote_uri, sync_large_objects=sync_large_objects)
2281 2283 return {
2282 2284 'msg': 'Pulled from url `{}` on repo `{}`'.format(
2283 2285 remote_uri_display, repo.repo_name),
2284 2286 'repository': repo.repo_name
2285 2287 }
2286 2288 except Exception:
2287 2289 log.exception("Exception occurred while trying to "
2288 2290 "pull changes from remote location")
2289 2291 raise JSONRPCError(
2290 2292 'Unable to pull changes from `%s`' % remote_uri_display
2291 2293 )
2292 2294
2293 2295
2294 2296 @jsonrpc_method()
2295 2297 def strip(request, apiuser, repoid, revision, branch):
2296 2298 """
2297 2299 Strips the given revision from the specified repository.
2298 2300
2299 2301 * This will remove the revision and all of its decendants.
2300 2302
2301 2303 This command can only be run using an |authtoken| with admin rights to
2302 2304 the specified repository.
2303 2305
2304 2306 This command takes the following options:
2305 2307
2306 2308 :param apiuser: This is filled automatically from the |authtoken|.
2307 2309 :type apiuser: AuthUser
2308 2310 :param repoid: The repository name or repository ID.
2309 2311 :type repoid: str or int
2310 2312 :param revision: The revision you wish to strip.
2311 2313 :type revision: str
2312 2314 :param branch: The branch from which to strip the revision.
2313 2315 :type branch: str
2314 2316
2315 2317 Example output:
2316 2318
2317 2319 .. code-block:: bash
2318 2320
2319 2321 id : <id_given_in_input>
2320 2322 result : {
2321 2323 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2322 2324 "repository": "<repository name>"
2323 2325 }
2324 2326 error : null
2325 2327
2326 2328 Example error output:
2327 2329
2328 2330 .. code-block:: bash
2329 2331
2330 2332 id : <id_given_in_input>
2331 2333 result : null
2332 2334 error : {
2333 2335 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2334 2336 }
2335 2337
2336 2338 """
2337 2339
2338 2340 repo = get_repo_or_error(repoid)
2339 2341 if not has_superadmin_permission(apiuser):
2340 2342 _perms = ('repository.admin',)
2341 2343 validate_repo_permissions(apiuser, repoid, repo, _perms)
2342 2344
2343 2345 try:
2344 2346 ScmModel().strip(repo, revision, branch)
2345 2347 audit_logger.store_api(
2346 2348 'repo.commit.strip', action_data={'commit_id': revision},
2347 2349 repo=repo,
2348 2350 user=apiuser, commit=True)
2349 2351
2350 2352 return {
2351 2353 'msg': 'Stripped commit {} from repo `{}`'.format(
2352 2354 revision, repo.repo_name),
2353 2355 'repository': repo.repo_name
2354 2356 }
2355 2357 except Exception:
2356 2358 log.exception("Exception while trying to strip")
2357 2359 raise JSONRPCError(
2358 2360 'Unable to strip commit {} from repo `{}`'.format(
2359 2361 revision, repo.repo_name)
2360 2362 )
2361 2363
2362 2364
2363 2365 @jsonrpc_method()
2364 2366 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2365 2367 """
2366 2368 Returns all settings for a repository. If key is given it only returns the
2367 2369 setting identified by the key or null.
2368 2370
2369 2371 :param apiuser: This is filled automatically from the |authtoken|.
2370 2372 :type apiuser: AuthUser
2371 2373 :param repoid: The repository name or repository id.
2372 2374 :type repoid: str or int
2373 2375 :param key: Key of the setting to return.
2374 2376 :type: key: Optional(str)
2375 2377
2376 2378 Example output:
2377 2379
2378 2380 .. code-block:: bash
2379 2381
2380 2382 {
2381 2383 "error": null,
2382 2384 "id": 237,
2383 2385 "result": {
2384 2386 "extensions_largefiles": true,
2385 2387 "extensions_evolve": true,
2386 2388 "hooks_changegroup_push_logger": true,
2387 2389 "hooks_changegroup_repo_size": false,
2388 2390 "hooks_outgoing_pull_logger": true,
2389 2391 "phases_publish": "True",
2390 2392 "rhodecode_hg_use_rebase_for_merging": true,
2391 2393 "rhodecode_pr_merge_enabled": true,
2392 2394 "rhodecode_use_outdated_comments": true
2393 2395 }
2394 2396 }
2395 2397 """
2396 2398
2397 2399 # Restrict access to this api method to super-admins, and repo admins only.
2398 2400 repo = get_repo_or_error(repoid)
2399 2401 if not has_superadmin_permission(apiuser):
2400 2402 _perms = ('repository.admin',)
2401 2403 validate_repo_permissions(apiuser, repoid, repo, _perms)
2402 2404
2403 2405 try:
2404 2406 settings_model = VcsSettingsModel(repo=repo)
2405 2407 settings = settings_model.get_global_settings()
2406 2408 settings.update(settings_model.get_repo_settings())
2407 2409
2408 2410 # If only a single setting is requested fetch it from all settings.
2409 2411 key = Optional.extract(key)
2410 2412 if key is not None:
2411 2413 settings = settings.get(key, None)
2412 2414 except Exception:
2413 2415 msg = f'Failed to fetch settings for repository `{repoid}`'
2414 2416 log.exception(msg)
2415 2417 raise JSONRPCError(msg)
2416 2418
2417 2419 return settings
2418 2420
2419 2421
2420 2422 @jsonrpc_method()
2421 2423 def set_repo_settings(request, apiuser, repoid, settings):
2422 2424 """
2423 2425 Update repository settings. Returns true on success.
2424 2426
2425 2427 :param apiuser: This is filled automatically from the |authtoken|.
2426 2428 :type apiuser: AuthUser
2427 2429 :param repoid: The repository name or repository id.
2428 2430 :type repoid: str or int
2429 2431 :param settings: The new settings for the repository.
2430 2432 :type: settings: dict
2431 2433
2432 2434 Example output:
2433 2435
2434 2436 .. code-block:: bash
2435 2437
2436 2438 {
2437 2439 "error": null,
2438 2440 "id": 237,
2439 2441 "result": true
2440 2442 }
2441 2443 """
2442 2444 # Restrict access to this api method to super-admins, and repo admins only.
2443 2445 repo = get_repo_or_error(repoid)
2444 2446 if not has_superadmin_permission(apiuser):
2445 2447 _perms = ('repository.admin',)
2446 2448 validate_repo_permissions(apiuser, repoid, repo, _perms)
2447 2449
2448 2450 if type(settings) is not dict:
2449 2451 raise JSONRPCError('Settings have to be a JSON Object.')
2450 2452
2451 2453 try:
2452 2454 settings_model = VcsSettingsModel(repo=repoid)
2453 2455
2454 2456 # Merge global, repo and incoming settings.
2455 2457 new_settings = settings_model.get_global_settings()
2456 2458 new_settings.update(settings_model.get_repo_settings())
2457 2459 new_settings.update(settings)
2458 2460
2459 2461 # Update the settings.
2460 2462 inherit_global_settings = new_settings.get(
2461 2463 'inherit_global_settings', False)
2462 2464 settings_model.create_or_update_repo_settings(
2463 2465 new_settings, inherit_global_settings=inherit_global_settings)
2464 2466 Session().commit()
2465 2467 except Exception:
2466 2468 msg = f'Failed to update settings for repository `{repoid}`'
2467 2469 log.exception(msg)
2468 2470 raise JSONRPCError(msg)
2469 2471
2470 2472 # Indicate success.
2471 2473 return True
2472 2474
2473 2475
2474 2476 @jsonrpc_method()
2475 2477 def maintenance(request, apiuser, repoid):
2476 2478 """
2477 2479 Triggers a maintenance on the given repository.
2478 2480
2479 2481 This command can only be run using an |authtoken| with admin
2480 2482 rights to the specified repository. For more information,
2481 2483 see :ref:`config-token-ref`.
2482 2484
2483 2485 This command takes the following options:
2484 2486
2485 2487 :param apiuser: This is filled automatically from the |authtoken|.
2486 2488 :type apiuser: AuthUser
2487 2489 :param repoid: The repository name or repository ID.
2488 2490 :type repoid: str or int
2489 2491
2490 2492 Example output:
2491 2493
2492 2494 .. code-block:: bash
2493 2495
2494 2496 id : <id_given_in_input>
2495 2497 result : {
2496 2498 "msg": "executed maintenance command",
2497 2499 "executed_actions": [
2498 2500 <action_message>, <action_message2>...
2499 2501 ],
2500 2502 "repository": "<repository name>"
2501 2503 }
2502 2504 error : null
2503 2505
2504 2506 Example error output:
2505 2507
2506 2508 .. code-block:: bash
2507 2509
2508 2510 id : <id_given_in_input>
2509 2511 result : null
2510 2512 error : {
2511 2513 "Unable to execute maintenance on `<reponame>`"
2512 2514 }
2513 2515
2514 2516 """
2515 2517
2516 2518 repo = get_repo_or_error(repoid)
2517 2519 if not has_superadmin_permission(apiuser):
2518 2520 _perms = ('repository.admin',)
2519 2521 validate_repo_permissions(apiuser, repoid, repo, _perms)
2520 2522
2521 2523 try:
2522 2524 maintenance = repo_maintenance.RepoMaintenance()
2523 2525 executed_actions = maintenance.execute(repo)
2524 2526
2525 2527 return {
2526 2528 'msg': 'executed maintenance command',
2527 2529 'executed_actions': executed_actions,
2528 2530 'repository': repo.repo_name
2529 2531 }
2530 2532 except Exception:
2531 2533 log.exception("Exception occurred while trying to run maintenance")
2532 2534 raise JSONRPCError(
2533 2535 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,62 +1,62 b''
1 1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20
21 21 from pyramid.httpexceptions import HTTPFound
22 22
23 23
24 24 from rhodecode.apps._base import RepoAppView
25 25 from rhodecode.lib import helpers as h
26 26 from rhodecode.lib.auth import (
27 27 LoginRequired, CSRFRequired, HasRepoPermissionAnyDecorator)
28 28 from rhodecode.model.scm import ScmModel
29 29
30 30 log = logging.getLogger(__name__)
31 31
32 32
33 33 class RepoSettingsRemoteView(RepoAppView):
34 34 def load_default_context(self):
35 35 c = self._get_local_tmpl_context()
36 36 return c
37 37
38 38 @LoginRequired()
39 39 @HasRepoPermissionAnyDecorator('repository.admin')
40 40 def repo_remote_edit_form(self):
41 41 c = self.load_default_context()
42 42 c.active = 'remote'
43 43
44 44 return self._get_template_context(c)
45 45
46 46 @LoginRequired()
47 47 @HasRepoPermissionAnyDecorator('repository.admin')
48 48 @CSRFRequired()
49 49 def repo_remote_pull_changes(self):
50 50 _ = self.request.translate
51 51 self.load_default_context()
52 52
53 53 try:
54 54 ScmModel().pull_changes(
55 self.db_repo_name, self._rhodecode_user.username)
55 self.db_repo_name, self._rhodecode_user.username, sync_large_objects=True)
56 56 h.flash(_('Pulled from remote location'), category='success')
57 57 except Exception:
58 58 log.exception("Exception during pull from remote")
59 59 h.flash(_('An error occurred during pull from remote location'),
60 60 category='error')
61 61 raise HTTPFound(
62 62 h.route_path('edit_repo_remote', repo_name=self.db_repo_name))
@@ -1,1053 +1,1053 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 GIT repository module
21 21 """
22 22
23 23 import logging
24 24 import os
25 25 import re
26 26
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from collections import OrderedDict
30 30 from rhodecode.lib.datelib import (
31 31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 32 from rhodecode.lib.hash_utils import safe_str
33 33 from rhodecode.lib.utils2 import CachedProperty
34 34 from rhodecode.lib.vcs import connection, path as vcspath
35 35 from rhodecode.lib.vcs.backends.base import (
36 36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 37 MergeFailureReason, Reference)
38 38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitDoesNotExistError, EmptyRepositoryError,
43 43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44 44
45 45
46 46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 class GitRepository(BaseRepository):
52 52 """
53 53 Git repository backend.
54 54 """
55 55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 66
67 67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 68
69 69 # caches
70 70 self._commit_ids = {}
71 71
72 72 @LazyProperty
73 73 def _remote(self):
74 74 repo_id = self.path
75 75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 76
77 77 @LazyProperty
78 78 def bare(self):
79 79 return self._remote.bare()
80 80
81 81 @LazyProperty
82 82 def head(self):
83 83 return self._remote.head()
84 84
85 85 @CachedProperty
86 86 def commit_ids(self):
87 87 """
88 88 Returns list of commit ids, in ascending order. Being lazy
89 89 attribute allows external tools to inject commit ids from cache.
90 90 """
91 91 commit_ids = self._get_all_commit_ids()
92 92 self._rebuild_cache(commit_ids)
93 93 return commit_ids
94 94
95 95 def _rebuild_cache(self, commit_ids):
96 96 self._commit_ids = {commit_id: index
97 97 for index, commit_id in enumerate(commit_ids)}
98 98
99 99 def run_git_command(self, cmd, **opts):
100 100 """
101 101 Runs given ``cmd`` as git command and returns tuple
102 102 (stdout, stderr).
103 103
104 104 :param cmd: git command to be executed
105 105 :param opts: env options to pass into Subprocess command
106 106 """
107 107 if not isinstance(cmd, list):
108 108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109 109
110 110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 111 out, err = self._remote.run_git_command(cmd, **opts)
112 112 if err and not skip_stderr_log:
113 113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 114 return out, err
115 115
116 116 @staticmethod
117 117 def check_url(url, config):
118 118 """
119 119 Function will check given url and try to verify if it's a valid
120 120 link. Sometimes it may happened that git will issue basic
121 121 auth request that can cause whole API to hang when used from python
122 122 or other external calls.
123 123
124 124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 125 when the return code is non 200
126 126 """
127 127 # check first if it's not an url
128 128 if os.path.isdir(url) or url.startswith('file:'):
129 129 return True
130 130
131 131 if '+' in url.split('://', 1)[0]:
132 132 url = url.split('+', 1)[1]
133 133
134 134 # Request the _remote to verify the url
135 135 return connection.Git.check_url(url, config.serialize())
136 136
137 137 @staticmethod
138 138 def is_valid_repository(path):
139 139 if os.path.isdir(os.path.join(path, '.git')):
140 140 return True
141 141 # check case of bare repository
142 142 try:
143 143 GitRepository(path)
144 144 return True
145 145 except VCSError:
146 146 pass
147 147 return False
148 148
149 149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 150 bare=False):
151 151 if create and os.path.exists(self.path):
152 152 raise RepositoryError(
153 153 f"Cannot create repository at {self.path}, location already exist")
154 154
155 155 if bare and do_workspace_checkout:
156 156 raise RepositoryError("Cannot update a bare repository")
157 157 try:
158 158
159 159 if src_url:
160 160 # check URL before any actions
161 161 GitRepository.check_url(src_url, self.config)
162 162
163 163 if create:
164 164 if bare:
165 165 self._remote.init_bare()
166 166 else:
167 167 self._remote.init()
168 168
169 169 if src_url and bare:
170 170 # bare repository only allows a fetch and checkout is not allowed
171 171 self.fetch(src_url, commit_ids=None)
172 172 elif src_url:
173 173 self.pull(src_url, commit_ids=None,
174 174 update_after=do_workspace_checkout)
175 175
176 176 else:
177 177 if not self._remote.assert_correct_path():
178 178 raise RepositoryError(
179 179 f'Path "{self.path}" does not contain a Git repository')
180 180
181 181 # TODO: johbo: check if we have to translate the OSError here
182 182 except OSError as err:
183 183 raise RepositoryError(err)
184 184
185 185 def _get_all_commit_ids(self):
186 186 return self._remote.get_all_commit_ids()
187 187
188 188 def _get_commit_ids(self, filters=None):
189 189 # we must check if this repo is not empty, since later command
190 190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 191 # errors
192 192
193 193 head = self._remote.head(show_exc=False)
194 194
195 195 if not head:
196 196 return []
197 197
198 198 rev_filter = ['--branches', '--tags']
199 199 extra_filter = []
200 200
201 201 if filters:
202 202 if filters.get('since'):
203 203 extra_filter.append('--since=%s' % (filters['since']))
204 204 if filters.get('until'):
205 205 extra_filter.append('--until=%s' % (filters['until']))
206 206 if filters.get('branch_name'):
207 207 rev_filter = []
208 208 extra_filter.append(filters['branch_name'])
209 209 rev_filter.extend(extra_filter)
210 210
211 211 # if filters.get('start') or filters.get('end'):
212 212 # # skip is offset, max-count is limit
213 213 # if filters.get('start'):
214 214 # extra_filter += ' --skip=%s' % filters['start']
215 215 # if filters.get('end'):
216 216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
217 217
218 218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
219 219 try:
220 220 output, __ = self.run_git_command(cmd)
221 221 except RepositoryError:
222 222 # Can be raised for empty repositories
223 223 return []
224 224 return output.splitlines()
225 225
226 226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
227 227
228 228 def is_null(value):
229 229 return len(value) == commit_id_or_idx.count('0')
230 230
231 231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 232 return self.commit_ids[-1]
233 233
234 234 commit_missing_err = "Commit {} does not exist for `{}`".format(
235 235 *map(safe_str, [commit_id_or_idx, self.name]))
236 236
237 237 is_bstr = isinstance(commit_id_or_idx, str)
238 238 is_branch = reference_obj and reference_obj.branch
239 239
240 240 lookup_ok = False
241 241 if is_bstr:
242 242 # Need to call remote to translate id for tagging scenarios,
243 243 # or branch that are numeric
244 244 try:
245 245 remote_data = self._remote.get_object(commit_id_or_idx,
246 246 maybe_unreachable=maybe_unreachable)
247 247 commit_id_or_idx = remote_data["commit_id"]
248 248 lookup_ok = True
249 249 except (CommitDoesNotExistError,):
250 250 lookup_ok = False
251 251
252 252 if lookup_ok is False:
253 253 is_numeric_idx = \
254 254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
255 255 or isinstance(commit_id_or_idx, int)
256 256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
257 257 try:
258 258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
259 259 lookup_ok = True
260 260 except Exception:
261 261 raise CommitDoesNotExistError(commit_missing_err)
262 262
263 263 # we failed regular lookup, and by integer number lookup
264 264 if lookup_ok is False:
265 265 raise CommitDoesNotExistError(commit_missing_err)
266 266
267 267 # Ensure we return full id
268 268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 269 raise CommitDoesNotExistError(
270 270 "Given commit id %s not recognized" % commit_id_or_idx)
271 271 return commit_id_or_idx
272 272
273 273 def get_hook_location(self):
274 274 """
275 275 returns absolute path to location where hooks are stored
276 276 """
277 277 loc = os.path.join(self.path, 'hooks')
278 278 if not self.bare:
279 279 loc = os.path.join(self.path, '.git', 'hooks')
280 280 return loc
281 281
282 282 @LazyProperty
283 283 def last_change(self):
284 284 """
285 285 Returns last change made on this repository as
286 286 `datetime.datetime` object.
287 287 """
288 288 try:
289 289 return self.get_commit().date
290 290 except RepositoryError:
291 291 tzoffset = makedate()[1]
292 292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293 293
294 294 def _get_fs_mtime(self):
295 295 idx_loc = '' if self.bare else '.git'
296 296 # fallback to filesystem
297 297 in_path = os.path.join(self.path, idx_loc, "index")
298 298 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 299 if os.path.exists(in_path):
300 300 return os.stat(in_path).st_mtime
301 301 else:
302 302 return os.stat(he_path).st_mtime
303 303
304 304 @LazyProperty
305 305 def description(self):
306 306 description = self._remote.get_description()
307 307 return safe_str(description or self.DEFAULT_DESCRIPTION)
308 308
309 309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 310 if self.is_empty():
311 311 return OrderedDict()
312 312
313 313 result = []
314 314 for ref, sha in self._refs.items():
315 315 if ref.startswith(prefix):
316 316 ref_name = ref
317 317 if strip_prefix:
318 318 ref_name = ref[len(prefix):]
319 319 result.append((safe_str(ref_name), sha))
320 320
321 321 def get_name(entry):
322 322 return entry[0]
323 323
324 324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325 325
326 326 def _get_branches(self):
327 327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328 328
329 329 @CachedProperty
330 330 def branches(self):
331 331 return self._get_branches()
332 332
333 333 @CachedProperty
334 334 def branches_closed(self):
335 335 return {}
336 336
337 337 @CachedProperty
338 338 def bookmarks(self):
339 339 return {}
340 340
341 341 @CachedProperty
342 342 def branches_all(self):
343 343 all_branches = {}
344 344 all_branches.update(self.branches)
345 345 all_branches.update(self.branches_closed)
346 346 return all_branches
347 347
348 348 @CachedProperty
349 349 def tags(self):
350 350 return self._get_tags()
351 351
352 352 def _get_tags(self):
353 353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
354 354
355 355 def tag(self, name, user, commit_id=None, message=None, date=None,
356 356 **kwargs):
357 357 # TODO: fix this method to apply annotated tags correct with message
358 358 """
359 359 Creates and returns a tag for the given ``commit_id``.
360 360
361 361 :param name: name for new tag
362 362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 363 :param commit_id: commit id for which new tag would be created
364 364 :param message: message of the tag's commit
365 365 :param date: date of tag's commit
366 366
367 367 :raises TagAlreadyExistError: if tag with same name already exists
368 368 """
369 369 if name in self.tags:
370 370 raise TagAlreadyExistError("Tag %s already exists" % name)
371 371 commit = self.get_commit(commit_id=commit_id)
372 372 message = message or f"Added tag {name} for commit {commit.raw_id}"
373 373
374 374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
375 375
376 376 self._invalidate_prop_cache('tags')
377 377 self._invalidate_prop_cache('_refs')
378 378
379 379 return commit
380 380
381 381 def remove_tag(self, name, user, message=None, date=None):
382 382 """
383 383 Removes tag with the given ``name``.
384 384
385 385 :param name: name of the tag to be removed
386 386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 387 :param message: message of the tag's removal commit
388 388 :param date: date of tag's removal commit
389 389
390 390 :raises TagDoesNotExistError: if tag with given name does not exists
391 391 """
392 392 if name not in self.tags:
393 393 raise TagDoesNotExistError("Tag %s does not exist" % name)
394 394
395 395 self._remote.tag_remove(name)
396 396 self._invalidate_prop_cache('tags')
397 397 self._invalidate_prop_cache('_refs')
398 398
399 399 def _get_refs(self):
400 400 return self._remote.get_refs()
401 401
402 402 @CachedProperty
403 403 def _refs(self):
404 404 return self._get_refs()
405 405
406 406 @property
407 407 def _ref_tree(self):
408 408 node = tree = {}
409 409 for ref, sha in self._refs.items():
410 410 path = ref.split('/')
411 411 for bit in path[:-1]:
412 412 node = node.setdefault(bit, {})
413 413 node[path[-1]] = sha
414 414 node = tree
415 415 return tree
416 416
417 417 def get_remote_ref(self, ref_name):
418 418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
419 419 try:
420 420 return self._refs[ref_key]
421 421 except Exception:
422 422 return
423 423
424 424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
425 425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
426 426 """
427 427 Returns `GitCommit` object representing commit from git repository
428 428 at the given `commit_id` or head (most recent commit) if None given.
429 429 """
430 430
431 431 if self.is_empty():
432 432 raise EmptyRepositoryError("There are no commits yet")
433 433
434 434 if commit_id is not None:
435 435 self._validate_commit_id(commit_id)
436 436 try:
437 437 # we have cached idx, use it without contacting the remote
438 438 idx = self._commit_ids[commit_id]
439 439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
440 440 except KeyError:
441 441 pass
442 442
443 443 elif commit_idx is not None:
444 444 self._validate_commit_idx(commit_idx)
445 445 try:
446 446 _commit_id = self.commit_ids[commit_idx]
447 447 if commit_idx < 0:
448 448 commit_idx = self.commit_ids.index(_commit_id)
449 449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 450 except IndexError:
451 451 commit_id = commit_idx
452 452 else:
453 453 commit_id = "tip"
454 454
455 455 if translate_tag:
456 456 commit_id = self._lookup_commit(
457 457 commit_id, maybe_unreachable=maybe_unreachable,
458 458 reference_obj=reference_obj)
459 459
460 460 try:
461 461 idx = self._commit_ids[commit_id]
462 462 except KeyError:
463 463 idx = -1
464 464
465 465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
466 466
467 467 def get_commits(
468 468 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
470 470 """
471 471 Returns generator of `GitCommit` objects from start to end (both
472 472 are inclusive), in ascending date order.
473 473
474 474 :param start_id: None, str(commit_id)
475 475 :param end_id: None, str(commit_id)
476 476 :param start_date: if specified, commits with commit date less than
477 477 ``start_date`` would be filtered out from returned set
478 478 :param end_date: if specified, commits with commit date greater than
479 479 ``end_date`` would be filtered out from returned set
480 480 :param branch_name: if specified, commits not reachable from given
481 481 branch would be filtered out from returned set
482 482 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 483 Mercurial evolve
484 484 :raise BranchDoesNotExistError: If given `branch_name` does not
485 485 exist.
486 486 :raise CommitDoesNotExistError: If commits for given `start` or
487 487 `end` could not be found.
488 488
489 489 """
490 490 if self.is_empty():
491 491 raise EmptyRepositoryError("There are no commits yet")
492 492
493 493 self._validate_branch_name(branch_name)
494 494
495 495 if start_id is not None:
496 496 self._validate_commit_id(start_id)
497 497 if end_id is not None:
498 498 self._validate_commit_id(end_id)
499 499
500 500 start_raw_id = self._lookup_commit(start_id)
501 501 start_pos = self._commit_ids[start_raw_id] if start_id else None
502 502 end_raw_id = self._lookup_commit(end_id)
503 503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
504 504
505 505 if None not in [start_id, end_id] and start_pos > end_pos:
506 506 raise RepositoryError(
507 507 "Start commit '%s' cannot be after end commit '%s'" %
508 508 (start_id, end_id))
509 509
510 510 if end_pos is not None:
511 511 end_pos += 1
512 512
513 513 filter_ = []
514 514 if branch_name:
515 515 filter_.append({'branch_name': branch_name})
516 516 if start_date and not end_date:
517 517 filter_.append({'since': start_date})
518 518 if end_date and not start_date:
519 519 filter_.append({'until': end_date})
520 520 if start_date and end_date:
521 521 filter_.append({'since': start_date})
522 522 filter_.append({'until': end_date})
523 523
524 524 # if start_pos or end_pos:
525 525 # filter_.append({'start': start_pos})
526 526 # filter_.append({'end': end_pos})
527 527
528 528 if filter_:
529 529 revfilters = {
530 530 'branch_name': branch_name,
531 531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
532 532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
533 533 'start': start_pos,
534 534 'end': end_pos,
535 535 }
536 536 commit_ids = self._get_commit_ids(filters=revfilters)
537 537
538 538 else:
539 539 commit_ids = self.commit_ids
540 540
541 541 if start_pos or end_pos:
542 542 commit_ids = commit_ids[start_pos: end_pos]
543 543
544 544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
545 545 translate_tag=translate_tags)
546 546
547 547 def get_diff(
548 548 self, commit1, commit2, path='', ignore_whitespace=False,
549 549 context=3, path1=None):
550 550 """
551 551 Returns (git like) *diff*, as plain text. Shows changes introduced by
552 552 ``commit2`` since ``commit1``.
553 553
554 554 :param commit1: Entry point from which diff is shown. Can be
555 555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
556 556 the changes since empty state of the repository until ``commit2``
557 557 :param commit2: Until which commits changes should be shown.
558 558 :param path:
559 559 :param ignore_whitespace: If set to ``True``, would not show whitespace
560 560 changes. Defaults to ``False``.
561 561 :param context: How many lines before/after changed lines should be
562 562 shown. Defaults to ``3``.
563 563 :param path1:
564 564 """
565 565 self._validate_diff_commits(commit1, commit2)
566 566 if path1 is not None and path1 != path:
567 567 raise ValueError("Diff of two different paths not supported.")
568 568
569 569 if path:
570 570 file_filter = path
571 571 else:
572 572 file_filter = None
573 573
574 574 diff = self._remote.diff(
575 575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
576 576 opt_ignorews=ignore_whitespace,
577 577 context=context)
578 578
579 579 return GitDiff(diff)
580 580
581 581 def strip(self, commit_id, branch_name):
582 582 commit = self.get_commit(commit_id=commit_id)
583 583 if commit.merge:
584 584 raise Exception('Cannot reset to merge commit')
585 585
586 586 if not branch_name:
587 587 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
588 588
589 589 # parent is going to be the new head now
590 590 commit = commit.parents[0]
591 591 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
592 592
593 593 # clear cached properties
594 594 self._invalidate_prop_cache('commit_ids')
595 595 self._invalidate_prop_cache('_refs')
596 596 self._invalidate_prop_cache('branches')
597 597
598 598 return len(self.commit_ids)
599 599
600 600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 602 self, commit_id1, repo2, commit_id2)
603 603
604 604 if commit_id1 == commit_id2:
605 605 return commit_id1
606 606
607 607 if self != repo2:
608 608 commits = self._remote.get_missing_revs(
609 609 commit_id1, commit_id2, repo2.path)
610 610 if commits:
611 611 commit = repo2.get_commit(commits[-1])
612 612 if commit.parents:
613 613 ancestor_id = commit.parents[0].raw_id
614 614 else:
615 615 ancestor_id = None
616 616 else:
617 617 # no commits from other repo, ancestor_id is the commit_id2
618 618 ancestor_id = commit_id2
619 619 else:
620 620 output, __ = self.run_git_command(
621 621 ['merge-base', commit_id1, commit_id2])
622 622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623 623
624 624 log.debug('Found common ancestor with sha: %s', ancestor_id)
625 625
626 626 return ancestor_id
627 627
628 628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 629 repo1 = self
630 630 ancestor_id = None
631 631
632 632 if commit_id1 == commit_id2:
633 633 commits = []
634 634 elif repo1 != repo2:
635 635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 636 repo2.path)
637 637 commits = [
638 638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 639 for commit_id in reversed(missing_ids)]
640 640 else:
641 641 output, __ = repo1.run_git_command(
642 642 ['log', '--reverse', '--pretty=format: %H', '-s',
643 643 f'{commit_id1}..{commit_id2}'])
644 644 commits = [
645 645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647 647
648 648 return commits
649 649
650 650 @LazyProperty
651 651 def in_memory_commit(self):
652 652 """
653 653 Returns ``GitInMemoryCommit`` object for this repository.
654 654 """
655 655 return GitInMemoryCommit(self)
656 656
657 657 def pull(self, url, commit_ids=None, update_after=False):
658 658 """
659 659 Pull changes from external location. Pull is different in GIT
660 660 that fetch since it's doing a checkout
661 661
662 662 :param commit_ids: Optional. Can be set to a list of commit ids
663 663 which shall be pulled from the other repository.
664 664 """
665 665 refs = None
666 666 if commit_ids is not None:
667 667 remote_refs = self._remote.get_remote_refs(url)
668 668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 669 self._remote.pull(url, refs=refs, update_after=update_after)
670 670 self._remote.invalidate_vcs_cache()
671 671
672 def fetch(self, url, commit_ids=None):
672 def fetch(self, url, commit_ids=None, **kwargs):
673 673 """
674 674 Fetch all git objects from external location.
675 675 """
676 self._remote.sync_fetch(url, refs=commit_ids)
676 self._remote.sync_fetch(url, refs=commit_ids, **kwargs)
677 677 self._remote.invalidate_vcs_cache()
678 678
679 679 def push(self, url):
680 680 refs = None
681 681 self._remote.sync_push(url, refs=refs)
682 682
683 683 def set_refs(self, ref_name, commit_id):
684 684 self._remote.set_refs(ref_name, commit_id)
685 685 self._invalidate_prop_cache('_refs')
686 686
687 687 def remove_ref(self, ref_name):
688 688 self._remote.remove_ref(ref_name)
689 689 self._invalidate_prop_cache('_refs')
690 690
691 691 def run_gc(self, prune=True):
692 692 cmd = ['gc', '--aggressive']
693 693 if prune:
694 694 cmd += ['--prune=now']
695 695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 696 return stderr
697 697
698 698 def _update_server_info(self):
699 699 """
700 700 runs gits update-server-info command in this repo instance
701 701 """
702 702 self._remote.update_server_info()
703 703
704 704 def _current_branch(self):
705 705 """
706 706 Return the name of the current branch.
707 707
708 708 It only works for non bare repositories (i.e. repositories with a
709 709 working copy)
710 710 """
711 711 if self.bare:
712 712 raise RepositoryError('Bare git repos do not have active branches')
713 713
714 714 if self.is_empty():
715 715 return None
716 716
717 717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 718 return stdout.strip()
719 719
720 720 def _checkout(self, branch_name, create=False, force=False):
721 721 """
722 722 Checkout a branch in the working directory.
723 723
724 724 It tries to create the branch if create is True, failing if the branch
725 725 already exists.
726 726
727 727 It only works for non bare repositories (i.e. repositories with a
728 728 working copy)
729 729 """
730 730 if self.bare:
731 731 raise RepositoryError('Cannot checkout branches in a bare git repo')
732 732
733 733 cmd = ['checkout']
734 734 if force:
735 735 cmd.append('-f')
736 736 if create:
737 737 cmd.append('-b')
738 738 cmd.append(branch_name)
739 739 self.run_git_command(cmd, fail_on_stderr=False)
740 740
741 741 def _create_branch(self, branch_name, commit_id):
742 742 """
743 743 creates a branch in a GIT repo
744 744 """
745 745 self._remote.create_branch(branch_name, commit_id)
746 746
747 747 def _identify(self):
748 748 """
749 749 Return the current state of the working directory.
750 750 """
751 751 if self.bare:
752 752 raise RepositoryError('Bare git repos do not have active branches')
753 753
754 754 if self.is_empty():
755 755 return None
756 756
757 757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 758 return stdout.strip()
759 759
760 760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 761 """
762 762 Create a local clone of the current repo.
763 763 """
764 764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 765 # clone will only fetch the active branch.
766 766 cmd = ['clone', '--branch', branch_name,
767 767 self.path, os.path.abspath(clone_path)]
768 768
769 769 self.run_git_command(cmd, fail_on_stderr=False)
770 770
771 771 # if we get the different source branch, make sure we also fetch it for
772 772 # merge conditions
773 773 if source_branch and source_branch != branch_name:
774 774 # check if the ref exists.
775 775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 776 if shadow_repo.get_remote_ref(source_branch):
777 777 cmd = ['fetch', self.path, source_branch]
778 778 self.run_git_command(cmd, fail_on_stderr=False)
779 779
780 780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 781 """
782 782 Fetch a branch from a local repository.
783 783 """
784 784 repository_path = os.path.abspath(repository_path)
785 785 if repository_path == self.path:
786 786 raise ValueError('Cannot fetch from the same repository')
787 787
788 788 if use_origin:
789 789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 790 branch=branch_name)
791 791
792 792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 793 repository_path, branch_name]
794 794 self.run_git_command(cmd, fail_on_stderr=False)
795 795
796 796 def _local_reset(self, branch_name):
797 797 branch_name = f'{branch_name}'
798 798 cmd = ['reset', '--hard', branch_name, '--']
799 799 self.run_git_command(cmd, fail_on_stderr=False)
800 800
801 801 def _last_fetch_heads(self):
802 802 """
803 803 Return the last fetched heads that need merging.
804 804
805 805 The algorithm is defined at
806 806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 807 """
808 808 if not self.bare:
809 809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 810 else:
811 811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812 812
813 813 heads = []
814 814 with open(fetch_heads_path) as f:
815 815 for line in f:
816 816 if ' not-for-merge ' in line:
817 817 continue
818 818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 819 heads.append(line)
820 820
821 821 return heads
822 822
823 823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825 825
826 826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 827 """
828 828 Pull a branch from a local repository.
829 829 """
830 830 if self.bare:
831 831 raise RepositoryError('Cannot pull into a bare git repository')
832 832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 833 # fast-forward (i.e., we are only pulling new changes and there are no
834 834 # conflicts with our current branch)
835 835 # Additionally, that option needs to go before --no-tags, otherwise git
836 836 # pull complains about it being an unknown flag.
837 837 cmd = ['pull']
838 838 if ff_only:
839 839 cmd.append('--ff-only')
840 840 cmd.extend(['--no-tags', repository_path, branch_name])
841 841 self.run_git_command(cmd, fail_on_stderr=False)
842 842
843 843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 844 """
845 845 Merge the given head into the checked out branch.
846 846
847 847 It will force a merge commit.
848 848
849 849 Currently it raises an error if the repo is empty, as it is not possible
850 850 to create a merge commit in an empty repo.
851 851
852 852 :param merge_message: The message to use for the merge commit.
853 853 :param heads: the heads to merge.
854 854 """
855 855 if self.bare:
856 856 raise RepositoryError('Cannot merge into a bare git repository')
857 857
858 858 if not heads:
859 859 return
860 860
861 861 if self.is_empty():
862 862 # TODO(skreft): do something more robust in this case.
863 863 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 864 unresolved = None
865 865
866 866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 867 # commit message. We also specify the user who is doing the merge.
868 868 cmd = ['-c', f'user.name="{user_name}"',
869 869 '-c', f'user.email={user_email}',
870 870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 871
872 872 merge_cmd = cmd + heads
873 873
874 874 try:
875 875 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 876 except RepositoryError:
877 877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 878 fail_on_stderr=False)[0].splitlines()
879 879 # NOTE(marcink): we add U notation for consistent with HG backend output
880 880 unresolved = [f'U {f}' for f in files]
881 881
882 882 # Cleanup any merge leftovers
883 883 self._remote.invalidate_vcs_cache()
884 884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885 885
886 886 if unresolved:
887 887 raise UnresolvedFilesInRepo(unresolved)
888 888 else:
889 889 raise
890 890
891 891 def _local_push(
892 892 self, source_branch, repository_path, target_branch,
893 893 enable_hooks=False, rc_scm_data=None):
894 894 """
895 895 Push the source_branch to the given repository and target_branch.
896 896
897 897 Currently it if the target_branch is not master and the target repo is
898 898 empty, the push will work, but then GitRepository won't be able to find
899 899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 900 pointing to master, which does not exist).
901 901
902 902 It does not run the hooks in the target repo.
903 903 """
904 904 # TODO(skreft): deal with the case in which the target repo is empty,
905 905 # and the target_branch is not master.
906 906 target_repo = GitRepository(repository_path)
907 907 if (not target_repo.bare and
908 908 target_repo._current_branch() == target_branch):
909 909 # Git prevents pushing to the checked out branch, so simulate it by
910 910 # pulling into the target repository.
911 911 target_repo._local_pull(self.path, source_branch)
912 912 else:
913 913 cmd = ['push', os.path.abspath(repository_path),
914 914 f'{source_branch}:{target_branch}']
915 915 gitenv = {}
916 916 if rc_scm_data:
917 917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918 918
919 919 if not enable_hooks:
920 920 gitenv['RC_SKIP_HOOKS'] = '1'
921 921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922 922
923 923 def _get_new_pr_branch(self, source_branch, target_branch):
924 924 prefix = f'pr_{source_branch}-{target_branch}_'
925 925 pr_branches = []
926 926 for branch in self.branches:
927 927 if branch.startswith(prefix):
928 928 pr_branches.append(int(branch[len(prefix):]))
929 929
930 930 if not pr_branches:
931 931 branch_id = 0
932 932 else:
933 933 branch_id = max(pr_branches) + 1
934 934
935 935 return '%s%d' % (prefix, branch_id)
936 936
937 937 def _maybe_prepare_merge_workspace(
938 938 self, repo_id, workspace_id, target_ref, source_ref):
939 939 shadow_repository_path = self._get_shadow_repository_path(
940 940 self.path, repo_id, workspace_id)
941 941 if not os.path.exists(shadow_repository_path):
942 942 self._local_clone(
943 943 shadow_repository_path, target_ref.name, source_ref.name)
944 944 log.debug('Prepared %s shadow repository in %s',
945 945 self.alias, shadow_repository_path)
946 946
947 947 return shadow_repository_path
948 948
949 949 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 950 source_repo, source_ref, merge_message,
951 951 merger_name, merger_email, dry_run=False,
952 952 use_rebase=False, close_branch=False):
953 953
954 954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 955 'rebase' if use_rebase else 'merge', dry_run)
956 956
957 957 if target_ref.commit_id != self.branches[target_ref.name]:
958 958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 959 target_ref.commit_id, self.branches[target_ref.name])
960 960 return MergeResponse(
961 961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 962 metadata={'target_ref': target_ref})
963 963
964 964 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 965 repo_id, workspace_id, target_ref, source_ref)
966 966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967 967
968 968 # checkout source, if it's different. Otherwise we could not
969 969 # fetch proper commits for merge testing
970 970 if source_ref.name != target_ref.name:
971 971 if shadow_repo.get_remote_ref(source_ref.name):
972 972 shadow_repo._checkout(source_ref.name, force=True)
973 973
974 974 # checkout target, and fetch changes
975 975 shadow_repo._checkout(target_ref.name, force=True)
976 976
977 977 # fetch/reset pull the target, in case it is changed
978 978 # this handles even force changes
979 979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 980 shadow_repo._local_reset(target_ref.name)
981 981
982 982 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 983 # retrieve the last target commit.
984 984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 987 target_ref, target_ref.commit_id,
988 988 shadow_repo.branches[target_ref.name])
989 989 return MergeResponse(
990 990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 991 metadata={'target_ref': target_ref})
992 992
993 993 # calculate new branch
994 994 pr_branch = shadow_repo._get_new_pr_branch(
995 995 source_ref.name, target_ref.name)
996 996 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 997 # checkout to temp branch, and fetch changes
998 998 shadow_repo._checkout(pr_branch, create=True)
999 999 try:
1000 1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 1001 except RepositoryError:
1002 1002 log.exception('Failure when doing local fetch on '
1003 1003 'shadow repo: %s', shadow_repo)
1004 1004 return MergeResponse(
1005 1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 1006 metadata={'source_ref': source_ref})
1007 1007
1008 1008 merge_ref = None
1009 1009 merge_failure_reason = MergeFailureReason.NONE
1010 1010 metadata = {}
1011 1011 try:
1012 1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 1013 [source_ref.commit_id])
1014 1014 merge_possible = True
1015 1015
1016 1016 # Need to invalidate the cache, or otherwise we
1017 1017 # cannot retrieve the merge commit.
1018 1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 1019 merge_commit_id = shadow_repo.branches[pr_branch]
1020 1020
1021 1021 # Set a reference pointing to the merge commit. This reference may
1022 1022 # be used to easily identify the last successful merge commit in
1023 1023 # the shadow repository.
1024 1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 1026 except RepositoryError as e:
1027 1027 log.exception('Failure when doing local merge on git shadow repo')
1028 1028 if isinstance(e, UnresolvedFilesInRepo):
1029 1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030 1030
1031 1031 merge_possible = False
1032 1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033 1033
1034 1034 if merge_possible and not dry_run:
1035 1035 try:
1036 1036 shadow_repo._local_push(
1037 1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 1039 merge_succeeded = True
1040 1040 except RepositoryError:
1041 1041 log.exception(
1042 1042 'Failure when doing local push from the shadow '
1043 1043 'repository to the target repository at %s.', self.path)
1044 1044 merge_succeeded = False
1045 1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 1046 metadata['target'] = 'git shadow repo'
1047 1047 metadata['merge_commit'] = pr_branch
1048 1048 else:
1049 1049 merge_succeeded = False
1050 1050
1051 1051 return MergeResponse(
1052 1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 1053 metadata=metadata)
@@ -1,1024 +1,1024 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 HG repository module
21 21 """
22 22 import os
23 23 import logging
24 24 import binascii
25 25 import configparser
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.str_utils import safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '')])
82 82
83 83 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 84 # From old installations we might still have this set enabled
85 85 # we explicitly remove this now here to make sure it wont propagate further
86 86 if config and config.get('extensions', 'hgsubversion') is not None:
87 87 config.drop_option('extensions', 'hgsubversion')
88 88
89 89 self.with_wire = with_wire or {"cache": False} # default should not use cache
90 90
91 91 self._init_repo(create, src_url, do_workspace_checkout)
92 92
93 93 # caches
94 94 self._commit_ids = {}
95 95
96 96 @LazyProperty
97 97 def _remote(self):
98 98 repo_id = self.path
99 99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
100 100
101 101 @CachedProperty
102 102 def commit_ids(self):
103 103 """
104 104 Returns list of commit ids, in ascending order. Being lazy
105 105 attribute allows external tools to inject shas from cache.
106 106 """
107 107 commit_ids = self._get_all_commit_ids()
108 108 self._rebuild_cache(commit_ids)
109 109 return commit_ids
110 110
111 111 def _rebuild_cache(self, commit_ids):
112 112 self._commit_ids = {commit_id: index
113 113 for index, commit_id in enumerate(commit_ids)}
114 114
115 115 @CachedProperty
116 116 def branches(self):
117 117 return self._get_branches()
118 118
119 119 @CachedProperty
120 120 def branches_closed(self):
121 121 return self._get_branches(active=False, closed=True)
122 122
123 123 @CachedProperty
124 124 def branches_all(self):
125 125 all_branches = {}
126 126 all_branches.update(self.branches)
127 127 all_branches.update(self.branches_closed)
128 128 return all_branches
129 129
130 130 def _get_branches(self, active=True, closed=False):
131 131 """
132 132 Gets branches for this repository
133 133 Returns only not closed active branches by default
134 134
135 135 :param active: return also active branches
136 136 :param closed: return also closed branches
137 137
138 138 """
139 139 if self.is_empty():
140 140 return {}
141 141
142 142 def get_name(ctx):
143 143 return ctx[0]
144 144
145 145 _branches = [(n, h,) for n, h in
146 146 self._remote.branches(active, closed).items()]
147 147
148 148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
149 149
150 150 @CachedProperty
151 151 def tags(self):
152 152 """
153 153 Gets tags for this repository
154 154 """
155 155 return self._get_tags()
156 156
157 157 def _get_tags(self):
158 158 if self.is_empty():
159 159 return {}
160 160
161 161 def get_name(ctx):
162 162 return ctx[0]
163 163
164 164 _tags = [(n, h,) for n, h in
165 165 self._remote.tags().items()]
166 166
167 167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
168 168
169 169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
170 170 """
171 171 Creates and returns a tag for the given ``commit_id``.
172 172
173 173 :param name: name for new tag
174 174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
175 175 :param commit_id: commit id for which new tag would be created
176 176 :param message: message of the tag's commit
177 177 :param date: date of tag's commit
178 178
179 179 :raises TagAlreadyExistError: if tag with same name already exists
180 180 """
181 181 if name in self.tags:
182 182 raise TagAlreadyExistError("Tag %s already exists" % name)
183 183
184 184 commit = self.get_commit(commit_id=commit_id)
185 185 local = kwargs.setdefault('local', False)
186 186
187 187 if message is None:
188 188 message = f"Added tag {name} for commit {commit.short_id}"
189 189
190 190 date, tz = date_to_timestamp_plus_offset(date)
191 191
192 192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
193 193 self._remote.invalidate_vcs_cache()
194 194
195 195 # Reinitialize tags
196 196 self._invalidate_prop_cache('tags')
197 197 tag_id = self.tags[name]
198 198
199 199 return self.get_commit(commit_id=tag_id)
200 200
201 201 def remove_tag(self, name, user, message=None, date=None):
202 202 """
203 203 Removes tag with the given `name`.
204 204
205 205 :param name: name of the tag to be removed
206 206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
207 207 :param message: message of the tag's removal commit
208 208 :param date: date of tag's removal commit
209 209
210 210 :raises TagDoesNotExistError: if tag with given name does not exists
211 211 """
212 212 if name not in self.tags:
213 213 raise TagDoesNotExistError("Tag %s does not exist" % name)
214 214
215 215 if message is None:
216 216 message = "Removed tag %s" % name
217 217 local = False
218 218
219 219 date, tz = date_to_timestamp_plus_offset(date)
220 220
221 221 self._remote.tag(name, nullid, message, local, user, date, tz)
222 222 self._remote.invalidate_vcs_cache()
223 223 self._invalidate_prop_cache('tags')
224 224
225 225 @LazyProperty
226 226 def bookmarks(self):
227 227 """
228 228 Gets bookmarks for this repository
229 229 """
230 230 return self._get_bookmarks()
231 231
232 232 def _get_bookmarks(self):
233 233 if self.is_empty():
234 234 return {}
235 235
236 236 def get_name(ctx):
237 237 return ctx[0]
238 238
239 239 _bookmarks = [
240 240 (n, h) for n, h in
241 241 self._remote.bookmarks().items()]
242 242
243 243 return OrderedDict(sorted(_bookmarks, key=get_name))
244 244
245 245 def _get_all_commit_ids(self):
246 246 return self._remote.get_all_commit_ids('visible')
247 247
248 248 def get_diff(
249 249 self, commit1, commit2, path='', ignore_whitespace=False,
250 250 context=3, path1=None):
251 251 """
252 252 Returns (git like) *diff*, as plain text. Shows changes introduced by
253 253 `commit2` since `commit1`.
254 254
255 255 :param commit1: Entry point from which diff is shown. Can be
256 256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
257 257 the changes since empty state of the repository until `commit2`
258 258 :param commit2: Until which commit changes should be shown.
259 259 :param ignore_whitespace: If set to ``True``, would not show whitespace
260 260 changes. Defaults to ``False``.
261 261 :param context: How many lines before/after changed lines should be
262 262 shown. Defaults to ``3``.
263 263 """
264 264 self._validate_diff_commits(commit1, commit2)
265 265 if path1 is not None and path1 != path:
266 266 raise ValueError("Diff of two different paths not supported.")
267 267
268 268 if path:
269 269 file_filter = [self.path, path]
270 270 else:
271 271 file_filter = None
272 272
273 273 diff = self._remote.diff(
274 274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
275 275 opt_git=True, opt_ignorews=ignore_whitespace,
276 276 context=context)
277 277 return MercurialDiff(diff)
278 278
279 279 def strip(self, commit_id, branch=None):
280 280 self._remote.strip(commit_id, update=False, backup=False)
281 281
282 282 self._remote.invalidate_vcs_cache()
283 283 # clear cache
284 284 self._invalidate_prop_cache('commit_ids')
285 285
286 286 return len(self.commit_ids)
287 287
288 288 def verify(self):
289 289 verify = self._remote.verify()
290 290
291 291 self._remote.invalidate_vcs_cache()
292 292 return verify
293 293
294 294 def hg_update_cache(self):
295 295 update_cache = self._remote.hg_update_cache()
296 296
297 297 self._remote.invalidate_vcs_cache()
298 298 return update_cache
299 299
300 300 def hg_rebuild_fn_cache(self):
301 301 update_cache = self._remote.hg_rebuild_fn_cache()
302 302
303 303 self._remote.invalidate_vcs_cache()
304 304 return update_cache
305 305
306 306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
307 307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
308 308 self, commit_id1, repo2, commit_id2)
309 309
310 310 if commit_id1 == commit_id2:
311 311 return commit_id1
312 312
313 313 ancestors = self._remote.revs_from_revspec(
314 314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
315 315 other_path=repo2.path)
316 316
317 317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
318 318
319 319 log.debug('Found common ancestor with sha: %s', ancestor_id)
320 320 return ancestor_id
321 321
322 322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
323 323 if commit_id1 == commit_id2:
324 324 commits = []
325 325 else:
326 326 if merge:
327 327 indexes = self._remote.revs_from_revspec(
328 328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
329 329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
330 330 else:
331 331 indexes = self._remote.revs_from_revspec(
332 332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
333 333 commit_id1, other_path=repo2.path)
334 334
335 335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
336 336 for idx in indexes]
337 337
338 338 return commits
339 339
340 340 @staticmethod
341 341 def check_url(url, config):
342 342 """
343 343 Function will check given url and try to verify if it's a valid
344 344 link. Sometimes it may happened that mercurial will issue basic
345 345 auth request that can cause whole API to hang when used from python
346 346 or other external calls.
347 347
348 348 On failures it'll raise urllib2.HTTPError, exception is also thrown
349 349 when the return code is non 200
350 350 """
351 351 # check first if it's not an local url
352 352 if os.path.isdir(url) or url.startswith('file:'):
353 353 return True
354 354
355 355 # Request the _remote to verify the url
356 356 return connection.Hg.check_url(url, config.serialize())
357 357
358 358 @staticmethod
359 359 def is_valid_repository(path):
360 360 return os.path.isdir(os.path.join(path, '.hg'))
361 361
362 362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
363 363 """
364 364 Function will check for mercurial repository in given path. If there
365 365 is no repository in that path it will raise an exception unless
366 366 `create` parameter is set to True - in that case repository would
367 367 be created.
368 368
369 369 If `src_url` is given, would try to clone repository from the
370 370 location at given clone_point. Additionally it'll make update to
371 371 working copy accordingly to `do_workspace_checkout` flag.
372 372 """
373 373 if create and os.path.exists(self.path):
374 374 raise RepositoryError(
375 375 f"Cannot create repository at {self.path}, location already exist")
376 376
377 377 if src_url:
378 378 url = str(self._get_url(src_url))
379 379 MercurialRepository.check_url(url, self.config)
380 380
381 381 self._remote.clone(url, self.path, do_workspace_checkout)
382 382
383 383 # Don't try to create if we've already cloned repo
384 384 create = False
385 385
386 386 if create:
387 387 os.makedirs(self.path, mode=0o755)
388 388
389 389 self._remote.localrepository(create)
390 390
391 391 @LazyProperty
392 392 def in_memory_commit(self):
393 393 return MercurialInMemoryCommit(self)
394 394
395 395 @LazyProperty
396 396 def description(self):
397 397 description = self._remote.get_config_value(
398 398 'web', 'description', untrusted=True)
399 399 return safe_str(description or self.DEFAULT_DESCRIPTION)
400 400
401 401 @LazyProperty
402 402 def contact(self):
403 403 contact = (
404 404 self._remote.get_config_value("web", "contact") or
405 405 self._remote.get_config_value("ui", "username"))
406 406 return safe_str(contact or self.DEFAULT_CONTACT)
407 407
408 408 @LazyProperty
409 409 def last_change(self):
410 410 """
411 411 Returns last change made on this repository as
412 412 `datetime.datetime` object.
413 413 """
414 414 try:
415 415 return self.get_commit().date
416 416 except RepositoryError:
417 417 tzoffset = makedate()[1]
418 418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
419 419
420 420 def _get_fs_mtime(self):
421 421 # fallback to filesystem
422 422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
423 423 st_path = os.path.join(self.path, '.hg', "store")
424 424 if os.path.exists(cl_path):
425 425 return os.stat(cl_path).st_mtime
426 426 else:
427 427 return os.stat(st_path).st_mtime
428 428
429 429 def _get_url(self, url):
430 430 """
431 431 Returns normalized url. If schema is not given, would fall
432 432 to filesystem
433 433 (``file:///``) schema.
434 434 """
435 435 if url != 'default' and '://' not in url:
436 436 url = "file:" + urllib.request.pathname2url(url)
437 437 return url
438 438
439 439 def get_hook_location(self):
440 440 """
441 441 returns absolute path to location where hooks are stored
442 442 """
443 443 return os.path.join(self.path, '.hg', '.hgrc')
444 444
445 445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
446 446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
447 447 """
448 448 Returns ``MercurialCommit`` object representing repository's
449 449 commit at the given `commit_id` or `commit_idx`.
450 450 """
451 451 if self.is_empty():
452 452 raise EmptyRepositoryError("There are no commits yet")
453 453
454 454 if commit_id is not None:
455 455 self._validate_commit_id(commit_id)
456 456 try:
457 457 # we have cached idx, use it without contacting the remote
458 458 idx = self._commit_ids[commit_id]
459 459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
460 460 except KeyError:
461 461 pass
462 462
463 463 elif commit_idx is not None:
464 464 self._validate_commit_idx(commit_idx)
465 465 try:
466 466 _commit_id = self.commit_ids[commit_idx]
467 467 if commit_idx < 0:
468 468 commit_idx = self.commit_ids.index(_commit_id)
469 469
470 470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
471 471 except IndexError:
472 472 commit_id = commit_idx
473 473 else:
474 474 commit_id = "tip"
475 475
476 476 # case here is no cached version, do an actual lookup instead
477 477 try:
478 478 raw_id, idx = self._remote.lookup(commit_id, both=True)
479 479 except CommitDoesNotExistError:
480 480 msg = "Commit {} does not exist for `{}`".format(
481 481 *map(safe_str, [commit_id, self.name]))
482 482 raise CommitDoesNotExistError(msg)
483 483
484 484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
485 485
486 486 def get_commits(
487 487 self, start_id=None, end_id=None, start_date=None, end_date=None,
488 488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
489 489 """
490 490 Returns generator of ``MercurialCommit`` objects from start to end
491 491 (both are inclusive)
492 492
493 493 :param start_id: None, str(commit_id)
494 494 :param end_id: None, str(commit_id)
495 495 :param start_date: if specified, commits with commit date less than
496 496 ``start_date`` would be filtered out from returned set
497 497 :param end_date: if specified, commits with commit date greater than
498 498 ``end_date`` would be filtered out from returned set
499 499 :param branch_name: if specified, commits not reachable from given
500 500 branch would be filtered out from returned set
501 501 :param show_hidden: Show hidden commits such as obsolete or hidden from
502 502 Mercurial evolve
503 503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
504 504 exist.
505 505 :raise CommitDoesNotExistError: If commit for given ``start`` or
506 506 ``end`` could not be found.
507 507 """
508 508 # actually we should check now if it's not an empty repo
509 509 if self.is_empty():
510 510 raise EmptyRepositoryError("There are no commits yet")
511 511 self._validate_branch_name(branch_name)
512 512
513 513 branch_ancestors = False
514 514 if start_id is not None:
515 515 self._validate_commit_id(start_id)
516 516 c_start = self.get_commit(commit_id=start_id)
517 517 start_pos = self._commit_ids[c_start.raw_id]
518 518 else:
519 519 start_pos = None
520 520
521 521 if end_id is not None:
522 522 self._validate_commit_id(end_id)
523 523 c_end = self.get_commit(commit_id=end_id)
524 524 end_pos = max(0, self._commit_ids[c_end.raw_id])
525 525 else:
526 526 end_pos = None
527 527
528 528 if None not in [start_id, end_id] and start_pos > end_pos:
529 529 raise RepositoryError(
530 530 "Start commit '%s' cannot be after end commit '%s'" %
531 531 (start_id, end_id))
532 532
533 533 if end_pos is not None:
534 534 end_pos += 1
535 535
536 536 commit_filter = []
537 537
538 538 if branch_name and not branch_ancestors:
539 539 commit_filter.append(f'branch("{branch_name}")')
540 540 elif branch_name and branch_ancestors:
541 541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
542 542
543 543 if start_date and not end_date:
544 544 commit_filter.append(f'date(">{start_date}")')
545 545 if end_date and not start_date:
546 546 commit_filter.append(f'date("<{end_date}")')
547 547 if start_date and end_date:
548 548 commit_filter.append(
549 549 f'date(">{start_date}") and date("<{end_date}")')
550 550
551 551 if not show_hidden:
552 552 commit_filter.append('not obsolete()')
553 553 commit_filter.append('not hidden()')
554 554
555 555 # TODO: johbo: Figure out a simpler way for this solution
556 556 collection_generator = CollectionGenerator
557 557 if commit_filter:
558 558 commit_filter = ' and '.join(map(safe_str, commit_filter))
559 559 revisions = self._remote.rev_range([commit_filter])
560 560 collection_generator = MercurialIndexBasedCollectionGenerator
561 561 else:
562 562 revisions = self.commit_ids
563 563
564 564 if start_pos or end_pos:
565 565 revisions = revisions[start_pos:end_pos]
566 566
567 567 return collection_generator(self, revisions, pre_load=pre_load)
568 568
569 569 def pull(self, url, commit_ids=None):
570 570 """
571 571 Pull changes from external location.
572 572
573 573 :param commit_ids: Optional. Can be set to a list of commit ids
574 574 which shall be pulled from the other repository.
575 575 """
576 576 url = self._get_url(url)
577 577 self._remote.pull(url, commit_ids=commit_ids)
578 578 self._remote.invalidate_vcs_cache()
579 579
580 def fetch(self, url, commit_ids=None):
580 def fetch(self, url, commit_ids=None, **kwargs):
581 581 """
582 582 Backward compatibility with GIT fetch==pull
583 583 """
584 584 return self.pull(url, commit_ids=commit_ids)
585 585
586 586 def push(self, url):
587 587 url = self._get_url(url)
588 588 self._remote.sync_push(url)
589 589
590 590 def _local_clone(self, clone_path):
591 591 """
592 592 Create a local clone of the current repo.
593 593 """
594 594 self._remote.clone(self.path, clone_path, update_after_clone=True,
595 595 hooks=False)
596 596
597 597 def _update(self, revision, clean=False):
598 598 """
599 599 Update the working copy to the specified revision.
600 600 """
601 601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
602 602 self._remote.update(revision, clean=clean)
603 603
604 604 def _identify(self):
605 605 """
606 606 Return the current state of the working directory.
607 607 """
608 608 return self._remote.identify().strip().rstrip('+')
609 609
610 610 def _heads(self, branch=None):
611 611 """
612 612 Return the commit ids of the repository heads.
613 613 """
614 614 return self._remote.heads(branch=branch).strip().split(' ')
615 615
616 616 def _ancestor(self, revision1, revision2):
617 617 """
618 618 Return the common ancestor of the two revisions.
619 619 """
620 620 return self._remote.ancestor(revision1, revision2)
621 621
622 622 def _local_push(
623 623 self, revision, repository_path, push_branches=False,
624 624 enable_hooks=False):
625 625 """
626 626 Push the given revision to the specified repository.
627 627
628 628 :param push_branches: allow to create branches in the target repo.
629 629 """
630 630 self._remote.push(
631 631 [revision], repository_path, hooks=enable_hooks,
632 632 push_branches=push_branches)
633 633
634 634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
635 635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
636 636 """
637 637 Merge the given source_revision into the checked out revision.
638 638
639 639 Returns the commit id of the merge and a boolean indicating if the
640 640 commit needs to be pushed.
641 641 """
642 642
643 643 source_ref_commit_id = source_ref.commit_id
644 644 target_ref_commit_id = target_ref.commit_id
645 645
646 646 # update our workdir to target ref, for proper merge
647 647 self._update(target_ref_commit_id, clean=True)
648 648
649 649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
650 650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
651 651
652 652 if close_commit_id:
653 653 # NOTE(marcink): if we get the close commit, this is our new source
654 654 # which will include the close commit itself.
655 655 source_ref_commit_id = close_commit_id
656 656
657 657 if ancestor == source_ref_commit_id:
658 658 # Nothing to do, the changes were already integrated
659 659 return target_ref_commit_id, False
660 660
661 661 elif ancestor == target_ref_commit_id and is_the_same_branch:
662 662 # In this case we should force a commit message
663 663 return source_ref_commit_id, True
664 664
665 665 unresolved = None
666 666 if use_rebase:
667 667 try:
668 668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
669 669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
670 670 self._remote.rebase(
671 671 source=source_ref_commit_id, dest=target_ref_commit_id)
672 672 self._remote.invalidate_vcs_cache()
673 673 self._update(bookmark_name, clean=True)
674 674 return self._identify(), True
675 675 except RepositoryError as e:
676 676 # The rebase-abort may raise another exception which 'hides'
677 677 # the original one, therefore we log it here.
678 678 log.exception('Error while rebasing shadow repo during merge.')
679 679 if 'unresolved conflicts' in safe_str(e):
680 680 unresolved = self._remote.get_unresolved_files()
681 681 log.debug('unresolved files: %s', unresolved)
682 682
683 683 # Cleanup any rebase leftovers
684 684 self._remote.invalidate_vcs_cache()
685 685 self._remote.rebase(abort=True)
686 686 self._remote.invalidate_vcs_cache()
687 687 self._remote.update(clean=True)
688 688 if unresolved:
689 689 raise UnresolvedFilesInRepo(unresolved)
690 690 else:
691 691 raise
692 692 else:
693 693 try:
694 694 self._remote.merge(source_ref_commit_id)
695 695 self._remote.invalidate_vcs_cache()
696 696 self._remote.commit(
697 697 message=safe_str(merge_message),
698 698 username=safe_str(f'{user_name} <{user_email}>'))
699 699 self._remote.invalidate_vcs_cache()
700 700 return self._identify(), True
701 701 except RepositoryError as e:
702 702 # The merge-abort may raise another exception which 'hides'
703 703 # the original one, therefore we log it here.
704 704 log.exception('Error while merging shadow repo during merge.')
705 705 if 'unresolved merge conflicts' in safe_str(e):
706 706 unresolved = self._remote.get_unresolved_files()
707 707 log.debug('unresolved files: %s', unresolved)
708 708
709 709 # Cleanup any merge leftovers
710 710 self._remote.update(clean=True)
711 711 if unresolved:
712 712 raise UnresolvedFilesInRepo(unresolved)
713 713 else:
714 714 raise
715 715
716 716 def _local_close(self, target_ref, user_name, user_email,
717 717 source_ref, close_message=''):
718 718 """
719 719 Close the branch of the given source_revision
720 720
721 721 Returns the commit id of the close and a boolean indicating if the
722 722 commit needs to be pushed.
723 723 """
724 724 self._update(source_ref.commit_id)
725 725 message = close_message or f"Closing branch: `{source_ref.name}`"
726 726 try:
727 727 self._remote.commit(
728 728 message=safe_str(message),
729 729 username=safe_str(f'{user_name} <{user_email}>'),
730 730 close_branch=True)
731 731 self._remote.invalidate_vcs_cache()
732 732 return self._identify(), True
733 733 except RepositoryError:
734 734 # Cleanup any commit leftovers
735 735 self._remote.update(clean=True)
736 736 raise
737 737
738 738 def _is_the_same_branch(self, target_ref, source_ref):
739 739 return (
740 740 self._get_branch_name(target_ref) ==
741 741 self._get_branch_name(source_ref))
742 742
743 743 def _get_branch_name(self, ref):
744 744 if ref.type == 'branch':
745 745 return ref.name
746 746 return self._remote.ctx_branch(ref.commit_id)
747 747
748 748 def _maybe_prepare_merge_workspace(
749 749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
750 750 shadow_repository_path = self._get_shadow_repository_path(
751 751 self.path, repo_id, workspace_id)
752 752 if not os.path.exists(shadow_repository_path):
753 753 self._local_clone(shadow_repository_path)
754 754 log.debug(
755 755 'Prepared shadow repository in %s', shadow_repository_path)
756 756
757 757 return shadow_repository_path
758 758
759 759 def _merge_repo(self, repo_id, workspace_id, target_ref,
760 760 source_repo, source_ref, merge_message,
761 761 merger_name, merger_email, dry_run=False,
762 762 use_rebase=False, close_branch=False):
763 763
764 764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
765 765 'rebase' if use_rebase else 'merge', dry_run)
766 766
767 767 if target_ref.commit_id not in self._heads():
768 768 return MergeResponse(
769 769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
770 770 metadata={'target_ref': target_ref})
771 771
772 772 try:
773 773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
774 774 heads_all = self._heads(target_ref.name)
775 775 max_heads = 10
776 776 if len(heads_all) > max_heads:
777 777 heads = '\n,'.join(
778 778 heads_all[:max_heads] +
779 779 [f'and {len(heads_all)-max_heads} more.'])
780 780 else:
781 781 heads = '\n,'.join(heads_all)
782 782 metadata = {
783 783 'target_ref': target_ref,
784 784 'source_ref': source_ref,
785 785 'heads': heads
786 786 }
787 787 return MergeResponse(
788 788 False, False, None,
789 789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
790 790 metadata=metadata)
791 791 except CommitDoesNotExistError:
792 792 log.exception('Failure when looking up branch heads on hg target')
793 793 return MergeResponse(
794 794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
795 795 metadata={'target_ref': target_ref})
796 796
797 797 shadow_repository_path = self._maybe_prepare_merge_workspace(
798 798 repo_id, workspace_id, target_ref, source_ref)
799 799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
800 800
801 801 log.debug('Pulling in target reference %s', target_ref)
802 802 self._validate_pull_reference(target_ref)
803 803 shadow_repo._local_pull(self.path, target_ref)
804 804
805 805 try:
806 806 log.debug('Pulling in source reference %s', source_ref)
807 807 source_repo._validate_pull_reference(source_ref)
808 808 shadow_repo._local_pull(source_repo.path, source_ref)
809 809 except CommitDoesNotExistError:
810 810 log.exception('Failure when doing local pull on hg shadow repo')
811 811 return MergeResponse(
812 812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
813 813 metadata={'source_ref': source_ref})
814 814
815 815 merge_ref = None
816 816 merge_commit_id = None
817 817 close_commit_id = None
818 818 merge_failure_reason = MergeFailureReason.NONE
819 819 metadata = {}
820 820
821 821 # enforce that close branch should be used only in case we source from
822 822 # an actual Branch
823 823 close_branch = close_branch and source_ref.type == 'branch'
824 824
825 825 # don't allow to close branch if source and target are the same
826 826 close_branch = close_branch and source_ref.name != target_ref.name
827 827
828 828 needs_push_on_close = False
829 829 if close_branch and not use_rebase and not dry_run:
830 830 try:
831 831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
832 832 target_ref, merger_name, merger_email, source_ref)
833 833 merge_possible = True
834 834 except RepositoryError:
835 835 log.exception('Failure when doing close branch on '
836 836 'shadow repo: %s', shadow_repo)
837 837 merge_possible = False
838 838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
839 839 else:
840 840 merge_possible = True
841 841
842 842 needs_push = False
843 843 if merge_possible:
844 844
845 845 try:
846 846 merge_commit_id, needs_push = shadow_repo._local_merge(
847 847 target_ref, merge_message, merger_name, merger_email,
848 848 source_ref, use_rebase=use_rebase,
849 849 close_commit_id=close_commit_id, dry_run=dry_run)
850 850 merge_possible = True
851 851
852 852 # read the state of the close action, if it
853 853 # maybe required a push
854 854 needs_push = needs_push or needs_push_on_close
855 855
856 856 # Set a bookmark pointing to the merge commit. This bookmark
857 857 # may be used to easily identify the last successful merge
858 858 # commit in the shadow repository.
859 859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
860 860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
861 861 except SubrepoMergeError:
862 862 log.exception(
863 863 'Subrepo merge error during local merge on hg shadow repo.')
864 864 merge_possible = False
865 865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
866 866 needs_push = False
867 867 except RepositoryError as e:
868 868 log.exception('Failure when doing local merge on hg shadow repo')
869 869 metadata['unresolved_files'] = 'no unresolved files found'
870 870
871 871 if isinstance(e, UnresolvedFilesInRepo):
872 872 all_conflicts = list(e.args[0])
873 873 max_conflicts = 20
874 874 if len(all_conflicts) > max_conflicts:
875 875 conflicts = all_conflicts[:max_conflicts] \
876 876 + [f'and {len(all_conflicts)-max_conflicts} more.']
877 877 else:
878 878 conflicts = all_conflicts
879 879 metadata['unresolved_files'] = \
880 880 '\n* conflict: ' + \
881 881 ('\n * conflict: '.join(conflicts))
882 882
883 883 merge_possible = False
884 884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
885 885 needs_push = False
886 886
887 887 if merge_possible and not dry_run:
888 888 if needs_push:
889 889 # In case the target is a bookmark, update it, so after pushing
890 890 # the bookmarks is also updated in the target.
891 891 if target_ref.type == 'book':
892 892 shadow_repo.bookmark(
893 893 target_ref.name, revision=merge_commit_id)
894 894 try:
895 895 shadow_repo_with_hooks = self.get_shadow_instance(
896 896 shadow_repository_path,
897 897 enable_hooks=True)
898 898 # This is the actual merge action, we push from shadow
899 899 # into origin.
900 900 # Note: the push_branches option will push any new branch
901 901 # defined in the source repository to the target. This may
902 902 # be dangerous as branches are permanent in Mercurial.
903 903 # This feature was requested in issue #441.
904 904 shadow_repo_with_hooks._local_push(
905 905 merge_commit_id, self.path, push_branches=True,
906 906 enable_hooks=True)
907 907
908 908 # maybe we also need to push the close_commit_id
909 909 if close_commit_id:
910 910 shadow_repo_with_hooks._local_push(
911 911 close_commit_id, self.path, push_branches=True,
912 912 enable_hooks=True)
913 913 merge_succeeded = True
914 914 except RepositoryError:
915 915 log.exception(
916 916 'Failure when doing local push from the shadow '
917 917 'repository to the target repository at %s.', self.path)
918 918 merge_succeeded = False
919 919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
920 920 metadata['target'] = 'hg shadow repo'
921 921 metadata['merge_commit'] = merge_commit_id
922 922 else:
923 923 merge_succeeded = True
924 924 else:
925 925 merge_succeeded = False
926 926
927 927 return MergeResponse(
928 928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
929 929 metadata=metadata)
930 930
931 931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
932 932 config = self.config.copy()
933 933 if not enable_hooks:
934 934 config.clear_section('hooks')
935 935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
936 936
937 937 def _validate_pull_reference(self, reference):
938 938 if not (reference.name in self.bookmarks or
939 939 reference.name in self.branches or
940 940 self.get_commit(reference.commit_id)):
941 941 raise CommitDoesNotExistError(
942 942 'Unknown branch, bookmark or commit id')
943 943
944 944 def _local_pull(self, repository_path, reference):
945 945 """
946 946 Fetch a branch, bookmark or commit from a local repository.
947 947 """
948 948 repository_path = os.path.abspath(repository_path)
949 949 if repository_path == self.path:
950 950 raise ValueError('Cannot pull from the same repository')
951 951
952 952 reference_type_to_option_name = {
953 953 'book': 'bookmark',
954 954 'branch': 'branch',
955 955 }
956 956 option_name = reference_type_to_option_name.get(
957 957 reference.type, 'revision')
958 958
959 959 if option_name == 'revision':
960 960 ref = reference.commit_id
961 961 else:
962 962 ref = reference.name
963 963
964 964 options = {option_name: [ref]}
965 965 self._remote.pull_cmd(repository_path, hooks=False, **options)
966 966 self._remote.invalidate_vcs_cache()
967 967
968 968 def bookmark(self, bookmark, revision=None):
969 969 if isinstance(bookmark, str):
970 970 bookmark = safe_str(bookmark)
971 971 self._remote.bookmark(bookmark, revision=revision)
972 972 self._remote.invalidate_vcs_cache()
973 973
974 974 def get_path_permissions(self, username):
975 975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
976 976
977 977 def read_patterns(suffix):
978 978 svalue = None
979 979 for section, option in [
980 980 ('narrowacl', username + suffix),
981 981 ('narrowacl', 'default' + suffix),
982 982 ('narrowhgacl', username + suffix),
983 983 ('narrowhgacl', 'default' + suffix)
984 984 ]:
985 985 try:
986 986 svalue = hgacl.get(section, option)
987 987 break # stop at the first value we find
988 988 except configparser.NoOptionError:
989 989 pass
990 990 if not svalue:
991 991 return None
992 992 result = ['/']
993 993 for pattern in svalue.split():
994 994 result.append(pattern)
995 995 if '*' not in pattern and '?' not in pattern:
996 996 result.append(pattern + '/*')
997 997 return result
998 998
999 999 if os.path.exists(hgacl_file):
1000 1000 try:
1001 1001 hgacl = configparser.RawConfigParser()
1002 1002 hgacl.read(hgacl_file)
1003 1003
1004 1004 includes = read_patterns('.includes')
1005 1005 excludes = read_patterns('.excludes')
1006 1006 return BasePathPermissionChecker.create_from_patterns(
1007 1007 includes, excludes)
1008 1008 except BaseException as e:
1009 1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1010 1010 hgacl_file, self.name, e)
1011 1011 raise exceptions.RepositoryRequirementError(msg)
1012 1012 else:
1013 1013 return None
1014 1014
1015 1015
1016 1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1017 1017
1018 1018 def _commit_factory(self, commit_id):
1019 1019 if isinstance(commit_id, int):
1020 1020 return self.repo.get_commit(
1021 1021 commit_idx=commit_id, pre_load=self.pre_load)
1022 1022 else:
1023 1023 return self.repo.get_commit(
1024 1024 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,1044 +1,1044 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Scm model for RhodeCode
21 21 """
22 22
23 23 import os.path
24 24 import traceback
25 25 import logging
26 26 import io
27 27
28 28 from sqlalchemy import func
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 import rhodecode
32 32 from rhodecode.lib.str_utils import safe_bytes
33 33 from rhodecode.lib.vcs import get_backend
34 34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 35 from rhodecode.lib.vcs.nodes import FileNode
36 36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 37 from rhodecode.lib import helpers as h, rc_cache
38 38 from rhodecode.lib.auth import (
39 39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 40 HasUserGroupPermissionAny)
41 41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 42 from rhodecode.lib import hooks_utils
43 43 from rhodecode.lib.utils import (
44 44 get_filesystem_repos, make_db_config)
45 45 from rhodecode.lib.str_utils import safe_str
46 46 from rhodecode.lib.system_info import get_system_info
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.db import (
49 49 or_, false, null,
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest, FileStore)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker(*self.perm_set)
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 for db_obj in self.obj_list:
139 139 # check permission at this level
140 140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 141 name = db_obj.__dict__.get(self.obj_attr, None)
142 142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 153
154 154 super().__init__(
155 155 obj_list=db_repo_list,
156 156 obj_attr='_repo_name', perm_set=perm_set,
157 157 perm_checker=HasRepoPermissionAny,
158 158 extra_kwargs=extra_kwargs)
159 159
160 160
161 161 class RepoGroupList(_PermCheckIterator):
162 162
163 163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 164 if not perm_set:
165 165 perm_set = ['group.read', 'group.write', 'group.admin']
166 166
167 167 super().__init__(
168 168 obj_list=db_repo_group_list,
169 169 obj_attr='_group_name', perm_set=perm_set,
170 170 perm_checker=HasRepoGroupPermissionAny,
171 171 extra_kwargs=extra_kwargs)
172 172
173 173
174 174 class UserGroupList(_PermCheckIterator):
175 175
176 176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 177 if not perm_set:
178 178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 179
180 180 super().__init__(
181 181 obj_list=db_user_group_list,
182 182 obj_attr='users_group_name', perm_set=perm_set,
183 183 perm_checker=HasUserGroupPermissionAny,
184 184 extra_kwargs=extra_kwargs)
185 185
186 186
187 187 class ScmModel(BaseModel):
188 188 """
189 189 Generic Scm Model
190 190 """
191 191
192 192 @LazyProperty
193 193 def repos_path(self):
194 194 """
195 195 Gets the repositories root path from database
196 196 """
197 197
198 198 settings_model = VcsSettingsModel(sa=self.sa)
199 199 return settings_model.get_repos_location()
200 200
201 201 def repo_scan(self, repos_path=None):
202 202 """
203 203 Listing of repositories in given path. This path should not be a
204 204 repository itself. Return a dictionary of repository objects
205 205
206 206 :param repos_path: path to directory containing repositories
207 207 """
208 208
209 209 if repos_path is None:
210 210 repos_path = self.repos_path
211 211
212 212 log.info('scanning for repositories in %s', repos_path)
213 213
214 214 config = make_db_config()
215 215 config.set('extensions', 'largefiles', '')
216 216 repos = {}
217 217
218 218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 219 # name need to be decomposed and put back together using the /
220 220 # since this is internal storage separator for rhodecode
221 221 name = Repository.normalize_repo_name(name)
222 222
223 223 try:
224 224 if name in repos:
225 225 raise RepositoryError('Duplicate repository name %s '
226 226 'found in %s' % (name, path))
227 227 elif path[0] in rhodecode.BACKENDS:
228 228 backend = get_backend(path[0])
229 229 repos[name] = backend(path[1], config=config,
230 230 with_wire={"cache": False})
231 231 except OSError:
232 232 continue
233 233 except RepositoryError:
234 234 log.exception('Failed to create a repo')
235 235 continue
236 236
237 237 log.debug('found %s paths with repositories', len(repos))
238 238 return repos
239 239
240 240 def get_repos(self, all_repos=None, sort_key=None):
241 241 """
242 242 Get all repositories from db and for each repo create it's
243 243 backend instance and fill that backed with information from database
244 244
245 245 :param all_repos: list of repository names as strings
246 246 give specific repositories list, good for filtering
247 247
248 248 :param sort_key: initial sorting of repositories
249 249 """
250 250 if all_repos is None:
251 251 all_repos = self.sa.query(Repository)\
252 252 .filter(Repository.group_id == null())\
253 253 .order_by(func.lower(Repository.repo_name)).all()
254 254 repo_iter = SimpleCachedRepoList(
255 255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 256 return repo_iter
257 257
258 258 def get_repo_groups(self, all_groups=None):
259 259 if all_groups is None:
260 260 all_groups = RepoGroup.query()\
261 261 .filter(RepoGroup.group_parent_id == null()).all()
262 262 return [x for x in RepoGroupList(all_groups)]
263 263
264 264 def mark_for_invalidation(self, repo_name, delete=False):
265 265 """
266 266 Mark caches of this repo invalid in the database. `delete` flag
267 267 removes the cache entries
268 268
269 269 :param repo_name: the repo_name for which caches should be marked
270 270 invalid, or deleted
271 271 :param delete: delete the entry keys instead of setting bool
272 272 flag on them, and also purge caches used by the dogpile
273 273 """
274 274 repo = Repository.get_by_repo_name(repo_name)
275 275
276 276 if repo:
277 277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 278 repo_id=repo.repo_id)
279 279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 280
281 281 repo_id = repo.repo_id
282 282 config = repo._config
283 283 config.set('extensions', 'largefiles', '')
284 284 repo.update_commit_cache(config=config, cs_cache=None)
285 285 if delete:
286 286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288 288
289 289 def toggle_following_repo(self, follow_repo_id, user_id):
290 290
291 291 f = self.sa.query(UserFollowing)\
292 292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 293 .filter(UserFollowing.user_id == user_id).scalar()
294 294
295 295 if f is not None:
296 296 try:
297 297 self.sa.delete(f)
298 298 return
299 299 except Exception:
300 300 log.error(traceback.format_exc())
301 301 raise
302 302
303 303 try:
304 304 f = UserFollowing()
305 305 f.user_id = user_id
306 306 f.follows_repo_id = follow_repo_id
307 307 self.sa.add(f)
308 308 except Exception:
309 309 log.error(traceback.format_exc())
310 310 raise
311 311
312 312 def toggle_following_user(self, follow_user_id, user_id):
313 313 f = self.sa.query(UserFollowing)\
314 314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 315 .filter(UserFollowing.user_id == user_id).scalar()
316 316
317 317 if f is not None:
318 318 try:
319 319 self.sa.delete(f)
320 320 return
321 321 except Exception:
322 322 log.error(traceback.format_exc())
323 323 raise
324 324
325 325 try:
326 326 f = UserFollowing()
327 327 f.user_id = user_id
328 328 f.follows_user_id = follow_user_id
329 329 self.sa.add(f)
330 330 except Exception:
331 331 log.error(traceback.format_exc())
332 332 raise
333 333
334 334 def is_following_repo(self, repo_name, user_id, cache=False):
335 335 r = self.sa.query(Repository)\
336 336 .filter(Repository.repo_name == repo_name).scalar()
337 337
338 338 f = self.sa.query(UserFollowing)\
339 339 .filter(UserFollowing.follows_repository == r)\
340 340 .filter(UserFollowing.user_id == user_id).scalar()
341 341
342 342 return f is not None
343 343
344 344 def is_following_user(self, username, user_id, cache=False):
345 345 u = User.get_by_username(username)
346 346
347 347 f = self.sa.query(UserFollowing)\
348 348 .filter(UserFollowing.follows_user == u)\
349 349 .filter(UserFollowing.user_id == user_id).scalar()
350 350
351 351 return f is not None
352 352
353 353 def get_followers(self, repo):
354 354 repo = self._get_repo(repo)
355 355
356 356 return self.sa.query(UserFollowing)\
357 357 .filter(UserFollowing.follows_repository == repo).count()
358 358
359 359 def get_forks(self, repo):
360 360 repo = self._get_repo(repo)
361 361 return self.sa.query(Repository)\
362 362 .filter(Repository.fork == repo).count()
363 363
364 364 def get_pull_requests(self, repo):
365 365 repo = self._get_repo(repo)
366 366 return self.sa.query(PullRequest)\
367 367 .filter(PullRequest.target_repo == repo)\
368 368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369 369
370 370 def get_artifacts(self, repo):
371 371 repo = self._get_repo(repo)
372 372 return self.sa.query(FileStore)\
373 373 .filter(FileStore.repo == repo)\
374 374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
375 375
376 376 def mark_as_fork(self, repo, fork, user):
377 377 repo = self._get_repo(repo)
378 378 fork = self._get_repo(fork)
379 379 if fork and repo.repo_id == fork.repo_id:
380 380 raise Exception("Cannot set repository as fork of itself")
381 381
382 382 if fork and repo.repo_type != fork.repo_type:
383 383 raise RepositoryError(
384 384 "Cannot set repository as fork of repository with other type")
385 385
386 386 repo.fork = fork
387 387 self.sa.add(repo)
388 388 return repo
389 389
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
391 391 dbrepo = self._get_repo(repo)
392 392 remote_uri = remote_uri or dbrepo.clone_uri
393 393 if not remote_uri:
394 394 raise Exception("This repository doesn't have a clone uri")
395 395
396 396 repo = dbrepo.scm_instance(cache=False)
397 397 repo.config.clear_section('hooks')
398 398
399 399 try:
400 400 # NOTE(marcink): add extra validation so we skip invalid urls
401 401 # this is due this tasks can be executed via scheduler without
402 402 # proper validation of remote_uri
403 403 if validate_uri:
404 404 config = make_db_config(clear_session=False)
405 405 url_validator(remote_uri, dbrepo.repo_type, config)
406 406 except InvalidCloneUrl:
407 407 raise
408 408
409 409 repo_name = dbrepo.repo_name
410 410 try:
411 411 # TODO: we need to make sure those operations call proper hooks !
412 repo.fetch(remote_uri)
412 repo.fetch(remote_uri, **kwargs)
413 413
414 414 self.mark_for_invalidation(repo_name)
415 415 except Exception:
416 416 log.error(traceback.format_exc())
417 417 raise
418 418
419 419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 420 dbrepo = self._get_repo(repo)
421 421 remote_uri = remote_uri or dbrepo.push_uri
422 422 if not remote_uri:
423 423 raise Exception("This repository doesn't have a clone uri")
424 424
425 425 repo = dbrepo.scm_instance(cache=False)
426 426 repo.config.clear_section('hooks')
427 427
428 428 try:
429 429 # NOTE(marcink): add extra validation so we skip invalid urls
430 430 # this is due this tasks can be executed via scheduler without
431 431 # proper validation of remote_uri
432 432 if validate_uri:
433 433 config = make_db_config(clear_session=False)
434 434 url_validator(remote_uri, dbrepo.repo_type, config)
435 435 except InvalidCloneUrl:
436 436 raise
437 437
438 438 try:
439 439 repo.push(remote_uri)
440 440 except Exception:
441 441 log.error(traceback.format_exc())
442 442 raise
443 443
444 444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 445 content: bytes, f_path: bytes, branch: str = None):
446 446 """
447 447 Commits changes
448 448 """
449 449 user = self._get_user(user)
450 450
451 451 # message and author needs to be unicode
452 452 # proper backend should then translate that into required type
453 453 message = safe_str(message)
454 454 author = safe_str(author)
455 455 imc = repo.in_memory_commit
456 456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 457 try:
458 458 # TODO: handle pre-push action !
459 459 tip = imc.commit(
460 460 message=message, author=author, parents=[commit],
461 461 branch=branch or commit.branch)
462 462 except Exception as e:
463 463 log.error(traceback.format_exc())
464 464 raise IMCCommitError(str(e))
465 465 finally:
466 466 # always clear caches, if commit fails we want fresh object also
467 467 self.mark_for_invalidation(repo_name)
468 468
469 469 # We trigger the post-push action
470 470 hooks_utils.trigger_post_push_hook(
471 471 username=user.username, action='push_local', hook_type='post_push',
472 472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 473 return tip
474 474
475 475 def _sanitize_path(self, f_path: bytes):
476 476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 478 if f_path:
479 479 f_path = os.path.normpath(f_path)
480 480 return f_path
481 481
482 482 def get_dirnode_metadata(self, request, commit, dir_node):
483 483 if not dir_node.is_dir():
484 484 return []
485 485
486 486 data = []
487 487 for node in dir_node:
488 488 if not node.is_file():
489 489 # we skip file-nodes
490 490 continue
491 491
492 492 last_commit = node.last_commit
493 493 last_commit_date = last_commit.date
494 494 data.append({
495 495 'name': node.name,
496 496 'size': h.format_byte_size_binary(node.size),
497 497 'modified_at': h.format_date(last_commit_date),
498 498 'modified_ts': last_commit_date.isoformat(),
499 499 'revision': last_commit.revision,
500 500 'short_id': last_commit.short_id,
501 501 'message': h.escape(last_commit.message),
502 502 'author': h.escape(last_commit.author),
503 503 'user_profile': h.gravatar_with_user(
504 504 request, last_commit.author),
505 505 })
506 506
507 507 return data
508 508
509 509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 510 extended_info=False, content=False, max_file_bytes=None):
511 511 """
512 512 recursive walk in root dir and return a set of all path in that dir
513 513 based on repository walk function
514 514
515 515 :param repo_name: name of repository
516 516 :param commit_id: commit id for which to list nodes
517 517 :param root_path: root path to list
518 518 :param flat: return as a list, if False returns a dict with description
519 519 :param extended_info: show additional info such as md5, binary, size etc
520 520 :param content: add nodes content to the return data
521 521 :param max_file_bytes: will not return file contents over this limit
522 522
523 523 """
524 524 _files = list()
525 525 _dirs = list()
526 526
527 527 try:
528 528 _repo = self._get_repo(repo_name)
529 529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 530 root_path = root_path.lstrip('/')
531 531
532 532 # get RootNode, inject pre-load options before walking
533 533 top_node = commit.get_node(root_path)
534 534 extended_info_pre_load = []
535 535 if extended_info:
536 536 extended_info_pre_load += ['md5']
537 537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538 538
539 539 for __, dirs, files in commit.walk(top_node):
540 540
541 541 for f in files:
542 542 _content = None
543 543 _data = f_name = f.str_path
544 544
545 545 if not flat:
546 546 _data = {
547 547 "name": h.escape(f_name),
548 548 "type": "file",
549 549 }
550 550 if extended_info:
551 551 _data.update({
552 552 "md5": f.md5,
553 553 "binary": f.is_binary,
554 554 "size": f.size,
555 555 "extension": f.extension,
556 556 "mimetype": f.mimetype,
557 557 "lines": f.lines()[0]
558 558 })
559 559
560 560 if content:
561 561 over_size_limit = (max_file_bytes is not None
562 562 and f.size > max_file_bytes)
563 563 full_content = None
564 564 if not f.is_binary and not over_size_limit:
565 565 full_content = f.str_content
566 566
567 567 _data.update({
568 568 "content": full_content,
569 569 })
570 570 _files.append(_data)
571 571
572 572 for d in dirs:
573 573 _data = d_name = d.str_path
574 574 if not flat:
575 575 _data = {
576 576 "name": h.escape(d_name),
577 577 "type": "dir",
578 578 }
579 579 if extended_info:
580 580 _data.update({
581 581 "md5": "",
582 582 "binary": False,
583 583 "size": 0,
584 584 "extension": "",
585 585 })
586 586 if content:
587 587 _data.update({
588 588 "content": None
589 589 })
590 590 _dirs.append(_data)
591 591 except RepositoryError:
592 592 log.exception("Exception in get_nodes")
593 593 raise
594 594
595 595 return _dirs, _files
596 596
597 597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 598 """
599 599 Generate files for quick filter in files view
600 600 """
601 601
602 602 _files = list()
603 603 _dirs = list()
604 604 try:
605 605 _repo = self._get_repo(repo_name)
606 606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 607 root_path = root_path.lstrip('/')
608 608
609 609 top_node = commit.get_node(root_path)
610 610 top_node.default_pre_load = []
611 611
612 612 for __, dirs, files in commit.walk(top_node):
613 613 for f in files:
614 614
615 615 _data = {
616 616 "name": h.escape(f.str_path),
617 617 "type": "file",
618 618 }
619 619
620 620 _files.append(_data)
621 621
622 622 for d in dirs:
623 623
624 624 _data = {
625 625 "name": h.escape(d.str_path),
626 626 "type": "dir",
627 627 }
628 628
629 629 _dirs.append(_data)
630 630 except RepositoryError:
631 631 log.exception("Exception in get_quick_filter_nodes")
632 632 raise
633 633
634 634 return _dirs, _files
635 635
636 636 def get_node(self, repo_name, commit_id, file_path,
637 637 extended_info=False, content=False, max_file_bytes=None, cache=True):
638 638 """
639 639 retrieve single node from commit
640 640 """
641 641
642 642 try:
643 643
644 644 _repo = self._get_repo(repo_name)
645 645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
646 646
647 647 file_node = commit.get_node(file_path)
648 648 if file_node.is_dir():
649 649 raise RepositoryError('The given path is a directory')
650 650
651 651 _content = None
652 652 f_name = file_node.str_path
653 653
654 654 file_data = {
655 655 "name": h.escape(f_name),
656 656 "type": "file",
657 657 }
658 658
659 659 if extended_info:
660 660 file_data.update({
661 661 "extension": file_node.extension,
662 662 "mimetype": file_node.mimetype,
663 663 })
664 664
665 665 if cache:
666 666 md5 = file_node.md5
667 667 is_binary = file_node.is_binary
668 668 size = file_node.size
669 669 else:
670 670 is_binary, md5, size, _content = file_node.metadata_uncached()
671 671
672 672 file_data.update({
673 673 "md5": md5,
674 674 "binary": is_binary,
675 675 "size": size,
676 676 })
677 677
678 678 if content and cache:
679 679 # get content + cache
680 680 size = file_node.size
681 681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
682 682 full_content = None
683 683 all_lines = 0
684 684 if not file_node.is_binary and not over_size_limit:
685 685 full_content = safe_str(file_node.content)
686 686 all_lines, empty_lines = file_node.count_lines(full_content)
687 687
688 688 file_data.update({
689 689 "content": full_content,
690 690 "lines": all_lines
691 691 })
692 692 elif content:
693 693 # get content *without* cache
694 694 if _content is None:
695 695 is_binary, md5, size, _content = file_node.metadata_uncached()
696 696
697 697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
698 698 full_content = None
699 699 all_lines = 0
700 700 if not is_binary and not over_size_limit:
701 701 full_content = safe_str(_content)
702 702 all_lines, empty_lines = file_node.count_lines(full_content)
703 703
704 704 file_data.update({
705 705 "content": full_content,
706 706 "lines": all_lines
707 707 })
708 708
709 709 except RepositoryError:
710 710 log.exception("Exception in get_node")
711 711 raise
712 712
713 713 return file_data
714 714
715 715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
716 716 """
717 717 Fetch node tree for usage in full text search
718 718 """
719 719
720 720 tree_info = list()
721 721
722 722 try:
723 723 _repo = self._get_repo(repo_name)
724 724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
725 725 root_path = root_path.lstrip('/')
726 726 top_node = commit.get_node(root_path)
727 727 top_node.default_pre_load = []
728 728
729 729 for __, dirs, files in commit.walk(top_node):
730 730
731 731 for f in files:
732 732 is_binary, md5, size, _content = f.metadata_uncached()
733 733 _data = {
734 734 "name": f.str_path,
735 735 "md5": md5,
736 736 "extension": f.extension,
737 737 "binary": is_binary,
738 738 "size": size
739 739 }
740 740
741 741 tree_info.append(_data)
742 742
743 743 except RepositoryError:
744 744 log.exception("Exception in get_nodes")
745 745 raise
746 746
747 747 return tree_info
748 748
749 749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
750 750 author=None, trigger_push_hook=True):
751 751 """
752 752 Commits given multiple nodes into repo
753 753
754 754 :param user: RhodeCode User object or user_id, the commiter
755 755 :param repo: RhodeCode Repository object
756 756 :param message: commit message
757 757 :param nodes: mapping {filename:{'content':content},...}
758 758 :param parent_commit: parent commit, can be empty than it's
759 759 initial commit
760 760 :param author: author of commit, cna be different that commiter
761 761 only for git
762 762 :param trigger_push_hook: trigger push hooks
763 763
764 764 :returns: new committed commit
765 765 """
766 766
767 767 user = self._get_user(user)
768 768 scm_instance = repo.scm_instance(cache=False)
769 769
770 770 message = safe_str(message)
771 771 commiter = user.full_contact
772 772 author = safe_str(author) if author else commiter
773 773
774 774 imc = scm_instance.in_memory_commit
775 775
776 776 if not parent_commit:
777 777 parent_commit = EmptyCommit(alias=scm_instance.alias)
778 778
779 779 if isinstance(parent_commit, EmptyCommit):
780 780 # EmptyCommit means we're editing empty repository
781 781 parents = None
782 782 else:
783 783 parents = [parent_commit]
784 784
785 785 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 786 processed_nodes = []
787 787 for filename, content_dict in nodes.items():
788 788 if not isinstance(filename, bytes):
789 789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 790 content = content_dict['content']
791 791 if not isinstance(content, upload_file_types + (bytes,)):
792 792 raise ValueError('content key value in nodes needs to be bytes')
793 793
794 794 for f_path in nodes:
795 795 f_path = self._sanitize_path(f_path)
796 796 content = nodes[f_path]['content']
797 797
798 798 # decoding here will force that we have proper encoded values
799 799 # in any other case this will throw exceptions and deny commit
800 800
801 801 if isinstance(content, bytes):
802 802 pass
803 803 elif isinstance(content, upload_file_types):
804 804 content = content.read()
805 805 else:
806 806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 807 processed_nodes.append((f_path, content))
808 808
809 809 # add multiple nodes
810 810 for path, content in processed_nodes:
811 811 imc.add(FileNode(path, content=content))
812 812
813 813 # TODO: handle pre push scenario
814 814 tip = imc.commit(message=message,
815 815 author=author,
816 816 parents=parents,
817 817 branch=parent_commit.branch)
818 818
819 819 self.mark_for_invalidation(repo.repo_name)
820 820 if trigger_push_hook:
821 821 hooks_utils.trigger_post_push_hook(
822 822 username=user.username, action='push_local',
823 823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 824 hook_type='post_push',
825 825 commit_ids=[tip.raw_id])
826 826 return tip
827 827
828 828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 829 author=None, trigger_push_hook=True):
830 830 user = self._get_user(user)
831 831 scm_instance = repo.scm_instance(cache=False)
832 832
833 833 message = safe_str(message)
834 834 commiter = user.full_contact
835 835 author = safe_str(author) if author else commiter
836 836
837 837 imc = scm_instance.in_memory_commit
838 838
839 839 if not parent_commit:
840 840 parent_commit = EmptyCommit(alias=scm_instance.alias)
841 841
842 842 if isinstance(parent_commit, EmptyCommit):
843 843 # EmptyCommit means we we're editing empty repository
844 844 parents = None
845 845 else:
846 846 parents = [parent_commit]
847 847
848 848 # add multiple nodes
849 849 for _filename, data in nodes.items():
850 850 # new filename, can be renamed from the old one, also sanitaze
851 851 # the path for any hack around relative paths like ../../ etc.
852 852 filename = self._sanitize_path(data['filename'])
853 853 old_filename = self._sanitize_path(_filename)
854 854 content = data['content']
855 855 file_mode = data.get('mode')
856 856 filenode = FileNode(old_filename, content=content, mode=file_mode)
857 857 op = data['op']
858 858 if op == 'add':
859 859 imc.add(filenode)
860 860 elif op == 'del':
861 861 imc.remove(filenode)
862 862 elif op == 'mod':
863 863 if filename != old_filename:
864 864 # TODO: handle renames more efficient, needs vcs lib changes
865 865 imc.remove(filenode)
866 866 imc.add(FileNode(filename, content=content, mode=file_mode))
867 867 else:
868 868 imc.change(filenode)
869 869
870 870 try:
871 871 # TODO: handle pre push scenario commit changes
872 872 tip = imc.commit(message=message,
873 873 author=author,
874 874 parents=parents,
875 875 branch=parent_commit.branch)
876 876 except NodeNotChangedError:
877 877 raise
878 878 except Exception as e:
879 879 log.exception("Unexpected exception during call to imc.commit")
880 880 raise IMCCommitError(str(e))
881 881 finally:
882 882 # always clear caches, if commit fails we want fresh object also
883 883 self.mark_for_invalidation(repo.repo_name)
884 884
885 885 if trigger_push_hook:
886 886 hooks_utils.trigger_post_push_hook(
887 887 username=user.username, action='push_local', hook_type='post_push',
888 888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
889 889 commit_ids=[tip.raw_id])
890 890
891 891 return tip
892 892
893 893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
894 894 author=None, trigger_push_hook=True):
895 895 """
896 896 Deletes given multiple nodes into `repo`
897 897
898 898 :param user: RhodeCode User object or user_id, the committer
899 899 :param repo: RhodeCode Repository object
900 900 :param message: commit message
901 901 :param nodes: mapping {filename:{'content':content},...}
902 902 :param parent_commit: parent commit, can be empty than it's initial
903 903 commit
904 904 :param author: author of commit, cna be different that commiter only
905 905 for git
906 906 :param trigger_push_hook: trigger push hooks
907 907
908 908 :returns: new commit after deletion
909 909 """
910 910
911 911 user = self._get_user(user)
912 912 scm_instance = repo.scm_instance(cache=False)
913 913
914 914 processed_nodes = []
915 915 for f_path in nodes:
916 916 f_path = self._sanitize_path(f_path)
917 917 # content can be empty but for compatibility it allows same dicts
918 918 # structure as add_nodes
919 919 content = nodes[f_path].get('content')
920 920 processed_nodes.append((safe_bytes(f_path), content))
921 921
922 922 message = safe_str(message)
923 923 commiter = user.full_contact
924 924 author = safe_str(author) if author else commiter
925 925
926 926 imc = scm_instance.in_memory_commit
927 927
928 928 if not parent_commit:
929 929 parent_commit = EmptyCommit(alias=scm_instance.alias)
930 930
931 931 if isinstance(parent_commit, EmptyCommit):
932 932 # EmptyCommit means we we're editing empty repository
933 933 parents = None
934 934 else:
935 935 parents = [parent_commit]
936 936 # add multiple nodes
937 937 for path, content in processed_nodes:
938 938 imc.remove(FileNode(path, content=content))
939 939
940 940 # TODO: handle pre push scenario
941 941 tip = imc.commit(message=message,
942 942 author=author,
943 943 parents=parents,
944 944 branch=parent_commit.branch)
945 945
946 946 self.mark_for_invalidation(repo.repo_name)
947 947 if trigger_push_hook:
948 948 hooks_utils.trigger_post_push_hook(
949 949 username=user.username, action='push_local', hook_type='post_push',
950 950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
951 951 commit_ids=[tip.raw_id])
952 952 return tip
953 953
954 954 def strip(self, repo, commit_id, branch):
955 955 scm_instance = repo.scm_instance(cache=False)
956 956 scm_instance.config.clear_section('hooks')
957 957 scm_instance.strip(commit_id, branch)
958 958 self.mark_for_invalidation(repo.repo_name)
959 959
960 960 def get_unread_journal(self):
961 961 return self.sa.query(UserLog).count()
962 962
963 963 @classmethod
964 964 def backend_landing_ref(cls, repo_type):
965 965 """
966 966 Return a default landing ref based on a repository type.
967 967 """
968 968
969 969 landing_ref = {
970 970 'hg': ('branch:default', 'default'),
971 971 'git': ('branch:master', 'master'),
972 972 'svn': ('rev:tip', 'latest tip'),
973 973 'default': ('rev:tip', 'latest tip'),
974 974 }
975 975
976 976 return landing_ref.get(repo_type) or landing_ref['default']
977 977
978 978 def get_repo_landing_revs(self, translator, repo=None):
979 979 """
980 980 Generates select option with tags branches and bookmarks (for hg only)
981 981 grouped by type
982 982
983 983 :param repo:
984 984 """
985 985 from rhodecode.lib.vcs.backends.git import GitRepository
986 986
987 987 _ = translator
988 988 repo = self._get_repo(repo)
989 989
990 990 if repo:
991 991 repo_type = repo.repo_type
992 992 else:
993 993 repo_type = 'default'
994 994
995 995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
996 996
997 997 default_ref_options = [
998 998 [default_landing_ref, landing_ref_lbl]
999 999 ]
1000 1000 default_choices = [
1001 1001 default_landing_ref
1002 1002 ]
1003 1003
1004 1004 if not repo:
1005 1005 # presented at NEW repo creation
1006 1006 return default_choices, default_ref_options
1007 1007
1008 1008 repo = repo.scm_instance()
1009 1009
1010 1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1011 1011 choices = [default_landing_ref]
1012 1012
1013 1013 # branches
1014 1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1015 1015 if not branch_group:
1016 1016 # new repo, or without maybe a branch?
1017 1017 branch_group = default_ref_options
1018 1018
1019 1019 branches_group = (branch_group, _("Branches"))
1020 1020 ref_options.append(branches_group)
1021 1021 choices.extend([x[0] for x in branches_group[0]])
1022 1022
1023 1023 # bookmarks for HG
1024 1024 if repo.alias == 'hg':
1025 1025 bookmarks_group = (
1026 1026 [(f'book:{safe_str(b)}', safe_str(b))
1027 1027 for b in repo.bookmarks],
1028 1028 _("Bookmarks"))
1029 1029 ref_options.append(bookmarks_group)
1030 1030 choices.extend([x[0] for x in bookmarks_group[0]])
1031 1031
1032 1032 # tags
1033 1033 tags_group = (
1034 1034 [(f'tag:{safe_str(t)}', safe_str(t))
1035 1035 for t in repo.tags],
1036 1036 _("Tags"))
1037 1037 ref_options.append(tags_group)
1038 1038 choices.extend([x[0] for x in tags_group[0]])
1039 1039
1040 1040 return choices, ref_options
1041 1041
1042 1042 def get_server_info(self, environ=None):
1043 1043 server_info = get_system_info(environ)
1044 1044 return server_info
General Comments 0
You need to be logged in to leave comments. Login now