##// END OF EJS Templates
api: allow uncached content fetching....
marcink -
r3479:58288c09 default
parent child Browse files
Show More
@@ -1,2305 +1,2309 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import time
23 23
24 24 import rhodecode
25 25 from rhodecode.api import (
26 26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 27 from rhodecode.api.utils import (
28 28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 31 validate_set_owner_permissions)
32 32 from rhodecode.lib import audit_logger, rc_cache
33 33 from rhodecode.lib import repo_maintenance
34 34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
35 35 from rhodecode.lib.celerylib.utils import get_task_id
36 36 from rhodecode.lib.utils2 import str2bool, time_to_datetime, safe_str, safe_int
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
39 39 from rhodecode.lib.vcs import RepositoryError
40 40 from rhodecode.model.changeset_status import ChangesetStatusModel
41 41 from rhodecode.model.comment import CommentsModel
42 42 from rhodecode.model.db import (
43 43 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
44 44 ChangesetComment)
45 45 from rhodecode.model.repo import RepoModel
46 46 from rhodecode.model.scm import ScmModel, RepoList
47 47 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
48 48 from rhodecode.model import validation_schema
49 49 from rhodecode.model.validation_schema.schemas import repo_schema
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 @jsonrpc_method()
55 55 def get_repo(request, apiuser, repoid, cache=Optional(True)):
56 56 """
57 57 Gets an existing repository by its name or repository_id.
58 58
59 59 The members section so the output returns users groups or users
60 60 associated with that repository.
61 61
62 62 This command can only be run using an |authtoken| with admin rights,
63 63 or users with at least read rights to the |repo|.
64 64
65 65 :param apiuser: This is filled automatically from the |authtoken|.
66 66 :type apiuser: AuthUser
67 67 :param repoid: The repository name or repository id.
68 68 :type repoid: str or int
69 69 :param cache: use the cached value for last changeset
70 70 :type: cache: Optional(bool)
71 71
72 72 Example output:
73 73
74 74 .. code-block:: bash
75 75
76 76 {
77 77 "error": null,
78 78 "id": <repo_id>,
79 79 "result": {
80 80 "clone_uri": null,
81 81 "created_on": "timestamp",
82 82 "description": "repo description",
83 83 "enable_downloads": false,
84 84 "enable_locking": false,
85 85 "enable_statistics": false,
86 86 "followers": [
87 87 {
88 88 "active": true,
89 89 "admin": false,
90 90 "api_key": "****************************************",
91 91 "api_keys": [
92 92 "****************************************"
93 93 ],
94 94 "email": "user@example.com",
95 95 "emails": [
96 96 "user@example.com"
97 97 ],
98 98 "extern_name": "rhodecode",
99 99 "extern_type": "rhodecode",
100 100 "firstname": "username",
101 101 "ip_addresses": [],
102 102 "language": null,
103 103 "last_login": "2015-09-16T17:16:35.854",
104 104 "lastname": "surname",
105 105 "user_id": <user_id>,
106 106 "username": "name"
107 107 }
108 108 ],
109 109 "fork_of": "parent-repo",
110 110 "landing_rev": [
111 111 "rev",
112 112 "tip"
113 113 ],
114 114 "last_changeset": {
115 115 "author": "User <user@example.com>",
116 116 "branch": "default",
117 117 "date": "timestamp",
118 118 "message": "last commit message",
119 119 "parents": [
120 120 {
121 121 "raw_id": "commit-id"
122 122 }
123 123 ],
124 124 "raw_id": "commit-id",
125 125 "revision": <revision number>,
126 126 "short_id": "short id"
127 127 },
128 128 "lock_reason": null,
129 129 "locked_by": null,
130 130 "locked_date": null,
131 131 "owner": "owner-name",
132 132 "permissions": [
133 133 {
134 134 "name": "super-admin-name",
135 135 "origin": "super-admin",
136 136 "permission": "repository.admin",
137 137 "type": "user"
138 138 },
139 139 {
140 140 "name": "owner-name",
141 141 "origin": "owner",
142 142 "permission": "repository.admin",
143 143 "type": "user"
144 144 },
145 145 {
146 146 "name": "user-group-name",
147 147 "origin": "permission",
148 148 "permission": "repository.write",
149 149 "type": "user_group"
150 150 }
151 151 ],
152 152 "private": true,
153 153 "repo_id": 676,
154 154 "repo_name": "user-group/repo-name",
155 155 "repo_type": "hg"
156 156 }
157 157 }
158 158 """
159 159
160 160 repo = get_repo_or_error(repoid)
161 161 cache = Optional.extract(cache)
162 162
163 163 include_secrets = False
164 164 if has_superadmin_permission(apiuser):
165 165 include_secrets = True
166 166 else:
167 167 # check if we have at least read permission for this repo !
168 168 _perms = (
169 169 'repository.admin', 'repository.write', 'repository.read',)
170 170 validate_repo_permissions(apiuser, repoid, repo, _perms)
171 171
172 172 permissions = []
173 173 for _user in repo.permissions():
174 174 user_data = {
175 175 'name': _user.username,
176 176 'permission': _user.permission,
177 177 'origin': get_origin(_user),
178 178 'type': "user",
179 179 }
180 180 permissions.append(user_data)
181 181
182 182 for _user_group in repo.permission_user_groups():
183 183 user_group_data = {
184 184 'name': _user_group.users_group_name,
185 185 'permission': _user_group.permission,
186 186 'origin': get_origin(_user_group),
187 187 'type': "user_group",
188 188 }
189 189 permissions.append(user_group_data)
190 190
191 191 following_users = [
192 192 user.user.get_api_data(include_secrets=include_secrets)
193 193 for user in repo.followers]
194 194
195 195 if not cache:
196 196 repo.update_commit_cache()
197 197 data = repo.get_api_data(include_secrets=include_secrets)
198 198 data['permissions'] = permissions
199 199 data['followers'] = following_users
200 200 return data
201 201
202 202
203 203 @jsonrpc_method()
204 204 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
205 205 """
206 206 Lists all existing repositories.
207 207
208 208 This command can only be run using an |authtoken| with admin rights,
209 209 or users with at least read rights to |repos|.
210 210
211 211 :param apiuser: This is filled automatically from the |authtoken|.
212 212 :type apiuser: AuthUser
213 213 :param root: specify root repository group to fetch repositories.
214 214 filters the returned repositories to be members of given root group.
215 215 :type root: Optional(None)
216 216 :param traverse: traverse given root into subrepositories. With this flag
217 217 set to False, it will only return top-level repositories from `root`.
218 218 if root is empty it will return just top-level repositories.
219 219 :type traverse: Optional(True)
220 220
221 221
222 222 Example output:
223 223
224 224 .. code-block:: bash
225 225
226 226 id : <id_given_in_input>
227 227 result: [
228 228 {
229 229 "repo_id" : "<repo_id>",
230 230 "repo_name" : "<reponame>"
231 231 "repo_type" : "<repo_type>",
232 232 "clone_uri" : "<clone_uri>",
233 233 "private": : "<bool>",
234 234 "created_on" : "<datetimecreated>",
235 235 "description" : "<description>",
236 236 "landing_rev": "<landing_rev>",
237 237 "owner": "<repo_owner>",
238 238 "fork_of": "<name_of_fork_parent>",
239 239 "enable_downloads": "<bool>",
240 240 "enable_locking": "<bool>",
241 241 "enable_statistics": "<bool>",
242 242 },
243 243 ...
244 244 ]
245 245 error: null
246 246 """
247 247
248 248 include_secrets = has_superadmin_permission(apiuser)
249 249 _perms = ('repository.read', 'repository.write', 'repository.admin',)
250 250 extras = {'user': apiuser}
251 251
252 252 root = Optional.extract(root)
253 253 traverse = Optional.extract(traverse, binary=True)
254 254
255 255 if root:
256 256 # verify parent existance, if it's empty return an error
257 257 parent = RepoGroup.get_by_group_name(root)
258 258 if not parent:
259 259 raise JSONRPCError(
260 260 'Root repository group `{}` does not exist'.format(root))
261 261
262 262 if traverse:
263 263 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
264 264 else:
265 265 repos = RepoModel().get_repos_for_root(root=parent)
266 266 else:
267 267 if traverse:
268 268 repos = RepoModel().get_all()
269 269 else:
270 270 # return just top-level
271 271 repos = RepoModel().get_repos_for_root(root=None)
272 272
273 273 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
274 274 return [repo.get_api_data(include_secrets=include_secrets)
275 275 for repo in repo_list]
276 276
277 277
278 278 @jsonrpc_method()
279 279 def get_repo_changeset(request, apiuser, repoid, revision,
280 280 details=Optional('basic')):
281 281 """
282 282 Returns information about a changeset.
283 283
284 284 Additionally parameters define the amount of details returned by
285 285 this function.
286 286
287 287 This command can only be run using an |authtoken| with admin rights,
288 288 or users with at least read rights to the |repo|.
289 289
290 290 :param apiuser: This is filled automatically from the |authtoken|.
291 291 :type apiuser: AuthUser
292 292 :param repoid: The repository name or repository id
293 293 :type repoid: str or int
294 294 :param revision: revision for which listing should be done
295 295 :type revision: str
296 296 :param details: details can be 'basic|extended|full' full gives diff
297 297 info details like the diff itself, and number of changed files etc.
298 298 :type details: Optional(str)
299 299
300 300 """
301 301 repo = get_repo_or_error(repoid)
302 302 if not has_superadmin_permission(apiuser):
303 303 _perms = (
304 304 'repository.admin', 'repository.write', 'repository.read',)
305 305 validate_repo_permissions(apiuser, repoid, repo, _perms)
306 306
307 307 changes_details = Optional.extract(details)
308 308 _changes_details_types = ['basic', 'extended', 'full']
309 309 if changes_details not in _changes_details_types:
310 310 raise JSONRPCError(
311 311 'ret_type must be one of %s' % (
312 312 ','.join(_changes_details_types)))
313 313
314 314 pre_load = ['author', 'branch', 'date', 'message', 'parents',
315 315 'status', '_commit', '_file_paths']
316 316
317 317 try:
318 318 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
319 319 except TypeError as e:
320 320 raise JSONRPCError(safe_str(e))
321 321 _cs_json = cs.__json__()
322 322 _cs_json['diff'] = build_commit_data(cs, changes_details)
323 323 if changes_details == 'full':
324 324 _cs_json['refs'] = cs._get_refs()
325 325 return _cs_json
326 326
327 327
328 328 @jsonrpc_method()
329 329 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
330 330 details=Optional('basic')):
331 331 """
332 332 Returns a set of commits limited by the number starting
333 333 from the `start_rev` option.
334 334
335 335 Additional parameters define the amount of details returned by this
336 336 function.
337 337
338 338 This command can only be run using an |authtoken| with admin rights,
339 339 or users with at least read rights to |repos|.
340 340
341 341 :param apiuser: This is filled automatically from the |authtoken|.
342 342 :type apiuser: AuthUser
343 343 :param repoid: The repository name or repository ID.
344 344 :type repoid: str or int
345 345 :param start_rev: The starting revision from where to get changesets.
346 346 :type start_rev: str
347 347 :param limit: Limit the number of commits to this amount
348 348 :type limit: str or int
349 349 :param details: Set the level of detail returned. Valid option are:
350 350 ``basic``, ``extended`` and ``full``.
351 351 :type details: Optional(str)
352 352
353 353 .. note::
354 354
355 355 Setting the parameter `details` to the value ``full`` is extensive
356 356 and returns details like the diff itself, and the number
357 357 of changed files.
358 358
359 359 """
360 360 repo = get_repo_or_error(repoid)
361 361 if not has_superadmin_permission(apiuser):
362 362 _perms = (
363 363 'repository.admin', 'repository.write', 'repository.read',)
364 364 validate_repo_permissions(apiuser, repoid, repo, _perms)
365 365
366 366 changes_details = Optional.extract(details)
367 367 _changes_details_types = ['basic', 'extended', 'full']
368 368 if changes_details not in _changes_details_types:
369 369 raise JSONRPCError(
370 370 'ret_type must be one of %s' % (
371 371 ','.join(_changes_details_types)))
372 372
373 373 limit = int(limit)
374 374 pre_load = ['author', 'branch', 'date', 'message', 'parents',
375 375 'status', '_commit', '_file_paths']
376 376
377 377 vcs_repo = repo.scm_instance()
378 378 # SVN needs a special case to distinguish its index and commit id
379 379 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
380 380 start_rev = vcs_repo.commit_ids[0]
381 381
382 382 try:
383 383 commits = vcs_repo.get_commits(
384 384 start_id=start_rev, pre_load=pre_load, translate_tags=False)
385 385 except TypeError as e:
386 386 raise JSONRPCError(safe_str(e))
387 387 except Exception:
388 388 log.exception('Fetching of commits failed')
389 389 raise JSONRPCError('Error occurred during commit fetching')
390 390
391 391 ret = []
392 392 for cnt, commit in enumerate(commits):
393 393 if cnt >= limit != -1:
394 394 break
395 395 _cs_json = commit.__json__()
396 396 _cs_json['diff'] = build_commit_data(commit, changes_details)
397 397 if changes_details == 'full':
398 398 _cs_json['refs'] = {
399 399 'branches': [commit.branch],
400 400 'bookmarks': getattr(commit, 'bookmarks', []),
401 401 'tags': commit.tags
402 402 }
403 403 ret.append(_cs_json)
404 404 return ret
405 405
406 406
407 407 @jsonrpc_method()
408 408 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
409 409 ret_type=Optional('all'), details=Optional('basic'),
410 410 max_file_bytes=Optional(None)):
411 411 """
412 412 Returns a list of nodes and children in a flat list for a given
413 413 path at given revision.
414 414
415 415 It's possible to specify ret_type to show only `files` or `dirs`.
416 416
417 417 This command can only be run using an |authtoken| with admin rights,
418 418 or users with at least read rights to |repos|.
419 419
420 420 :param apiuser: This is filled automatically from the |authtoken|.
421 421 :type apiuser: AuthUser
422 422 :param repoid: The repository name or repository ID.
423 423 :type repoid: str or int
424 424 :param revision: The revision for which listing should be done.
425 425 :type revision: str
426 426 :param root_path: The path from which to start displaying.
427 427 :type root_path: str
428 428 :param ret_type: Set the return type. Valid options are
429 429 ``all`` (default), ``files`` and ``dirs``.
430 430 :type ret_type: Optional(str)
431 431 :param details: Returns extended information about nodes, such as
432 432 md5, binary, and or content.
433 433 The valid options are ``basic`` and ``full``.
434 434 :type details: Optional(str)
435 435 :param max_file_bytes: Only return file content under this file size bytes
436 436 :type details: Optional(int)
437 437
438 438 Example output:
439 439
440 440 .. code-block:: bash
441 441
442 442 id : <id_given_in_input>
443 443 result: [
444 444 {
445 445 "binary": false,
446 446 "content": "File line\nLine2\n",
447 447 "extension": "md",
448 448 "lines": 2,
449 449 "md5": "059fa5d29b19c0657e384749480f6422",
450 450 "mimetype": "text/x-minidsrc",
451 451 "name": "file.md",
452 452 "size": 580,
453 453 "type": "file"
454 454 },
455 455 ...
456 456 ]
457 457 error: null
458 458 """
459 459
460 460 repo = get_repo_or_error(repoid)
461 461 if not has_superadmin_permission(apiuser):
462 462 _perms = ('repository.admin', 'repository.write', 'repository.read',)
463 463 validate_repo_permissions(apiuser, repoid, repo, _perms)
464 464
465 465 ret_type = Optional.extract(ret_type)
466 466 details = Optional.extract(details)
467 467 _extended_types = ['basic', 'full']
468 468 if details not in _extended_types:
469 469 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
470 470 extended_info = False
471 471 content = False
472 472 if details == 'basic':
473 473 extended_info = True
474 474
475 475 if details == 'full':
476 476 extended_info = content = True
477 477
478 478 _map = {}
479 479 try:
480 480 # check if repo is not empty by any chance, skip quicker if it is.
481 481 _scm = repo.scm_instance()
482 482 if _scm.is_empty():
483 483 return []
484 484
485 485 _d, _f = ScmModel().get_nodes(
486 486 repo, revision, root_path, flat=False,
487 487 extended_info=extended_info, content=content,
488 488 max_file_bytes=max_file_bytes)
489 489 _map = {
490 490 'all': _d + _f,
491 491 'files': _f,
492 492 'dirs': _d,
493 493 }
494 494 return _map[ret_type]
495 495 except KeyError:
496 496 raise JSONRPCError(
497 497 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
498 498 except Exception:
499 499 log.exception("Exception occurred while trying to get repo nodes")
500 500 raise JSONRPCError(
501 501 'failed to get repo: `%s` nodes' % repo.repo_name
502 502 )
503 503
504 504
505 505 @jsonrpc_method()
506 506 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
507 max_file_bytes=Optional(None), details=Optional('basic')):
507 max_file_bytes=Optional(None), details=Optional('basic'),
508 cache=Optional(True)):
508 509 """
509 510 Returns a single file from repository at given revision.
510 511
511 512 This command can only be run using an |authtoken| with admin rights,
512 513 or users with at least read rights to |repos|.
513 514
514 515 :param apiuser: This is filled automatically from the |authtoken|.
515 516 :type apiuser: AuthUser
516 517 :param repoid: The repository name or repository ID.
517 518 :type repoid: str or int
518 519 :param commit_id: The revision for which listing should be done.
519 520 :type commit_id: str
520 521 :param file_path: The path from which to start displaying.
521 522 :type file_path: str
522 523 :param details: Returns different set of information about nodes.
523 524 The valid options are ``minimal`` ``basic`` and ``full``.
524 525 :type details: Optional(str)
525 526 :param max_file_bytes: Only return file content under this file size bytes
526 :type details: Optional(int)
527
527 :type max_file_bytes: Optional(int)
528 :param cache: Use internal caches for fetching files. If disabled fetching
529 files is slower but more memory efficient
530 :type cache: Optional(bool)
528 531 Example output:
529 532
530 533 .. code-block:: bash
531 534
532 535 id : <id_given_in_input>
533 536 result: {
534 537 "binary": false,
535 538 "extension": "py",
536 539 "lines": 35,
537 540 "content": "....",
538 541 "md5": "76318336366b0f17ee249e11b0c99c41",
539 542 "mimetype": "text/x-python",
540 543 "name": "python.py",
541 544 "size": 817,
542 545 "type": "file",
543 546 }
544 547 error: null
545 548 """
546 549
547 550 repo = get_repo_or_error(repoid)
548 551 if not has_superadmin_permission(apiuser):
549 552 _perms = ('repository.admin', 'repository.write', 'repository.read',)
550 553 validate_repo_permissions(apiuser, repoid, repo, _perms)
551 554
555 cache = Optional.extract(cache, binary=True)
552 556 details = Optional.extract(details)
553 557 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
554 558 if details not in _extended_types:
555 559 raise JSONRPCError(
556 560 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
557 561 extended_info = False
558 562 content = False
559 563
560 564 if details == 'minimal':
561 565 extended_info = False
562 566
563 567 elif details == 'basic':
564 568 extended_info = True
565 569
566 570 elif details == 'full':
567 571 extended_info = content = True
568 572
569 573 try:
570 574 # check if repo is not empty by any chance, skip quicker if it is.
571 575 _scm = repo.scm_instance()
572 576 if _scm.is_empty():
573 577 return None
574 578
575 579 node = ScmModel().get_node(
576 580 repo, commit_id, file_path, extended_info=extended_info,
577 content=content, max_file_bytes=max_file_bytes)
581 content=content, max_file_bytes=max_file_bytes, cache=cache)
578 582
579 583 except Exception:
580 584 log.exception("Exception occurred while trying to get repo node")
581 585 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
582 586
583 587 return node
584 588
585 589
586 590 @jsonrpc_method()
587 591 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
588 592 """
589 593 Returns a list of tree nodes for path at given revision. This api is built
590 594 strictly for usage in full text search building, and shouldn't be consumed
591 595
592 596 This command can only be run using an |authtoken| with admin rights,
593 597 or users with at least read rights to |repos|.
594 598
595 599 """
596 600
597 601 repo = get_repo_or_error(repoid)
598 602 if not has_superadmin_permission(apiuser):
599 603 _perms = ('repository.admin', 'repository.write', 'repository.read',)
600 604 validate_repo_permissions(apiuser, repoid, repo, _perms)
601 605
602 606 repo_id = repo.repo_id
603 607 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
604 608 cache_on = cache_seconds > 0
605 609
606 610 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
607 611 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
608 612
609 613 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
610 614 condition=cache_on)
611 615 def compute_fts_tree(repo_id, commit_id, root_path, cache_ver):
612 616 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
613 617
614 618 try:
615 619 # check if repo is not empty by any chance, skip quicker if it is.
616 620 _scm = repo.scm_instance()
617 621 if _scm.is_empty():
618 622 return []
619 623 except RepositoryError:
620 624 log.exception("Exception occurred while trying to get repo nodes")
621 625 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
622 626
623 627 try:
624 628 # we need to resolve commit_id to a FULL sha for cache to work correctly.
625 629 # sending 'master' is a pointer that needs to be translated to current commit.
626 630 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
627 631 log.debug(
628 632 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
629 633 'with caching: %s[TTL: %ss]' % (
630 634 repo_id, commit_id, cache_on, cache_seconds or 0))
631 635
632 636 tree_files = compute_fts_tree(repo_id, commit_id, root_path, 'v1')
633 637 return tree_files
634 638
635 639 except Exception:
636 640 log.exception("Exception occurred while trying to get repo nodes")
637 641 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
638 642
639 643
640 644 @jsonrpc_method()
641 645 def get_repo_refs(request, apiuser, repoid):
642 646 """
643 647 Returns a dictionary of current references. It returns
644 648 bookmarks, branches, closed_branches, and tags for given repository
645 649
646 650 It's possible to specify ret_type to show only `files` or `dirs`.
647 651
648 652 This command can only be run using an |authtoken| with admin rights,
649 653 or users with at least read rights to |repos|.
650 654
651 655 :param apiuser: This is filled automatically from the |authtoken|.
652 656 :type apiuser: AuthUser
653 657 :param repoid: The repository name or repository ID.
654 658 :type repoid: str or int
655 659
656 660 Example output:
657 661
658 662 .. code-block:: bash
659 663
660 664 id : <id_given_in_input>
661 665 "result": {
662 666 "bookmarks": {
663 667 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
664 668 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
665 669 },
666 670 "branches": {
667 671 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
668 672 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
669 673 },
670 674 "branches_closed": {},
671 675 "tags": {
672 676 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
673 677 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
674 678 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
675 679 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
676 680 }
677 681 }
678 682 error: null
679 683 """
680 684
681 685 repo = get_repo_or_error(repoid)
682 686 if not has_superadmin_permission(apiuser):
683 687 _perms = ('repository.admin', 'repository.write', 'repository.read',)
684 688 validate_repo_permissions(apiuser, repoid, repo, _perms)
685 689
686 690 try:
687 691 # check if repo is not empty by any chance, skip quicker if it is.
688 692 vcs_instance = repo.scm_instance()
689 693 refs = vcs_instance.refs()
690 694 return refs
691 695 except Exception:
692 696 log.exception("Exception occurred while trying to get repo refs")
693 697 raise JSONRPCError(
694 698 'failed to get repo: `%s` references' % repo.repo_name
695 699 )
696 700
697 701
698 702 @jsonrpc_method()
699 703 def create_repo(
700 704 request, apiuser, repo_name, repo_type,
701 705 owner=Optional(OAttr('apiuser')),
702 706 description=Optional(''),
703 707 private=Optional(False),
704 708 clone_uri=Optional(None),
705 709 push_uri=Optional(None),
706 710 landing_rev=Optional('rev:tip'),
707 711 enable_statistics=Optional(False),
708 712 enable_locking=Optional(False),
709 713 enable_downloads=Optional(False),
710 714 copy_permissions=Optional(False)):
711 715 """
712 716 Creates a repository.
713 717
714 718 * If the repository name contains "/", repository will be created inside
715 719 a repository group or nested repository groups
716 720
717 721 For example "foo/bar/repo1" will create |repo| called "repo1" inside
718 722 group "foo/bar". You have to have permissions to access and write to
719 723 the last repository group ("bar" in this example)
720 724
721 725 This command can only be run using an |authtoken| with at least
722 726 permissions to create repositories, or write permissions to
723 727 parent repository groups.
724 728
725 729 :param apiuser: This is filled automatically from the |authtoken|.
726 730 :type apiuser: AuthUser
727 731 :param repo_name: Set the repository name.
728 732 :type repo_name: str
729 733 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
730 734 :type repo_type: str
731 735 :param owner: user_id or username
732 736 :type owner: Optional(str)
733 737 :param description: Set the repository description.
734 738 :type description: Optional(str)
735 739 :param private: set repository as private
736 740 :type private: bool
737 741 :param clone_uri: set clone_uri
738 742 :type clone_uri: str
739 743 :param push_uri: set push_uri
740 744 :type push_uri: str
741 745 :param landing_rev: <rev_type>:<rev>
742 746 :type landing_rev: str
743 747 :param enable_locking:
744 748 :type enable_locking: bool
745 749 :param enable_downloads:
746 750 :type enable_downloads: bool
747 751 :param enable_statistics:
748 752 :type enable_statistics: bool
749 753 :param copy_permissions: Copy permission from group in which the
750 754 repository is being created.
751 755 :type copy_permissions: bool
752 756
753 757
754 758 Example output:
755 759
756 760 .. code-block:: bash
757 761
758 762 id : <id_given_in_input>
759 763 result: {
760 764 "msg": "Created new repository `<reponame>`",
761 765 "success": true,
762 766 "task": "<celery task id or None if done sync>"
763 767 }
764 768 error: null
765 769
766 770
767 771 Example error output:
768 772
769 773 .. code-block:: bash
770 774
771 775 id : <id_given_in_input>
772 776 result : null
773 777 error : {
774 778 'failed to create repository `<repo_name>`'
775 779 }
776 780
777 781 """
778 782
779 783 owner = validate_set_owner_permissions(apiuser, owner)
780 784
781 785 description = Optional.extract(description)
782 786 copy_permissions = Optional.extract(copy_permissions)
783 787 clone_uri = Optional.extract(clone_uri)
784 788 push_uri = Optional.extract(push_uri)
785 789 landing_commit_ref = Optional.extract(landing_rev)
786 790
787 791 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
788 792 if isinstance(private, Optional):
789 793 private = defs.get('repo_private') or Optional.extract(private)
790 794 if isinstance(repo_type, Optional):
791 795 repo_type = defs.get('repo_type')
792 796 if isinstance(enable_statistics, Optional):
793 797 enable_statistics = defs.get('repo_enable_statistics')
794 798 if isinstance(enable_locking, Optional):
795 799 enable_locking = defs.get('repo_enable_locking')
796 800 if isinstance(enable_downloads, Optional):
797 801 enable_downloads = defs.get('repo_enable_downloads')
798 802
799 803 schema = repo_schema.RepoSchema().bind(
800 804 repo_type_options=rhodecode.BACKENDS.keys(),
801 805 repo_type=repo_type,
802 806 # user caller
803 807 user=apiuser)
804 808
805 809 try:
806 810 schema_data = schema.deserialize(dict(
807 811 repo_name=repo_name,
808 812 repo_type=repo_type,
809 813 repo_owner=owner.username,
810 814 repo_description=description,
811 815 repo_landing_commit_ref=landing_commit_ref,
812 816 repo_clone_uri=clone_uri,
813 817 repo_push_uri=push_uri,
814 818 repo_private=private,
815 819 repo_copy_permissions=copy_permissions,
816 820 repo_enable_statistics=enable_statistics,
817 821 repo_enable_downloads=enable_downloads,
818 822 repo_enable_locking=enable_locking))
819 823 except validation_schema.Invalid as err:
820 824 raise JSONRPCValidationError(colander_exc=err)
821 825
822 826 try:
823 827 data = {
824 828 'owner': owner,
825 829 'repo_name': schema_data['repo_group']['repo_name_without_group'],
826 830 'repo_name_full': schema_data['repo_name'],
827 831 'repo_group': schema_data['repo_group']['repo_group_id'],
828 832 'repo_type': schema_data['repo_type'],
829 833 'repo_description': schema_data['repo_description'],
830 834 'repo_private': schema_data['repo_private'],
831 835 'clone_uri': schema_data['repo_clone_uri'],
832 836 'push_uri': schema_data['repo_push_uri'],
833 837 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
834 838 'enable_statistics': schema_data['repo_enable_statistics'],
835 839 'enable_locking': schema_data['repo_enable_locking'],
836 840 'enable_downloads': schema_data['repo_enable_downloads'],
837 841 'repo_copy_permissions': schema_data['repo_copy_permissions'],
838 842 }
839 843
840 844 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
841 845 task_id = get_task_id(task)
842 846 # no commit, it's done in RepoModel, or async via celery
843 847 return {
844 848 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
845 849 'success': True, # cannot return the repo data here since fork
846 850 # can be done async
847 851 'task': task_id
848 852 }
849 853 except Exception:
850 854 log.exception(
851 855 u"Exception while trying to create the repository %s",
852 856 schema_data['repo_name'])
853 857 raise JSONRPCError(
854 858 'failed to create repository `%s`' % (schema_data['repo_name'],))
855 859
856 860
857 861 @jsonrpc_method()
858 862 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
859 863 description=Optional('')):
860 864 """
861 865 Adds an extra field to a repository.
862 866
863 867 This command can only be run using an |authtoken| with at least
864 868 write permissions to the |repo|.
865 869
866 870 :param apiuser: This is filled automatically from the |authtoken|.
867 871 :type apiuser: AuthUser
868 872 :param repoid: Set the repository name or repository id.
869 873 :type repoid: str or int
870 874 :param key: Create a unique field key for this repository.
871 875 :type key: str
872 876 :param label:
873 877 :type label: Optional(str)
874 878 :param description:
875 879 :type description: Optional(str)
876 880 """
877 881 repo = get_repo_or_error(repoid)
878 882 if not has_superadmin_permission(apiuser):
879 883 _perms = ('repository.admin',)
880 884 validate_repo_permissions(apiuser, repoid, repo, _perms)
881 885
882 886 label = Optional.extract(label) or key
883 887 description = Optional.extract(description)
884 888
885 889 field = RepositoryField.get_by_key_name(key, repo)
886 890 if field:
887 891 raise JSONRPCError('Field with key '
888 892 '`%s` exists for repo `%s`' % (key, repoid))
889 893
890 894 try:
891 895 RepoModel().add_repo_field(repo, key, field_label=label,
892 896 field_desc=description)
893 897 Session().commit()
894 898 return {
895 899 'msg': "Added new repository field `%s`" % (key,),
896 900 'success': True,
897 901 }
898 902 except Exception:
899 903 log.exception("Exception occurred while trying to add field to repo")
900 904 raise JSONRPCError(
901 905 'failed to create new field for repository `%s`' % (repoid,))
902 906
903 907
904 908 @jsonrpc_method()
905 909 def remove_field_from_repo(request, apiuser, repoid, key):
906 910 """
907 911 Removes an extra field from a repository.
908 912
909 913 This command can only be run using an |authtoken| with at least
910 914 write permissions to the |repo|.
911 915
912 916 :param apiuser: This is filled automatically from the |authtoken|.
913 917 :type apiuser: AuthUser
914 918 :param repoid: Set the repository name or repository ID.
915 919 :type repoid: str or int
916 920 :param key: Set the unique field key for this repository.
917 921 :type key: str
918 922 """
919 923
920 924 repo = get_repo_or_error(repoid)
921 925 if not has_superadmin_permission(apiuser):
922 926 _perms = ('repository.admin',)
923 927 validate_repo_permissions(apiuser, repoid, repo, _perms)
924 928
925 929 field = RepositoryField.get_by_key_name(key, repo)
926 930 if not field:
927 931 raise JSONRPCError('Field with key `%s` does not '
928 932 'exists for repo `%s`' % (key, repoid))
929 933
930 934 try:
931 935 RepoModel().delete_repo_field(repo, field_key=key)
932 936 Session().commit()
933 937 return {
934 938 'msg': "Deleted repository field `%s`" % (key,),
935 939 'success': True,
936 940 }
937 941 except Exception:
938 942 log.exception(
939 943 "Exception occurred while trying to delete field from repo")
940 944 raise JSONRPCError(
941 945 'failed to delete field for repository `%s`' % (repoid,))
942 946
943 947
944 948 @jsonrpc_method()
945 949 def update_repo(
946 950 request, apiuser, repoid, repo_name=Optional(None),
947 951 owner=Optional(OAttr('apiuser')), description=Optional(''),
948 952 private=Optional(False),
949 953 clone_uri=Optional(None), push_uri=Optional(None),
950 954 landing_rev=Optional('rev:tip'), fork_of=Optional(None),
951 955 enable_statistics=Optional(False),
952 956 enable_locking=Optional(False),
953 957 enable_downloads=Optional(False), fields=Optional('')):
954 958 """
955 959 Updates a repository with the given information.
956 960
957 961 This command can only be run using an |authtoken| with at least
958 962 admin permissions to the |repo|.
959 963
960 964 * If the repository name contains "/", repository will be updated
961 965 accordingly with a repository group or nested repository groups
962 966
963 967 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
964 968 called "repo-test" and place it inside group "foo/bar".
965 969 You have to have permissions to access and write to the last repository
966 970 group ("bar" in this example)
967 971
968 972 :param apiuser: This is filled automatically from the |authtoken|.
969 973 :type apiuser: AuthUser
970 974 :param repoid: repository name or repository ID.
971 975 :type repoid: str or int
972 976 :param repo_name: Update the |repo| name, including the
973 977 repository group it's in.
974 978 :type repo_name: str
975 979 :param owner: Set the |repo| owner.
976 980 :type owner: str
977 981 :param fork_of: Set the |repo| as fork of another |repo|.
978 982 :type fork_of: str
979 983 :param description: Update the |repo| description.
980 984 :type description: str
981 985 :param private: Set the |repo| as private. (True | False)
982 986 :type private: bool
983 987 :param clone_uri: Update the |repo| clone URI.
984 988 :type clone_uri: str
985 989 :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``.
986 990 :type landing_rev: str
987 991 :param enable_statistics: Enable statistics on the |repo|, (True | False).
988 992 :type enable_statistics: bool
989 993 :param enable_locking: Enable |repo| locking.
990 994 :type enable_locking: bool
991 995 :param enable_downloads: Enable downloads from the |repo|, (True | False).
992 996 :type enable_downloads: bool
993 997 :param fields: Add extra fields to the |repo|. Use the following
994 998 example format: ``field_key=field_val,field_key2=fieldval2``.
995 999 Escape ', ' with \,
996 1000 :type fields: str
997 1001 """
998 1002
999 1003 repo = get_repo_or_error(repoid)
1000 1004
1001 1005 include_secrets = False
1002 1006 if not has_superadmin_permission(apiuser):
1003 1007 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
1004 1008 else:
1005 1009 include_secrets = True
1006 1010
1007 1011 updates = dict(
1008 1012 repo_name=repo_name
1009 1013 if not isinstance(repo_name, Optional) else repo.repo_name,
1010 1014
1011 1015 fork_id=fork_of
1012 1016 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1013 1017
1014 1018 user=owner
1015 1019 if not isinstance(owner, Optional) else repo.user.username,
1016 1020
1017 1021 repo_description=description
1018 1022 if not isinstance(description, Optional) else repo.description,
1019 1023
1020 1024 repo_private=private
1021 1025 if not isinstance(private, Optional) else repo.private,
1022 1026
1023 1027 clone_uri=clone_uri
1024 1028 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1025 1029
1026 1030 push_uri=push_uri
1027 1031 if not isinstance(push_uri, Optional) else repo.push_uri,
1028 1032
1029 1033 repo_landing_rev=landing_rev
1030 1034 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1031 1035
1032 1036 repo_enable_statistics=enable_statistics
1033 1037 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1034 1038
1035 1039 repo_enable_locking=enable_locking
1036 1040 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1037 1041
1038 1042 repo_enable_downloads=enable_downloads
1039 1043 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1040 1044
1041 1045 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1042 1046 request.translate, repo=repo)
1043 1047
1044 1048 old_values = repo.get_api_data()
1045 1049 repo_type = repo.repo_type
1046 1050 schema = repo_schema.RepoSchema().bind(
1047 1051 repo_type_options=rhodecode.BACKENDS.keys(),
1048 1052 repo_ref_options=ref_choices,
1049 1053 repo_type=repo_type,
1050 1054 # user caller
1051 1055 user=apiuser,
1052 1056 old_values=old_values)
1053 1057 try:
1054 1058 schema_data = schema.deserialize(dict(
1055 1059 # we save old value, users cannot change type
1056 1060 repo_type=repo_type,
1057 1061
1058 1062 repo_name=updates['repo_name'],
1059 1063 repo_owner=updates['user'],
1060 1064 repo_description=updates['repo_description'],
1061 1065 repo_clone_uri=updates['clone_uri'],
1062 1066 repo_push_uri=updates['push_uri'],
1063 1067 repo_fork_of=updates['fork_id'],
1064 1068 repo_private=updates['repo_private'],
1065 1069 repo_landing_commit_ref=updates['repo_landing_rev'],
1066 1070 repo_enable_statistics=updates['repo_enable_statistics'],
1067 1071 repo_enable_downloads=updates['repo_enable_downloads'],
1068 1072 repo_enable_locking=updates['repo_enable_locking']))
1069 1073 except validation_schema.Invalid as err:
1070 1074 raise JSONRPCValidationError(colander_exc=err)
1071 1075
1072 1076 # save validated data back into the updates dict
1073 1077 validated_updates = dict(
1074 1078 repo_name=schema_data['repo_group']['repo_name_without_group'],
1075 1079 repo_group=schema_data['repo_group']['repo_group_id'],
1076 1080
1077 1081 user=schema_data['repo_owner'],
1078 1082 repo_description=schema_data['repo_description'],
1079 1083 repo_private=schema_data['repo_private'],
1080 1084 clone_uri=schema_data['repo_clone_uri'],
1081 1085 push_uri=schema_data['repo_push_uri'],
1082 1086 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1083 1087 repo_enable_statistics=schema_data['repo_enable_statistics'],
1084 1088 repo_enable_locking=schema_data['repo_enable_locking'],
1085 1089 repo_enable_downloads=schema_data['repo_enable_downloads'],
1086 1090 )
1087 1091
1088 1092 if schema_data['repo_fork_of']:
1089 1093 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1090 1094 validated_updates['fork_id'] = fork_repo.repo_id
1091 1095
1092 1096 # extra fields
1093 1097 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1094 1098 if fields:
1095 1099 validated_updates.update(fields)
1096 1100
1097 1101 try:
1098 1102 RepoModel().update(repo, **validated_updates)
1099 1103 audit_logger.store_api(
1100 1104 'repo.edit', action_data={'old_data': old_values},
1101 1105 user=apiuser, repo=repo)
1102 1106 Session().commit()
1103 1107 return {
1104 1108 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1105 1109 'repository': repo.get_api_data(include_secrets=include_secrets)
1106 1110 }
1107 1111 except Exception:
1108 1112 log.exception(
1109 1113 u"Exception while trying to update the repository %s",
1110 1114 repoid)
1111 1115 raise JSONRPCError('failed to update repo `%s`' % repoid)
1112 1116
1113 1117
1114 1118 @jsonrpc_method()
1115 1119 def fork_repo(request, apiuser, repoid, fork_name,
1116 1120 owner=Optional(OAttr('apiuser')),
1117 1121 description=Optional(''),
1118 1122 private=Optional(False),
1119 1123 clone_uri=Optional(None),
1120 1124 landing_rev=Optional('rev:tip'),
1121 1125 copy_permissions=Optional(False)):
1122 1126 """
1123 1127 Creates a fork of the specified |repo|.
1124 1128
1125 1129 * If the fork_name contains "/", fork will be created inside
1126 1130 a repository group or nested repository groups
1127 1131
1128 1132 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1129 1133 inside group "foo/bar". You have to have permissions to access and
1130 1134 write to the last repository group ("bar" in this example)
1131 1135
1132 1136 This command can only be run using an |authtoken| with minimum
1133 1137 read permissions of the forked repo, create fork permissions for an user.
1134 1138
1135 1139 :param apiuser: This is filled automatically from the |authtoken|.
1136 1140 :type apiuser: AuthUser
1137 1141 :param repoid: Set repository name or repository ID.
1138 1142 :type repoid: str or int
1139 1143 :param fork_name: Set the fork name, including it's repository group membership.
1140 1144 :type fork_name: str
1141 1145 :param owner: Set the fork owner.
1142 1146 :type owner: str
1143 1147 :param description: Set the fork description.
1144 1148 :type description: str
1145 1149 :param copy_permissions: Copy permissions from parent |repo|. The
1146 1150 default is False.
1147 1151 :type copy_permissions: bool
1148 1152 :param private: Make the fork private. The default is False.
1149 1153 :type private: bool
1150 1154 :param landing_rev: Set the landing revision. The default is tip.
1151 1155
1152 1156 Example output:
1153 1157
1154 1158 .. code-block:: bash
1155 1159
1156 1160 id : <id_for_response>
1157 1161 api_key : "<api_key>"
1158 1162 args: {
1159 1163 "repoid" : "<reponame or repo_id>",
1160 1164 "fork_name": "<forkname>",
1161 1165 "owner": "<username or user_id = Optional(=apiuser)>",
1162 1166 "description": "<description>",
1163 1167 "copy_permissions": "<bool>",
1164 1168 "private": "<bool>",
1165 1169 "landing_rev": "<landing_rev>"
1166 1170 }
1167 1171
1168 1172 Example error output:
1169 1173
1170 1174 .. code-block:: bash
1171 1175
1172 1176 id : <id_given_in_input>
1173 1177 result: {
1174 1178 "msg": "Created fork of `<reponame>` as `<forkname>`",
1175 1179 "success": true,
1176 1180 "task": "<celery task id or None if done sync>"
1177 1181 }
1178 1182 error: null
1179 1183
1180 1184 """
1181 1185
1182 1186 repo = get_repo_or_error(repoid)
1183 1187 repo_name = repo.repo_name
1184 1188
1185 1189 if not has_superadmin_permission(apiuser):
1186 1190 # check if we have at least read permission for
1187 1191 # this repo that we fork !
1188 1192 _perms = (
1189 1193 'repository.admin', 'repository.write', 'repository.read')
1190 1194 validate_repo_permissions(apiuser, repoid, repo, _perms)
1191 1195
1192 1196 # check if the regular user has at least fork permissions as well
1193 1197 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1194 1198 raise JSONRPCForbidden()
1195 1199
1196 1200 # check if user can set owner parameter
1197 1201 owner = validate_set_owner_permissions(apiuser, owner)
1198 1202
1199 1203 description = Optional.extract(description)
1200 1204 copy_permissions = Optional.extract(copy_permissions)
1201 1205 clone_uri = Optional.extract(clone_uri)
1202 1206 landing_commit_ref = Optional.extract(landing_rev)
1203 1207 private = Optional.extract(private)
1204 1208
1205 1209 schema = repo_schema.RepoSchema().bind(
1206 1210 repo_type_options=rhodecode.BACKENDS.keys(),
1207 1211 repo_type=repo.repo_type,
1208 1212 # user caller
1209 1213 user=apiuser)
1210 1214
1211 1215 try:
1212 1216 schema_data = schema.deserialize(dict(
1213 1217 repo_name=fork_name,
1214 1218 repo_type=repo.repo_type,
1215 1219 repo_owner=owner.username,
1216 1220 repo_description=description,
1217 1221 repo_landing_commit_ref=landing_commit_ref,
1218 1222 repo_clone_uri=clone_uri,
1219 1223 repo_private=private,
1220 1224 repo_copy_permissions=copy_permissions))
1221 1225 except validation_schema.Invalid as err:
1222 1226 raise JSONRPCValidationError(colander_exc=err)
1223 1227
1224 1228 try:
1225 1229 data = {
1226 1230 'fork_parent_id': repo.repo_id,
1227 1231
1228 1232 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1229 1233 'repo_name_full': schema_data['repo_name'],
1230 1234 'repo_group': schema_data['repo_group']['repo_group_id'],
1231 1235 'repo_type': schema_data['repo_type'],
1232 1236 'description': schema_data['repo_description'],
1233 1237 'private': schema_data['repo_private'],
1234 1238 'copy_permissions': schema_data['repo_copy_permissions'],
1235 1239 'landing_rev': schema_data['repo_landing_commit_ref'],
1236 1240 }
1237 1241
1238 1242 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1239 1243 # no commit, it's done in RepoModel, or async via celery
1240 1244 task_id = get_task_id(task)
1241 1245
1242 1246 return {
1243 1247 'msg': 'Created fork of `%s` as `%s`' % (
1244 1248 repo.repo_name, schema_data['repo_name']),
1245 1249 'success': True, # cannot return the repo data here since fork
1246 1250 # can be done async
1247 1251 'task': task_id
1248 1252 }
1249 1253 except Exception:
1250 1254 log.exception(
1251 1255 u"Exception while trying to create fork %s",
1252 1256 schema_data['repo_name'])
1253 1257 raise JSONRPCError(
1254 1258 'failed to fork repository `%s` as `%s`' % (
1255 1259 repo_name, schema_data['repo_name']))
1256 1260
1257 1261
1258 1262 @jsonrpc_method()
1259 1263 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1260 1264 """
1261 1265 Deletes a repository.
1262 1266
1263 1267 * When the `forks` parameter is set it's possible to detach or delete
1264 1268 forks of deleted repository.
1265 1269
1266 1270 This command can only be run using an |authtoken| with admin
1267 1271 permissions on the |repo|.
1268 1272
1269 1273 :param apiuser: This is filled automatically from the |authtoken|.
1270 1274 :type apiuser: AuthUser
1271 1275 :param repoid: Set the repository name or repository ID.
1272 1276 :type repoid: str or int
1273 1277 :param forks: Set to `detach` or `delete` forks from the |repo|.
1274 1278 :type forks: Optional(str)
1275 1279
1276 1280 Example error output:
1277 1281
1278 1282 .. code-block:: bash
1279 1283
1280 1284 id : <id_given_in_input>
1281 1285 result: {
1282 1286 "msg": "Deleted repository `<reponame>`",
1283 1287 "success": true
1284 1288 }
1285 1289 error: null
1286 1290 """
1287 1291
1288 1292 repo = get_repo_or_error(repoid)
1289 1293 repo_name = repo.repo_name
1290 1294 if not has_superadmin_permission(apiuser):
1291 1295 _perms = ('repository.admin',)
1292 1296 validate_repo_permissions(apiuser, repoid, repo, _perms)
1293 1297
1294 1298 try:
1295 1299 handle_forks = Optional.extract(forks)
1296 1300 _forks_msg = ''
1297 1301 _forks = [f for f in repo.forks]
1298 1302 if handle_forks == 'detach':
1299 1303 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1300 1304 elif handle_forks == 'delete':
1301 1305 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1302 1306 elif _forks:
1303 1307 raise JSONRPCError(
1304 1308 'Cannot delete `%s` it still contains attached forks' %
1305 1309 (repo.repo_name,)
1306 1310 )
1307 1311 old_data = repo.get_api_data()
1308 1312 RepoModel().delete(repo, forks=forks)
1309 1313
1310 1314 repo = audit_logger.RepoWrap(repo_id=None,
1311 1315 repo_name=repo.repo_name)
1312 1316
1313 1317 audit_logger.store_api(
1314 1318 'repo.delete', action_data={'old_data': old_data},
1315 1319 user=apiuser, repo=repo)
1316 1320
1317 1321 ScmModel().mark_for_invalidation(repo_name, delete=True)
1318 1322 Session().commit()
1319 1323 return {
1320 1324 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1321 1325 'success': True
1322 1326 }
1323 1327 except Exception:
1324 1328 log.exception("Exception occurred while trying to delete repo")
1325 1329 raise JSONRPCError(
1326 1330 'failed to delete repository `%s`' % (repo_name,)
1327 1331 )
1328 1332
1329 1333
1330 1334 #TODO: marcink, change name ?
1331 1335 @jsonrpc_method()
1332 1336 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1333 1337 """
1334 1338 Invalidates the cache for the specified repository.
1335 1339
1336 1340 This command can only be run using an |authtoken| with admin rights to
1337 1341 the specified repository.
1338 1342
1339 1343 This command takes the following options:
1340 1344
1341 1345 :param apiuser: This is filled automatically from |authtoken|.
1342 1346 :type apiuser: AuthUser
1343 1347 :param repoid: Sets the repository name or repository ID.
1344 1348 :type repoid: str or int
1345 1349 :param delete_keys: This deletes the invalidated keys instead of
1346 1350 just flagging them.
1347 1351 :type delete_keys: Optional(``True`` | ``False``)
1348 1352
1349 1353 Example output:
1350 1354
1351 1355 .. code-block:: bash
1352 1356
1353 1357 id : <id_given_in_input>
1354 1358 result : {
1355 1359 'msg': Cache for repository `<repository name>` was invalidated,
1356 1360 'repository': <repository name>
1357 1361 }
1358 1362 error : null
1359 1363
1360 1364 Example error output:
1361 1365
1362 1366 .. code-block:: bash
1363 1367
1364 1368 id : <id_given_in_input>
1365 1369 result : null
1366 1370 error : {
1367 1371 'Error occurred during cache invalidation action'
1368 1372 }
1369 1373
1370 1374 """
1371 1375
1372 1376 repo = get_repo_or_error(repoid)
1373 1377 if not has_superadmin_permission(apiuser):
1374 1378 _perms = ('repository.admin', 'repository.write',)
1375 1379 validate_repo_permissions(apiuser, repoid, repo, _perms)
1376 1380
1377 1381 delete = Optional.extract(delete_keys)
1378 1382 try:
1379 1383 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1380 1384 return {
1381 1385 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1382 1386 'repository': repo.repo_name
1383 1387 }
1384 1388 except Exception:
1385 1389 log.exception(
1386 1390 "Exception occurred while trying to invalidate repo cache")
1387 1391 raise JSONRPCError(
1388 1392 'Error occurred during cache invalidation action'
1389 1393 )
1390 1394
1391 1395
1392 1396 #TODO: marcink, change name ?
1393 1397 @jsonrpc_method()
1394 1398 def lock(request, apiuser, repoid, locked=Optional(None),
1395 1399 userid=Optional(OAttr('apiuser'))):
1396 1400 """
1397 1401 Sets the lock state of the specified |repo| by the given user.
1398 1402 From more information, see :ref:`repo-locking`.
1399 1403
1400 1404 * If the ``userid`` option is not set, the repository is locked to the
1401 1405 user who called the method.
1402 1406 * If the ``locked`` parameter is not set, the current lock state of the
1403 1407 repository is displayed.
1404 1408
1405 1409 This command can only be run using an |authtoken| with admin rights to
1406 1410 the specified repository.
1407 1411
1408 1412 This command takes the following options:
1409 1413
1410 1414 :param apiuser: This is filled automatically from the |authtoken|.
1411 1415 :type apiuser: AuthUser
1412 1416 :param repoid: Sets the repository name or repository ID.
1413 1417 :type repoid: str or int
1414 1418 :param locked: Sets the lock state.
1415 1419 :type locked: Optional(``True`` | ``False``)
1416 1420 :param userid: Set the repository lock to this user.
1417 1421 :type userid: Optional(str or int)
1418 1422
1419 1423 Example error output:
1420 1424
1421 1425 .. code-block:: bash
1422 1426
1423 1427 id : <id_given_in_input>
1424 1428 result : {
1425 1429 'repo': '<reponame>',
1426 1430 'locked': <bool: lock state>,
1427 1431 'locked_since': <int: lock timestamp>,
1428 1432 'locked_by': <username of person who made the lock>,
1429 1433 'lock_reason': <str: reason for locking>,
1430 1434 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1431 1435 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1432 1436 or
1433 1437 'msg': 'Repo `<repository name>` not locked.'
1434 1438 or
1435 1439 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1436 1440 }
1437 1441 error : null
1438 1442
1439 1443 Example error output:
1440 1444
1441 1445 .. code-block:: bash
1442 1446
1443 1447 id : <id_given_in_input>
1444 1448 result : null
1445 1449 error : {
1446 1450 'Error occurred locking repository `<reponame>`'
1447 1451 }
1448 1452 """
1449 1453
1450 1454 repo = get_repo_or_error(repoid)
1451 1455 if not has_superadmin_permission(apiuser):
1452 1456 # check if we have at least write permission for this repo !
1453 1457 _perms = ('repository.admin', 'repository.write',)
1454 1458 validate_repo_permissions(apiuser, repoid, repo, _perms)
1455 1459
1456 1460 # make sure normal user does not pass someone else userid,
1457 1461 # he is not allowed to do that
1458 1462 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1459 1463 raise JSONRPCError('userid is not the same as your user')
1460 1464
1461 1465 if isinstance(userid, Optional):
1462 1466 userid = apiuser.user_id
1463 1467
1464 1468 user = get_user_or_error(userid)
1465 1469
1466 1470 if isinstance(locked, Optional):
1467 1471 lockobj = repo.locked
1468 1472
1469 1473 if lockobj[0] is None:
1470 1474 _d = {
1471 1475 'repo': repo.repo_name,
1472 1476 'locked': False,
1473 1477 'locked_since': None,
1474 1478 'locked_by': None,
1475 1479 'lock_reason': None,
1476 1480 'lock_state_changed': False,
1477 1481 'msg': 'Repo `%s` not locked.' % repo.repo_name
1478 1482 }
1479 1483 return _d
1480 1484 else:
1481 1485 _user_id, _time, _reason = lockobj
1482 1486 lock_user = get_user_or_error(userid)
1483 1487 _d = {
1484 1488 'repo': repo.repo_name,
1485 1489 'locked': True,
1486 1490 'locked_since': _time,
1487 1491 'locked_by': lock_user.username,
1488 1492 'lock_reason': _reason,
1489 1493 'lock_state_changed': False,
1490 1494 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1491 1495 % (repo.repo_name, lock_user.username,
1492 1496 json.dumps(time_to_datetime(_time))))
1493 1497 }
1494 1498 return _d
1495 1499
1496 1500 # force locked state through a flag
1497 1501 else:
1498 1502 locked = str2bool(locked)
1499 1503 lock_reason = Repository.LOCK_API
1500 1504 try:
1501 1505 if locked:
1502 1506 lock_time = time.time()
1503 1507 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1504 1508 else:
1505 1509 lock_time = None
1506 1510 Repository.unlock(repo)
1507 1511 _d = {
1508 1512 'repo': repo.repo_name,
1509 1513 'locked': locked,
1510 1514 'locked_since': lock_time,
1511 1515 'locked_by': user.username,
1512 1516 'lock_reason': lock_reason,
1513 1517 'lock_state_changed': True,
1514 1518 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1515 1519 % (user.username, repo.repo_name, locked))
1516 1520 }
1517 1521 return _d
1518 1522 except Exception:
1519 1523 log.exception(
1520 1524 "Exception occurred while trying to lock repository")
1521 1525 raise JSONRPCError(
1522 1526 'Error occurred locking repository `%s`' % repo.repo_name
1523 1527 )
1524 1528
1525 1529
1526 1530 @jsonrpc_method()
1527 1531 def comment_commit(
1528 1532 request, apiuser, repoid, commit_id, message, status=Optional(None),
1529 1533 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1530 1534 resolves_comment_id=Optional(None),
1531 1535 userid=Optional(OAttr('apiuser'))):
1532 1536 """
1533 1537 Set a commit comment, and optionally change the status of the commit.
1534 1538
1535 1539 :param apiuser: This is filled automatically from the |authtoken|.
1536 1540 :type apiuser: AuthUser
1537 1541 :param repoid: Set the repository name or repository ID.
1538 1542 :type repoid: str or int
1539 1543 :param commit_id: Specify the commit_id for which to set a comment.
1540 1544 :type commit_id: str
1541 1545 :param message: The comment text.
1542 1546 :type message: str
1543 1547 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1544 1548 'approved', 'rejected', 'under_review'
1545 1549 :type status: str
1546 1550 :param comment_type: Comment type, one of: 'note', 'todo'
1547 1551 :type comment_type: Optional(str), default: 'note'
1548 1552 :param userid: Set the user name of the comment creator.
1549 1553 :type userid: Optional(str or int)
1550 1554
1551 1555 Example error output:
1552 1556
1553 1557 .. code-block:: bash
1554 1558
1555 1559 {
1556 1560 "id" : <id_given_in_input>,
1557 1561 "result" : {
1558 1562 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1559 1563 "status_change": null or <status>,
1560 1564 "success": true
1561 1565 },
1562 1566 "error" : null
1563 1567 }
1564 1568
1565 1569 """
1566 1570 repo = get_repo_or_error(repoid)
1567 1571 if not has_superadmin_permission(apiuser):
1568 1572 _perms = ('repository.read', 'repository.write', 'repository.admin')
1569 1573 validate_repo_permissions(apiuser, repoid, repo, _perms)
1570 1574
1571 1575 try:
1572 1576 commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id
1573 1577 except Exception as e:
1574 1578 log.exception('Failed to fetch commit')
1575 1579 raise JSONRPCError(safe_str(e))
1576 1580
1577 1581 if isinstance(userid, Optional):
1578 1582 userid = apiuser.user_id
1579 1583
1580 1584 user = get_user_or_error(userid)
1581 1585 status = Optional.extract(status)
1582 1586 comment_type = Optional.extract(comment_type)
1583 1587 resolves_comment_id = Optional.extract(resolves_comment_id)
1584 1588
1585 1589 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1586 1590 if status and status not in allowed_statuses:
1587 1591 raise JSONRPCError('Bad status, must be on '
1588 1592 'of %s got %s' % (allowed_statuses, status,))
1589 1593
1590 1594 if resolves_comment_id:
1591 1595 comment = ChangesetComment.get(resolves_comment_id)
1592 1596 if not comment:
1593 1597 raise JSONRPCError(
1594 1598 'Invalid resolves_comment_id `%s` for this commit.'
1595 1599 % resolves_comment_id)
1596 1600 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1597 1601 raise JSONRPCError(
1598 1602 'Comment `%s` is wrong type for setting status to resolved.'
1599 1603 % resolves_comment_id)
1600 1604
1601 1605 try:
1602 1606 rc_config = SettingsModel().get_all_settings()
1603 1607 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1604 1608 status_change_label = ChangesetStatus.get_status_lbl(status)
1605 1609 comment = CommentsModel().create(
1606 1610 message, repo, user, commit_id=commit_id,
1607 1611 status_change=status_change_label,
1608 1612 status_change_type=status,
1609 1613 renderer=renderer,
1610 1614 comment_type=comment_type,
1611 1615 resolves_comment_id=resolves_comment_id,
1612 1616 auth_user=apiuser
1613 1617 )
1614 1618 if status:
1615 1619 # also do a status change
1616 1620 try:
1617 1621 ChangesetStatusModel().set_status(
1618 1622 repo, status, user, comment, revision=commit_id,
1619 1623 dont_allow_on_closed_pull_request=True
1620 1624 )
1621 1625 except StatusChangeOnClosedPullRequestError:
1622 1626 log.exception(
1623 1627 "Exception occurred while trying to change repo commit status")
1624 1628 msg = ('Changing status on a changeset associated with '
1625 1629 'a closed pull request is not allowed')
1626 1630 raise JSONRPCError(msg)
1627 1631
1628 1632 Session().commit()
1629 1633 return {
1630 1634 'msg': (
1631 1635 'Commented on commit `%s` for repository `%s`' % (
1632 1636 comment.revision, repo.repo_name)),
1633 1637 'status_change': status,
1634 1638 'success': True,
1635 1639 }
1636 1640 except JSONRPCError:
1637 1641 # catch any inside errors, and re-raise them to prevent from
1638 1642 # below global catch to silence them
1639 1643 raise
1640 1644 except Exception:
1641 1645 log.exception("Exception occurred while trying to comment on commit")
1642 1646 raise JSONRPCError(
1643 1647 'failed to set comment on repository `%s`' % (repo.repo_name,)
1644 1648 )
1645 1649
1646 1650
1647 1651 @jsonrpc_method()
1648 1652 def get_repo_comments(request, apiuser, repoid,
1649 1653 commit_id=Optional(None), comment_type=Optional(None),
1650 1654 userid=Optional(None)):
1651 1655 """
1652 1656 Get all comments for a repository
1653 1657
1654 1658 :param apiuser: This is filled automatically from the |authtoken|.
1655 1659 :type apiuser: AuthUser
1656 1660 :param repoid: Set the repository name or repository ID.
1657 1661 :type repoid: str or int
1658 1662 :param commit_id: Optionally filter the comments by the commit_id
1659 1663 :type commit_id: Optional(str), default: None
1660 1664 :param comment_type: Optionally filter the comments by the comment_type
1661 1665 one of: 'note', 'todo'
1662 1666 :type comment_type: Optional(str), default: None
1663 1667 :param userid: Optionally filter the comments by the author of comment
1664 1668 :type userid: Optional(str or int), Default: None
1665 1669
1666 1670 Example error output:
1667 1671
1668 1672 .. code-block:: bash
1669 1673
1670 1674 {
1671 1675 "id" : <id_given_in_input>,
1672 1676 "result" : [
1673 1677 {
1674 1678 "comment_author": <USER_DETAILS>,
1675 1679 "comment_created_on": "2017-02-01T14:38:16.309",
1676 1680 "comment_f_path": "file.txt",
1677 1681 "comment_id": 282,
1678 1682 "comment_lineno": "n1",
1679 1683 "comment_resolved_by": null,
1680 1684 "comment_status": [],
1681 1685 "comment_text": "This file needs a header",
1682 1686 "comment_type": "todo"
1683 1687 }
1684 1688 ],
1685 1689 "error" : null
1686 1690 }
1687 1691
1688 1692 """
1689 1693 repo = get_repo_or_error(repoid)
1690 1694 if not has_superadmin_permission(apiuser):
1691 1695 _perms = ('repository.read', 'repository.write', 'repository.admin')
1692 1696 validate_repo_permissions(apiuser, repoid, repo, _perms)
1693 1697
1694 1698 commit_id = Optional.extract(commit_id)
1695 1699
1696 1700 userid = Optional.extract(userid)
1697 1701 if userid:
1698 1702 user = get_user_or_error(userid)
1699 1703 else:
1700 1704 user = None
1701 1705
1702 1706 comment_type = Optional.extract(comment_type)
1703 1707 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1704 1708 raise JSONRPCError(
1705 1709 'comment_type must be one of `{}` got {}'.format(
1706 1710 ChangesetComment.COMMENT_TYPES, comment_type)
1707 1711 )
1708 1712
1709 1713 comments = CommentsModel().get_repository_comments(
1710 1714 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1711 1715 return comments
1712 1716
1713 1717
1714 1718 @jsonrpc_method()
1715 1719 def grant_user_permission(request, apiuser, repoid, userid, perm):
1716 1720 """
1717 1721 Grant permissions for the specified user on the given repository,
1718 1722 or update existing permissions if found.
1719 1723
1720 1724 This command can only be run using an |authtoken| with admin
1721 1725 permissions on the |repo|.
1722 1726
1723 1727 :param apiuser: This is filled automatically from the |authtoken|.
1724 1728 :type apiuser: AuthUser
1725 1729 :param repoid: Set the repository name or repository ID.
1726 1730 :type repoid: str or int
1727 1731 :param userid: Set the user name.
1728 1732 :type userid: str
1729 1733 :param perm: Set the user permissions, using the following format
1730 1734 ``(repository.(none|read|write|admin))``
1731 1735 :type perm: str
1732 1736
1733 1737 Example output:
1734 1738
1735 1739 .. code-block:: bash
1736 1740
1737 1741 id : <id_given_in_input>
1738 1742 result: {
1739 1743 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1740 1744 "success": true
1741 1745 }
1742 1746 error: null
1743 1747 """
1744 1748
1745 1749 repo = get_repo_or_error(repoid)
1746 1750 user = get_user_or_error(userid)
1747 1751 perm = get_perm_or_error(perm)
1748 1752 if not has_superadmin_permission(apiuser):
1749 1753 _perms = ('repository.admin',)
1750 1754 validate_repo_permissions(apiuser, repoid, repo, _perms)
1751 1755
1752 1756 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1753 1757 try:
1754 1758 changes = RepoModel().update_permissions(
1755 1759 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1756 1760
1757 1761 action_data = {
1758 1762 'added': changes['added'],
1759 1763 'updated': changes['updated'],
1760 1764 'deleted': changes['deleted'],
1761 1765 }
1762 1766 audit_logger.store_api(
1763 1767 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1764 1768
1765 1769 Session().commit()
1766 1770 return {
1767 1771 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1768 1772 perm.permission_name, user.username, repo.repo_name
1769 1773 ),
1770 1774 'success': True
1771 1775 }
1772 1776 except Exception:
1773 1777 log.exception("Exception occurred while trying edit permissions for repo")
1774 1778 raise JSONRPCError(
1775 1779 'failed to edit permission for user: `%s` in repo: `%s`' % (
1776 1780 userid, repoid
1777 1781 )
1778 1782 )
1779 1783
1780 1784
1781 1785 @jsonrpc_method()
1782 1786 def revoke_user_permission(request, apiuser, repoid, userid):
1783 1787 """
1784 1788 Revoke permission for a user on the specified repository.
1785 1789
1786 1790 This command can only be run using an |authtoken| with admin
1787 1791 permissions on the |repo|.
1788 1792
1789 1793 :param apiuser: This is filled automatically from the |authtoken|.
1790 1794 :type apiuser: AuthUser
1791 1795 :param repoid: Set the repository name or repository ID.
1792 1796 :type repoid: str or int
1793 1797 :param userid: Set the user name of revoked user.
1794 1798 :type userid: str or int
1795 1799
1796 1800 Example error output:
1797 1801
1798 1802 .. code-block:: bash
1799 1803
1800 1804 id : <id_given_in_input>
1801 1805 result: {
1802 1806 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1803 1807 "success": true
1804 1808 }
1805 1809 error: null
1806 1810 """
1807 1811
1808 1812 repo = get_repo_or_error(repoid)
1809 1813 user = get_user_or_error(userid)
1810 1814 if not has_superadmin_permission(apiuser):
1811 1815 _perms = ('repository.admin',)
1812 1816 validate_repo_permissions(apiuser, repoid, repo, _perms)
1813 1817
1814 1818 perm_deletions = [[user.user_id, None, "user"]]
1815 1819 try:
1816 1820 changes = RepoModel().update_permissions(
1817 1821 repo=repo, perm_deletions=perm_deletions, cur_user=user)
1818 1822
1819 1823 action_data = {
1820 1824 'added': changes['added'],
1821 1825 'updated': changes['updated'],
1822 1826 'deleted': changes['deleted'],
1823 1827 }
1824 1828 audit_logger.store_api(
1825 1829 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1826 1830
1827 1831 Session().commit()
1828 1832 return {
1829 1833 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1830 1834 user.username, repo.repo_name
1831 1835 ),
1832 1836 'success': True
1833 1837 }
1834 1838 except Exception:
1835 1839 log.exception("Exception occurred while trying revoke permissions to repo")
1836 1840 raise JSONRPCError(
1837 1841 'failed to edit permission for user: `%s` in repo: `%s`' % (
1838 1842 userid, repoid
1839 1843 )
1840 1844 )
1841 1845
1842 1846
1843 1847 @jsonrpc_method()
1844 1848 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1845 1849 """
1846 1850 Grant permission for a user group on the specified repository,
1847 1851 or update existing permissions.
1848 1852
1849 1853 This command can only be run using an |authtoken| with admin
1850 1854 permissions on the |repo|.
1851 1855
1852 1856 :param apiuser: This is filled automatically from the |authtoken|.
1853 1857 :type apiuser: AuthUser
1854 1858 :param repoid: Set the repository name or repository ID.
1855 1859 :type repoid: str or int
1856 1860 :param usergroupid: Specify the ID of the user group.
1857 1861 :type usergroupid: str or int
1858 1862 :param perm: Set the user group permissions using the following
1859 1863 format: (repository.(none|read|write|admin))
1860 1864 :type perm: str
1861 1865
1862 1866 Example output:
1863 1867
1864 1868 .. code-block:: bash
1865 1869
1866 1870 id : <id_given_in_input>
1867 1871 result : {
1868 1872 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1869 1873 "success": true
1870 1874
1871 1875 }
1872 1876 error : null
1873 1877
1874 1878 Example error output:
1875 1879
1876 1880 .. code-block:: bash
1877 1881
1878 1882 id : <id_given_in_input>
1879 1883 result : null
1880 1884 error : {
1881 1885 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1882 1886 }
1883 1887
1884 1888 """
1885 1889
1886 1890 repo = get_repo_or_error(repoid)
1887 1891 perm = get_perm_or_error(perm)
1888 1892 if not has_superadmin_permission(apiuser):
1889 1893 _perms = ('repository.admin',)
1890 1894 validate_repo_permissions(apiuser, repoid, repo, _perms)
1891 1895
1892 1896 user_group = get_user_group_or_error(usergroupid)
1893 1897 if not has_superadmin_permission(apiuser):
1894 1898 # check if we have at least read permission for this user group !
1895 1899 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1896 1900 if not HasUserGroupPermissionAnyApi(*_perms)(
1897 1901 user=apiuser, user_group_name=user_group.users_group_name):
1898 1902 raise JSONRPCError(
1899 1903 'user group `%s` does not exist' % (usergroupid,))
1900 1904
1901 1905 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
1902 1906 try:
1903 1907 changes = RepoModel().update_permissions(
1904 1908 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1905 1909 action_data = {
1906 1910 'added': changes['added'],
1907 1911 'updated': changes['updated'],
1908 1912 'deleted': changes['deleted'],
1909 1913 }
1910 1914 audit_logger.store_api(
1911 1915 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1912 1916
1913 1917 Session().commit()
1914 1918 return {
1915 1919 'msg': 'Granted perm: `%s` for user group: `%s` in '
1916 1920 'repo: `%s`' % (
1917 1921 perm.permission_name, user_group.users_group_name,
1918 1922 repo.repo_name
1919 1923 ),
1920 1924 'success': True
1921 1925 }
1922 1926 except Exception:
1923 1927 log.exception(
1924 1928 "Exception occurred while trying change permission on repo")
1925 1929 raise JSONRPCError(
1926 1930 'failed to edit permission for user group: `%s` in '
1927 1931 'repo: `%s`' % (
1928 1932 usergroupid, repo.repo_name
1929 1933 )
1930 1934 )
1931 1935
1932 1936
1933 1937 @jsonrpc_method()
1934 1938 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1935 1939 """
1936 1940 Revoke the permissions of a user group on a given repository.
1937 1941
1938 1942 This command can only be run using an |authtoken| with admin
1939 1943 permissions on the |repo|.
1940 1944
1941 1945 :param apiuser: This is filled automatically from the |authtoken|.
1942 1946 :type apiuser: AuthUser
1943 1947 :param repoid: Set the repository name or repository ID.
1944 1948 :type repoid: str or int
1945 1949 :param usergroupid: Specify the user group ID.
1946 1950 :type usergroupid: str or int
1947 1951
1948 1952 Example output:
1949 1953
1950 1954 .. code-block:: bash
1951 1955
1952 1956 id : <id_given_in_input>
1953 1957 result: {
1954 1958 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1955 1959 "success": true
1956 1960 }
1957 1961 error: null
1958 1962 """
1959 1963
1960 1964 repo = get_repo_or_error(repoid)
1961 1965 if not has_superadmin_permission(apiuser):
1962 1966 _perms = ('repository.admin',)
1963 1967 validate_repo_permissions(apiuser, repoid, repo, _perms)
1964 1968
1965 1969 user_group = get_user_group_or_error(usergroupid)
1966 1970 if not has_superadmin_permission(apiuser):
1967 1971 # check if we have at least read permission for this user group !
1968 1972 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1969 1973 if not HasUserGroupPermissionAnyApi(*_perms)(
1970 1974 user=apiuser, user_group_name=user_group.users_group_name):
1971 1975 raise JSONRPCError(
1972 1976 'user group `%s` does not exist' % (usergroupid,))
1973 1977
1974 1978 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
1975 1979 try:
1976 1980 changes = RepoModel().update_permissions(
1977 1981 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
1978 1982 action_data = {
1979 1983 'added': changes['added'],
1980 1984 'updated': changes['updated'],
1981 1985 'deleted': changes['deleted'],
1982 1986 }
1983 1987 audit_logger.store_api(
1984 1988 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1985 1989
1986 1990 Session().commit()
1987 1991 return {
1988 1992 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
1989 1993 user_group.users_group_name, repo.repo_name
1990 1994 ),
1991 1995 'success': True
1992 1996 }
1993 1997 except Exception:
1994 1998 log.exception("Exception occurred while trying revoke "
1995 1999 "user group permission on repo")
1996 2000 raise JSONRPCError(
1997 2001 'failed to edit permission for user group: `%s` in '
1998 2002 'repo: `%s`' % (
1999 2003 user_group.users_group_name, repo.repo_name
2000 2004 )
2001 2005 )
2002 2006
2003 2007
2004 2008 @jsonrpc_method()
2005 2009 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2006 2010 """
2007 2011 Triggers a pull on the given repository from a remote location. You
2008 2012 can use this to keep remote repositories up-to-date.
2009 2013
2010 2014 This command can only be run using an |authtoken| with admin
2011 2015 rights to the specified repository. For more information,
2012 2016 see :ref:`config-token-ref`.
2013 2017
2014 2018 This command takes the following options:
2015 2019
2016 2020 :param apiuser: This is filled automatically from the |authtoken|.
2017 2021 :type apiuser: AuthUser
2018 2022 :param repoid: The repository name or repository ID.
2019 2023 :type repoid: str or int
2020 2024 :param remote_uri: Optional remote URI to pass in for pull
2021 2025 :type remote_uri: str
2022 2026
2023 2027 Example output:
2024 2028
2025 2029 .. code-block:: bash
2026 2030
2027 2031 id : <id_given_in_input>
2028 2032 result : {
2029 2033 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2030 2034 "repository": "<repository name>"
2031 2035 }
2032 2036 error : null
2033 2037
2034 2038 Example error output:
2035 2039
2036 2040 .. code-block:: bash
2037 2041
2038 2042 id : <id_given_in_input>
2039 2043 result : null
2040 2044 error : {
2041 2045 "Unable to push changes from `<remote_url>`"
2042 2046 }
2043 2047
2044 2048 """
2045 2049
2046 2050 repo = get_repo_or_error(repoid)
2047 2051 remote_uri = Optional.extract(remote_uri)
2048 2052 remote_uri_display = remote_uri or repo.clone_uri_hidden
2049 2053 if not has_superadmin_permission(apiuser):
2050 2054 _perms = ('repository.admin',)
2051 2055 validate_repo_permissions(apiuser, repoid, repo, _perms)
2052 2056
2053 2057 try:
2054 2058 ScmModel().pull_changes(
2055 2059 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2056 2060 return {
2057 2061 'msg': 'Pulled from url `%s` on repo `%s`' % (
2058 2062 remote_uri_display, repo.repo_name),
2059 2063 'repository': repo.repo_name
2060 2064 }
2061 2065 except Exception:
2062 2066 log.exception("Exception occurred while trying to "
2063 2067 "pull changes from remote location")
2064 2068 raise JSONRPCError(
2065 2069 'Unable to pull changes from `%s`' % remote_uri_display
2066 2070 )
2067 2071
2068 2072
2069 2073 @jsonrpc_method()
2070 2074 def strip(request, apiuser, repoid, revision, branch):
2071 2075 """
2072 2076 Strips the given revision from the specified repository.
2073 2077
2074 2078 * This will remove the revision and all of its decendants.
2075 2079
2076 2080 This command can only be run using an |authtoken| with admin rights to
2077 2081 the specified repository.
2078 2082
2079 2083 This command takes the following options:
2080 2084
2081 2085 :param apiuser: This is filled automatically from the |authtoken|.
2082 2086 :type apiuser: AuthUser
2083 2087 :param repoid: The repository name or repository ID.
2084 2088 :type repoid: str or int
2085 2089 :param revision: The revision you wish to strip.
2086 2090 :type revision: str
2087 2091 :param branch: The branch from which to strip the revision.
2088 2092 :type branch: str
2089 2093
2090 2094 Example output:
2091 2095
2092 2096 .. code-block:: bash
2093 2097
2094 2098 id : <id_given_in_input>
2095 2099 result : {
2096 2100 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2097 2101 "repository": "<repository name>"
2098 2102 }
2099 2103 error : null
2100 2104
2101 2105 Example error output:
2102 2106
2103 2107 .. code-block:: bash
2104 2108
2105 2109 id : <id_given_in_input>
2106 2110 result : null
2107 2111 error : {
2108 2112 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2109 2113 }
2110 2114
2111 2115 """
2112 2116
2113 2117 repo = get_repo_or_error(repoid)
2114 2118 if not has_superadmin_permission(apiuser):
2115 2119 _perms = ('repository.admin',)
2116 2120 validate_repo_permissions(apiuser, repoid, repo, _perms)
2117 2121
2118 2122 try:
2119 2123 ScmModel().strip(repo, revision, branch)
2120 2124 audit_logger.store_api(
2121 2125 'repo.commit.strip', action_data={'commit_id': revision},
2122 2126 repo=repo,
2123 2127 user=apiuser, commit=True)
2124 2128
2125 2129 return {
2126 2130 'msg': 'Stripped commit %s from repo `%s`' % (
2127 2131 revision, repo.repo_name),
2128 2132 'repository': repo.repo_name
2129 2133 }
2130 2134 except Exception:
2131 2135 log.exception("Exception while trying to strip")
2132 2136 raise JSONRPCError(
2133 2137 'Unable to strip commit %s from repo `%s`' % (
2134 2138 revision, repo.repo_name)
2135 2139 )
2136 2140
2137 2141
2138 2142 @jsonrpc_method()
2139 2143 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2140 2144 """
2141 2145 Returns all settings for a repository. If key is given it only returns the
2142 2146 setting identified by the key or null.
2143 2147
2144 2148 :param apiuser: This is filled automatically from the |authtoken|.
2145 2149 :type apiuser: AuthUser
2146 2150 :param repoid: The repository name or repository id.
2147 2151 :type repoid: str or int
2148 2152 :param key: Key of the setting to return.
2149 2153 :type: key: Optional(str)
2150 2154
2151 2155 Example output:
2152 2156
2153 2157 .. code-block:: bash
2154 2158
2155 2159 {
2156 2160 "error": null,
2157 2161 "id": 237,
2158 2162 "result": {
2159 2163 "extensions_largefiles": true,
2160 2164 "extensions_evolve": true,
2161 2165 "hooks_changegroup_push_logger": true,
2162 2166 "hooks_changegroup_repo_size": false,
2163 2167 "hooks_outgoing_pull_logger": true,
2164 2168 "phases_publish": "True",
2165 2169 "rhodecode_hg_use_rebase_for_merging": true,
2166 2170 "rhodecode_pr_merge_enabled": true,
2167 2171 "rhodecode_use_outdated_comments": true
2168 2172 }
2169 2173 }
2170 2174 """
2171 2175
2172 2176 # Restrict access to this api method to admins only.
2173 2177 if not has_superadmin_permission(apiuser):
2174 2178 raise JSONRPCForbidden()
2175 2179
2176 2180 try:
2177 2181 repo = get_repo_or_error(repoid)
2178 2182 settings_model = VcsSettingsModel(repo=repo)
2179 2183 settings = settings_model.get_global_settings()
2180 2184 settings.update(settings_model.get_repo_settings())
2181 2185
2182 2186 # If only a single setting is requested fetch it from all settings.
2183 2187 key = Optional.extract(key)
2184 2188 if key is not None:
2185 2189 settings = settings.get(key, None)
2186 2190 except Exception:
2187 2191 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2188 2192 log.exception(msg)
2189 2193 raise JSONRPCError(msg)
2190 2194
2191 2195 return settings
2192 2196
2193 2197
2194 2198 @jsonrpc_method()
2195 2199 def set_repo_settings(request, apiuser, repoid, settings):
2196 2200 """
2197 2201 Update repository settings. Returns true on success.
2198 2202
2199 2203 :param apiuser: This is filled automatically from the |authtoken|.
2200 2204 :type apiuser: AuthUser
2201 2205 :param repoid: The repository name or repository id.
2202 2206 :type repoid: str or int
2203 2207 :param settings: The new settings for the repository.
2204 2208 :type: settings: dict
2205 2209
2206 2210 Example output:
2207 2211
2208 2212 .. code-block:: bash
2209 2213
2210 2214 {
2211 2215 "error": null,
2212 2216 "id": 237,
2213 2217 "result": true
2214 2218 }
2215 2219 """
2216 2220 # Restrict access to this api method to admins only.
2217 2221 if not has_superadmin_permission(apiuser):
2218 2222 raise JSONRPCForbidden()
2219 2223
2220 2224 if type(settings) is not dict:
2221 2225 raise JSONRPCError('Settings have to be a JSON Object.')
2222 2226
2223 2227 try:
2224 2228 settings_model = VcsSettingsModel(repo=repoid)
2225 2229
2226 2230 # Merge global, repo and incoming settings.
2227 2231 new_settings = settings_model.get_global_settings()
2228 2232 new_settings.update(settings_model.get_repo_settings())
2229 2233 new_settings.update(settings)
2230 2234
2231 2235 # Update the settings.
2232 2236 inherit_global_settings = new_settings.get(
2233 2237 'inherit_global_settings', False)
2234 2238 settings_model.create_or_update_repo_settings(
2235 2239 new_settings, inherit_global_settings=inherit_global_settings)
2236 2240 Session().commit()
2237 2241 except Exception:
2238 2242 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2239 2243 log.exception(msg)
2240 2244 raise JSONRPCError(msg)
2241 2245
2242 2246 # Indicate success.
2243 2247 return True
2244 2248
2245 2249
2246 2250 @jsonrpc_method()
2247 2251 def maintenance(request, apiuser, repoid):
2248 2252 """
2249 2253 Triggers a maintenance on the given repository.
2250 2254
2251 2255 This command can only be run using an |authtoken| with admin
2252 2256 rights to the specified repository. For more information,
2253 2257 see :ref:`config-token-ref`.
2254 2258
2255 2259 This command takes the following options:
2256 2260
2257 2261 :param apiuser: This is filled automatically from the |authtoken|.
2258 2262 :type apiuser: AuthUser
2259 2263 :param repoid: The repository name or repository ID.
2260 2264 :type repoid: str or int
2261 2265
2262 2266 Example output:
2263 2267
2264 2268 .. code-block:: bash
2265 2269
2266 2270 id : <id_given_in_input>
2267 2271 result : {
2268 2272 "msg": "executed maintenance command",
2269 2273 "executed_actions": [
2270 2274 <action_message>, <action_message2>...
2271 2275 ],
2272 2276 "repository": "<repository name>"
2273 2277 }
2274 2278 error : null
2275 2279
2276 2280 Example error output:
2277 2281
2278 2282 .. code-block:: bash
2279 2283
2280 2284 id : <id_given_in_input>
2281 2285 result : null
2282 2286 error : {
2283 2287 "Unable to execute maintenance on `<reponame>`"
2284 2288 }
2285 2289
2286 2290 """
2287 2291
2288 2292 repo = get_repo_or_error(repoid)
2289 2293 if not has_superadmin_permission(apiuser):
2290 2294 _perms = ('repository.admin',)
2291 2295 validate_repo_permissions(apiuser, repoid, repo, _perms)
2292 2296
2293 2297 try:
2294 2298 maintenance = repo_maintenance.RepoMaintenance()
2295 2299 executed_actions = maintenance.execute(repo)
2296 2300
2297 2301 return {
2298 2302 'msg': 'executed maintenance command',
2299 2303 'executed_actions': executed_actions,
2300 2304 'repository': repo.repo_name
2301 2305 }
2302 2306 except Exception:
2303 2307 log.exception("Exception occurred while trying to run maintenance")
2304 2308 raise JSONRPCError(
2305 2309 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,841 +1,850 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 23 """
24 24
25 25 import os
26 26 import stat
27 27
28 28 from zope.cachedescriptors.property import Lazy as LazyProperty
29 29
30 30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 31 from rhodecode.lib.utils import safe_unicode, safe_str
32 32 from rhodecode.lib.utils2 import md5
33 33 from rhodecode.lib.vcs import path as vcspath
34 34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37 37
38 38 LARGEFILE_PREFIX = '.hglf'
39 39
40 40
41 41 class NodeKind:
42 42 SUBMODULE = -1
43 43 DIR = 1
44 44 FILE = 2
45 45 LARGEFILE = 3
46 46
47 47
48 48 class NodeState:
49 49 ADDED = u'added'
50 50 CHANGED = u'changed'
51 51 NOT_CHANGED = u'not changed'
52 52 REMOVED = u'removed'
53 53
54 54
55 55 class NodeGeneratorBase(object):
56 56 """
57 57 Base class for removed added and changed filenodes, it's a lazy generator
58 58 class that will create filenodes only on iteration or call
59 59
60 60 The len method doesn't need to create filenodes at all
61 61 """
62 62
63 63 def __init__(self, current_paths, cs):
64 64 self.cs = cs
65 65 self.current_paths = current_paths
66 66
67 67 def __call__(self):
68 68 return [n for n in self]
69 69
70 70 def __getslice__(self, i, j):
71 71 for p in self.current_paths[i:j]:
72 72 yield self.cs.get_node(p)
73 73
74 74 def __len__(self):
75 75 return len(self.current_paths)
76 76
77 77 def __iter__(self):
78 78 for p in self.current_paths:
79 79 yield self.cs.get_node(p)
80 80
81 81
82 82 class AddedFileNodesGenerator(NodeGeneratorBase):
83 83 """
84 84 Class holding added files for current commit
85 85 """
86 86
87 87
88 88 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 89 """
90 90 Class holding changed files for current commit
91 91 """
92 92
93 93
94 94 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 95 """
96 96 Class holding removed files for current commit
97 97 """
98 98 def __iter__(self):
99 99 for p in self.current_paths:
100 100 yield RemovedFileNode(path=p)
101 101
102 102 def __getslice__(self, i, j):
103 103 for p in self.current_paths[i:j]:
104 104 yield RemovedFileNode(path=p)
105 105
106 106
107 107 class Node(object):
108 108 """
109 109 Simplest class representing file or directory on repository. SCM backends
110 110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 111 directly.
112 112
113 113 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 114 only. Moreover, every single node is identified by the ``path`` attribute,
115 115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 116 """
117 117 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 118 # security attacks could be used with this
119 119 commit = None
120 120
121 121 def __init__(self, path, kind):
122 122 self._validate_path(path) # can throw exception if path is invalid
123 123 self.path = safe_str(path.rstrip('/')) # we store paths as str
124 124 if path == '' and kind != NodeKind.DIR:
125 125 raise NodeError("Only DirNode and its subclasses may be "
126 126 "initialized with empty path")
127 127 self.kind = kind
128 128
129 129 if self.is_root() and not self.is_dir():
130 130 raise NodeError("Root node cannot be FILE kind")
131 131
132 132 def _validate_path(self, path):
133 133 if path.startswith('/'):
134 134 raise NodeError(
135 135 "Cannot initialize Node objects with slash at "
136 136 "the beginning as only relative paths are supported. "
137 137 "Got %s" % (path,))
138 138
139 139 @LazyProperty
140 140 def parent(self):
141 141 parent_path = self.get_parent_path()
142 142 if parent_path:
143 143 if self.commit:
144 144 return self.commit.get_node(parent_path)
145 145 return DirNode(parent_path)
146 146 return None
147 147
148 148 @LazyProperty
149 149 def unicode_path(self):
150 150 return safe_unicode(self.path)
151 151
152 152 @LazyProperty
153 153 def has_rtlo(self):
154 154 """Detects if a path has right-to-left-override marker"""
155 155 return self.RTLO_MARKER in self.unicode_path
156 156
157 157 @LazyProperty
158 158 def unicode_path_safe(self):
159 159 """
160 160 Special SAFE representation of path without the right-to-left-override.
161 161 This should be only used for "showing" the file, cannot be used for any
162 162 urls etc.
163 163 """
164 164 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
165 165
166 166 @LazyProperty
167 167 def dir_path(self):
168 168 """
169 169 Returns name of the directory from full path of this vcs node. Empty
170 170 string is returned if there's no directory in the path
171 171 """
172 172 _parts = self.path.rstrip('/').rsplit('/', 1)
173 173 if len(_parts) == 2:
174 174 return safe_unicode(_parts[0])
175 175 return u''
176 176
177 177 @LazyProperty
178 178 def name(self):
179 179 """
180 180 Returns name of the node so if its path
181 181 then only last part is returned.
182 182 """
183 183 return safe_unicode(self.path.rstrip('/').split('/')[-1])
184 184
185 185 @property
186 186 def kind(self):
187 187 return self._kind
188 188
189 189 @kind.setter
190 190 def kind(self, kind):
191 191 if hasattr(self, '_kind'):
192 192 raise NodeError("Cannot change node's kind")
193 193 else:
194 194 self._kind = kind
195 195 # Post setter check (path's trailing slash)
196 196 if self.path.endswith('/'):
197 197 raise NodeError("Node's path cannot end with slash")
198 198
199 199 def __cmp__(self, other):
200 200 """
201 201 Comparator using name of the node, needed for quick list sorting.
202 202 """
203 203
204 204 kind_cmp = cmp(self.kind, other.kind)
205 205 if kind_cmp:
206 206 if isinstance(self, SubModuleNode):
207 207 # we make submodules equal to dirnode for "sorting" purposes
208 208 return NodeKind.DIR
209 209 return kind_cmp
210 210 return cmp(self.name, other.name)
211 211
212 212 def __eq__(self, other):
213 213 for attr in ['name', 'path', 'kind']:
214 214 if getattr(self, attr) != getattr(other, attr):
215 215 return False
216 216 if self.is_file():
217 217 if self.content != other.content:
218 218 return False
219 219 else:
220 220 # For DirNode's check without entering each dir
221 221 self_nodes_paths = list(sorted(n.path for n in self.nodes))
222 222 other_nodes_paths = list(sorted(n.path for n in self.nodes))
223 223 if self_nodes_paths != other_nodes_paths:
224 224 return False
225 225 return True
226 226
227 227 def __ne__(self, other):
228 228 return not self.__eq__(other)
229 229
230 230 def __repr__(self):
231 231 return '<%s %r>' % (self.__class__.__name__, self.path)
232 232
233 233 def __str__(self):
234 234 return self.__repr__()
235 235
236 236 def __unicode__(self):
237 237 return self.name
238 238
239 239 def get_parent_path(self):
240 240 """
241 241 Returns node's parent path or empty string if node is root.
242 242 """
243 243 if self.is_root():
244 244 return ''
245 245 return vcspath.dirname(self.path.rstrip('/')) + '/'
246 246
247 247 def is_file(self):
248 248 """
249 249 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
250 250 otherwise.
251 251 """
252 252 return self.kind == NodeKind.FILE
253 253
254 254 def is_dir(self):
255 255 """
256 256 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
257 257 otherwise.
258 258 """
259 259 return self.kind == NodeKind.DIR
260 260
261 261 def is_root(self):
262 262 """
263 263 Returns ``True`` if node is a root node and ``False`` otherwise.
264 264 """
265 265 return self.kind == NodeKind.DIR and self.path == ''
266 266
267 267 def is_submodule(self):
268 268 """
269 269 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
270 270 otherwise.
271 271 """
272 272 return self.kind == NodeKind.SUBMODULE
273 273
274 274 def is_largefile(self):
275 275 """
276 276 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
277 277 otherwise
278 278 """
279 279 return self.kind == NodeKind.LARGEFILE
280 280
281 281 def is_link(self):
282 282 if self.commit:
283 283 return self.commit.is_link(self.path)
284 284 return False
285 285
286 286 @LazyProperty
287 287 def added(self):
288 288 return self.state is NodeState.ADDED
289 289
290 290 @LazyProperty
291 291 def changed(self):
292 292 return self.state is NodeState.CHANGED
293 293
294 294 @LazyProperty
295 295 def not_changed(self):
296 296 return self.state is NodeState.NOT_CHANGED
297 297
298 298 @LazyProperty
299 299 def removed(self):
300 300 return self.state is NodeState.REMOVED
301 301
302 302
303 303 class FileNode(Node):
304 304 """
305 305 Class representing file nodes.
306 306
307 307 :attribute: path: path to the node, relative to repository's root
308 308 :attribute: content: if given arbitrary sets content of the file
309 309 :attribute: commit: if given, first time content is accessed, callback
310 310 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
311 311 """
312 312 _filter_pre_load = []
313 313
314 314 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
315 315 """
316 316 Only one of ``content`` and ``commit`` may be given. Passing both
317 317 would raise ``NodeError`` exception.
318 318
319 319 :param path: relative path to the node
320 320 :param content: content may be passed to constructor
321 321 :param commit: if given, will use it to lazily fetch content
322 322 :param mode: ST_MODE (i.e. 0100644)
323 323 """
324 324 if content and commit:
325 325 raise NodeError("Cannot use both content and commit")
326 326 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
327 327 self.commit = commit
328 328 self._content = content
329 329 self._mode = mode or FILEMODE_DEFAULT
330 330
331 331 self._set_bulk_properties(pre_load)
332 332
333 333 def _set_bulk_properties(self, pre_load):
334 334 if not pre_load:
335 335 return
336 336 pre_load = [entry for entry in pre_load
337 337 if entry not in self._filter_pre_load]
338 338 if not pre_load:
339 339 return
340 340
341 341 for attr_name in pre_load:
342 342 result = getattr(self, attr_name)
343 343 if callable(result):
344 344 result = result()
345 345 self.__dict__[attr_name] = result
346 346
347 347 @LazyProperty
348 348 def mode(self):
349 349 """
350 350 Returns lazily mode of the FileNode. If `commit` is not set, would
351 351 use value given at initialization or `FILEMODE_DEFAULT` (default).
352 352 """
353 353 if self.commit:
354 354 mode = self.commit.get_file_mode(self.path)
355 355 else:
356 356 mode = self._mode
357 357 return mode
358 358
359 359 @LazyProperty
360 360 def raw_bytes(self):
361 361 """
362 362 Returns lazily the raw bytes of the FileNode.
363 363 """
364 364 if self.commit:
365 365 if self._content is None:
366 366 self._content = self.commit.get_file_content(self.path)
367 367 content = self._content
368 368 else:
369 369 content = self._content
370 370 return content
371 371
372 372 @LazyProperty
373 373 def md5(self):
374 374 """
375 375 Returns md5 of the file node.
376 376 """
377 377 return md5(self.raw_bytes)
378 378
379 379 def metadata_uncached(self):
380 380 """
381 381 Returns md5, binary flag of the file node, without any cache usage.
382 382 """
383 383
384 if self.commit:
385 content = self.commit.get_file_content(self.path)
386 else:
387 content = self._content
384 content = self.content_uncached()
388 385
389 386 is_binary = content and '\0' in content
390 387 size = 0
391 388 if content:
392 389 size = len(content)
393 return is_binary, md5(content), size
390
391 return is_binary, md5(content), size, content
392
393 def content_uncached(self):
394 """
395 Returns lazily content of the FileNode. If possible, would try to
396 decode content from UTF-8.
397 """
398 if self.commit:
399 content = self.commit.get_file_content(self.path)
400 else:
401 content = self._content
402 return content
394 403
395 404 @LazyProperty
396 405 def content(self):
397 406 """
398 407 Returns lazily content of the FileNode. If possible, would try to
399 408 decode content from UTF-8.
400 409 """
401 410 content = self.raw_bytes
402 411
403 412 if self.is_binary:
404 413 return content
405 414 return safe_unicode(content)
406 415
407 416 @LazyProperty
408 417 def size(self):
409 418 if self.commit:
410 419 return self.commit.get_file_size(self.path)
411 420 raise NodeError(
412 421 "Cannot retrieve size of the file without related "
413 422 "commit attribute")
414 423
415 424 @LazyProperty
416 425 def message(self):
417 426 if self.commit:
418 427 return self.last_commit.message
419 428 raise NodeError(
420 429 "Cannot retrieve message of the file without related "
421 430 "commit attribute")
422 431
423 432 @LazyProperty
424 433 def last_commit(self):
425 434 if self.commit:
426 435 pre_load = ["author", "date", "message"]
427 436 return self.commit.get_path_commit(self.path, pre_load=pre_load)
428 437 raise NodeError(
429 438 "Cannot retrieve last commit of the file without "
430 439 "related commit attribute")
431 440
432 441 def get_mimetype(self):
433 442 """
434 443 Mimetype is calculated based on the file's content. If ``_mimetype``
435 444 attribute is available, it will be returned (backends which store
436 445 mimetypes or can easily recognize them, should set this private
437 446 attribute to indicate that type should *NOT* be calculated).
438 447 """
439 448
440 449 if hasattr(self, '_mimetype'):
441 450 if (isinstance(self._mimetype, (tuple, list,)) and
442 451 len(self._mimetype) == 2):
443 452 return self._mimetype
444 453 else:
445 454 raise NodeError('given _mimetype attribute must be an 2 '
446 455 'element list or tuple')
447 456
448 457 db = get_mimetypes_db()
449 458 mtype, encoding = db.guess_type(self.name)
450 459
451 460 if mtype is None:
452 461 if self.is_binary:
453 462 mtype = 'application/octet-stream'
454 463 encoding = None
455 464 else:
456 465 mtype = 'text/plain'
457 466 encoding = None
458 467
459 468 # try with pygments
460 469 try:
461 470 from pygments.lexers import get_lexer_for_filename
462 471 mt = get_lexer_for_filename(self.name).mimetypes
463 472 except Exception:
464 473 mt = None
465 474
466 475 if mt:
467 476 mtype = mt[0]
468 477
469 478 return mtype, encoding
470 479
471 480 @LazyProperty
472 481 def mimetype(self):
473 482 """
474 483 Wrapper around full mimetype info. It returns only type of fetched
475 484 mimetype without the encoding part. use get_mimetype function to fetch
476 485 full set of (type,encoding)
477 486 """
478 487 return self.get_mimetype()[0]
479 488
480 489 @LazyProperty
481 490 def mimetype_main(self):
482 491 return self.mimetype.split('/')[0]
483 492
484 493 @classmethod
485 494 def get_lexer(cls, filename, content=None):
486 495 from pygments import lexers
487 496
488 497 extension = filename.split('.')[-1]
489 498 lexer = None
490 499
491 500 try:
492 501 lexer = lexers.guess_lexer_for_filename(
493 502 filename, content, stripnl=False)
494 503 except lexers.ClassNotFound:
495 504 lexer = None
496 505
497 506 # try our EXTENSION_MAP
498 507 if not lexer:
499 508 try:
500 509 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
501 510 if lexer_class:
502 511 lexer = lexers.get_lexer_by_name(lexer_class[0])
503 512 except lexers.ClassNotFound:
504 513 lexer = None
505 514
506 515 if not lexer:
507 516 lexer = lexers.TextLexer(stripnl=False)
508 517
509 518 return lexer
510 519
511 520 @LazyProperty
512 521 def lexer(self):
513 522 """
514 523 Returns pygment's lexer class. Would try to guess lexer taking file's
515 524 content, name and mimetype.
516 525 """
517 526 return self.get_lexer(self.name, self.content)
518 527
519 528 @LazyProperty
520 529 def lexer_alias(self):
521 530 """
522 531 Returns first alias of the lexer guessed for this file.
523 532 """
524 533 return self.lexer.aliases[0]
525 534
526 535 @LazyProperty
527 536 def history(self):
528 537 """
529 538 Returns a list of commit for this file in which the file was changed
530 539 """
531 540 if self.commit is None:
532 541 raise NodeError('Unable to get commit for this FileNode')
533 542 return self.commit.get_path_history(self.path)
534 543
535 544 @LazyProperty
536 545 def annotate(self):
537 546 """
538 547 Returns a list of three element tuples with lineno, commit and line
539 548 """
540 549 if self.commit is None:
541 550 raise NodeError('Unable to get commit for this FileNode')
542 551 pre_load = ["author", "date", "message"]
543 552 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
544 553
545 554 @LazyProperty
546 555 def state(self):
547 556 if not self.commit:
548 557 raise NodeError(
549 558 "Cannot check state of the node if it's not "
550 559 "linked with commit")
551 560 elif self.path in (node.path for node in self.commit.added):
552 561 return NodeState.ADDED
553 562 elif self.path in (node.path for node in self.commit.changed):
554 563 return NodeState.CHANGED
555 564 else:
556 565 return NodeState.NOT_CHANGED
557 566
558 567 @LazyProperty
559 568 def is_binary(self):
560 569 """
561 570 Returns True if file has binary content.
562 571 """
563 572 _bin = self.raw_bytes and '\0' in self.raw_bytes
564 573 return _bin
565 574
566 575 @LazyProperty
567 576 def extension(self):
568 577 """Returns filenode extension"""
569 578 return self.name.split('.')[-1]
570 579
571 580 @property
572 581 def is_executable(self):
573 582 """
574 583 Returns ``True`` if file has executable flag turned on.
575 584 """
576 585 return bool(self.mode & stat.S_IXUSR)
577 586
578 587 def get_largefile_node(self):
579 588 """
580 589 Try to return a Mercurial FileNode from this node. It does internal
581 590 checks inside largefile store, if that file exist there it will
582 591 create special instance of LargeFileNode which can get content from
583 592 LF store.
584 593 """
585 594 if self.commit:
586 595 return self.commit.get_largefile_node(self.path)
587 596
588 597 def lines(self, count_empty=False):
589 598 all_lines, empty_lines = 0, 0
590 599
591 600 if not self.is_binary:
592 601 content = self.content
593 602 if count_empty:
594 603 all_lines = 0
595 604 empty_lines = 0
596 605 for line in content.splitlines(True):
597 606 if line == '\n':
598 607 empty_lines += 1
599 608 all_lines += 1
600 609
601 610 return all_lines, all_lines - empty_lines
602 611 else:
603 612 # fast method
604 613 empty_lines = all_lines = content.count('\n')
605 614 if all_lines == 0 and content:
606 615 # one-line without a newline
607 616 empty_lines = all_lines = 1
608 617
609 618 return all_lines, empty_lines
610 619
611 620 def __repr__(self):
612 621 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
613 622 getattr(self.commit, 'short_id', ''))
614 623
615 624
616 625 class RemovedFileNode(FileNode):
617 626 """
618 627 Dummy FileNode class - trying to access any public attribute except path,
619 628 name, kind or state (or methods/attributes checking those two) would raise
620 629 RemovedFileNodeError.
621 630 """
622 631 ALLOWED_ATTRIBUTES = [
623 632 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
624 633 'added', 'changed', 'not_changed', 'removed'
625 634 ]
626 635
627 636 def __init__(self, path):
628 637 """
629 638 :param path: relative path to the node
630 639 """
631 640 super(RemovedFileNode, self).__init__(path=path)
632 641
633 642 def __getattribute__(self, attr):
634 643 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
635 644 return super(RemovedFileNode, self).__getattribute__(attr)
636 645 raise RemovedFileNodeError(
637 646 "Cannot access attribute %s on RemovedFileNode" % attr)
638 647
639 648 @LazyProperty
640 649 def state(self):
641 650 return NodeState.REMOVED
642 651
643 652
644 653 class DirNode(Node):
645 654 """
646 655 DirNode stores list of files and directories within this node.
647 656 Nodes may be used standalone but within repository context they
648 657 lazily fetch data within same repositorty's commit.
649 658 """
650 659
651 660 def __init__(self, path, nodes=(), commit=None):
652 661 """
653 662 Only one of ``nodes`` and ``commit`` may be given. Passing both
654 663 would raise ``NodeError`` exception.
655 664
656 665 :param path: relative path to the node
657 666 :param nodes: content may be passed to constructor
658 667 :param commit: if given, will use it to lazily fetch content
659 668 """
660 669 if nodes and commit:
661 670 raise NodeError("Cannot use both nodes and commit")
662 671 super(DirNode, self).__init__(path, NodeKind.DIR)
663 672 self.commit = commit
664 673 self._nodes = nodes
665 674
666 675 @LazyProperty
667 676 def content(self):
668 677 raise NodeError(
669 678 "%s represents a dir and has no `content` attribute" % self)
670 679
671 680 @LazyProperty
672 681 def nodes(self):
673 682 if self.commit:
674 683 nodes = self.commit.get_nodes(self.path)
675 684 else:
676 685 nodes = self._nodes
677 686 self._nodes_dict = dict((node.path, node) for node in nodes)
678 687 return sorted(nodes)
679 688
680 689 @LazyProperty
681 690 def files(self):
682 691 return sorted((node for node in self.nodes if node.is_file()))
683 692
684 693 @LazyProperty
685 694 def dirs(self):
686 695 return sorted((node for node in self.nodes if node.is_dir()))
687 696
688 697 def __iter__(self):
689 698 for node in self.nodes:
690 699 yield node
691 700
692 701 def get_node(self, path):
693 702 """
694 703 Returns node from within this particular ``DirNode``, so it is now
695 704 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
696 705 'docs'. In order to access deeper nodes one must fetch nodes between
697 706 them first - this would work::
698 707
699 708 docs = root.get_node('docs')
700 709 docs.get_node('api').get_node('index.rst')
701 710
702 711 :param: path - relative to the current node
703 712
704 713 .. note::
705 714 To access lazily (as in example above) node have to be initialized
706 715 with related commit object - without it node is out of
707 716 context and may know nothing about anything else than nearest
708 717 (located at same level) nodes.
709 718 """
710 719 try:
711 720 path = path.rstrip('/')
712 721 if path == '':
713 722 raise NodeError("Cannot retrieve node without path")
714 723 self.nodes # access nodes first in order to set _nodes_dict
715 724 paths = path.split('/')
716 725 if len(paths) == 1:
717 726 if not self.is_root():
718 727 path = '/'.join((self.path, paths[0]))
719 728 else:
720 729 path = paths[0]
721 730 return self._nodes_dict[path]
722 731 elif len(paths) > 1:
723 732 if self.commit is None:
724 733 raise NodeError(
725 734 "Cannot access deeper nodes without commit")
726 735 else:
727 736 path1, path2 = paths[0], '/'.join(paths[1:])
728 737 return self.get_node(path1).get_node(path2)
729 738 else:
730 739 raise KeyError
731 740 except KeyError:
732 741 raise NodeError("Node does not exist at %s" % path)
733 742
734 743 @LazyProperty
735 744 def state(self):
736 745 raise NodeError("Cannot access state of DirNode")
737 746
738 747 @LazyProperty
739 748 def size(self):
740 749 size = 0
741 750 for root, dirs, files in self.commit.walk(self.path):
742 751 for f in files:
743 752 size += f.size
744 753
745 754 return size
746 755
747 756 @LazyProperty
748 757 def last_commit(self):
749 758 if self.commit:
750 759 pre_load = ["author", "date", "message"]
751 760 return self.commit.get_path_commit(self.path, pre_load=pre_load)
752 761 raise NodeError(
753 762 "Cannot retrieve last commit of the file without "
754 763 "related commit attribute")
755 764
756 765 def __repr__(self):
757 766 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
758 767 getattr(self.commit, 'short_id', ''))
759 768
760 769
761 770 class RootNode(DirNode):
762 771 """
763 772 DirNode being the root node of the repository.
764 773 """
765 774
766 775 def __init__(self, nodes=(), commit=None):
767 776 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
768 777
769 778 def __repr__(self):
770 779 return '<%s>' % self.__class__.__name__
771 780
772 781
773 782 class SubModuleNode(Node):
774 783 """
775 784 represents a SubModule of Git or SubRepo of Mercurial
776 785 """
777 786 is_binary = False
778 787 size = 0
779 788
780 789 def __init__(self, name, url=None, commit=None, alias=None):
781 790 self.path = name
782 791 self.kind = NodeKind.SUBMODULE
783 792 self.alias = alias
784 793
785 794 # we have to use EmptyCommit here since this can point to svn/git/hg
786 795 # submodules we cannot get from repository
787 796 self.commit = EmptyCommit(str(commit), alias=alias)
788 797 self.url = url or self._extract_submodule_url()
789 798
790 799 def __repr__(self):
791 800 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
792 801 getattr(self.commit, 'short_id', ''))
793 802
794 803 def _extract_submodule_url(self):
795 804 # TODO: find a way to parse gits submodule file and extract the
796 805 # linking URL
797 806 return self.path
798 807
799 808 @LazyProperty
800 809 def name(self):
801 810 """
802 811 Returns name of the node so if its path
803 812 then only last part is returned.
804 813 """
805 814 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
806 815 return u'%s @ %s' % (org, self.commit.short_id)
807 816
808 817
809 818 class LargeFileNode(FileNode):
810 819
811 820 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
812 821 self.path = path
813 822 self.org_path = org_path
814 823 self.kind = NodeKind.LARGEFILE
815 824 self.alias = alias
816 825
817 826 def _validate_path(self, path):
818 827 """
819 828 we override check since the LargeFileNode path is system absolute
820 829 """
821 830 pass
822 831
823 832 def __repr__(self):
824 833 return '<%s %r>' % (self.__class__.__name__, self.path)
825 834
826 835 @LazyProperty
827 836 def size(self):
828 837 return os.stat(self.path).st_size
829 838
830 839 @LazyProperty
831 840 def raw_bytes(self):
832 841 with open(self.path, 'rb') as f:
833 842 content = f.read()
834 843 return content
835 844
836 845 @LazyProperty
837 846 def name(self):
838 847 """
839 848 Overwrites name to be the org lf path
840 849 """
841 850 return self.org_path
@@ -1,918 +1,933 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 checker = self.perm_checker(*self.perm_set)
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 name = getattr(db_obj, self.obj_attr, None)
142 142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = [
153 153 'repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 229 klass = get_backend(path[0])
230 230 repos[name] = klass(path[1], config=config)
231 231 except OSError:
232 232 continue
233 233 log.debug('found %s paths with repositories', len(repos))
234 234 return repos
235 235
236 236 def get_repos(self, all_repos=None, sort_key=None):
237 237 """
238 238 Get all repositories from db and for each repo create it's
239 239 backend instance and fill that backed with information from database
240 240
241 241 :param all_repos: list of repository names as strings
242 242 give specific repositories list, good for filtering
243 243
244 244 :param sort_key: initial sorting of repositories
245 245 """
246 246 if all_repos is None:
247 247 all_repos = self.sa.query(Repository)\
248 248 .filter(Repository.group_id == None)\
249 249 .order_by(func.lower(Repository.repo_name)).all()
250 250 repo_iter = SimpleCachedRepoList(
251 251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 252 return repo_iter
253 253
254 254 def get_repo_groups(self, all_groups=None):
255 255 if all_groups is None:
256 256 all_groups = RepoGroup.query()\
257 257 .filter(RepoGroup.group_parent_id == None).all()
258 258 return [x for x in RepoGroupList(all_groups)]
259 259
260 260 def mark_for_invalidation(self, repo_name, delete=False):
261 261 """
262 262 Mark caches of this repo invalid in the database. `delete` flag
263 263 removes the cache entries
264 264
265 265 :param repo_name: the repo_name for which caches should be marked
266 266 invalid, or deleted
267 267 :param delete: delete the entry keys instead of setting bool
268 268 flag on them, and also purge caches used by the dogpile
269 269 """
270 270 repo = Repository.get_by_repo_name(repo_name)
271 271
272 272 if repo:
273 273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 274 repo_id=repo.repo_id)
275 275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 276
277 277 repo_id = repo.repo_id
278 278 config = repo._config
279 279 config.set('extensions', 'largefiles', '')
280 280 repo.update_commit_cache(config=config, cs_cache=None)
281 281 if delete:
282 282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 284
285 285 def toggle_following_repo(self, follow_repo_id, user_id):
286 286
287 287 f = self.sa.query(UserFollowing)\
288 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 289 .filter(UserFollowing.user_id == user_id).scalar()
290 290
291 291 if f is not None:
292 292 try:
293 293 self.sa.delete(f)
294 294 return
295 295 except Exception:
296 296 log.error(traceback.format_exc())
297 297 raise
298 298
299 299 try:
300 300 f = UserFollowing()
301 301 f.user_id = user_id
302 302 f.follows_repo_id = follow_repo_id
303 303 self.sa.add(f)
304 304 except Exception:
305 305 log.error(traceback.format_exc())
306 306 raise
307 307
308 308 def toggle_following_user(self, follow_user_id, user_id):
309 309 f = self.sa.query(UserFollowing)\
310 310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 311 .filter(UserFollowing.user_id == user_id).scalar()
312 312
313 313 if f is not None:
314 314 try:
315 315 self.sa.delete(f)
316 316 return
317 317 except Exception:
318 318 log.error(traceback.format_exc())
319 319 raise
320 320
321 321 try:
322 322 f = UserFollowing()
323 323 f.user_id = user_id
324 324 f.follows_user_id = follow_user_id
325 325 self.sa.add(f)
326 326 except Exception:
327 327 log.error(traceback.format_exc())
328 328 raise
329 329
330 330 def is_following_repo(self, repo_name, user_id, cache=False):
331 331 r = self.sa.query(Repository)\
332 332 .filter(Repository.repo_name == repo_name).scalar()
333 333
334 334 f = self.sa.query(UserFollowing)\
335 335 .filter(UserFollowing.follows_repository == r)\
336 336 .filter(UserFollowing.user_id == user_id).scalar()
337 337
338 338 return f is not None
339 339
340 340 def is_following_user(self, username, user_id, cache=False):
341 341 u = User.get_by_username(username)
342 342
343 343 f = self.sa.query(UserFollowing)\
344 344 .filter(UserFollowing.follows_user == u)\
345 345 .filter(UserFollowing.user_id == user_id).scalar()
346 346
347 347 return f is not None
348 348
349 349 def get_followers(self, repo):
350 350 repo = self._get_repo(repo)
351 351
352 352 return self.sa.query(UserFollowing)\
353 353 .filter(UserFollowing.follows_repository == repo).count()
354 354
355 355 def get_forks(self, repo):
356 356 repo = self._get_repo(repo)
357 357 return self.sa.query(Repository)\
358 358 .filter(Repository.fork == repo).count()
359 359
360 360 def get_pull_requests(self, repo):
361 361 repo = self._get_repo(repo)
362 362 return self.sa.query(PullRequest)\
363 363 .filter(PullRequest.target_repo == repo)\
364 364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 365
366 366 def mark_as_fork(self, repo, fork, user):
367 367 repo = self._get_repo(repo)
368 368 fork = self._get_repo(fork)
369 369 if fork and repo.repo_id == fork.repo_id:
370 370 raise Exception("Cannot set repository as fork of itself")
371 371
372 372 if fork and repo.repo_type != fork.repo_type:
373 373 raise RepositoryError(
374 374 "Cannot set repository as fork of repository with other type")
375 375
376 376 repo.fork = fork
377 377 self.sa.add(repo)
378 378 return repo
379 379
380 380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 381 dbrepo = self._get_repo(repo)
382 382 remote_uri = remote_uri or dbrepo.clone_uri
383 383 if not remote_uri:
384 384 raise Exception("This repository doesn't have a clone uri")
385 385
386 386 repo = dbrepo.scm_instance(cache=False)
387 387 repo.config.clear_section('hooks')
388 388
389 389 try:
390 390 # NOTE(marcink): add extra validation so we skip invalid urls
391 391 # this is due this tasks can be executed via scheduler without
392 392 # proper validation of remote_uri
393 393 if validate_uri:
394 394 config = make_db_config(clear_session=False)
395 395 url_validator(remote_uri, dbrepo.repo_type, config)
396 396 except InvalidCloneUrl:
397 397 raise
398 398
399 399 repo_name = dbrepo.repo_name
400 400 try:
401 401 # TODO: we need to make sure those operations call proper hooks !
402 402 repo.fetch(remote_uri)
403 403
404 404 self.mark_for_invalidation(repo_name)
405 405 except Exception:
406 406 log.error(traceback.format_exc())
407 407 raise
408 408
409 409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 410 dbrepo = self._get_repo(repo)
411 411 remote_uri = remote_uri or dbrepo.push_uri
412 412 if not remote_uri:
413 413 raise Exception("This repository doesn't have a clone uri")
414 414
415 415 repo = dbrepo.scm_instance(cache=False)
416 416 repo.config.clear_section('hooks')
417 417
418 418 try:
419 419 # NOTE(marcink): add extra validation so we skip invalid urls
420 420 # this is due this tasks can be executed via scheduler without
421 421 # proper validation of remote_uri
422 422 if validate_uri:
423 423 config = make_db_config(clear_session=False)
424 424 url_validator(remote_uri, dbrepo.repo_type, config)
425 425 except InvalidCloneUrl:
426 426 raise
427 427
428 428 try:
429 429 repo.push(remote_uri)
430 430 except Exception:
431 431 log.error(traceback.format_exc())
432 432 raise
433 433
434 434 def commit_change(self, repo, repo_name, commit, user, author, message,
435 435 content, f_path):
436 436 """
437 437 Commits changes
438 438
439 439 :param repo: SCM instance
440 440
441 441 """
442 442 user = self._get_user(user)
443 443
444 444 # decoding here will force that we have proper encoded values
445 445 # in any other case this will throw exceptions and deny commit
446 446 content = safe_str(content)
447 447 path = safe_str(f_path)
448 448 # message and author needs to be unicode
449 449 # proper backend should then translate that into required type
450 450 message = safe_unicode(message)
451 451 author = safe_unicode(author)
452 452 imc = repo.in_memory_commit
453 453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 454 try:
455 455 # TODO: handle pre-push action !
456 456 tip = imc.commit(
457 457 message=message, author=author, parents=[commit],
458 458 branch=commit.branch)
459 459 except Exception as e:
460 460 log.error(traceback.format_exc())
461 461 raise IMCCommitError(str(e))
462 462 finally:
463 463 # always clear caches, if commit fails we want fresh object also
464 464 self.mark_for_invalidation(repo_name)
465 465
466 466 # We trigger the post-push action
467 467 hooks_utils.trigger_post_push_hook(
468 468 username=user.username, action='push_local', hook_type='post_push',
469 469 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 470 return tip
471 471
472 472 def _sanitize_path(self, f_path):
473 473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 474 raise NonRelativePathError('%s is not an relative path' % f_path)
475 475 if f_path:
476 476 f_path = os.path.normpath(f_path)
477 477 return f_path
478 478
479 479 def get_dirnode_metadata(self, request, commit, dir_node):
480 480 if not dir_node.is_dir():
481 481 return []
482 482
483 483 data = []
484 484 for node in dir_node:
485 485 if not node.is_file():
486 486 # we skip file-nodes
487 487 continue
488 488
489 489 last_commit = node.last_commit
490 490 last_commit_date = last_commit.date
491 491 data.append({
492 492 'name': node.name,
493 493 'size': h.format_byte_size_binary(node.size),
494 494 'modified_at': h.format_date(last_commit_date),
495 495 'modified_ts': last_commit_date.isoformat(),
496 496 'revision': last_commit.revision,
497 497 'short_id': last_commit.short_id,
498 498 'message': h.escape(last_commit.message),
499 499 'author': h.escape(last_commit.author),
500 500 'user_profile': h.gravatar_with_user(
501 501 request, last_commit.author),
502 502 })
503 503
504 504 return data
505 505
506 506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 507 extended_info=False, content=False, max_file_bytes=None):
508 508 """
509 509 recursive walk in root dir and return a set of all path in that dir
510 510 based on repository walk function
511 511
512 512 :param repo_name: name of repository
513 513 :param commit_id: commit id for which to list nodes
514 514 :param root_path: root path to list
515 515 :param flat: return as a list, if False returns a dict with description
516 516 :param extended_info: show additional info such as md5, binary, size etc
517 517 :param content: add nodes content to the return data
518 518 :param max_file_bytes: will not return file contents over this limit
519 519
520 520 """
521 521 _files = list()
522 522 _dirs = list()
523 523 try:
524 524 _repo = self._get_repo(repo_name)
525 525 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
526 526 root_path = root_path.lstrip('/')
527 527 for __, dirs, files in commit.walk(root_path):
528 528
529 529 for f in files:
530 530 _content = None
531 531 _data = f_name = f.unicode_path
532 532
533 533 if not flat:
534 534 _data = {
535 535 "name": h.escape(f_name),
536 536 "type": "file",
537 537 }
538 538 if extended_info:
539 539 _data.update({
540 540 "md5": f.md5,
541 541 "binary": f.is_binary,
542 542 "size": f.size,
543 543 "extension": f.extension,
544 544 "mimetype": f.mimetype,
545 545 "lines": f.lines()[0]
546 546 })
547 547
548 548 if content:
549 549 over_size_limit = (max_file_bytes is not None
550 550 and f.size > max_file_bytes)
551 551 full_content = None
552 552 if not f.is_binary and not over_size_limit:
553 553 full_content = safe_str(f.content)
554 554
555 555 _data.update({
556 556 "content": full_content,
557 557 })
558 558 _files.append(_data)
559 559
560 560 for d in dirs:
561 561 _data = d_name = d.unicode_path
562 562 if not flat:
563 563 _data = {
564 564 "name": h.escape(d_name),
565 565 "type": "dir",
566 566 }
567 567 if extended_info:
568 568 _data.update({
569 569 "md5": None,
570 570 "binary": None,
571 571 "size": None,
572 572 "extension": None,
573 573 })
574 574 if content:
575 575 _data.update({
576 576 "content": None
577 577 })
578 578 _dirs.append(_data)
579 579 except RepositoryError:
580 580 log.exception("Exception in get_nodes")
581 581 raise
582 582
583 583 return _dirs, _files
584 584
585 585 def get_node(self, repo_name, commit_id, file_path,
586 extended_info=False, content=False, max_file_bytes=None):
586 extended_info=False, content=False, max_file_bytes=None, cache=True):
587 587 """
588 588 retrieve single node from commit
589 589 """
590 590 try:
591 591
592 592 _repo = self._get_repo(repo_name)
593 593 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
594 594
595 595 file_node = commit.get_node(file_path)
596 596 if file_node.is_dir():
597 597 raise RepositoryError('The given path is a directory')
598 598
599 599 _content = None
600 600 f_name = file_node.unicode_path
601 601
602 602 file_data = {
603 603 "name": h.escape(f_name),
604 604 "type": "file",
605 605 }
606 606
607 607 if extended_info:
608 608 file_data.update({
609 "md5": file_node.md5,
610 "binary": file_node.is_binary,
611 "size": file_node.size,
612 609 "extension": file_node.extension,
613 610 "mimetype": file_node.mimetype,
614 "lines": file_node.lines()[0]
611 })
612
613 if cache:
614 md5 = file_node.md5
615 is_binary = file_node.is_binary
616 size = file_node.size
617 else:
618 is_binary, md5, size, _content = file_node.metadata_uncached()
619
620 file_data.update({
621 "md5": md5,
622 "binary": is_binary,
623 "size": size,
615 624 })
616 625
617 626 if content:
618 627 over_size_limit = (max_file_bytes is not None
619 628 and file_node.size > max_file_bytes)
620 629 full_content = None
621 630 if not file_node.is_binary and not over_size_limit:
622 full_content = safe_str(file_node.content)
631 if cache:
632 full_content = safe_str(file_node.content)
633 else:
634 if _content is None:
635 is_binary, md5, size, _content = \
636 file_node.metadata_uncached()
637 full_content = safe_str(_content)
623 638
624 639 file_data.update({
625 640 "content": full_content,
626 641 })
627 642
628 643 except RepositoryError:
629 644 log.exception("Exception in get_node")
630 645 raise
631 646
632 647 return file_data
633 648
634 649 def get_fts_data(self, repo_name, commit_id, root_path='/'):
635 650 """
636 651 Fetch node tree for usage in full text search
637 652 """
638 653
639 654 tree_info = list()
640 655
641 656 try:
642 657 _repo = self._get_repo(repo_name)
643 658 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
644 659 root_path = root_path.lstrip('/')
645 660 for __, dirs, files in commit.walk(root_path):
646 661
647 662 for f in files:
648 663 _content = None
649 664 _data = f_name = f.unicode_path
650 is_binary, md5, size = f.metadata_uncached()
665 is_binary, md5, size, _content = f.metadata_uncached()
651 666 _data = {
652 667 "name": h.escape(f_name),
653 668 "md5": md5,
654 669 "extension": f.extension,
655 670 "binary": is_binary,
656 671 "size": size
657 672 }
658 673
659 674 tree_info.append(_data)
660 675
661 676 except RepositoryError:
662 677 log.exception("Exception in get_nodes")
663 678 raise
664 679
665 680 return tree_info
666 681
667 682 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
668 683 author=None, trigger_push_hook=True):
669 684 """
670 685 Commits given multiple nodes into repo
671 686
672 687 :param user: RhodeCode User object or user_id, the commiter
673 688 :param repo: RhodeCode Repository object
674 689 :param message: commit message
675 690 :param nodes: mapping {filename:{'content':content},...}
676 691 :param parent_commit: parent commit, can be empty than it's
677 692 initial commit
678 693 :param author: author of commit, cna be different that commiter
679 694 only for git
680 695 :param trigger_push_hook: trigger push hooks
681 696
682 697 :returns: new commited commit
683 698 """
684 699
685 700 user = self._get_user(user)
686 701 scm_instance = repo.scm_instance(cache=False)
687 702
688 703 processed_nodes = []
689 704 for f_path in nodes:
690 705 f_path = self._sanitize_path(f_path)
691 706 content = nodes[f_path]['content']
692 707 f_path = safe_str(f_path)
693 708 # decoding here will force that we have proper encoded values
694 709 # in any other case this will throw exceptions and deny commit
695 710 if isinstance(content, (basestring,)):
696 711 content = safe_str(content)
697 712 elif isinstance(content, (file, cStringIO.OutputType,)):
698 713 content = content.read()
699 714 else:
700 715 raise Exception('Content is of unrecognized type %s' % (
701 716 type(content)
702 717 ))
703 718 processed_nodes.append((f_path, content))
704 719
705 720 message = safe_unicode(message)
706 721 commiter = user.full_contact
707 722 author = safe_unicode(author) if author else commiter
708 723
709 724 imc = scm_instance.in_memory_commit
710 725
711 726 if not parent_commit:
712 727 parent_commit = EmptyCommit(alias=scm_instance.alias)
713 728
714 729 if isinstance(parent_commit, EmptyCommit):
715 730 # EmptyCommit means we we're editing empty repository
716 731 parents = None
717 732 else:
718 733 parents = [parent_commit]
719 734 # add multiple nodes
720 735 for path, content in processed_nodes:
721 736 imc.add(FileNode(path, content=content))
722 737 # TODO: handle pre push scenario
723 738 tip = imc.commit(message=message,
724 739 author=author,
725 740 parents=parents,
726 741 branch=parent_commit.branch)
727 742
728 743 self.mark_for_invalidation(repo.repo_name)
729 744 if trigger_push_hook:
730 745 hooks_utils.trigger_post_push_hook(
731 746 username=user.username, action='push_local',
732 747 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
733 748 hook_type='post_push',
734 749 commit_ids=[tip.raw_id])
735 750 return tip
736 751
737 752 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
738 753 author=None, trigger_push_hook=True):
739 754 user = self._get_user(user)
740 755 scm_instance = repo.scm_instance(cache=False)
741 756
742 757 message = safe_unicode(message)
743 758 commiter = user.full_contact
744 759 author = safe_unicode(author) if author else commiter
745 760
746 761 imc = scm_instance.in_memory_commit
747 762
748 763 if not parent_commit:
749 764 parent_commit = EmptyCommit(alias=scm_instance.alias)
750 765
751 766 if isinstance(parent_commit, EmptyCommit):
752 767 # EmptyCommit means we we're editing empty repository
753 768 parents = None
754 769 else:
755 770 parents = [parent_commit]
756 771
757 772 # add multiple nodes
758 773 for _filename, data in nodes.items():
759 774 # new filename, can be renamed from the old one, also sanitaze
760 775 # the path for any hack around relative paths like ../../ etc.
761 776 filename = self._sanitize_path(data['filename'])
762 777 old_filename = self._sanitize_path(_filename)
763 778 content = data['content']
764 779 file_mode = data.get('mode')
765 780 filenode = FileNode(old_filename, content=content, mode=file_mode)
766 781 op = data['op']
767 782 if op == 'add':
768 783 imc.add(filenode)
769 784 elif op == 'del':
770 785 imc.remove(filenode)
771 786 elif op == 'mod':
772 787 if filename != old_filename:
773 788 # TODO: handle renames more efficient, needs vcs lib changes
774 789 imc.remove(filenode)
775 790 imc.add(FileNode(filename, content=content, mode=file_mode))
776 791 else:
777 792 imc.change(filenode)
778 793
779 794 try:
780 795 # TODO: handle pre push scenario commit changes
781 796 tip = imc.commit(message=message,
782 797 author=author,
783 798 parents=parents,
784 799 branch=parent_commit.branch)
785 800 except NodeNotChangedError:
786 801 raise
787 802 except Exception as e:
788 803 log.exception("Unexpected exception during call to imc.commit")
789 804 raise IMCCommitError(str(e))
790 805 finally:
791 806 # always clear caches, if commit fails we want fresh object also
792 807 self.mark_for_invalidation(repo.repo_name)
793 808
794 809 if trigger_push_hook:
795 810 hooks_utils.trigger_post_push_hook(
796 811 username=user.username, action='push_local', hook_type='post_push',
797 812 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
798 813 commit_ids=[tip.raw_id])
799 814
800 815 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
801 816 author=None, trigger_push_hook=True):
802 817 """
803 818 Deletes given multiple nodes into `repo`
804 819
805 820 :param user: RhodeCode User object or user_id, the committer
806 821 :param repo: RhodeCode Repository object
807 822 :param message: commit message
808 823 :param nodes: mapping {filename:{'content':content},...}
809 824 :param parent_commit: parent commit, can be empty than it's initial
810 825 commit
811 826 :param author: author of commit, cna be different that commiter only
812 827 for git
813 828 :param trigger_push_hook: trigger push hooks
814 829
815 830 :returns: new commit after deletion
816 831 """
817 832
818 833 user = self._get_user(user)
819 834 scm_instance = repo.scm_instance(cache=False)
820 835
821 836 processed_nodes = []
822 837 for f_path in nodes:
823 838 f_path = self._sanitize_path(f_path)
824 839 # content can be empty but for compatabilty it allows same dicts
825 840 # structure as add_nodes
826 841 content = nodes[f_path].get('content')
827 842 processed_nodes.append((f_path, content))
828 843
829 844 message = safe_unicode(message)
830 845 commiter = user.full_contact
831 846 author = safe_unicode(author) if author else commiter
832 847
833 848 imc = scm_instance.in_memory_commit
834 849
835 850 if not parent_commit:
836 851 parent_commit = EmptyCommit(alias=scm_instance.alias)
837 852
838 853 if isinstance(parent_commit, EmptyCommit):
839 854 # EmptyCommit means we we're editing empty repository
840 855 parents = None
841 856 else:
842 857 parents = [parent_commit]
843 858 # add multiple nodes
844 859 for path, content in processed_nodes:
845 860 imc.remove(FileNode(path, content=content))
846 861
847 862 # TODO: handle pre push scenario
848 863 tip = imc.commit(message=message,
849 864 author=author,
850 865 parents=parents,
851 866 branch=parent_commit.branch)
852 867
853 868 self.mark_for_invalidation(repo.repo_name)
854 869 if trigger_push_hook:
855 870 hooks_utils.trigger_post_push_hook(
856 871 username=user.username, action='push_local', hook_type='post_push',
857 872 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
858 873 commit_ids=[tip.raw_id])
859 874 return tip
860 875
861 876 def strip(self, repo, commit_id, branch):
862 877 scm_instance = repo.scm_instance(cache=False)
863 878 scm_instance.config.clear_section('hooks')
864 879 scm_instance.strip(commit_id, branch)
865 880 self.mark_for_invalidation(repo.repo_name)
866 881
867 882 def get_unread_journal(self):
868 883 return self.sa.query(UserLog).count()
869 884
870 885 def get_repo_landing_revs(self, translator, repo=None):
871 886 """
872 887 Generates select option with tags branches and bookmarks (for hg only)
873 888 grouped by type
874 889
875 890 :param repo:
876 891 """
877 892 _ = translator
878 893 repo = self._get_repo(repo)
879 894
880 895 hist_l = [
881 896 ['rev:tip', _('latest tip')]
882 897 ]
883 898 choices = [
884 899 'rev:tip'
885 900 ]
886 901
887 902 if not repo:
888 903 return choices, hist_l
889 904
890 905 repo = repo.scm_instance()
891 906
892 907 branches_group = (
893 908 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
894 909 for b in repo.branches],
895 910 _("Branches"))
896 911 hist_l.append(branches_group)
897 912 choices.extend([x[0] for x in branches_group[0]])
898 913
899 914 if repo.alias == 'hg':
900 915 bookmarks_group = (
901 916 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
902 917 for b in repo.bookmarks],
903 918 _("Bookmarks"))
904 919 hist_l.append(bookmarks_group)
905 920 choices.extend([x[0] for x in bookmarks_group[0]])
906 921
907 922 tags_group = (
908 923 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
909 924 for t in repo.tags],
910 925 _("Tags"))
911 926 hist_l.append(tags_group)
912 927 choices.extend([x[0] for x in tags_group[0]])
913 928
914 929 return choices, hist_l
915 930
916 931 def get_server_info(self, environ=None):
917 932 server_info = get_system_info(environ)
918 933 return server_info
General Comments 0
You need to be logged in to leave comments. Login now