##// END OF EJS Templates
commits: allow tag commit translation to be skipped for faster commit fetching in big chunks.
marcink -
r3468:7bc93053 default
parent child Browse files
Show More
@@ -1,2305 +1,2305 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import time
23 23
24 24 import rhodecode
25 25 from rhodecode.api import (
26 26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 27 from rhodecode.api.utils import (
28 28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 31 validate_set_owner_permissions)
32 32 from rhodecode.lib import audit_logger, rc_cache
33 33 from rhodecode.lib import repo_maintenance
34 34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
35 35 from rhodecode.lib.celerylib.utils import get_task_id
36 36 from rhodecode.lib.utils2 import str2bool, time_to_datetime, safe_str, safe_int
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
39 39 from rhodecode.lib.vcs import RepositoryError
40 40 from rhodecode.model.changeset_status import ChangesetStatusModel
41 41 from rhodecode.model.comment import CommentsModel
42 42 from rhodecode.model.db import (
43 43 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
44 44 ChangesetComment)
45 45 from rhodecode.model.repo import RepoModel
46 46 from rhodecode.model.scm import ScmModel, RepoList
47 47 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
48 48 from rhodecode.model import validation_schema
49 49 from rhodecode.model.validation_schema.schemas import repo_schema
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 @jsonrpc_method()
55 55 def get_repo(request, apiuser, repoid, cache=Optional(True)):
56 56 """
57 57 Gets an existing repository by its name or repository_id.
58 58
59 59 The members section so the output returns users groups or users
60 60 associated with that repository.
61 61
62 62 This command can only be run using an |authtoken| with admin rights,
63 63 or users with at least read rights to the |repo|.
64 64
65 65 :param apiuser: This is filled automatically from the |authtoken|.
66 66 :type apiuser: AuthUser
67 67 :param repoid: The repository name or repository id.
68 68 :type repoid: str or int
69 69 :param cache: use the cached value for last changeset
70 70 :type: cache: Optional(bool)
71 71
72 72 Example output:
73 73
74 74 .. code-block:: bash
75 75
76 76 {
77 77 "error": null,
78 78 "id": <repo_id>,
79 79 "result": {
80 80 "clone_uri": null,
81 81 "created_on": "timestamp",
82 82 "description": "repo description",
83 83 "enable_downloads": false,
84 84 "enable_locking": false,
85 85 "enable_statistics": false,
86 86 "followers": [
87 87 {
88 88 "active": true,
89 89 "admin": false,
90 90 "api_key": "****************************************",
91 91 "api_keys": [
92 92 "****************************************"
93 93 ],
94 94 "email": "user@example.com",
95 95 "emails": [
96 96 "user@example.com"
97 97 ],
98 98 "extern_name": "rhodecode",
99 99 "extern_type": "rhodecode",
100 100 "firstname": "username",
101 101 "ip_addresses": [],
102 102 "language": null,
103 103 "last_login": "2015-09-16T17:16:35.854",
104 104 "lastname": "surname",
105 105 "user_id": <user_id>,
106 106 "username": "name"
107 107 }
108 108 ],
109 109 "fork_of": "parent-repo",
110 110 "landing_rev": [
111 111 "rev",
112 112 "tip"
113 113 ],
114 114 "last_changeset": {
115 115 "author": "User <user@example.com>",
116 116 "branch": "default",
117 117 "date": "timestamp",
118 118 "message": "last commit message",
119 119 "parents": [
120 120 {
121 121 "raw_id": "commit-id"
122 122 }
123 123 ],
124 124 "raw_id": "commit-id",
125 125 "revision": <revision number>,
126 126 "short_id": "short id"
127 127 },
128 128 "lock_reason": null,
129 129 "locked_by": null,
130 130 "locked_date": null,
131 131 "owner": "owner-name",
132 132 "permissions": [
133 133 {
134 134 "name": "super-admin-name",
135 135 "origin": "super-admin",
136 136 "permission": "repository.admin",
137 137 "type": "user"
138 138 },
139 139 {
140 140 "name": "owner-name",
141 141 "origin": "owner",
142 142 "permission": "repository.admin",
143 143 "type": "user"
144 144 },
145 145 {
146 146 "name": "user-group-name",
147 147 "origin": "permission",
148 148 "permission": "repository.write",
149 149 "type": "user_group"
150 150 }
151 151 ],
152 152 "private": true,
153 153 "repo_id": 676,
154 154 "repo_name": "user-group/repo-name",
155 155 "repo_type": "hg"
156 156 }
157 157 }
158 158 """
159 159
160 160 repo = get_repo_or_error(repoid)
161 161 cache = Optional.extract(cache)
162 162
163 163 include_secrets = False
164 164 if has_superadmin_permission(apiuser):
165 165 include_secrets = True
166 166 else:
167 167 # check if we have at least read permission for this repo !
168 168 _perms = (
169 169 'repository.admin', 'repository.write', 'repository.read',)
170 170 validate_repo_permissions(apiuser, repoid, repo, _perms)
171 171
172 172 permissions = []
173 173 for _user in repo.permissions():
174 174 user_data = {
175 175 'name': _user.username,
176 176 'permission': _user.permission,
177 177 'origin': get_origin(_user),
178 178 'type': "user",
179 179 }
180 180 permissions.append(user_data)
181 181
182 182 for _user_group in repo.permission_user_groups():
183 183 user_group_data = {
184 184 'name': _user_group.users_group_name,
185 185 'permission': _user_group.permission,
186 186 'origin': get_origin(_user_group),
187 187 'type': "user_group",
188 188 }
189 189 permissions.append(user_group_data)
190 190
191 191 following_users = [
192 192 user.user.get_api_data(include_secrets=include_secrets)
193 193 for user in repo.followers]
194 194
195 195 if not cache:
196 196 repo.update_commit_cache()
197 197 data = repo.get_api_data(include_secrets=include_secrets)
198 198 data['permissions'] = permissions
199 199 data['followers'] = following_users
200 200 return data
201 201
202 202
203 203 @jsonrpc_method()
204 204 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
205 205 """
206 206 Lists all existing repositories.
207 207
208 208 This command can only be run using an |authtoken| with admin rights,
209 209 or users with at least read rights to |repos|.
210 210
211 211 :param apiuser: This is filled automatically from the |authtoken|.
212 212 :type apiuser: AuthUser
213 213 :param root: specify root repository group to fetch repositories.
214 214 filters the returned repositories to be members of given root group.
215 215 :type root: Optional(None)
216 216 :param traverse: traverse given root into subrepositories. With this flag
217 217 set to False, it will only return top-level repositories from `root`.
218 218 if root is empty it will return just top-level repositories.
219 219 :type traverse: Optional(True)
220 220
221 221
222 222 Example output:
223 223
224 224 .. code-block:: bash
225 225
226 226 id : <id_given_in_input>
227 227 result: [
228 228 {
229 229 "repo_id" : "<repo_id>",
230 230 "repo_name" : "<reponame>"
231 231 "repo_type" : "<repo_type>",
232 232 "clone_uri" : "<clone_uri>",
233 233 "private": : "<bool>",
234 234 "created_on" : "<datetimecreated>",
235 235 "description" : "<description>",
236 236 "landing_rev": "<landing_rev>",
237 237 "owner": "<repo_owner>",
238 238 "fork_of": "<name_of_fork_parent>",
239 239 "enable_downloads": "<bool>",
240 240 "enable_locking": "<bool>",
241 241 "enable_statistics": "<bool>",
242 242 },
243 243 ...
244 244 ]
245 245 error: null
246 246 """
247 247
248 248 include_secrets = has_superadmin_permission(apiuser)
249 249 _perms = ('repository.read', 'repository.write', 'repository.admin',)
250 250 extras = {'user': apiuser}
251 251
252 252 root = Optional.extract(root)
253 253 traverse = Optional.extract(traverse, binary=True)
254 254
255 255 if root:
256 256 # verify parent existance, if it's empty return an error
257 257 parent = RepoGroup.get_by_group_name(root)
258 258 if not parent:
259 259 raise JSONRPCError(
260 260 'Root repository group `{}` does not exist'.format(root))
261 261
262 262 if traverse:
263 263 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
264 264 else:
265 265 repos = RepoModel().get_repos_for_root(root=parent)
266 266 else:
267 267 if traverse:
268 268 repos = RepoModel().get_all()
269 269 else:
270 270 # return just top-level
271 271 repos = RepoModel().get_repos_for_root(root=None)
272 272
273 273 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
274 274 return [repo.get_api_data(include_secrets=include_secrets)
275 275 for repo in repo_list]
276 276
277 277
278 278 @jsonrpc_method()
279 279 def get_repo_changeset(request, apiuser, repoid, revision,
280 280 details=Optional('basic')):
281 281 """
282 282 Returns information about a changeset.
283 283
284 284 Additionally parameters define the amount of details returned by
285 285 this function.
286 286
287 287 This command can only be run using an |authtoken| with admin rights,
288 288 or users with at least read rights to the |repo|.
289 289
290 290 :param apiuser: This is filled automatically from the |authtoken|.
291 291 :type apiuser: AuthUser
292 292 :param repoid: The repository name or repository id
293 293 :type repoid: str or int
294 294 :param revision: revision for which listing should be done
295 295 :type revision: str
296 296 :param details: details can be 'basic|extended|full' full gives diff
297 297 info details like the diff itself, and number of changed files etc.
298 298 :type details: Optional(str)
299 299
300 300 """
301 301 repo = get_repo_or_error(repoid)
302 302 if not has_superadmin_permission(apiuser):
303 303 _perms = (
304 304 'repository.admin', 'repository.write', 'repository.read',)
305 305 validate_repo_permissions(apiuser, repoid, repo, _perms)
306 306
307 307 changes_details = Optional.extract(details)
308 308 _changes_details_types = ['basic', 'extended', 'full']
309 309 if changes_details not in _changes_details_types:
310 310 raise JSONRPCError(
311 311 'ret_type must be one of %s' % (
312 312 ','.join(_changes_details_types)))
313 313
314 314 pre_load = ['author', 'branch', 'date', 'message', 'parents',
315 315 'status', '_commit', '_file_paths']
316 316
317 317 try:
318 318 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
319 319 except TypeError as e:
320 320 raise JSONRPCError(safe_str(e))
321 321 _cs_json = cs.__json__()
322 322 _cs_json['diff'] = build_commit_data(cs, changes_details)
323 323 if changes_details == 'full':
324 324 _cs_json['refs'] = cs._get_refs()
325 325 return _cs_json
326 326
327 327
328 328 @jsonrpc_method()
329 329 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
330 330 details=Optional('basic')):
331 331 """
332 332 Returns a set of commits limited by the number starting
333 333 from the `start_rev` option.
334 334
335 335 Additional parameters define the amount of details returned by this
336 336 function.
337 337
338 338 This command can only be run using an |authtoken| with admin rights,
339 339 or users with at least read rights to |repos|.
340 340
341 341 :param apiuser: This is filled automatically from the |authtoken|.
342 342 :type apiuser: AuthUser
343 343 :param repoid: The repository name or repository ID.
344 344 :type repoid: str or int
345 345 :param start_rev: The starting revision from where to get changesets.
346 346 :type start_rev: str
347 347 :param limit: Limit the number of commits to this amount
348 348 :type limit: str or int
349 349 :param details: Set the level of detail returned. Valid option are:
350 350 ``basic``, ``extended`` and ``full``.
351 351 :type details: Optional(str)
352 352
353 353 .. note::
354 354
355 355 Setting the parameter `details` to the value ``full`` is extensive
356 356 and returns details like the diff itself, and the number
357 357 of changed files.
358 358
359 359 """
360 360 repo = get_repo_or_error(repoid)
361 361 if not has_superadmin_permission(apiuser):
362 362 _perms = (
363 363 'repository.admin', 'repository.write', 'repository.read',)
364 364 validate_repo_permissions(apiuser, repoid, repo, _perms)
365 365
366 366 changes_details = Optional.extract(details)
367 367 _changes_details_types = ['basic', 'extended', 'full']
368 368 if changes_details not in _changes_details_types:
369 369 raise JSONRPCError(
370 370 'ret_type must be one of %s' % (
371 371 ','.join(_changes_details_types)))
372 372
373 373 limit = int(limit)
374 374 pre_load = ['author', 'branch', 'date', 'message', 'parents',
375 375 'status', '_commit', '_file_paths']
376 376
377 377 vcs_repo = repo.scm_instance()
378 378 # SVN needs a special case to distinguish its index and commit id
379 379 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
380 380 start_rev = vcs_repo.commit_ids[0]
381 381
382 382 try:
383 383 commits = vcs_repo.get_commits(
384 start_id=start_rev, pre_load=pre_load)
384 start_id=start_rev, pre_load=pre_load, translate_tags=False)
385 385 except TypeError as e:
386 386 raise JSONRPCError(safe_str(e))
387 387 except Exception:
388 388 log.exception('Fetching of commits failed')
389 389 raise JSONRPCError('Error occurred during commit fetching')
390 390
391 391 ret = []
392 392 for cnt, commit in enumerate(commits):
393 393 if cnt >= limit != -1:
394 394 break
395 395 _cs_json = commit.__json__()
396 396 _cs_json['diff'] = build_commit_data(commit, changes_details)
397 397 if changes_details == 'full':
398 398 _cs_json['refs'] = {
399 399 'branches': [commit.branch],
400 400 'bookmarks': getattr(commit, 'bookmarks', []),
401 401 'tags': commit.tags
402 402 }
403 403 ret.append(_cs_json)
404 404 return ret
405 405
406 406
407 407 @jsonrpc_method()
408 408 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
409 409 ret_type=Optional('all'), details=Optional('basic'),
410 410 max_file_bytes=Optional(None)):
411 411 """
412 412 Returns a list of nodes and children in a flat list for a given
413 413 path at given revision.
414 414
415 415 It's possible to specify ret_type to show only `files` or `dirs`.
416 416
417 417 This command can only be run using an |authtoken| with admin rights,
418 418 or users with at least read rights to |repos|.
419 419
420 420 :param apiuser: This is filled automatically from the |authtoken|.
421 421 :type apiuser: AuthUser
422 422 :param repoid: The repository name or repository ID.
423 423 :type repoid: str or int
424 424 :param revision: The revision for which listing should be done.
425 425 :type revision: str
426 426 :param root_path: The path from which to start displaying.
427 427 :type root_path: str
428 428 :param ret_type: Set the return type. Valid options are
429 429 ``all`` (default), ``files`` and ``dirs``.
430 430 :type ret_type: Optional(str)
431 431 :param details: Returns extended information about nodes, such as
432 432 md5, binary, and or content.
433 433 The valid options are ``basic`` and ``full``.
434 434 :type details: Optional(str)
435 435 :param max_file_bytes: Only return file content under this file size bytes
436 436 :type details: Optional(int)
437 437
438 438 Example output:
439 439
440 440 .. code-block:: bash
441 441
442 442 id : <id_given_in_input>
443 443 result: [
444 444 {
445 445 "binary": false,
446 446 "content": "File line\nLine2\n",
447 447 "extension": "md",
448 448 "lines": 2,
449 449 "md5": "059fa5d29b19c0657e384749480f6422",
450 450 "mimetype": "text/x-minidsrc",
451 451 "name": "file.md",
452 452 "size": 580,
453 453 "type": "file"
454 454 },
455 455 ...
456 456 ]
457 457 error: null
458 458 """
459 459
460 460 repo = get_repo_or_error(repoid)
461 461 if not has_superadmin_permission(apiuser):
462 462 _perms = ('repository.admin', 'repository.write', 'repository.read',)
463 463 validate_repo_permissions(apiuser, repoid, repo, _perms)
464 464
465 465 ret_type = Optional.extract(ret_type)
466 466 details = Optional.extract(details)
467 467 _extended_types = ['basic', 'full']
468 468 if details not in _extended_types:
469 469 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
470 470 extended_info = False
471 471 content = False
472 472 if details == 'basic':
473 473 extended_info = True
474 474
475 475 if details == 'full':
476 476 extended_info = content = True
477 477
478 478 _map = {}
479 479 try:
480 480 # check if repo is not empty by any chance, skip quicker if it is.
481 481 _scm = repo.scm_instance()
482 482 if _scm.is_empty():
483 483 return []
484 484
485 485 _d, _f = ScmModel().get_nodes(
486 486 repo, revision, root_path, flat=False,
487 487 extended_info=extended_info, content=content,
488 488 max_file_bytes=max_file_bytes)
489 489 _map = {
490 490 'all': _d + _f,
491 491 'files': _f,
492 492 'dirs': _d,
493 493 }
494 494 return _map[ret_type]
495 495 except KeyError:
496 496 raise JSONRPCError(
497 497 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
498 498 except Exception:
499 499 log.exception("Exception occurred while trying to get repo nodes")
500 500 raise JSONRPCError(
501 501 'failed to get repo: `%s` nodes' % repo.repo_name
502 502 )
503 503
504 504
505 505 @jsonrpc_method()
506 506 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
507 507 max_file_bytes=Optional(None), details=Optional('basic')):
508 508 """
509 509 Returns a single file from repository at given revision.
510 510
511 511 This command can only be run using an |authtoken| with admin rights,
512 512 or users with at least read rights to |repos|.
513 513
514 514 :param apiuser: This is filled automatically from the |authtoken|.
515 515 :type apiuser: AuthUser
516 516 :param repoid: The repository name or repository ID.
517 517 :type repoid: str or int
518 518 :param commit_id: The revision for which listing should be done.
519 519 :type commit_id: str
520 520 :param file_path: The path from which to start displaying.
521 521 :type file_path: str
522 522 :param details: Returns different set of information about nodes.
523 523 The valid options are ``minimal`` ``basic`` and ``full``.
524 524 :type details: Optional(str)
525 525 :param max_file_bytes: Only return file content under this file size bytes
526 526 :type details: Optional(int)
527 527
528 528 Example output:
529 529
530 530 .. code-block:: bash
531 531
532 532 id : <id_given_in_input>
533 533 result: {
534 534 "binary": false,
535 535 "extension": "py",
536 536 "lines": 35,
537 537 "content": "....",
538 538 "md5": "76318336366b0f17ee249e11b0c99c41",
539 539 "mimetype": "text/x-python",
540 540 "name": "python.py",
541 541 "size": 817,
542 542 "type": "file",
543 543 }
544 544 error: null
545 545 """
546 546
547 547 repo = get_repo_or_error(repoid)
548 548 if not has_superadmin_permission(apiuser):
549 549 _perms = ('repository.admin', 'repository.write', 'repository.read',)
550 550 validate_repo_permissions(apiuser, repoid, repo, _perms)
551 551
552 552 details = Optional.extract(details)
553 553 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
554 554 if details not in _extended_types:
555 555 raise JSONRPCError(
556 556 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
557 557 extended_info = False
558 558 content = False
559 559
560 560 if details == 'minimal':
561 561 extended_info = False
562 562
563 563 elif details == 'basic':
564 564 extended_info = True
565 565
566 566 elif details == 'full':
567 567 extended_info = content = True
568 568
569 569 try:
570 570 # check if repo is not empty by any chance, skip quicker if it is.
571 571 _scm = repo.scm_instance()
572 572 if _scm.is_empty():
573 573 return None
574 574
575 575 node = ScmModel().get_node(
576 576 repo, commit_id, file_path, extended_info=extended_info,
577 577 content=content, max_file_bytes=max_file_bytes)
578 578
579 579 except Exception:
580 580 log.exception("Exception occurred while trying to get repo node")
581 581 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
582 582
583 583 return node
584 584
585 585
586 586 @jsonrpc_method()
587 587 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
588 588 """
589 589 Returns a list of tree nodes for path at given revision. This api is built
590 590 strictly for usage in full text search building, and shouldn't be consumed
591 591
592 592 This command can only be run using an |authtoken| with admin rights,
593 593 or users with at least read rights to |repos|.
594 594
595 595 """
596 596
597 597 repo = get_repo_or_error(repoid)
598 598 if not has_superadmin_permission(apiuser):
599 599 _perms = ('repository.admin', 'repository.write', 'repository.read',)
600 600 validate_repo_permissions(apiuser, repoid, repo, _perms)
601 601
602 602 repo_id = repo.repo_id
603 603 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
604 604 cache_on = cache_seconds > 0
605 605
606 606 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
607 607 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
608 608
609 609 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
610 610 condition=cache_on)
611 611 def compute_fts_tree(repo_id, commit_id, root_path, cache_ver):
612 612 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
613 613
614 614 try:
615 615 # check if repo is not empty by any chance, skip quicker if it is.
616 616 _scm = repo.scm_instance()
617 617 if _scm.is_empty():
618 618 return []
619 619 except RepositoryError:
620 620 log.exception("Exception occurred while trying to get repo nodes")
621 621 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
622 622
623 623 try:
624 624 # we need to resolve commit_id to a FULL sha for cache to work correctly.
625 625 # sending 'master' is a pointer that needs to be translated to current commit.
626 626 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
627 627 log.debug(
628 628 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
629 629 'with caching: %s[TTL: %ss]' % (
630 630 repo_id, commit_id, cache_on, cache_seconds or 0))
631 631
632 632 tree_files = compute_fts_tree(repo_id, commit_id, root_path, 'v1')
633 633 return tree_files
634 634
635 635 except Exception:
636 636 log.exception("Exception occurred while trying to get repo nodes")
637 637 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
638 638
639 639
640 640 @jsonrpc_method()
641 641 def get_repo_refs(request, apiuser, repoid):
642 642 """
643 643 Returns a dictionary of current references. It returns
644 644 bookmarks, branches, closed_branches, and tags for given repository
645 645
646 646 It's possible to specify ret_type to show only `files` or `dirs`.
647 647
648 648 This command can only be run using an |authtoken| with admin rights,
649 649 or users with at least read rights to |repos|.
650 650
651 651 :param apiuser: This is filled automatically from the |authtoken|.
652 652 :type apiuser: AuthUser
653 653 :param repoid: The repository name or repository ID.
654 654 :type repoid: str or int
655 655
656 656 Example output:
657 657
658 658 .. code-block:: bash
659 659
660 660 id : <id_given_in_input>
661 661 "result": {
662 662 "bookmarks": {
663 663 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
664 664 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
665 665 },
666 666 "branches": {
667 667 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
668 668 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
669 669 },
670 670 "branches_closed": {},
671 671 "tags": {
672 672 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
673 673 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
674 674 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
675 675 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
676 676 }
677 677 }
678 678 error: null
679 679 """
680 680
681 681 repo = get_repo_or_error(repoid)
682 682 if not has_superadmin_permission(apiuser):
683 683 _perms = ('repository.admin', 'repository.write', 'repository.read',)
684 684 validate_repo_permissions(apiuser, repoid, repo, _perms)
685 685
686 686 try:
687 687 # check if repo is not empty by any chance, skip quicker if it is.
688 688 vcs_instance = repo.scm_instance()
689 689 refs = vcs_instance.refs()
690 690 return refs
691 691 except Exception:
692 692 log.exception("Exception occurred while trying to get repo refs")
693 693 raise JSONRPCError(
694 694 'failed to get repo: `%s` references' % repo.repo_name
695 695 )
696 696
697 697
698 698 @jsonrpc_method()
699 699 def create_repo(
700 700 request, apiuser, repo_name, repo_type,
701 701 owner=Optional(OAttr('apiuser')),
702 702 description=Optional(''),
703 703 private=Optional(False),
704 704 clone_uri=Optional(None),
705 705 push_uri=Optional(None),
706 706 landing_rev=Optional('rev:tip'),
707 707 enable_statistics=Optional(False),
708 708 enable_locking=Optional(False),
709 709 enable_downloads=Optional(False),
710 710 copy_permissions=Optional(False)):
711 711 """
712 712 Creates a repository.
713 713
714 714 * If the repository name contains "/", repository will be created inside
715 715 a repository group or nested repository groups
716 716
717 717 For example "foo/bar/repo1" will create |repo| called "repo1" inside
718 718 group "foo/bar". You have to have permissions to access and write to
719 719 the last repository group ("bar" in this example)
720 720
721 721 This command can only be run using an |authtoken| with at least
722 722 permissions to create repositories, or write permissions to
723 723 parent repository groups.
724 724
725 725 :param apiuser: This is filled automatically from the |authtoken|.
726 726 :type apiuser: AuthUser
727 727 :param repo_name: Set the repository name.
728 728 :type repo_name: str
729 729 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
730 730 :type repo_type: str
731 731 :param owner: user_id or username
732 732 :type owner: Optional(str)
733 733 :param description: Set the repository description.
734 734 :type description: Optional(str)
735 735 :param private: set repository as private
736 736 :type private: bool
737 737 :param clone_uri: set clone_uri
738 738 :type clone_uri: str
739 739 :param push_uri: set push_uri
740 740 :type push_uri: str
741 741 :param landing_rev: <rev_type>:<rev>
742 742 :type landing_rev: str
743 743 :param enable_locking:
744 744 :type enable_locking: bool
745 745 :param enable_downloads:
746 746 :type enable_downloads: bool
747 747 :param enable_statistics:
748 748 :type enable_statistics: bool
749 749 :param copy_permissions: Copy permission from group in which the
750 750 repository is being created.
751 751 :type copy_permissions: bool
752 752
753 753
754 754 Example output:
755 755
756 756 .. code-block:: bash
757 757
758 758 id : <id_given_in_input>
759 759 result: {
760 760 "msg": "Created new repository `<reponame>`",
761 761 "success": true,
762 762 "task": "<celery task id or None if done sync>"
763 763 }
764 764 error: null
765 765
766 766
767 767 Example error output:
768 768
769 769 .. code-block:: bash
770 770
771 771 id : <id_given_in_input>
772 772 result : null
773 773 error : {
774 774 'failed to create repository `<repo_name>`'
775 775 }
776 776
777 777 """
778 778
779 779 owner = validate_set_owner_permissions(apiuser, owner)
780 780
781 781 description = Optional.extract(description)
782 782 copy_permissions = Optional.extract(copy_permissions)
783 783 clone_uri = Optional.extract(clone_uri)
784 784 push_uri = Optional.extract(push_uri)
785 785 landing_commit_ref = Optional.extract(landing_rev)
786 786
787 787 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
788 788 if isinstance(private, Optional):
789 789 private = defs.get('repo_private') or Optional.extract(private)
790 790 if isinstance(repo_type, Optional):
791 791 repo_type = defs.get('repo_type')
792 792 if isinstance(enable_statistics, Optional):
793 793 enable_statistics = defs.get('repo_enable_statistics')
794 794 if isinstance(enable_locking, Optional):
795 795 enable_locking = defs.get('repo_enable_locking')
796 796 if isinstance(enable_downloads, Optional):
797 797 enable_downloads = defs.get('repo_enable_downloads')
798 798
799 799 schema = repo_schema.RepoSchema().bind(
800 800 repo_type_options=rhodecode.BACKENDS.keys(),
801 801 repo_type=repo_type,
802 802 # user caller
803 803 user=apiuser)
804 804
805 805 try:
806 806 schema_data = schema.deserialize(dict(
807 807 repo_name=repo_name,
808 808 repo_type=repo_type,
809 809 repo_owner=owner.username,
810 810 repo_description=description,
811 811 repo_landing_commit_ref=landing_commit_ref,
812 812 repo_clone_uri=clone_uri,
813 813 repo_push_uri=push_uri,
814 814 repo_private=private,
815 815 repo_copy_permissions=copy_permissions,
816 816 repo_enable_statistics=enable_statistics,
817 817 repo_enable_downloads=enable_downloads,
818 818 repo_enable_locking=enable_locking))
819 819 except validation_schema.Invalid as err:
820 820 raise JSONRPCValidationError(colander_exc=err)
821 821
822 822 try:
823 823 data = {
824 824 'owner': owner,
825 825 'repo_name': schema_data['repo_group']['repo_name_without_group'],
826 826 'repo_name_full': schema_data['repo_name'],
827 827 'repo_group': schema_data['repo_group']['repo_group_id'],
828 828 'repo_type': schema_data['repo_type'],
829 829 'repo_description': schema_data['repo_description'],
830 830 'repo_private': schema_data['repo_private'],
831 831 'clone_uri': schema_data['repo_clone_uri'],
832 832 'push_uri': schema_data['repo_push_uri'],
833 833 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
834 834 'enable_statistics': schema_data['repo_enable_statistics'],
835 835 'enable_locking': schema_data['repo_enable_locking'],
836 836 'enable_downloads': schema_data['repo_enable_downloads'],
837 837 'repo_copy_permissions': schema_data['repo_copy_permissions'],
838 838 }
839 839
840 840 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
841 841 task_id = get_task_id(task)
842 842 # no commit, it's done in RepoModel, or async via celery
843 843 return {
844 844 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
845 845 'success': True, # cannot return the repo data here since fork
846 846 # can be done async
847 847 'task': task_id
848 848 }
849 849 except Exception:
850 850 log.exception(
851 851 u"Exception while trying to create the repository %s",
852 852 schema_data['repo_name'])
853 853 raise JSONRPCError(
854 854 'failed to create repository `%s`' % (schema_data['repo_name'],))
855 855
856 856
857 857 @jsonrpc_method()
858 858 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
859 859 description=Optional('')):
860 860 """
861 861 Adds an extra field to a repository.
862 862
863 863 This command can only be run using an |authtoken| with at least
864 864 write permissions to the |repo|.
865 865
866 866 :param apiuser: This is filled automatically from the |authtoken|.
867 867 :type apiuser: AuthUser
868 868 :param repoid: Set the repository name or repository id.
869 869 :type repoid: str or int
870 870 :param key: Create a unique field key for this repository.
871 871 :type key: str
872 872 :param label:
873 873 :type label: Optional(str)
874 874 :param description:
875 875 :type description: Optional(str)
876 876 """
877 877 repo = get_repo_or_error(repoid)
878 878 if not has_superadmin_permission(apiuser):
879 879 _perms = ('repository.admin',)
880 880 validate_repo_permissions(apiuser, repoid, repo, _perms)
881 881
882 882 label = Optional.extract(label) or key
883 883 description = Optional.extract(description)
884 884
885 885 field = RepositoryField.get_by_key_name(key, repo)
886 886 if field:
887 887 raise JSONRPCError('Field with key '
888 888 '`%s` exists for repo `%s`' % (key, repoid))
889 889
890 890 try:
891 891 RepoModel().add_repo_field(repo, key, field_label=label,
892 892 field_desc=description)
893 893 Session().commit()
894 894 return {
895 895 'msg': "Added new repository field `%s`" % (key,),
896 896 'success': True,
897 897 }
898 898 except Exception:
899 899 log.exception("Exception occurred while trying to add field to repo")
900 900 raise JSONRPCError(
901 901 'failed to create new field for repository `%s`' % (repoid,))
902 902
903 903
904 904 @jsonrpc_method()
905 905 def remove_field_from_repo(request, apiuser, repoid, key):
906 906 """
907 907 Removes an extra field from a repository.
908 908
909 909 This command can only be run using an |authtoken| with at least
910 910 write permissions to the |repo|.
911 911
912 912 :param apiuser: This is filled automatically from the |authtoken|.
913 913 :type apiuser: AuthUser
914 914 :param repoid: Set the repository name or repository ID.
915 915 :type repoid: str or int
916 916 :param key: Set the unique field key for this repository.
917 917 :type key: str
918 918 """
919 919
920 920 repo = get_repo_or_error(repoid)
921 921 if not has_superadmin_permission(apiuser):
922 922 _perms = ('repository.admin',)
923 923 validate_repo_permissions(apiuser, repoid, repo, _perms)
924 924
925 925 field = RepositoryField.get_by_key_name(key, repo)
926 926 if not field:
927 927 raise JSONRPCError('Field with key `%s` does not '
928 928 'exists for repo `%s`' % (key, repoid))
929 929
930 930 try:
931 931 RepoModel().delete_repo_field(repo, field_key=key)
932 932 Session().commit()
933 933 return {
934 934 'msg': "Deleted repository field `%s`" % (key,),
935 935 'success': True,
936 936 }
937 937 except Exception:
938 938 log.exception(
939 939 "Exception occurred while trying to delete field from repo")
940 940 raise JSONRPCError(
941 941 'failed to delete field for repository `%s`' % (repoid,))
942 942
943 943
944 944 @jsonrpc_method()
945 945 def update_repo(
946 946 request, apiuser, repoid, repo_name=Optional(None),
947 947 owner=Optional(OAttr('apiuser')), description=Optional(''),
948 948 private=Optional(False),
949 949 clone_uri=Optional(None), push_uri=Optional(None),
950 950 landing_rev=Optional('rev:tip'), fork_of=Optional(None),
951 951 enable_statistics=Optional(False),
952 952 enable_locking=Optional(False),
953 953 enable_downloads=Optional(False), fields=Optional('')):
954 954 """
955 955 Updates a repository with the given information.
956 956
957 957 This command can only be run using an |authtoken| with at least
958 958 admin permissions to the |repo|.
959 959
960 960 * If the repository name contains "/", repository will be updated
961 961 accordingly with a repository group or nested repository groups
962 962
963 963 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
964 964 called "repo-test" and place it inside group "foo/bar".
965 965 You have to have permissions to access and write to the last repository
966 966 group ("bar" in this example)
967 967
968 968 :param apiuser: This is filled automatically from the |authtoken|.
969 969 :type apiuser: AuthUser
970 970 :param repoid: repository name or repository ID.
971 971 :type repoid: str or int
972 972 :param repo_name: Update the |repo| name, including the
973 973 repository group it's in.
974 974 :type repo_name: str
975 975 :param owner: Set the |repo| owner.
976 976 :type owner: str
977 977 :param fork_of: Set the |repo| as fork of another |repo|.
978 978 :type fork_of: str
979 979 :param description: Update the |repo| description.
980 980 :type description: str
981 981 :param private: Set the |repo| as private. (True | False)
982 982 :type private: bool
983 983 :param clone_uri: Update the |repo| clone URI.
984 984 :type clone_uri: str
985 985 :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``.
986 986 :type landing_rev: str
987 987 :param enable_statistics: Enable statistics on the |repo|, (True | False).
988 988 :type enable_statistics: bool
989 989 :param enable_locking: Enable |repo| locking.
990 990 :type enable_locking: bool
991 991 :param enable_downloads: Enable downloads from the |repo|, (True | False).
992 992 :type enable_downloads: bool
993 993 :param fields: Add extra fields to the |repo|. Use the following
994 994 example format: ``field_key=field_val,field_key2=fieldval2``.
995 995 Escape ', ' with \,
996 996 :type fields: str
997 997 """
998 998
999 999 repo = get_repo_or_error(repoid)
1000 1000
1001 1001 include_secrets = False
1002 1002 if not has_superadmin_permission(apiuser):
1003 1003 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
1004 1004 else:
1005 1005 include_secrets = True
1006 1006
1007 1007 updates = dict(
1008 1008 repo_name=repo_name
1009 1009 if not isinstance(repo_name, Optional) else repo.repo_name,
1010 1010
1011 1011 fork_id=fork_of
1012 1012 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1013 1013
1014 1014 user=owner
1015 1015 if not isinstance(owner, Optional) else repo.user.username,
1016 1016
1017 1017 repo_description=description
1018 1018 if not isinstance(description, Optional) else repo.description,
1019 1019
1020 1020 repo_private=private
1021 1021 if not isinstance(private, Optional) else repo.private,
1022 1022
1023 1023 clone_uri=clone_uri
1024 1024 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1025 1025
1026 1026 push_uri=push_uri
1027 1027 if not isinstance(push_uri, Optional) else repo.push_uri,
1028 1028
1029 1029 repo_landing_rev=landing_rev
1030 1030 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1031 1031
1032 1032 repo_enable_statistics=enable_statistics
1033 1033 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1034 1034
1035 1035 repo_enable_locking=enable_locking
1036 1036 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1037 1037
1038 1038 repo_enable_downloads=enable_downloads
1039 1039 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1040 1040
1041 1041 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1042 1042 request.translate, repo=repo)
1043 1043
1044 1044 old_values = repo.get_api_data()
1045 1045 repo_type = repo.repo_type
1046 1046 schema = repo_schema.RepoSchema().bind(
1047 1047 repo_type_options=rhodecode.BACKENDS.keys(),
1048 1048 repo_ref_options=ref_choices,
1049 1049 repo_type=repo_type,
1050 1050 # user caller
1051 1051 user=apiuser,
1052 1052 old_values=old_values)
1053 1053 try:
1054 1054 schema_data = schema.deserialize(dict(
1055 1055 # we save old value, users cannot change type
1056 1056 repo_type=repo_type,
1057 1057
1058 1058 repo_name=updates['repo_name'],
1059 1059 repo_owner=updates['user'],
1060 1060 repo_description=updates['repo_description'],
1061 1061 repo_clone_uri=updates['clone_uri'],
1062 1062 repo_push_uri=updates['push_uri'],
1063 1063 repo_fork_of=updates['fork_id'],
1064 1064 repo_private=updates['repo_private'],
1065 1065 repo_landing_commit_ref=updates['repo_landing_rev'],
1066 1066 repo_enable_statistics=updates['repo_enable_statistics'],
1067 1067 repo_enable_downloads=updates['repo_enable_downloads'],
1068 1068 repo_enable_locking=updates['repo_enable_locking']))
1069 1069 except validation_schema.Invalid as err:
1070 1070 raise JSONRPCValidationError(colander_exc=err)
1071 1071
1072 1072 # save validated data back into the updates dict
1073 1073 validated_updates = dict(
1074 1074 repo_name=schema_data['repo_group']['repo_name_without_group'],
1075 1075 repo_group=schema_data['repo_group']['repo_group_id'],
1076 1076
1077 1077 user=schema_data['repo_owner'],
1078 1078 repo_description=schema_data['repo_description'],
1079 1079 repo_private=schema_data['repo_private'],
1080 1080 clone_uri=schema_data['repo_clone_uri'],
1081 1081 push_uri=schema_data['repo_push_uri'],
1082 1082 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1083 1083 repo_enable_statistics=schema_data['repo_enable_statistics'],
1084 1084 repo_enable_locking=schema_data['repo_enable_locking'],
1085 1085 repo_enable_downloads=schema_data['repo_enable_downloads'],
1086 1086 )
1087 1087
1088 1088 if schema_data['repo_fork_of']:
1089 1089 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1090 1090 validated_updates['fork_id'] = fork_repo.repo_id
1091 1091
1092 1092 # extra fields
1093 1093 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1094 1094 if fields:
1095 1095 validated_updates.update(fields)
1096 1096
1097 1097 try:
1098 1098 RepoModel().update(repo, **validated_updates)
1099 1099 audit_logger.store_api(
1100 1100 'repo.edit', action_data={'old_data': old_values},
1101 1101 user=apiuser, repo=repo)
1102 1102 Session().commit()
1103 1103 return {
1104 1104 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1105 1105 'repository': repo.get_api_data(include_secrets=include_secrets)
1106 1106 }
1107 1107 except Exception:
1108 1108 log.exception(
1109 1109 u"Exception while trying to update the repository %s",
1110 1110 repoid)
1111 1111 raise JSONRPCError('failed to update repo `%s`' % repoid)
1112 1112
1113 1113
1114 1114 @jsonrpc_method()
1115 1115 def fork_repo(request, apiuser, repoid, fork_name,
1116 1116 owner=Optional(OAttr('apiuser')),
1117 1117 description=Optional(''),
1118 1118 private=Optional(False),
1119 1119 clone_uri=Optional(None),
1120 1120 landing_rev=Optional('rev:tip'),
1121 1121 copy_permissions=Optional(False)):
1122 1122 """
1123 1123 Creates a fork of the specified |repo|.
1124 1124
1125 1125 * If the fork_name contains "/", fork will be created inside
1126 1126 a repository group or nested repository groups
1127 1127
1128 1128 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1129 1129 inside group "foo/bar". You have to have permissions to access and
1130 1130 write to the last repository group ("bar" in this example)
1131 1131
1132 1132 This command can only be run using an |authtoken| with minimum
1133 1133 read permissions of the forked repo, create fork permissions for an user.
1134 1134
1135 1135 :param apiuser: This is filled automatically from the |authtoken|.
1136 1136 :type apiuser: AuthUser
1137 1137 :param repoid: Set repository name or repository ID.
1138 1138 :type repoid: str or int
1139 1139 :param fork_name: Set the fork name, including it's repository group membership.
1140 1140 :type fork_name: str
1141 1141 :param owner: Set the fork owner.
1142 1142 :type owner: str
1143 1143 :param description: Set the fork description.
1144 1144 :type description: str
1145 1145 :param copy_permissions: Copy permissions from parent |repo|. The
1146 1146 default is False.
1147 1147 :type copy_permissions: bool
1148 1148 :param private: Make the fork private. The default is False.
1149 1149 :type private: bool
1150 1150 :param landing_rev: Set the landing revision. The default is tip.
1151 1151
1152 1152 Example output:
1153 1153
1154 1154 .. code-block:: bash
1155 1155
1156 1156 id : <id_for_response>
1157 1157 api_key : "<api_key>"
1158 1158 args: {
1159 1159 "repoid" : "<reponame or repo_id>",
1160 1160 "fork_name": "<forkname>",
1161 1161 "owner": "<username or user_id = Optional(=apiuser)>",
1162 1162 "description": "<description>",
1163 1163 "copy_permissions": "<bool>",
1164 1164 "private": "<bool>",
1165 1165 "landing_rev": "<landing_rev>"
1166 1166 }
1167 1167
1168 1168 Example error output:
1169 1169
1170 1170 .. code-block:: bash
1171 1171
1172 1172 id : <id_given_in_input>
1173 1173 result: {
1174 1174 "msg": "Created fork of `<reponame>` as `<forkname>`",
1175 1175 "success": true,
1176 1176 "task": "<celery task id or None if done sync>"
1177 1177 }
1178 1178 error: null
1179 1179
1180 1180 """
1181 1181
1182 1182 repo = get_repo_or_error(repoid)
1183 1183 repo_name = repo.repo_name
1184 1184
1185 1185 if not has_superadmin_permission(apiuser):
1186 1186 # check if we have at least read permission for
1187 1187 # this repo that we fork !
1188 1188 _perms = (
1189 1189 'repository.admin', 'repository.write', 'repository.read')
1190 1190 validate_repo_permissions(apiuser, repoid, repo, _perms)
1191 1191
1192 1192 # check if the regular user has at least fork permissions as well
1193 1193 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1194 1194 raise JSONRPCForbidden()
1195 1195
1196 1196 # check if user can set owner parameter
1197 1197 owner = validate_set_owner_permissions(apiuser, owner)
1198 1198
1199 1199 description = Optional.extract(description)
1200 1200 copy_permissions = Optional.extract(copy_permissions)
1201 1201 clone_uri = Optional.extract(clone_uri)
1202 1202 landing_commit_ref = Optional.extract(landing_rev)
1203 1203 private = Optional.extract(private)
1204 1204
1205 1205 schema = repo_schema.RepoSchema().bind(
1206 1206 repo_type_options=rhodecode.BACKENDS.keys(),
1207 1207 repo_type=repo.repo_type,
1208 1208 # user caller
1209 1209 user=apiuser)
1210 1210
1211 1211 try:
1212 1212 schema_data = schema.deserialize(dict(
1213 1213 repo_name=fork_name,
1214 1214 repo_type=repo.repo_type,
1215 1215 repo_owner=owner.username,
1216 1216 repo_description=description,
1217 1217 repo_landing_commit_ref=landing_commit_ref,
1218 1218 repo_clone_uri=clone_uri,
1219 1219 repo_private=private,
1220 1220 repo_copy_permissions=copy_permissions))
1221 1221 except validation_schema.Invalid as err:
1222 1222 raise JSONRPCValidationError(colander_exc=err)
1223 1223
1224 1224 try:
1225 1225 data = {
1226 1226 'fork_parent_id': repo.repo_id,
1227 1227
1228 1228 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1229 1229 'repo_name_full': schema_data['repo_name'],
1230 1230 'repo_group': schema_data['repo_group']['repo_group_id'],
1231 1231 'repo_type': schema_data['repo_type'],
1232 1232 'description': schema_data['repo_description'],
1233 1233 'private': schema_data['repo_private'],
1234 1234 'copy_permissions': schema_data['repo_copy_permissions'],
1235 1235 'landing_rev': schema_data['repo_landing_commit_ref'],
1236 1236 }
1237 1237
1238 1238 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1239 1239 # no commit, it's done in RepoModel, or async via celery
1240 1240 task_id = get_task_id(task)
1241 1241
1242 1242 return {
1243 1243 'msg': 'Created fork of `%s` as `%s`' % (
1244 1244 repo.repo_name, schema_data['repo_name']),
1245 1245 'success': True, # cannot return the repo data here since fork
1246 1246 # can be done async
1247 1247 'task': task_id
1248 1248 }
1249 1249 except Exception:
1250 1250 log.exception(
1251 1251 u"Exception while trying to create fork %s",
1252 1252 schema_data['repo_name'])
1253 1253 raise JSONRPCError(
1254 1254 'failed to fork repository `%s` as `%s`' % (
1255 1255 repo_name, schema_data['repo_name']))
1256 1256
1257 1257
1258 1258 @jsonrpc_method()
1259 1259 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1260 1260 """
1261 1261 Deletes a repository.
1262 1262
1263 1263 * When the `forks` parameter is set it's possible to detach or delete
1264 1264 forks of deleted repository.
1265 1265
1266 1266 This command can only be run using an |authtoken| with admin
1267 1267 permissions on the |repo|.
1268 1268
1269 1269 :param apiuser: This is filled automatically from the |authtoken|.
1270 1270 :type apiuser: AuthUser
1271 1271 :param repoid: Set the repository name or repository ID.
1272 1272 :type repoid: str or int
1273 1273 :param forks: Set to `detach` or `delete` forks from the |repo|.
1274 1274 :type forks: Optional(str)
1275 1275
1276 1276 Example error output:
1277 1277
1278 1278 .. code-block:: bash
1279 1279
1280 1280 id : <id_given_in_input>
1281 1281 result: {
1282 1282 "msg": "Deleted repository `<reponame>`",
1283 1283 "success": true
1284 1284 }
1285 1285 error: null
1286 1286 """
1287 1287
1288 1288 repo = get_repo_or_error(repoid)
1289 1289 repo_name = repo.repo_name
1290 1290 if not has_superadmin_permission(apiuser):
1291 1291 _perms = ('repository.admin',)
1292 1292 validate_repo_permissions(apiuser, repoid, repo, _perms)
1293 1293
1294 1294 try:
1295 1295 handle_forks = Optional.extract(forks)
1296 1296 _forks_msg = ''
1297 1297 _forks = [f for f in repo.forks]
1298 1298 if handle_forks == 'detach':
1299 1299 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1300 1300 elif handle_forks == 'delete':
1301 1301 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1302 1302 elif _forks:
1303 1303 raise JSONRPCError(
1304 1304 'Cannot delete `%s` it still contains attached forks' %
1305 1305 (repo.repo_name,)
1306 1306 )
1307 1307 old_data = repo.get_api_data()
1308 1308 RepoModel().delete(repo, forks=forks)
1309 1309
1310 1310 repo = audit_logger.RepoWrap(repo_id=None,
1311 1311 repo_name=repo.repo_name)
1312 1312
1313 1313 audit_logger.store_api(
1314 1314 'repo.delete', action_data={'old_data': old_data},
1315 1315 user=apiuser, repo=repo)
1316 1316
1317 1317 ScmModel().mark_for_invalidation(repo_name, delete=True)
1318 1318 Session().commit()
1319 1319 return {
1320 1320 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1321 1321 'success': True
1322 1322 }
1323 1323 except Exception:
1324 1324 log.exception("Exception occurred while trying to delete repo")
1325 1325 raise JSONRPCError(
1326 1326 'failed to delete repository `%s`' % (repo_name,)
1327 1327 )
1328 1328
1329 1329
1330 1330 #TODO: marcink, change name ?
1331 1331 @jsonrpc_method()
1332 1332 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1333 1333 """
1334 1334 Invalidates the cache for the specified repository.
1335 1335
1336 1336 This command can only be run using an |authtoken| with admin rights to
1337 1337 the specified repository.
1338 1338
1339 1339 This command takes the following options:
1340 1340
1341 1341 :param apiuser: This is filled automatically from |authtoken|.
1342 1342 :type apiuser: AuthUser
1343 1343 :param repoid: Sets the repository name or repository ID.
1344 1344 :type repoid: str or int
1345 1345 :param delete_keys: This deletes the invalidated keys instead of
1346 1346 just flagging them.
1347 1347 :type delete_keys: Optional(``True`` | ``False``)
1348 1348
1349 1349 Example output:
1350 1350
1351 1351 .. code-block:: bash
1352 1352
1353 1353 id : <id_given_in_input>
1354 1354 result : {
1355 1355 'msg': Cache for repository `<repository name>` was invalidated,
1356 1356 'repository': <repository name>
1357 1357 }
1358 1358 error : null
1359 1359
1360 1360 Example error output:
1361 1361
1362 1362 .. code-block:: bash
1363 1363
1364 1364 id : <id_given_in_input>
1365 1365 result : null
1366 1366 error : {
1367 1367 'Error occurred during cache invalidation action'
1368 1368 }
1369 1369
1370 1370 """
1371 1371
1372 1372 repo = get_repo_or_error(repoid)
1373 1373 if not has_superadmin_permission(apiuser):
1374 1374 _perms = ('repository.admin', 'repository.write',)
1375 1375 validate_repo_permissions(apiuser, repoid, repo, _perms)
1376 1376
1377 1377 delete = Optional.extract(delete_keys)
1378 1378 try:
1379 1379 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1380 1380 return {
1381 1381 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1382 1382 'repository': repo.repo_name
1383 1383 }
1384 1384 except Exception:
1385 1385 log.exception(
1386 1386 "Exception occurred while trying to invalidate repo cache")
1387 1387 raise JSONRPCError(
1388 1388 'Error occurred during cache invalidation action'
1389 1389 )
1390 1390
1391 1391
1392 1392 #TODO: marcink, change name ?
1393 1393 @jsonrpc_method()
1394 1394 def lock(request, apiuser, repoid, locked=Optional(None),
1395 1395 userid=Optional(OAttr('apiuser'))):
1396 1396 """
1397 1397 Sets the lock state of the specified |repo| by the given user.
1398 1398 From more information, see :ref:`repo-locking`.
1399 1399
1400 1400 * If the ``userid`` option is not set, the repository is locked to the
1401 1401 user who called the method.
1402 1402 * If the ``locked`` parameter is not set, the current lock state of the
1403 1403 repository is displayed.
1404 1404
1405 1405 This command can only be run using an |authtoken| with admin rights to
1406 1406 the specified repository.
1407 1407
1408 1408 This command takes the following options:
1409 1409
1410 1410 :param apiuser: This is filled automatically from the |authtoken|.
1411 1411 :type apiuser: AuthUser
1412 1412 :param repoid: Sets the repository name or repository ID.
1413 1413 :type repoid: str or int
1414 1414 :param locked: Sets the lock state.
1415 1415 :type locked: Optional(``True`` | ``False``)
1416 1416 :param userid: Set the repository lock to this user.
1417 1417 :type userid: Optional(str or int)
1418 1418
1419 1419 Example error output:
1420 1420
1421 1421 .. code-block:: bash
1422 1422
1423 1423 id : <id_given_in_input>
1424 1424 result : {
1425 1425 'repo': '<reponame>',
1426 1426 'locked': <bool: lock state>,
1427 1427 'locked_since': <int: lock timestamp>,
1428 1428 'locked_by': <username of person who made the lock>,
1429 1429 'lock_reason': <str: reason for locking>,
1430 1430 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1431 1431 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1432 1432 or
1433 1433 'msg': 'Repo `<repository name>` not locked.'
1434 1434 or
1435 1435 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1436 1436 }
1437 1437 error : null
1438 1438
1439 1439 Example error output:
1440 1440
1441 1441 .. code-block:: bash
1442 1442
1443 1443 id : <id_given_in_input>
1444 1444 result : null
1445 1445 error : {
1446 1446 'Error occurred locking repository `<reponame>`'
1447 1447 }
1448 1448 """
1449 1449
1450 1450 repo = get_repo_or_error(repoid)
1451 1451 if not has_superadmin_permission(apiuser):
1452 1452 # check if we have at least write permission for this repo !
1453 1453 _perms = ('repository.admin', 'repository.write',)
1454 1454 validate_repo_permissions(apiuser, repoid, repo, _perms)
1455 1455
1456 1456 # make sure normal user does not pass someone else userid,
1457 1457 # he is not allowed to do that
1458 1458 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1459 1459 raise JSONRPCError('userid is not the same as your user')
1460 1460
1461 1461 if isinstance(userid, Optional):
1462 1462 userid = apiuser.user_id
1463 1463
1464 1464 user = get_user_or_error(userid)
1465 1465
1466 1466 if isinstance(locked, Optional):
1467 1467 lockobj = repo.locked
1468 1468
1469 1469 if lockobj[0] is None:
1470 1470 _d = {
1471 1471 'repo': repo.repo_name,
1472 1472 'locked': False,
1473 1473 'locked_since': None,
1474 1474 'locked_by': None,
1475 1475 'lock_reason': None,
1476 1476 'lock_state_changed': False,
1477 1477 'msg': 'Repo `%s` not locked.' % repo.repo_name
1478 1478 }
1479 1479 return _d
1480 1480 else:
1481 1481 _user_id, _time, _reason = lockobj
1482 1482 lock_user = get_user_or_error(userid)
1483 1483 _d = {
1484 1484 'repo': repo.repo_name,
1485 1485 'locked': True,
1486 1486 'locked_since': _time,
1487 1487 'locked_by': lock_user.username,
1488 1488 'lock_reason': _reason,
1489 1489 'lock_state_changed': False,
1490 1490 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1491 1491 % (repo.repo_name, lock_user.username,
1492 1492 json.dumps(time_to_datetime(_time))))
1493 1493 }
1494 1494 return _d
1495 1495
1496 1496 # force locked state through a flag
1497 1497 else:
1498 1498 locked = str2bool(locked)
1499 1499 lock_reason = Repository.LOCK_API
1500 1500 try:
1501 1501 if locked:
1502 1502 lock_time = time.time()
1503 1503 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1504 1504 else:
1505 1505 lock_time = None
1506 1506 Repository.unlock(repo)
1507 1507 _d = {
1508 1508 'repo': repo.repo_name,
1509 1509 'locked': locked,
1510 1510 'locked_since': lock_time,
1511 1511 'locked_by': user.username,
1512 1512 'lock_reason': lock_reason,
1513 1513 'lock_state_changed': True,
1514 1514 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1515 1515 % (user.username, repo.repo_name, locked))
1516 1516 }
1517 1517 return _d
1518 1518 except Exception:
1519 1519 log.exception(
1520 1520 "Exception occurred while trying to lock repository")
1521 1521 raise JSONRPCError(
1522 1522 'Error occurred locking repository `%s`' % repo.repo_name
1523 1523 )
1524 1524
1525 1525
1526 1526 @jsonrpc_method()
1527 1527 def comment_commit(
1528 1528 request, apiuser, repoid, commit_id, message, status=Optional(None),
1529 1529 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1530 1530 resolves_comment_id=Optional(None),
1531 1531 userid=Optional(OAttr('apiuser'))):
1532 1532 """
1533 1533 Set a commit comment, and optionally change the status of the commit.
1534 1534
1535 1535 :param apiuser: This is filled automatically from the |authtoken|.
1536 1536 :type apiuser: AuthUser
1537 1537 :param repoid: Set the repository name or repository ID.
1538 1538 :type repoid: str or int
1539 1539 :param commit_id: Specify the commit_id for which to set a comment.
1540 1540 :type commit_id: str
1541 1541 :param message: The comment text.
1542 1542 :type message: str
1543 1543 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1544 1544 'approved', 'rejected', 'under_review'
1545 1545 :type status: str
1546 1546 :param comment_type: Comment type, one of: 'note', 'todo'
1547 1547 :type comment_type: Optional(str), default: 'note'
1548 1548 :param userid: Set the user name of the comment creator.
1549 1549 :type userid: Optional(str or int)
1550 1550
1551 1551 Example error output:
1552 1552
1553 1553 .. code-block:: bash
1554 1554
1555 1555 {
1556 1556 "id" : <id_given_in_input>,
1557 1557 "result" : {
1558 1558 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1559 1559 "status_change": null or <status>,
1560 1560 "success": true
1561 1561 },
1562 1562 "error" : null
1563 1563 }
1564 1564
1565 1565 """
1566 1566 repo = get_repo_or_error(repoid)
1567 1567 if not has_superadmin_permission(apiuser):
1568 1568 _perms = ('repository.read', 'repository.write', 'repository.admin')
1569 1569 validate_repo_permissions(apiuser, repoid, repo, _perms)
1570 1570
1571 1571 try:
1572 1572 commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id
1573 1573 except Exception as e:
1574 1574 log.exception('Failed to fetch commit')
1575 1575 raise JSONRPCError(safe_str(e))
1576 1576
1577 1577 if isinstance(userid, Optional):
1578 1578 userid = apiuser.user_id
1579 1579
1580 1580 user = get_user_or_error(userid)
1581 1581 status = Optional.extract(status)
1582 1582 comment_type = Optional.extract(comment_type)
1583 1583 resolves_comment_id = Optional.extract(resolves_comment_id)
1584 1584
1585 1585 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1586 1586 if status and status not in allowed_statuses:
1587 1587 raise JSONRPCError('Bad status, must be on '
1588 1588 'of %s got %s' % (allowed_statuses, status,))
1589 1589
1590 1590 if resolves_comment_id:
1591 1591 comment = ChangesetComment.get(resolves_comment_id)
1592 1592 if not comment:
1593 1593 raise JSONRPCError(
1594 1594 'Invalid resolves_comment_id `%s` for this commit.'
1595 1595 % resolves_comment_id)
1596 1596 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1597 1597 raise JSONRPCError(
1598 1598 'Comment `%s` is wrong type for setting status to resolved.'
1599 1599 % resolves_comment_id)
1600 1600
1601 1601 try:
1602 1602 rc_config = SettingsModel().get_all_settings()
1603 1603 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1604 1604 status_change_label = ChangesetStatus.get_status_lbl(status)
1605 1605 comment = CommentsModel().create(
1606 1606 message, repo, user, commit_id=commit_id,
1607 1607 status_change=status_change_label,
1608 1608 status_change_type=status,
1609 1609 renderer=renderer,
1610 1610 comment_type=comment_type,
1611 1611 resolves_comment_id=resolves_comment_id,
1612 1612 auth_user=apiuser
1613 1613 )
1614 1614 if status:
1615 1615 # also do a status change
1616 1616 try:
1617 1617 ChangesetStatusModel().set_status(
1618 1618 repo, status, user, comment, revision=commit_id,
1619 1619 dont_allow_on_closed_pull_request=True
1620 1620 )
1621 1621 except StatusChangeOnClosedPullRequestError:
1622 1622 log.exception(
1623 1623 "Exception occurred while trying to change repo commit status")
1624 1624 msg = ('Changing status on a changeset associated with '
1625 1625 'a closed pull request is not allowed')
1626 1626 raise JSONRPCError(msg)
1627 1627
1628 1628 Session().commit()
1629 1629 return {
1630 1630 'msg': (
1631 1631 'Commented on commit `%s` for repository `%s`' % (
1632 1632 comment.revision, repo.repo_name)),
1633 1633 'status_change': status,
1634 1634 'success': True,
1635 1635 }
1636 1636 except JSONRPCError:
1637 1637 # catch any inside errors, and re-raise them to prevent from
1638 1638 # below global catch to silence them
1639 1639 raise
1640 1640 except Exception:
1641 1641 log.exception("Exception occurred while trying to comment on commit")
1642 1642 raise JSONRPCError(
1643 1643 'failed to set comment on repository `%s`' % (repo.repo_name,)
1644 1644 )
1645 1645
1646 1646
1647 1647 @jsonrpc_method()
1648 1648 def get_repo_comments(request, apiuser, repoid,
1649 1649 commit_id=Optional(None), comment_type=Optional(None),
1650 1650 userid=Optional(None)):
1651 1651 """
1652 1652 Get all comments for a repository
1653 1653
1654 1654 :param apiuser: This is filled automatically from the |authtoken|.
1655 1655 :type apiuser: AuthUser
1656 1656 :param repoid: Set the repository name or repository ID.
1657 1657 :type repoid: str or int
1658 1658 :param commit_id: Optionally filter the comments by the commit_id
1659 1659 :type commit_id: Optional(str), default: None
1660 1660 :param comment_type: Optionally filter the comments by the comment_type
1661 1661 one of: 'note', 'todo'
1662 1662 :type comment_type: Optional(str), default: None
1663 1663 :param userid: Optionally filter the comments by the author of comment
1664 1664 :type userid: Optional(str or int), Default: None
1665 1665
1666 1666 Example error output:
1667 1667
1668 1668 .. code-block:: bash
1669 1669
1670 1670 {
1671 1671 "id" : <id_given_in_input>,
1672 1672 "result" : [
1673 1673 {
1674 1674 "comment_author": <USER_DETAILS>,
1675 1675 "comment_created_on": "2017-02-01T14:38:16.309",
1676 1676 "comment_f_path": "file.txt",
1677 1677 "comment_id": 282,
1678 1678 "comment_lineno": "n1",
1679 1679 "comment_resolved_by": null,
1680 1680 "comment_status": [],
1681 1681 "comment_text": "This file needs a header",
1682 1682 "comment_type": "todo"
1683 1683 }
1684 1684 ],
1685 1685 "error" : null
1686 1686 }
1687 1687
1688 1688 """
1689 1689 repo = get_repo_or_error(repoid)
1690 1690 if not has_superadmin_permission(apiuser):
1691 1691 _perms = ('repository.read', 'repository.write', 'repository.admin')
1692 1692 validate_repo_permissions(apiuser, repoid, repo, _perms)
1693 1693
1694 1694 commit_id = Optional.extract(commit_id)
1695 1695
1696 1696 userid = Optional.extract(userid)
1697 1697 if userid:
1698 1698 user = get_user_or_error(userid)
1699 1699 else:
1700 1700 user = None
1701 1701
1702 1702 comment_type = Optional.extract(comment_type)
1703 1703 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1704 1704 raise JSONRPCError(
1705 1705 'comment_type must be one of `{}` got {}'.format(
1706 1706 ChangesetComment.COMMENT_TYPES, comment_type)
1707 1707 )
1708 1708
1709 1709 comments = CommentsModel().get_repository_comments(
1710 1710 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1711 1711 return comments
1712 1712
1713 1713
1714 1714 @jsonrpc_method()
1715 1715 def grant_user_permission(request, apiuser, repoid, userid, perm):
1716 1716 """
1717 1717 Grant permissions for the specified user on the given repository,
1718 1718 or update existing permissions if found.
1719 1719
1720 1720 This command can only be run using an |authtoken| with admin
1721 1721 permissions on the |repo|.
1722 1722
1723 1723 :param apiuser: This is filled automatically from the |authtoken|.
1724 1724 :type apiuser: AuthUser
1725 1725 :param repoid: Set the repository name or repository ID.
1726 1726 :type repoid: str or int
1727 1727 :param userid: Set the user name.
1728 1728 :type userid: str
1729 1729 :param perm: Set the user permissions, using the following format
1730 1730 ``(repository.(none|read|write|admin))``
1731 1731 :type perm: str
1732 1732
1733 1733 Example output:
1734 1734
1735 1735 .. code-block:: bash
1736 1736
1737 1737 id : <id_given_in_input>
1738 1738 result: {
1739 1739 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1740 1740 "success": true
1741 1741 }
1742 1742 error: null
1743 1743 """
1744 1744
1745 1745 repo = get_repo_or_error(repoid)
1746 1746 user = get_user_or_error(userid)
1747 1747 perm = get_perm_or_error(perm)
1748 1748 if not has_superadmin_permission(apiuser):
1749 1749 _perms = ('repository.admin',)
1750 1750 validate_repo_permissions(apiuser, repoid, repo, _perms)
1751 1751
1752 1752 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1753 1753 try:
1754 1754 changes = RepoModel().update_permissions(
1755 1755 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1756 1756
1757 1757 action_data = {
1758 1758 'added': changes['added'],
1759 1759 'updated': changes['updated'],
1760 1760 'deleted': changes['deleted'],
1761 1761 }
1762 1762 audit_logger.store_api(
1763 1763 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1764 1764
1765 1765 Session().commit()
1766 1766 return {
1767 1767 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1768 1768 perm.permission_name, user.username, repo.repo_name
1769 1769 ),
1770 1770 'success': True
1771 1771 }
1772 1772 except Exception:
1773 1773 log.exception("Exception occurred while trying edit permissions for repo")
1774 1774 raise JSONRPCError(
1775 1775 'failed to edit permission for user: `%s` in repo: `%s`' % (
1776 1776 userid, repoid
1777 1777 )
1778 1778 )
1779 1779
1780 1780
1781 1781 @jsonrpc_method()
1782 1782 def revoke_user_permission(request, apiuser, repoid, userid):
1783 1783 """
1784 1784 Revoke permission for a user on the specified repository.
1785 1785
1786 1786 This command can only be run using an |authtoken| with admin
1787 1787 permissions on the |repo|.
1788 1788
1789 1789 :param apiuser: This is filled automatically from the |authtoken|.
1790 1790 :type apiuser: AuthUser
1791 1791 :param repoid: Set the repository name or repository ID.
1792 1792 :type repoid: str or int
1793 1793 :param userid: Set the user name of revoked user.
1794 1794 :type userid: str or int
1795 1795
1796 1796 Example error output:
1797 1797
1798 1798 .. code-block:: bash
1799 1799
1800 1800 id : <id_given_in_input>
1801 1801 result: {
1802 1802 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1803 1803 "success": true
1804 1804 }
1805 1805 error: null
1806 1806 """
1807 1807
1808 1808 repo = get_repo_or_error(repoid)
1809 1809 user = get_user_or_error(userid)
1810 1810 if not has_superadmin_permission(apiuser):
1811 1811 _perms = ('repository.admin',)
1812 1812 validate_repo_permissions(apiuser, repoid, repo, _perms)
1813 1813
1814 1814 perm_deletions = [[user.user_id, None, "user"]]
1815 1815 try:
1816 1816 changes = RepoModel().update_permissions(
1817 1817 repo=repo, perm_deletions=perm_deletions, cur_user=user)
1818 1818
1819 1819 action_data = {
1820 1820 'added': changes['added'],
1821 1821 'updated': changes['updated'],
1822 1822 'deleted': changes['deleted'],
1823 1823 }
1824 1824 audit_logger.store_api(
1825 1825 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1826 1826
1827 1827 Session().commit()
1828 1828 return {
1829 1829 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1830 1830 user.username, repo.repo_name
1831 1831 ),
1832 1832 'success': True
1833 1833 }
1834 1834 except Exception:
1835 1835 log.exception("Exception occurred while trying revoke permissions to repo")
1836 1836 raise JSONRPCError(
1837 1837 'failed to edit permission for user: `%s` in repo: `%s`' % (
1838 1838 userid, repoid
1839 1839 )
1840 1840 )
1841 1841
1842 1842
1843 1843 @jsonrpc_method()
1844 1844 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1845 1845 """
1846 1846 Grant permission for a user group on the specified repository,
1847 1847 or update existing permissions.
1848 1848
1849 1849 This command can only be run using an |authtoken| with admin
1850 1850 permissions on the |repo|.
1851 1851
1852 1852 :param apiuser: This is filled automatically from the |authtoken|.
1853 1853 :type apiuser: AuthUser
1854 1854 :param repoid: Set the repository name or repository ID.
1855 1855 :type repoid: str or int
1856 1856 :param usergroupid: Specify the ID of the user group.
1857 1857 :type usergroupid: str or int
1858 1858 :param perm: Set the user group permissions using the following
1859 1859 format: (repository.(none|read|write|admin))
1860 1860 :type perm: str
1861 1861
1862 1862 Example output:
1863 1863
1864 1864 .. code-block:: bash
1865 1865
1866 1866 id : <id_given_in_input>
1867 1867 result : {
1868 1868 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1869 1869 "success": true
1870 1870
1871 1871 }
1872 1872 error : null
1873 1873
1874 1874 Example error output:
1875 1875
1876 1876 .. code-block:: bash
1877 1877
1878 1878 id : <id_given_in_input>
1879 1879 result : null
1880 1880 error : {
1881 1881 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1882 1882 }
1883 1883
1884 1884 """
1885 1885
1886 1886 repo = get_repo_or_error(repoid)
1887 1887 perm = get_perm_or_error(perm)
1888 1888 if not has_superadmin_permission(apiuser):
1889 1889 _perms = ('repository.admin',)
1890 1890 validate_repo_permissions(apiuser, repoid, repo, _perms)
1891 1891
1892 1892 user_group = get_user_group_or_error(usergroupid)
1893 1893 if not has_superadmin_permission(apiuser):
1894 1894 # check if we have at least read permission for this user group !
1895 1895 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1896 1896 if not HasUserGroupPermissionAnyApi(*_perms)(
1897 1897 user=apiuser, user_group_name=user_group.users_group_name):
1898 1898 raise JSONRPCError(
1899 1899 'user group `%s` does not exist' % (usergroupid,))
1900 1900
1901 1901 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
1902 1902 try:
1903 1903 changes = RepoModel().update_permissions(
1904 1904 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1905 1905 action_data = {
1906 1906 'added': changes['added'],
1907 1907 'updated': changes['updated'],
1908 1908 'deleted': changes['deleted'],
1909 1909 }
1910 1910 audit_logger.store_api(
1911 1911 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1912 1912
1913 1913 Session().commit()
1914 1914 return {
1915 1915 'msg': 'Granted perm: `%s` for user group: `%s` in '
1916 1916 'repo: `%s`' % (
1917 1917 perm.permission_name, user_group.users_group_name,
1918 1918 repo.repo_name
1919 1919 ),
1920 1920 'success': True
1921 1921 }
1922 1922 except Exception:
1923 1923 log.exception(
1924 1924 "Exception occurred while trying change permission on repo")
1925 1925 raise JSONRPCError(
1926 1926 'failed to edit permission for user group: `%s` in '
1927 1927 'repo: `%s`' % (
1928 1928 usergroupid, repo.repo_name
1929 1929 )
1930 1930 )
1931 1931
1932 1932
1933 1933 @jsonrpc_method()
1934 1934 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1935 1935 """
1936 1936 Revoke the permissions of a user group on a given repository.
1937 1937
1938 1938 This command can only be run using an |authtoken| with admin
1939 1939 permissions on the |repo|.
1940 1940
1941 1941 :param apiuser: This is filled automatically from the |authtoken|.
1942 1942 :type apiuser: AuthUser
1943 1943 :param repoid: Set the repository name or repository ID.
1944 1944 :type repoid: str or int
1945 1945 :param usergroupid: Specify the user group ID.
1946 1946 :type usergroupid: str or int
1947 1947
1948 1948 Example output:
1949 1949
1950 1950 .. code-block:: bash
1951 1951
1952 1952 id : <id_given_in_input>
1953 1953 result: {
1954 1954 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1955 1955 "success": true
1956 1956 }
1957 1957 error: null
1958 1958 """
1959 1959
1960 1960 repo = get_repo_or_error(repoid)
1961 1961 if not has_superadmin_permission(apiuser):
1962 1962 _perms = ('repository.admin',)
1963 1963 validate_repo_permissions(apiuser, repoid, repo, _perms)
1964 1964
1965 1965 user_group = get_user_group_or_error(usergroupid)
1966 1966 if not has_superadmin_permission(apiuser):
1967 1967 # check if we have at least read permission for this user group !
1968 1968 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1969 1969 if not HasUserGroupPermissionAnyApi(*_perms)(
1970 1970 user=apiuser, user_group_name=user_group.users_group_name):
1971 1971 raise JSONRPCError(
1972 1972 'user group `%s` does not exist' % (usergroupid,))
1973 1973
1974 1974 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
1975 1975 try:
1976 1976 changes = RepoModel().update_permissions(
1977 1977 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
1978 1978 action_data = {
1979 1979 'added': changes['added'],
1980 1980 'updated': changes['updated'],
1981 1981 'deleted': changes['deleted'],
1982 1982 }
1983 1983 audit_logger.store_api(
1984 1984 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1985 1985
1986 1986 Session().commit()
1987 1987 return {
1988 1988 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
1989 1989 user_group.users_group_name, repo.repo_name
1990 1990 ),
1991 1991 'success': True
1992 1992 }
1993 1993 except Exception:
1994 1994 log.exception("Exception occurred while trying revoke "
1995 1995 "user group permission on repo")
1996 1996 raise JSONRPCError(
1997 1997 'failed to edit permission for user group: `%s` in '
1998 1998 'repo: `%s`' % (
1999 1999 user_group.users_group_name, repo.repo_name
2000 2000 )
2001 2001 )
2002 2002
2003 2003
2004 2004 @jsonrpc_method()
2005 2005 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2006 2006 """
2007 2007 Triggers a pull on the given repository from a remote location. You
2008 2008 can use this to keep remote repositories up-to-date.
2009 2009
2010 2010 This command can only be run using an |authtoken| with admin
2011 2011 rights to the specified repository. For more information,
2012 2012 see :ref:`config-token-ref`.
2013 2013
2014 2014 This command takes the following options:
2015 2015
2016 2016 :param apiuser: This is filled automatically from the |authtoken|.
2017 2017 :type apiuser: AuthUser
2018 2018 :param repoid: The repository name or repository ID.
2019 2019 :type repoid: str or int
2020 2020 :param remote_uri: Optional remote URI to pass in for pull
2021 2021 :type remote_uri: str
2022 2022
2023 2023 Example output:
2024 2024
2025 2025 .. code-block:: bash
2026 2026
2027 2027 id : <id_given_in_input>
2028 2028 result : {
2029 2029 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2030 2030 "repository": "<repository name>"
2031 2031 }
2032 2032 error : null
2033 2033
2034 2034 Example error output:
2035 2035
2036 2036 .. code-block:: bash
2037 2037
2038 2038 id : <id_given_in_input>
2039 2039 result : null
2040 2040 error : {
2041 2041 "Unable to push changes from `<remote_url>`"
2042 2042 }
2043 2043
2044 2044 """
2045 2045
2046 2046 repo = get_repo_or_error(repoid)
2047 2047 remote_uri = Optional.extract(remote_uri)
2048 2048 remote_uri_display = remote_uri or repo.clone_uri_hidden
2049 2049 if not has_superadmin_permission(apiuser):
2050 2050 _perms = ('repository.admin',)
2051 2051 validate_repo_permissions(apiuser, repoid, repo, _perms)
2052 2052
2053 2053 try:
2054 2054 ScmModel().pull_changes(
2055 2055 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2056 2056 return {
2057 2057 'msg': 'Pulled from url `%s` on repo `%s`' % (
2058 2058 remote_uri_display, repo.repo_name),
2059 2059 'repository': repo.repo_name
2060 2060 }
2061 2061 except Exception:
2062 2062 log.exception("Exception occurred while trying to "
2063 2063 "pull changes from remote location")
2064 2064 raise JSONRPCError(
2065 2065 'Unable to pull changes from `%s`' % remote_uri_display
2066 2066 )
2067 2067
2068 2068
2069 2069 @jsonrpc_method()
2070 2070 def strip(request, apiuser, repoid, revision, branch):
2071 2071 """
2072 2072 Strips the given revision from the specified repository.
2073 2073
2074 2074 * This will remove the revision and all of its decendants.
2075 2075
2076 2076 This command can only be run using an |authtoken| with admin rights to
2077 2077 the specified repository.
2078 2078
2079 2079 This command takes the following options:
2080 2080
2081 2081 :param apiuser: This is filled automatically from the |authtoken|.
2082 2082 :type apiuser: AuthUser
2083 2083 :param repoid: The repository name or repository ID.
2084 2084 :type repoid: str or int
2085 2085 :param revision: The revision you wish to strip.
2086 2086 :type revision: str
2087 2087 :param branch: The branch from which to strip the revision.
2088 2088 :type branch: str
2089 2089
2090 2090 Example output:
2091 2091
2092 2092 .. code-block:: bash
2093 2093
2094 2094 id : <id_given_in_input>
2095 2095 result : {
2096 2096 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2097 2097 "repository": "<repository name>"
2098 2098 }
2099 2099 error : null
2100 2100
2101 2101 Example error output:
2102 2102
2103 2103 .. code-block:: bash
2104 2104
2105 2105 id : <id_given_in_input>
2106 2106 result : null
2107 2107 error : {
2108 2108 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2109 2109 }
2110 2110
2111 2111 """
2112 2112
2113 2113 repo = get_repo_or_error(repoid)
2114 2114 if not has_superadmin_permission(apiuser):
2115 2115 _perms = ('repository.admin',)
2116 2116 validate_repo_permissions(apiuser, repoid, repo, _perms)
2117 2117
2118 2118 try:
2119 2119 ScmModel().strip(repo, revision, branch)
2120 2120 audit_logger.store_api(
2121 2121 'repo.commit.strip', action_data={'commit_id': revision},
2122 2122 repo=repo,
2123 2123 user=apiuser, commit=True)
2124 2124
2125 2125 return {
2126 2126 'msg': 'Stripped commit %s from repo `%s`' % (
2127 2127 revision, repo.repo_name),
2128 2128 'repository': repo.repo_name
2129 2129 }
2130 2130 except Exception:
2131 2131 log.exception("Exception while trying to strip")
2132 2132 raise JSONRPCError(
2133 2133 'Unable to strip commit %s from repo `%s`' % (
2134 2134 revision, repo.repo_name)
2135 2135 )
2136 2136
2137 2137
2138 2138 @jsonrpc_method()
2139 2139 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2140 2140 """
2141 2141 Returns all settings for a repository. If key is given it only returns the
2142 2142 setting identified by the key or null.
2143 2143
2144 2144 :param apiuser: This is filled automatically from the |authtoken|.
2145 2145 :type apiuser: AuthUser
2146 2146 :param repoid: The repository name or repository id.
2147 2147 :type repoid: str or int
2148 2148 :param key: Key of the setting to return.
2149 2149 :type: key: Optional(str)
2150 2150
2151 2151 Example output:
2152 2152
2153 2153 .. code-block:: bash
2154 2154
2155 2155 {
2156 2156 "error": null,
2157 2157 "id": 237,
2158 2158 "result": {
2159 2159 "extensions_largefiles": true,
2160 2160 "extensions_evolve": true,
2161 2161 "hooks_changegroup_push_logger": true,
2162 2162 "hooks_changegroup_repo_size": false,
2163 2163 "hooks_outgoing_pull_logger": true,
2164 2164 "phases_publish": "True",
2165 2165 "rhodecode_hg_use_rebase_for_merging": true,
2166 2166 "rhodecode_pr_merge_enabled": true,
2167 2167 "rhodecode_use_outdated_comments": true
2168 2168 }
2169 2169 }
2170 2170 """
2171 2171
2172 2172 # Restrict access to this api method to admins only.
2173 2173 if not has_superadmin_permission(apiuser):
2174 2174 raise JSONRPCForbidden()
2175 2175
2176 2176 try:
2177 2177 repo = get_repo_or_error(repoid)
2178 2178 settings_model = VcsSettingsModel(repo=repo)
2179 2179 settings = settings_model.get_global_settings()
2180 2180 settings.update(settings_model.get_repo_settings())
2181 2181
2182 2182 # If only a single setting is requested fetch it from all settings.
2183 2183 key = Optional.extract(key)
2184 2184 if key is not None:
2185 2185 settings = settings.get(key, None)
2186 2186 except Exception:
2187 2187 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2188 2188 log.exception(msg)
2189 2189 raise JSONRPCError(msg)
2190 2190
2191 2191 return settings
2192 2192
2193 2193
2194 2194 @jsonrpc_method()
2195 2195 def set_repo_settings(request, apiuser, repoid, settings):
2196 2196 """
2197 2197 Update repository settings. Returns true on success.
2198 2198
2199 2199 :param apiuser: This is filled automatically from the |authtoken|.
2200 2200 :type apiuser: AuthUser
2201 2201 :param repoid: The repository name or repository id.
2202 2202 :type repoid: str or int
2203 2203 :param settings: The new settings for the repository.
2204 2204 :type: settings: dict
2205 2205
2206 2206 Example output:
2207 2207
2208 2208 .. code-block:: bash
2209 2209
2210 2210 {
2211 2211 "error": null,
2212 2212 "id": 237,
2213 2213 "result": true
2214 2214 }
2215 2215 """
2216 2216 # Restrict access to this api method to admins only.
2217 2217 if not has_superadmin_permission(apiuser):
2218 2218 raise JSONRPCForbidden()
2219 2219
2220 2220 if type(settings) is not dict:
2221 2221 raise JSONRPCError('Settings have to be a JSON Object.')
2222 2222
2223 2223 try:
2224 2224 settings_model = VcsSettingsModel(repo=repoid)
2225 2225
2226 2226 # Merge global, repo and incoming settings.
2227 2227 new_settings = settings_model.get_global_settings()
2228 2228 new_settings.update(settings_model.get_repo_settings())
2229 2229 new_settings.update(settings)
2230 2230
2231 2231 # Update the settings.
2232 2232 inherit_global_settings = new_settings.get(
2233 2233 'inherit_global_settings', False)
2234 2234 settings_model.create_or_update_repo_settings(
2235 2235 new_settings, inherit_global_settings=inherit_global_settings)
2236 2236 Session().commit()
2237 2237 except Exception:
2238 2238 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2239 2239 log.exception(msg)
2240 2240 raise JSONRPCError(msg)
2241 2241
2242 2242 # Indicate success.
2243 2243 return True
2244 2244
2245 2245
2246 2246 @jsonrpc_method()
2247 2247 def maintenance(request, apiuser, repoid):
2248 2248 """
2249 2249 Triggers a maintenance on the given repository.
2250 2250
2251 2251 This command can only be run using an |authtoken| with admin
2252 2252 rights to the specified repository. For more information,
2253 2253 see :ref:`config-token-ref`.
2254 2254
2255 2255 This command takes the following options:
2256 2256
2257 2257 :param apiuser: This is filled automatically from the |authtoken|.
2258 2258 :type apiuser: AuthUser
2259 2259 :param repoid: The repository name or repository ID.
2260 2260 :type repoid: str or int
2261 2261
2262 2262 Example output:
2263 2263
2264 2264 .. code-block:: bash
2265 2265
2266 2266 id : <id_given_in_input>
2267 2267 result : {
2268 2268 "msg": "executed maintenance command",
2269 2269 "executed_actions": [
2270 2270 <action_message>, <action_message2>...
2271 2271 ],
2272 2272 "repository": "<repository name>"
2273 2273 }
2274 2274 error : null
2275 2275
2276 2276 Example error output:
2277 2277
2278 2278 .. code-block:: bash
2279 2279
2280 2280 id : <id_given_in_input>
2281 2281 result : null
2282 2282 error : {
2283 2283 "Unable to execute maintenance on `<reponame>`"
2284 2284 }
2285 2285
2286 2286 """
2287 2287
2288 2288 repo = get_repo_or_error(repoid)
2289 2289 if not has_superadmin_permission(apiuser):
2290 2290 _perms = ('repository.admin',)
2291 2291 validate_repo_permissions(apiuser, repoid, repo, _perms)
2292 2292
2293 2293 try:
2294 2294 maintenance = repo_maintenance.RepoMaintenance()
2295 2295 executed_actions = maintenance.execute(repo)
2296 2296
2297 2297 return {
2298 2298 'msg': 'executed maintenance command',
2299 2299 'executed_actions': executed_actions,
2300 2300 'repository': repo.repo_name
2301 2301 }
2302 2302 except Exception:
2303 2303 log.exception("Exception occurred while trying to run maintenance")
2304 2304 raise JSONRPCError(
2305 2305 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,358 +1,358 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from pyramid.httpexceptions import HTTPNotFound, HTTPFound
25 25 from pyramid.view import view_config
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30 import rhodecode.lib.helpers as h
31 31 from rhodecode.lib.auth import (
32 32 LoginRequired, HasRepoPermissionAnyDecorator)
33 33
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.lib.graphmod import _colored, _dagwalker
36 36 from rhodecode.lib.helpers import RepoPage
37 37 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool
38 38 from rhodecode.lib.vcs.exceptions import (
39 39 RepositoryError, CommitDoesNotExistError,
40 40 CommitError, NodeDoesNotExistError, EmptyRepositoryError)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44 DEFAULT_CHANGELOG_SIZE = 20
45 45
46 46
47 47 class RepoChangelogView(RepoAppView):
48 48
49 49 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
50 50 """
51 51 This is a safe way to get commit. If an error occurs it redirects to
52 52 tip with proper message
53 53
54 54 :param commit_id: id of commit to fetch
55 55 :param redirect_after: toggle redirection
56 56 """
57 57 _ = self.request.translate
58 58
59 59 try:
60 60 return self.rhodecode_vcs_repo.get_commit(commit_id)
61 61 except EmptyRepositoryError:
62 62 if not redirect_after:
63 63 return None
64 64
65 65 h.flash(h.literal(
66 66 _('There are no commits yet')), category='warning')
67 67 raise HTTPFound(
68 68 h.route_path('repo_summary', repo_name=self.db_repo_name))
69 69
70 70 except (CommitDoesNotExistError, LookupError):
71 71 msg = _('No such commit exists for this repository')
72 72 h.flash(msg, category='error')
73 73 raise HTTPNotFound()
74 74 except RepositoryError as e:
75 75 h.flash(safe_str(h.escape(e)), category='error')
76 76 raise HTTPNotFound()
77 77
78 78 def _graph(self, repo, commits, prev_data=None, next_data=None):
79 79 """
80 80 Generates a DAG graph for repo
81 81
82 82 :param repo: repo instance
83 83 :param commits: list of commits
84 84 """
85 85 if not commits:
86 86 return json.dumps([]), json.dumps([])
87 87
88 88 def serialize(commit, parents=True):
89 89 data = dict(
90 90 raw_id=commit.raw_id,
91 91 idx=commit.idx,
92 92 branch=h.escape(commit.branch),
93 93 )
94 94 if parents:
95 95 data['parents'] = [
96 96 serialize(x, parents=False) for x in commit.parents]
97 97 return data
98 98
99 99 prev_data = prev_data or []
100 100 next_data = next_data or []
101 101
102 102 current = [serialize(x) for x in commits]
103 103 commits = prev_data + current + next_data
104 104
105 105 dag = _dagwalker(repo, commits)
106 106
107 107 data = [[commit_id, vtx, edges, branch]
108 108 for commit_id, vtx, edges, branch in _colored(dag)]
109 109 return json.dumps(data), json.dumps(current)
110 110
111 111 def _check_if_valid_branch(self, branch_name, repo_name, f_path):
112 112 if branch_name not in self.rhodecode_vcs_repo.branches_all:
113 113 h.flash('Branch {} is not found.'.format(h.escape(branch_name)),
114 114 category='warning')
115 115 redirect_url = h.route_path(
116 116 'repo_changelog_file', repo_name=repo_name,
117 117 commit_id=branch_name, f_path=f_path or '')
118 118 raise HTTPFound(redirect_url)
119 119
120 120 def _load_changelog_data(
121 121 self, c, collection, page, chunk_size, branch_name=None,
122 122 dynamic=False, f_path=None, commit_id=None):
123 123
124 124 def url_generator(**kw):
125 125 query_params = {}
126 126 query_params.update(kw)
127 127 if f_path:
128 128 # changelog for file
129 129 return h.route_path(
130 130 'repo_changelog_file',
131 131 repo_name=c.rhodecode_db_repo.repo_name,
132 132 commit_id=commit_id, f_path=f_path,
133 133 _query=query_params)
134 134 else:
135 135 return h.route_path(
136 136 'repo_changelog',
137 137 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
138 138
139 139 c.total_cs = len(collection)
140 140 c.showing_commits = min(chunk_size, c.total_cs)
141 141 c.pagination = RepoPage(collection, page=page, item_count=c.total_cs,
142 142 items_per_page=chunk_size, branch=branch_name,
143 143 url=url_generator)
144 144
145 145 c.next_page = c.pagination.next_page
146 146 c.prev_page = c.pagination.previous_page
147 147
148 148 if dynamic:
149 149 if self.request.GET.get('chunk') != 'next':
150 150 c.next_page = None
151 151 if self.request.GET.get('chunk') != 'prev':
152 152 c.prev_page = None
153 153
154 154 page_commit_ids = [x.raw_id for x in c.pagination]
155 155 c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids)
156 156 c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids)
157 157
158 158 def load_default_context(self):
159 159 c = self._get_local_tmpl_context(include_app_defaults=True)
160 160
161 161 c.rhodecode_repo = self.rhodecode_vcs_repo
162 162
163 163 return c
164 164
165 165 def _get_preload_attrs(self):
166 166 pre_load = ['author', 'branch', 'date', 'message', 'parents',
167 167 'obsolete', 'phase', 'hidden']
168 168 return pre_load
169 169
170 170 @LoginRequired()
171 171 @HasRepoPermissionAnyDecorator(
172 172 'repository.read', 'repository.write', 'repository.admin')
173 173 @view_config(
174 174 route_name='repo_changelog', request_method='GET',
175 175 renderer='rhodecode:templates/changelog/changelog.mako')
176 176 @view_config(
177 177 route_name='repo_changelog_file', request_method='GET',
178 178 renderer='rhodecode:templates/changelog/changelog.mako')
179 179 def repo_changelog(self):
180 180 c = self.load_default_context()
181 181
182 182 commit_id = self.request.matchdict.get('commit_id')
183 183 f_path = self._get_f_path(self.request.matchdict)
184 184 show_hidden = str2bool(self.request.GET.get('evolve'))
185 185
186 186 chunk_size = 20
187 187
188 188 c.branch_name = branch_name = self.request.GET.get('branch') or ''
189 189 c.book_name = book_name = self.request.GET.get('bookmark') or ''
190 190 c.f_path = f_path
191 191 c.commit_id = commit_id
192 192 c.show_hidden = show_hidden
193 193
194 194 hist_limit = safe_int(self.request.GET.get('limit')) or None
195 195
196 196 p = safe_int(self.request.GET.get('page', 1), 1)
197 197
198 198 c.selected_name = branch_name or book_name
199 199 if not commit_id and branch_name:
200 200 self._check_if_valid_branch(branch_name, self.db_repo_name, f_path)
201 201
202 202 c.changelog_for_path = f_path
203 203 pre_load = self._get_preload_attrs()
204 204
205 205 partial_xhr = self.request.environ.get('HTTP_X_PARTIAL_XHR')
206
207 206 try:
208 207 if f_path:
209 208 log.debug('generating changelog for path %s', f_path)
210 209 # get the history for the file !
211 210 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
212 211
213 212 try:
214 213 collection = base_commit.get_path_history(
215 214 f_path, limit=hist_limit, pre_load=pre_load)
216 215 if collection and partial_xhr:
217 216 # for ajax call we remove first one since we're looking
218 217 # at it right now in the context of a file commit
219 218 collection.pop(0)
220 219 except (NodeDoesNotExistError, CommitError):
221 220 # this node is not present at tip!
222 221 try:
223 222 commit = self._get_commit_or_redirect(commit_id)
224 223 collection = commit.get_path_history(f_path)
225 224 except RepositoryError as e:
226 225 h.flash(safe_str(e), category='warning')
227 226 redirect_url = h.route_path(
228 227 'repo_changelog', repo_name=self.db_repo_name)
229 228 raise HTTPFound(redirect_url)
230 229 collection = list(reversed(collection))
231 230 else:
232 231 collection = self.rhodecode_vcs_repo.get_commits(
233 232 branch_name=branch_name, show_hidden=show_hidden,
234 pre_load=pre_load)
233 pre_load=pre_load, translate_tags=False)
235 234
236 235 self._load_changelog_data(
237 236 c, collection, p, chunk_size, c.branch_name,
238 237 f_path=f_path, commit_id=commit_id)
239 238
240 239 except EmptyRepositoryError as e:
241 240 h.flash(safe_str(h.escape(e)), category='warning')
242 241 raise HTTPFound(
243 242 h.route_path('repo_summary', repo_name=self.db_repo_name))
244 243 except HTTPFound:
245 244 raise
246 245 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
247 246 log.exception(safe_str(e))
248 247 h.flash(safe_str(h.escape(e)), category='error')
249 248 raise HTTPFound(
250 249 h.route_path('repo_changelog', repo_name=self.db_repo_name))
251 250
252 251 if partial_xhr or self.request.environ.get('HTTP_X_PJAX'):
253 252 # case when loading dynamic file history in file view
254 253 # loading from ajax, we don't want the first result, it's popped
255 254 # in the code above
256 255 html = render(
257 256 'rhodecode:templates/changelog/changelog_file_history.mako',
258 257 self._get_template_context(c), self.request)
259 258 return Response(html)
260 259
261 260 commit_ids = []
262 261 if not f_path:
263 262 # only load graph data when not in file history mode
264 263 commit_ids = c.pagination
265 264
266 265 c.graph_data, c.graph_commits = self._graph(
267 266 self.rhodecode_vcs_repo, commit_ids)
268 267
269 268 return self._get_template_context(c)
270 269
271 270 @LoginRequired()
272 271 @HasRepoPermissionAnyDecorator(
273 272 'repository.read', 'repository.write', 'repository.admin')
274 273 @view_config(
275 274 route_name='repo_changelog_elements', request_method=('GET', 'POST'),
276 275 renderer='rhodecode:templates/changelog/changelog_elements.mako',
277 276 xhr=True)
278 277 @view_config(
279 278 route_name='repo_changelog_elements_file', request_method=('GET', 'POST'),
280 279 renderer='rhodecode:templates/changelog/changelog_elements.mako',
281 280 xhr=True)
282 281 def repo_changelog_elements(self):
283 282 c = self.load_default_context()
284 283 commit_id = self.request.matchdict.get('commit_id')
285 284 f_path = self._get_f_path(self.request.matchdict)
286 285 show_hidden = str2bool(self.request.GET.get('evolve'))
287 286
288 287 chunk_size = 20
289 288 hist_limit = safe_int(self.request.GET.get('limit')) or None
290 289
291 290 def wrap_for_error(err):
292 291 html = '<tr>' \
293 292 '<td colspan="9" class="alert alert-error">ERROR: {}</td>' \
294 293 '</tr>'.format(err)
295 294 return Response(html)
296 295
297 296 c.branch_name = branch_name = self.request.GET.get('branch') or ''
298 297 c.book_name = book_name = self.request.GET.get('bookmark') or ''
299 298 c.f_path = f_path
300 299 c.commit_id = commit_id
301 300 c.show_hidden = show_hidden
302 301
303 302 c.selected_name = branch_name or book_name
304 303 if branch_name and branch_name not in self.rhodecode_vcs_repo.branches_all:
305 304 return wrap_for_error(
306 305 safe_str('Branch: {} is not valid'.format(branch_name)))
307 306
308 307 pre_load = self._get_preload_attrs()
309 308
310 309 if f_path:
311 310 try:
312 311 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
313 312 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
314 313 log.exception(safe_str(e))
315 314 raise HTTPFound(
316 315 h.route_path('repo_changelog', repo_name=self.db_repo_name))
317 316
318 317 collection = base_commit.get_path_history(
319 318 f_path, limit=hist_limit, pre_load=pre_load)
320 319 collection = list(reversed(collection))
321 320 else:
322 321 collection = self.rhodecode_vcs_repo.get_commits(
323 branch_name=branch_name, show_hidden=show_hidden, pre_load=pre_load)
322 branch_name=branch_name, show_hidden=show_hidden, pre_load=pre_load,
323 translate_tags=False)
324 324
325 325 p = safe_int(self.request.GET.get('page', 1), 1)
326 326 try:
327 327 self._load_changelog_data(
328 328 c, collection, p, chunk_size, dynamic=True,
329 329 f_path=f_path, commit_id=commit_id)
330 330 except EmptyRepositoryError as e:
331 331 return wrap_for_error(safe_str(e))
332 332 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
333 333 log.exception('Failed to fetch commits')
334 334 return wrap_for_error(safe_str(e))
335 335
336 336 prev_data = None
337 337 next_data = None
338 338
339 339 try:
340 340 prev_graph = json.loads(self.request.POST.get('graph') or '{}')
341 341 except json.JSONDecodeError:
342 342 prev_graph = {}
343 343
344 344 if self.request.GET.get('chunk') == 'prev':
345 345 next_data = prev_graph
346 346 elif self.request.GET.get('chunk') == 'next':
347 347 prev_data = prev_graph
348 348
349 349 commit_ids = []
350 350 if not f_path:
351 351 # only load graph data when not in file history mode
352 352 commit_ids = c.pagination
353 353
354 354 c.graph_data, c.graph_commits = self._graph(
355 355 self.rhodecode_vcs_repo, commit_ids,
356 356 prev_data=prev_data, next_data=next_data)
357 357
358 358 return self._get_template_context(c)
@@ -1,502 +1,502 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23 import collections
24 24
25 25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 26 from pyramid.view import view_config
27 27 from pyramid.renderers import render
28 28 from pyramid.response import Response
29 29
30 30 from rhodecode.apps._base import RepoAppView
31 31
32 32 from rhodecode.lib import diffs, codeblocks
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35 35
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.diffs import (
38 38 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
39 39 get_diff_whitespace_flag)
40 40 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
41 41 import rhodecode.lib.helpers as h
42 42 from rhodecode.lib.utils2 import safe_unicode, str2bool
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 RepositoryError, CommitDoesNotExistError)
46 46 from rhodecode.model.db import ChangesetComment, ChangesetStatus
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import CommentsModel
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 def _update_with_GET(params, request):
56 56 for k in ['diff1', 'diff2', 'diff']:
57 57 params[k] += request.GET.getall(k)
58 58
59 59
60 60
61 61
62 62
63 63 class RepoCommitsView(RepoAppView):
64 64 def load_default_context(self):
65 65 c = self._get_local_tmpl_context(include_app_defaults=True)
66 66 c.rhodecode_repo = self.rhodecode_vcs_repo
67 67
68 68 return c
69 69
70 70 def _is_diff_cache_enabled(self, target_repo):
71 71 caching_enabled = self._get_general_setting(
72 72 target_repo, 'rhodecode_diff_cache')
73 73 log.debug('Diff caching enabled: %s', caching_enabled)
74 74 return caching_enabled
75 75
76 76 def _commit(self, commit_id_range, method):
77 77 _ = self.request.translate
78 78 c = self.load_default_context()
79 79 c.fulldiff = self.request.GET.get('fulldiff')
80 80
81 81 # fetch global flags of ignore ws or context lines
82 82 diff_context = get_diff_context(self.request)
83 83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84 84
85 85 # diff_limit will cut off the whole diff if the limit is applied
86 86 # otherwise it will just hide the big files from the front-end
87 87 diff_limit = c.visual.cut_off_limit_diff
88 88 file_limit = c.visual.cut_off_limit_file
89 89
90 90 # get ranges of commit ids if preset
91 91 commit_range = commit_id_range.split('...')[:2]
92 92
93 93 try:
94 94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 95 'message', 'parents']
96 96
97 97 if len(commit_range) == 2:
98 98 commits = self.rhodecode_vcs_repo.get_commits(
99 99 start_id=commit_range[0], end_id=commit_range[1],
100 pre_load=pre_load)
100 pre_load=pre_load, translate_tags=False)
101 101 commits = list(commits)
102 102 else:
103 103 commits = [self.rhodecode_vcs_repo.get_commit(
104 104 commit_id=commit_id_range, pre_load=pre_load)]
105 105
106 106 c.commit_ranges = commits
107 107 if not c.commit_ranges:
108 108 raise RepositoryError(
109 109 'The commit range returned an empty result')
110 110 except CommitDoesNotExistError:
111 111 msg = _('No such commit exists for this repository')
112 112 h.flash(msg, category='error')
113 113 raise HTTPNotFound()
114 114 except Exception:
115 115 log.exception("General failure")
116 116 raise HTTPNotFound()
117 117
118 118 c.changes = OrderedDict()
119 119 c.lines_added = 0
120 120 c.lines_deleted = 0
121 121
122 122 # auto collapse if we have more than limit
123 123 collapse_limit = diffs.DiffProcessor._collapse_commits_over
124 124 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
125 125
126 126 c.commit_statuses = ChangesetStatus.STATUSES
127 127 c.inline_comments = []
128 128 c.files = []
129 129
130 130 c.statuses = []
131 131 c.comments = []
132 132 c.unresolved_comments = []
133 133 if len(c.commit_ranges) == 1:
134 134 commit = c.commit_ranges[0]
135 135 c.comments = CommentsModel().get_comments(
136 136 self.db_repo.repo_id,
137 137 revision=commit.raw_id)
138 138 c.statuses.append(ChangesetStatusModel().get_status(
139 139 self.db_repo.repo_id, commit.raw_id))
140 140 # comments from PR
141 141 statuses = ChangesetStatusModel().get_statuses(
142 142 self.db_repo.repo_id, commit.raw_id,
143 143 with_revisions=True)
144 144 prs = set(st.pull_request for st in statuses
145 145 if st.pull_request is not None)
146 146 # from associated statuses, check the pull requests, and
147 147 # show comments from them
148 148 for pr in prs:
149 149 c.comments.extend(pr.comments)
150 150
151 151 c.unresolved_comments = CommentsModel()\
152 152 .get_commit_unresolved_todos(commit.raw_id)
153 153
154 154 diff = None
155 155 # Iterate over ranges (default commit view is always one commit)
156 156 for commit in c.commit_ranges:
157 157 c.changes[commit.raw_id] = []
158 158
159 159 commit2 = commit
160 160 commit1 = commit.first_parent
161 161
162 162 if method == 'show':
163 163 inline_comments = CommentsModel().get_inline_comments(
164 164 self.db_repo.repo_id, revision=commit.raw_id)
165 165 c.inline_cnt = CommentsModel().get_inline_comments_count(
166 166 inline_comments)
167 167 c.inline_comments = inline_comments
168 168
169 169 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
170 170 self.db_repo)
171 171 cache_file_path = diff_cache_exist(
172 172 cache_path, 'diff', commit.raw_id,
173 173 hide_whitespace_changes, diff_context, c.fulldiff)
174 174
175 175 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
176 176 force_recache = str2bool(self.request.GET.get('force_recache'))
177 177
178 178 cached_diff = None
179 179 if caching_enabled:
180 180 cached_diff = load_cached_diff(cache_file_path)
181 181
182 182 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
183 183 if not force_recache and has_proper_diff_cache:
184 184 diffset = cached_diff['diff']
185 185 else:
186 186 vcs_diff = self.rhodecode_vcs_repo.get_diff(
187 187 commit1, commit2,
188 188 ignore_whitespace=hide_whitespace_changes,
189 189 context=diff_context)
190 190
191 191 diff_processor = diffs.DiffProcessor(
192 192 vcs_diff, format='newdiff', diff_limit=diff_limit,
193 193 file_limit=file_limit, show_full_diff=c.fulldiff)
194 194
195 195 _parsed = diff_processor.prepare()
196 196
197 197 diffset = codeblocks.DiffSet(
198 198 repo_name=self.db_repo_name,
199 199 source_node_getter=codeblocks.diffset_node_getter(commit1),
200 200 target_node_getter=codeblocks.diffset_node_getter(commit2))
201 201
202 202 diffset = self.path_filter.render_patchset_filtered(
203 203 diffset, _parsed, commit1.raw_id, commit2.raw_id)
204 204
205 205 # save cached diff
206 206 if caching_enabled:
207 207 cache_diff(cache_file_path, diffset, None)
208 208
209 209 c.limited_diff = diffset.limited_diff
210 210 c.changes[commit.raw_id] = diffset
211 211 else:
212 212 # TODO(marcink): no cache usage here...
213 213 _diff = self.rhodecode_vcs_repo.get_diff(
214 214 commit1, commit2,
215 215 ignore_whitespace=hide_whitespace_changes, context=diff_context)
216 216 diff_processor = diffs.DiffProcessor(
217 217 _diff, format='newdiff', diff_limit=diff_limit,
218 218 file_limit=file_limit, show_full_diff=c.fulldiff)
219 219 # downloads/raw we only need RAW diff nothing else
220 220 diff = self.path_filter.get_raw_patch(diff_processor)
221 221 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
222 222
223 223 # sort comments by how they were generated
224 224 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
225 225
226 226 if len(c.commit_ranges) == 1:
227 227 c.commit = c.commit_ranges[0]
228 228 c.parent_tmpl = ''.join(
229 229 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
230 230
231 231 if method == 'download':
232 232 response = Response(diff)
233 233 response.content_type = 'text/plain'
234 234 response.content_disposition = (
235 235 'attachment; filename=%s.diff' % commit_id_range[:12])
236 236 return response
237 237 elif method == 'patch':
238 238 c.diff = safe_unicode(diff)
239 239 patch = render(
240 240 'rhodecode:templates/changeset/patch_changeset.mako',
241 241 self._get_template_context(c), self.request)
242 242 response = Response(patch)
243 243 response.content_type = 'text/plain'
244 244 return response
245 245 elif method == 'raw':
246 246 response = Response(diff)
247 247 response.content_type = 'text/plain'
248 248 return response
249 249 elif method == 'show':
250 250 if len(c.commit_ranges) == 1:
251 251 html = render(
252 252 'rhodecode:templates/changeset/changeset.mako',
253 253 self._get_template_context(c), self.request)
254 254 return Response(html)
255 255 else:
256 256 c.ancestor = None
257 257 c.target_repo = self.db_repo
258 258 html = render(
259 259 'rhodecode:templates/changeset/changeset_range.mako',
260 260 self._get_template_context(c), self.request)
261 261 return Response(html)
262 262
263 263 raise HTTPBadRequest()
264 264
265 265 @LoginRequired()
266 266 @HasRepoPermissionAnyDecorator(
267 267 'repository.read', 'repository.write', 'repository.admin')
268 268 @view_config(
269 269 route_name='repo_commit', request_method='GET',
270 270 renderer=None)
271 271 def repo_commit_show(self):
272 272 commit_id = self.request.matchdict['commit_id']
273 273 return self._commit(commit_id, method='show')
274 274
275 275 @LoginRequired()
276 276 @HasRepoPermissionAnyDecorator(
277 277 'repository.read', 'repository.write', 'repository.admin')
278 278 @view_config(
279 279 route_name='repo_commit_raw', request_method='GET',
280 280 renderer=None)
281 281 @view_config(
282 282 route_name='repo_commit_raw_deprecated', request_method='GET',
283 283 renderer=None)
284 284 def repo_commit_raw(self):
285 285 commit_id = self.request.matchdict['commit_id']
286 286 return self._commit(commit_id, method='raw')
287 287
288 288 @LoginRequired()
289 289 @HasRepoPermissionAnyDecorator(
290 290 'repository.read', 'repository.write', 'repository.admin')
291 291 @view_config(
292 292 route_name='repo_commit_patch', request_method='GET',
293 293 renderer=None)
294 294 def repo_commit_patch(self):
295 295 commit_id = self.request.matchdict['commit_id']
296 296 return self._commit(commit_id, method='patch')
297 297
298 298 @LoginRequired()
299 299 @HasRepoPermissionAnyDecorator(
300 300 'repository.read', 'repository.write', 'repository.admin')
301 301 @view_config(
302 302 route_name='repo_commit_download', request_method='GET',
303 303 renderer=None)
304 304 def repo_commit_download(self):
305 305 commit_id = self.request.matchdict['commit_id']
306 306 return self._commit(commit_id, method='download')
307 307
308 308 @LoginRequired()
309 309 @NotAnonymous()
310 310 @HasRepoPermissionAnyDecorator(
311 311 'repository.read', 'repository.write', 'repository.admin')
312 312 @CSRFRequired()
313 313 @view_config(
314 314 route_name='repo_commit_comment_create', request_method='POST',
315 315 renderer='json_ext')
316 316 def repo_commit_comment_create(self):
317 317 _ = self.request.translate
318 318 commit_id = self.request.matchdict['commit_id']
319 319
320 320 c = self.load_default_context()
321 321 status = self.request.POST.get('changeset_status', None)
322 322 text = self.request.POST.get('text')
323 323 comment_type = self.request.POST.get('comment_type')
324 324 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
325 325
326 326 if status:
327 327 text = text or (_('Status change %(transition_icon)s %(status)s')
328 328 % {'transition_icon': '>',
329 329 'status': ChangesetStatus.get_status_lbl(status)})
330 330
331 331 multi_commit_ids = []
332 332 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
333 333 if _commit_id not in ['', None, EmptyCommit.raw_id]:
334 334 if _commit_id not in multi_commit_ids:
335 335 multi_commit_ids.append(_commit_id)
336 336
337 337 commit_ids = multi_commit_ids or [commit_id]
338 338
339 339 comment = None
340 340 for current_id in filter(None, commit_ids):
341 341 comment = CommentsModel().create(
342 342 text=text,
343 343 repo=self.db_repo.repo_id,
344 344 user=self._rhodecode_db_user.user_id,
345 345 commit_id=current_id,
346 346 f_path=self.request.POST.get('f_path'),
347 347 line_no=self.request.POST.get('line'),
348 348 status_change=(ChangesetStatus.get_status_lbl(status)
349 349 if status else None),
350 350 status_change_type=status,
351 351 comment_type=comment_type,
352 352 resolves_comment_id=resolves_comment_id,
353 353 auth_user=self._rhodecode_user
354 354 )
355 355
356 356 # get status if set !
357 357 if status:
358 358 # if latest status was from pull request and it's closed
359 359 # disallow changing status !
360 360 # dont_allow_on_closed_pull_request = True !
361 361
362 362 try:
363 363 ChangesetStatusModel().set_status(
364 364 self.db_repo.repo_id,
365 365 status,
366 366 self._rhodecode_db_user.user_id,
367 367 comment,
368 368 revision=current_id,
369 369 dont_allow_on_closed_pull_request=True
370 370 )
371 371 except StatusChangeOnClosedPullRequestError:
372 372 msg = _('Changing the status of a commit associated with '
373 373 'a closed pull request is not allowed')
374 374 log.exception(msg)
375 375 h.flash(msg, category='warning')
376 376 raise HTTPFound(h.route_path(
377 377 'repo_commit', repo_name=self.db_repo_name,
378 378 commit_id=current_id))
379 379
380 380 # finalize, commit and redirect
381 381 Session().commit()
382 382
383 383 data = {
384 384 'target_id': h.safeid(h.safe_unicode(
385 385 self.request.POST.get('f_path'))),
386 386 }
387 387 if comment:
388 388 c.co = comment
389 389 rendered_comment = render(
390 390 'rhodecode:templates/changeset/changeset_comment_block.mako',
391 391 self._get_template_context(c), self.request)
392 392
393 393 data.update(comment.get_dict())
394 394 data.update({'rendered_text': rendered_comment})
395 395
396 396 return data
397 397
398 398 @LoginRequired()
399 399 @NotAnonymous()
400 400 @HasRepoPermissionAnyDecorator(
401 401 'repository.read', 'repository.write', 'repository.admin')
402 402 @CSRFRequired()
403 403 @view_config(
404 404 route_name='repo_commit_comment_preview', request_method='POST',
405 405 renderer='string', xhr=True)
406 406 def repo_commit_comment_preview(self):
407 407 # Technically a CSRF token is not needed as no state changes with this
408 408 # call. However, as this is a POST is better to have it, so automated
409 409 # tools don't flag it as potential CSRF.
410 410 # Post is required because the payload could be bigger than the maximum
411 411 # allowed by GET.
412 412
413 413 text = self.request.POST.get('text')
414 414 renderer = self.request.POST.get('renderer') or 'rst'
415 415 if text:
416 416 return h.render(text, renderer=renderer, mentions=True)
417 417 return ''
418 418
419 419 @LoginRequired()
420 420 @NotAnonymous()
421 421 @HasRepoPermissionAnyDecorator(
422 422 'repository.read', 'repository.write', 'repository.admin')
423 423 @CSRFRequired()
424 424 @view_config(
425 425 route_name='repo_commit_comment_delete', request_method='POST',
426 426 renderer='json_ext')
427 427 def repo_commit_comment_delete(self):
428 428 commit_id = self.request.matchdict['commit_id']
429 429 comment_id = self.request.matchdict['comment_id']
430 430
431 431 comment = ChangesetComment.get_or_404(comment_id)
432 432 if not comment:
433 433 log.debug('Comment with id:%s not found, skipping', comment_id)
434 434 # comment already deleted in another call probably
435 435 return True
436 436
437 437 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
438 438 super_admin = h.HasPermissionAny('hg.admin')()
439 439 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
440 440 is_repo_comment = comment.repo.repo_name == self.db_repo_name
441 441 comment_repo_admin = is_repo_admin and is_repo_comment
442 442
443 443 if super_admin or comment_owner or comment_repo_admin:
444 444 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
445 445 Session().commit()
446 446 return True
447 447 else:
448 448 log.warning('No permissions for user %s to delete comment_id: %s',
449 449 self._rhodecode_db_user, comment_id)
450 450 raise HTTPNotFound()
451 451
452 452 @LoginRequired()
453 453 @HasRepoPermissionAnyDecorator(
454 454 'repository.read', 'repository.write', 'repository.admin')
455 455 @view_config(
456 456 route_name='repo_commit_data', request_method='GET',
457 457 renderer='json_ext', xhr=True)
458 458 def repo_commit_data(self):
459 459 commit_id = self.request.matchdict['commit_id']
460 460 self.load_default_context()
461 461
462 462 try:
463 463 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
464 464 except CommitDoesNotExistError as e:
465 465 return EmptyCommit(message=str(e))
466 466
467 467 @LoginRequired()
468 468 @HasRepoPermissionAnyDecorator(
469 469 'repository.read', 'repository.write', 'repository.admin')
470 470 @view_config(
471 471 route_name='repo_commit_children', request_method='GET',
472 472 renderer='json_ext', xhr=True)
473 473 def repo_commit_children(self):
474 474 commit_id = self.request.matchdict['commit_id']
475 475 self.load_default_context()
476 476
477 477 try:
478 478 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
479 479 children = commit.children
480 480 except CommitDoesNotExistError:
481 481 children = []
482 482
483 483 result = {"results": children}
484 484 return result
485 485
486 486 @LoginRequired()
487 487 @HasRepoPermissionAnyDecorator(
488 488 'repository.read', 'repository.write', 'repository.admin')
489 489 @view_config(
490 490 route_name='repo_commit_parents', request_method='GET',
491 491 renderer='json_ext')
492 492 def repo_commit_parents(self):
493 493 commit_id = self.request.matchdict['commit_id']
494 494 self.load_default_context()
495 495
496 496 try:
497 497 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
498 498 parents = commit.parents
499 499 except CommitDoesNotExistError:
500 500 parents = []
501 501 result = {"results": parents}
502 502 return result
@@ -1,390 +1,391 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import string
23 23 import rhodecode
24 24
25 25 from pyramid.view import view_config
26 26
27 27 from rhodecode.lib.view_utils import get_format_ref_id
28 28 from rhodecode.apps._base import RepoAppView
29 29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
30 30 from rhodecode.lib import helpers as h, rc_cache
31 31 from rhodecode.lib.utils2 import safe_str, safe_int
32 32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
33 33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 36 from rhodecode.lib.vcs.exceptions import (
37 37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
38 38 from rhodecode.model.db import Statistics, CacheKey, User
39 39 from rhodecode.model.meta import Session
40 40 from rhodecode.model.repo import ReadmeFinder
41 41 from rhodecode.model.scm import ScmModel
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class RepoSummaryView(RepoAppView):
47 47
48 48 def load_default_context(self):
49 49 c = self._get_local_tmpl_context(include_app_defaults=True)
50 50 c.rhodecode_repo = None
51 51 if not c.repository_requirements_missing:
52 52 c.rhodecode_repo = self.rhodecode_vcs_repo
53 53 return c
54 54
55 55 def _get_readme_data(self, db_repo, renderer_type):
56 56
57 57 log.debug('Looking for README file')
58 58
59 59 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
60 60 db_repo.repo_id, CacheKey.CACHE_TYPE_README)
61 61 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
62 62 repo_id=self.db_repo.repo_id)
63 63 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
64 64
65 65 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
66 66 def generate_repo_readme(repo_id, _repo_name, _renderer_type):
67 67 readme_data = None
68 68 readme_node = None
69 69 readme_filename = None
70 70 commit = self._get_landing_commit_or_none(db_repo)
71 71 if commit:
72 72 log.debug("Searching for a README file.")
73 73 readme_node = ReadmeFinder(_renderer_type).search(commit)
74 74 if readme_node:
75 75 relative_urls = {
76 76 'raw': h.route_path(
77 77 'repo_file_raw', repo_name=_repo_name,
78 78 commit_id=commit.raw_id, f_path=readme_node.path),
79 79 'standard': h.route_path(
80 80 'repo_files', repo_name=_repo_name,
81 81 commit_id=commit.raw_id, f_path=readme_node.path),
82 82 }
83 83 readme_data = self._render_readme_or_none(
84 84 commit, readme_node, relative_urls)
85 85 readme_filename = readme_node.path
86 86 return readme_data, readme_filename
87 87
88 88 inv_context_manager = rc_cache.InvalidationContext(
89 89 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
90 90 with inv_context_manager as invalidation_context:
91 91 args = (db_repo.repo_id, db_repo.repo_name, renderer_type,)
92 92 # re-compute and store cache if we get invalidate signal
93 93 if invalidation_context.should_invalidate():
94 94 instance = generate_repo_readme.refresh(*args)
95 95 else:
96 96 instance = generate_repo_readme(*args)
97 97
98 98 log.debug(
99 99 'Repo readme generated and computed in %.3fs',
100 100 inv_context_manager.compute_time)
101 101 return instance
102 102
103 103 def _get_landing_commit_or_none(self, db_repo):
104 104 log.debug("Getting the landing commit.")
105 105 try:
106 106 commit = db_repo.get_landing_commit()
107 107 if not isinstance(commit, EmptyCommit):
108 108 return commit
109 109 else:
110 110 log.debug("Repository is empty, no README to render.")
111 111 except CommitError:
112 112 log.exception(
113 113 "Problem getting commit when trying to render the README.")
114 114
115 115 def _render_readme_or_none(self, commit, readme_node, relative_urls):
116 116 log.debug(
117 117 'Found README file `%s` rendering...', readme_node.path)
118 118 renderer = MarkupRenderer()
119 119 try:
120 120 html_source = renderer.render(
121 121 readme_node.content, filename=readme_node.path)
122 122 if relative_urls:
123 123 return relative_links(html_source, relative_urls)
124 124 return html_source
125 125 except Exception:
126 126 log.exception(
127 127 "Exception while trying to render the README")
128 128
129 129 def _load_commits_context(self, c):
130 130 p = safe_int(self.request.GET.get('page'), 1)
131 131 size = safe_int(self.request.GET.get('size'), 10)
132 132
133 133 def url_generator(**kw):
134 134 query_params = {
135 135 'size': size
136 136 }
137 137 query_params.update(kw)
138 138 return h.route_path(
139 139 'repo_summary_commits',
140 140 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
141 141
142 142 pre_load = ['author', 'branch', 'date', 'message']
143 143 try:
144 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
144 collection = self.rhodecode_vcs_repo.get_commits(
145 pre_load=pre_load, translate_tags=False)
145 146 except EmptyRepositoryError:
146 147 collection = self.rhodecode_vcs_repo
147 148
148 149 c.repo_commits = h.RepoPage(
149 150 collection, page=p, items_per_page=size, url=url_generator)
150 151 page_ids = [x.raw_id for x in c.repo_commits]
151 152 c.comments = self.db_repo.get_comments(page_ids)
152 153 c.statuses = self.db_repo.statuses(page_ids)
153 154
154 155 @LoginRequired()
155 156 @HasRepoPermissionAnyDecorator(
156 157 'repository.read', 'repository.write', 'repository.admin')
157 158 @view_config(
158 159 route_name='repo_summary_commits', request_method='GET',
159 160 renderer='rhodecode:templates/summary/summary_commits.mako')
160 161 def summary_commits(self):
161 162 c = self.load_default_context()
162 163 self._load_commits_context(c)
163 164 return self._get_template_context(c)
164 165
165 166 @LoginRequired()
166 167 @HasRepoPermissionAnyDecorator(
167 168 'repository.read', 'repository.write', 'repository.admin')
168 169 @view_config(
169 170 route_name='repo_summary', request_method='GET',
170 171 renderer='rhodecode:templates/summary/summary.mako')
171 172 @view_config(
172 173 route_name='repo_summary_slash', request_method='GET',
173 174 renderer='rhodecode:templates/summary/summary.mako')
174 175 @view_config(
175 176 route_name='repo_summary_explicit', request_method='GET',
176 177 renderer='rhodecode:templates/summary/summary.mako')
177 178 def summary(self):
178 179 c = self.load_default_context()
179 180
180 181 # Prepare the clone URL
181 182 username = ''
182 183 if self._rhodecode_user.username != User.DEFAULT_USER:
183 184 username = safe_str(self._rhodecode_user.username)
184 185
185 186 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
186 187 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
187 188
188 189 if '{repo}' in _def_clone_uri:
189 190 _def_clone_uri_id = _def_clone_uri.replace(
190 191 '{repo}', '_{repoid}')
191 192 elif '{repoid}' in _def_clone_uri:
192 193 _def_clone_uri_id = _def_clone_uri.replace(
193 194 '_{repoid}', '{repo}')
194 195
195 196 c.clone_repo_url = self.db_repo.clone_url(
196 197 user=username, uri_tmpl=_def_clone_uri)
197 198 c.clone_repo_url_id = self.db_repo.clone_url(
198 199 user=username, uri_tmpl=_def_clone_uri_id)
199 200 c.clone_repo_url_ssh = self.db_repo.clone_url(
200 201 uri_tmpl=_def_clone_uri_ssh, ssh=True)
201 202
202 203 # If enabled, get statistics data
203 204
204 205 c.show_stats = bool(self.db_repo.enable_statistics)
205 206
206 207 stats = Session().query(Statistics) \
207 208 .filter(Statistics.repository == self.db_repo) \
208 209 .scalar()
209 210
210 211 c.stats_percentage = 0
211 212
212 213 if stats and stats.languages:
213 214 c.no_data = False is self.db_repo.enable_statistics
214 215 lang_stats_d = json.loads(stats.languages)
215 216
216 217 # Sort first by decreasing count and second by the file extension,
217 218 # so we have a consistent output.
218 219 lang_stats_items = sorted(lang_stats_d.iteritems(),
219 220 key=lambda k: (-k[1], k[0]))[:10]
220 221 lang_stats = [(x, {"count": y,
221 222 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
222 223 for x, y in lang_stats_items]
223 224
224 225 c.trending_languages = json.dumps(lang_stats)
225 226 else:
226 227 c.no_data = True
227 228 c.trending_languages = json.dumps({})
228 229
229 230 scm_model = ScmModel()
230 231 c.enable_downloads = self.db_repo.enable_downloads
231 232 c.repository_followers = scm_model.get_followers(self.db_repo)
232 233 c.repository_forks = scm_model.get_forks(self.db_repo)
233 234 c.repository_is_user_following = scm_model.is_following_repo(
234 235 self.db_repo_name, self._rhodecode_user.user_id)
235 236
236 237 # first interaction with the VCS instance after here...
237 238 if c.repository_requirements_missing:
238 239 self.request.override_renderer = \
239 240 'rhodecode:templates/summary/missing_requirements.mako'
240 241 return self._get_template_context(c)
241 242
242 243 c.readme_data, c.readme_file = \
243 244 self._get_readme_data(self.db_repo, c.visual.default_renderer)
244 245
245 246 # loads the summary commits template context
246 247 self._load_commits_context(c)
247 248
248 249 return self._get_template_context(c)
249 250
250 251 def get_request_commit_id(self):
251 252 return self.request.matchdict['commit_id']
252 253
253 254 @LoginRequired()
254 255 @HasRepoPermissionAnyDecorator(
255 256 'repository.read', 'repository.write', 'repository.admin')
256 257 @view_config(
257 258 route_name='repo_stats', request_method='GET',
258 259 renderer='json_ext')
259 260 def repo_stats(self):
260 261 commit_id = self.get_request_commit_id()
261 262 show_stats = bool(self.db_repo.enable_statistics)
262 263 repo_id = self.db_repo.repo_id
263 264
264 265 cache_seconds = safe_int(
265 266 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
266 267 cache_on = cache_seconds > 0
267 268 log.debug(
268 269 'Computing REPO TREE for repo_id %s commit_id `%s` '
269 270 'with caching: %s[TTL: %ss]' % (
270 271 repo_id, commit_id, cache_on, cache_seconds or 0))
271 272
272 273 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
273 274 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
274 275
275 276 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
276 277 condition=cache_on)
277 278 def compute_stats(repo_id, commit_id, show_stats):
278 279 code_stats = {}
279 280 size = 0
280 281 try:
281 282 scm_instance = self.db_repo.scm_instance()
282 283 commit = scm_instance.get_commit(commit_id)
283 284
284 285 for node in commit.get_filenodes_generator():
285 286 size += node.size
286 287 if not show_stats:
287 288 continue
288 289 ext = string.lower(node.extension)
289 290 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
290 291 if ext_info:
291 292 if ext in code_stats:
292 293 code_stats[ext]['count'] += 1
293 294 else:
294 295 code_stats[ext] = {"count": 1, "desc": ext_info}
295 296 except (EmptyRepositoryError, CommitDoesNotExistError):
296 297 pass
297 298 return {'size': h.format_byte_size_binary(size),
298 299 'code_stats': code_stats}
299 300
300 301 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
301 302 return stats
302 303
303 304 @LoginRequired()
304 305 @HasRepoPermissionAnyDecorator(
305 306 'repository.read', 'repository.write', 'repository.admin')
306 307 @view_config(
307 308 route_name='repo_refs_data', request_method='GET',
308 309 renderer='json_ext')
309 310 def repo_refs_data(self):
310 311 _ = self.request.translate
311 312 self.load_default_context()
312 313
313 314 repo = self.rhodecode_vcs_repo
314 315 refs_to_create = [
315 316 (_("Branch"), repo.branches, 'branch'),
316 317 (_("Tag"), repo.tags, 'tag'),
317 318 (_("Bookmark"), repo.bookmarks, 'book'),
318 319 ]
319 320 res = self._create_reference_data(
320 321 repo, self.db_repo_name, refs_to_create)
321 322 data = {
322 323 'more': False,
323 324 'results': res
324 325 }
325 326 return data
326 327
327 328 @LoginRequired()
328 329 @HasRepoPermissionAnyDecorator(
329 330 'repository.read', 'repository.write', 'repository.admin')
330 331 @view_config(
331 332 route_name='repo_refs_changelog_data', request_method='GET',
332 333 renderer='json_ext')
333 334 def repo_refs_changelog_data(self):
334 335 _ = self.request.translate
335 336 self.load_default_context()
336 337
337 338 repo = self.rhodecode_vcs_repo
338 339
339 340 refs_to_create = [
340 341 (_("Branches"), repo.branches, 'branch'),
341 342 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
342 343 # TODO: enable when vcs can handle bookmarks filters
343 344 # (_("Bookmarks"), repo.bookmarks, "book"),
344 345 ]
345 346 res = self._create_reference_data(
346 347 repo, self.db_repo_name, refs_to_create)
347 348 data = {
348 349 'more': False,
349 350 'results': res
350 351 }
351 352 return data
352 353
353 354 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
354 355 format_ref_id = get_format_ref_id(repo)
355 356
356 357 result = []
357 358 for title, refs, ref_type in refs_to_create:
358 359 if refs:
359 360 result.append({
360 361 'text': title,
361 362 'children': self._create_reference_items(
362 363 repo, full_repo_name, refs, ref_type,
363 364 format_ref_id),
364 365 })
365 366 return result
366 367
367 368 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
368 369 format_ref_id):
369 370 result = []
370 371 is_svn = h.is_svn(repo)
371 372 for ref_name, raw_id in refs.iteritems():
372 373 files_url = self._create_files_url(
373 374 repo, full_repo_name, ref_name, raw_id, is_svn)
374 375 result.append({
375 376 'text': ref_name,
376 377 'id': format_ref_id(ref_name, raw_id),
377 378 'raw_id': raw_id,
378 379 'type': ref_type,
379 380 'files_url': files_url,
380 381 })
381 382 return result
382 383
383 384 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
384 385 use_commit_id = '/' in ref_name or is_svn
385 386 return h.route_path(
386 387 'repo_files',
387 388 repo_name=full_repo_name,
388 389 f_path=ref_name if is_svn else '',
389 390 commit_id=raw_id if use_commit_id else ref_name,
390 391 _query=dict(at=ref_name))
@@ -1,1842 +1,1846 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36 from pyramid import compat
37 37
38 38 from rhodecode.translation import lazy_ugettext
39 39 from rhodecode.lib.utils2 import safe_str, safe_unicode
40 40 from rhodecode.lib.vcs import connection
41 41 from rhodecode.lib.vcs.utils import author_name, author_email
42 42 from rhodecode.lib.vcs.conf import settings
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 48 RepositoryError)
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 FILEMODE_DEFAULT = 0o100644
55 55 FILEMODE_EXECUTABLE = 0o100755
56 56
57 57 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
58 58
59 59
60 60 class MergeFailureReason(object):
61 61 """
62 62 Enumeration with all the reasons why the server side merge could fail.
63 63
64 64 DO NOT change the number of the reasons, as they may be stored in the
65 65 database.
66 66
67 67 Changing the name of a reason is acceptable and encouraged to deprecate old
68 68 reasons.
69 69 """
70 70
71 71 # Everything went well.
72 72 NONE = 0
73 73
74 74 # An unexpected exception was raised. Check the logs for more details.
75 75 UNKNOWN = 1
76 76
77 77 # The merge was not successful, there are conflicts.
78 78 MERGE_FAILED = 2
79 79
80 80 # The merge succeeded but we could not push it to the target repository.
81 81 PUSH_FAILED = 3
82 82
83 83 # The specified target is not a head in the target repository.
84 84 TARGET_IS_NOT_HEAD = 4
85 85
86 86 # The source repository contains more branches than the target. Pushing
87 87 # the merge will create additional branches in the target.
88 88 HG_SOURCE_HAS_MORE_BRANCHES = 5
89 89
90 90 # The target reference has multiple heads. That does not allow to correctly
91 91 # identify the target location. This could only happen for mercurial
92 92 # branches.
93 93 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94 94
95 95 # The target repository is locked
96 96 TARGET_IS_LOCKED = 7
97 97
98 98 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 99 # A involved commit could not be found.
100 100 _DEPRECATED_MISSING_COMMIT = 8
101 101
102 102 # The target repo reference is missing.
103 103 MISSING_TARGET_REF = 9
104 104
105 105 # The source repo reference is missing.
106 106 MISSING_SOURCE_REF = 10
107 107
108 108 # The merge was not successful, there are conflicts related to sub
109 109 # repositories.
110 110 SUBREPO_MERGE_FAILED = 11
111 111
112 112
113 113 class UpdateFailureReason(object):
114 114 """
115 115 Enumeration with all the reasons why the pull request update could fail.
116 116
117 117 DO NOT change the number of the reasons, as they may be stored in the
118 118 database.
119 119
120 120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 121 reasons.
122 122 """
123 123
124 124 # Everything went well.
125 125 NONE = 0
126 126
127 127 # An unexpected exception was raised. Check the logs for more details.
128 128 UNKNOWN = 1
129 129
130 130 # The pull request is up to date.
131 131 NO_CHANGE = 2
132 132
133 133 # The pull request has a reference type that is not supported for update.
134 134 WRONG_REF_TYPE = 3
135 135
136 136 # Update failed because the target reference is missing.
137 137 MISSING_TARGET_REF = 4
138 138
139 139 # Update failed because the source reference is missing.
140 140 MISSING_SOURCE_REF = 5
141 141
142 142
143 143 class MergeResponse(object):
144 144
145 145 # uses .format(**metadata) for variables
146 146 MERGE_STATUS_MESSAGES = {
147 147 MergeFailureReason.NONE: lazy_ugettext(
148 148 u'This pull request can be automatically merged.'),
149 149 MergeFailureReason.UNKNOWN: lazy_ugettext(
150 150 u'This pull request cannot be merged because of an unhandled exception. '
151 151 u'{exception}'),
152 152 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
153 153 u'This pull request cannot be merged because of merge conflicts.'),
154 154 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
155 155 u'This pull request could not be merged because push to '
156 156 u'target:`{target}@{merge_commit}` failed.'),
157 157 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
158 158 u'This pull request cannot be merged because the target '
159 159 u'`{target_ref.name}` is not a head.'),
160 160 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
161 161 u'This pull request cannot be merged because the source contains '
162 162 u'more branches than the target.'),
163 163 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
164 164 u'This pull request cannot be merged because the target '
165 165 u'has multiple heads: `{heads}`.'),
166 166 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target repository is '
168 168 u'locked by {locked_by}.'),
169 169
170 170 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
171 171 u'This pull request cannot be merged because the target '
172 172 u'reference `{target_ref.name}` is missing.'),
173 173 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the source '
175 175 u'reference `{source_ref.name}` is missing.'),
176 176 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
177 177 u'This pull request cannot be merged because of conflicts related '
178 178 u'to sub repositories.'),
179 179
180 180 # Deprecations
181 181 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
182 182 u'This pull request cannot be merged because the target or the '
183 183 u'source reference is missing.'),
184 184
185 185 }
186 186
187 187 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
188 188 self.possible = possible
189 189 self.executed = executed
190 190 self.merge_ref = merge_ref
191 191 self.failure_reason = failure_reason
192 192 self.metadata = metadata or {}
193 193
194 194 def __repr__(self):
195 195 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
196 196
197 197 def __eq__(self, other):
198 198 same_instance = isinstance(other, self.__class__)
199 199 return same_instance \
200 200 and self.possible == other.possible \
201 201 and self.executed == other.executed \
202 202 and self.failure_reason == other.failure_reason
203 203
204 204 @property
205 205 def label(self):
206 206 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
207 207 not k.startswith('_'))
208 208 return label_dict.get(self.failure_reason)
209 209
210 210 @property
211 211 def merge_status_message(self):
212 212 """
213 213 Return a human friendly error message for the given merge status code.
214 214 """
215 215 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
216 216 try:
217 217 return msg.format(**self.metadata)
218 218 except Exception:
219 219 log.exception('Failed to format %s message', self)
220 220 return msg
221 221
222 222 def asdict(self):
223 223 data = {}
224 224 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
225 225 'merge_status_message']:
226 226 data[k] = getattr(self, k)
227 227 return data
228 228
229 229
230 230 class BaseRepository(object):
231 231 """
232 232 Base Repository for final backends
233 233
234 234 .. attribute:: DEFAULT_BRANCH_NAME
235 235
236 236 name of default branch (i.e. "trunk" for svn, "master" for git etc.
237 237
238 238 .. attribute:: commit_ids
239 239
240 240 list of all available commit ids, in ascending order
241 241
242 242 .. attribute:: path
243 243
244 244 absolute path to the repository
245 245
246 246 .. attribute:: bookmarks
247 247
248 248 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
249 249 there are no bookmarks or the backend implementation does not support
250 250 bookmarks.
251 251
252 252 .. attribute:: tags
253 253
254 254 Mapping from name to :term:`Commit ID` of the tag.
255 255
256 256 """
257 257
258 258 DEFAULT_BRANCH_NAME = None
259 259 DEFAULT_CONTACT = u"Unknown"
260 260 DEFAULT_DESCRIPTION = u"unknown"
261 261 EMPTY_COMMIT_ID = '0' * 40
262 262
263 263 path = None
264 264
265 265 def __init__(self, repo_path, config=None, create=False, **kwargs):
266 266 """
267 267 Initializes repository. Raises RepositoryError if repository could
268 268 not be find at the given ``repo_path`` or directory at ``repo_path``
269 269 exists and ``create`` is set to True.
270 270
271 271 :param repo_path: local path of the repository
272 272 :param config: repository configuration
273 273 :param create=False: if set to True, would try to create repository.
274 274 :param src_url=None: if set, should be proper url from which repository
275 275 would be cloned; requires ``create`` parameter to be set to True -
276 276 raises RepositoryError if src_url is set and create evaluates to
277 277 False
278 278 """
279 279 raise NotImplementedError
280 280
281 281 def __repr__(self):
282 282 return '<%s at %s>' % (self.__class__.__name__, self.path)
283 283
284 284 def __len__(self):
285 285 return self.count()
286 286
287 287 def __eq__(self, other):
288 288 same_instance = isinstance(other, self.__class__)
289 289 return same_instance and other.path == self.path
290 290
291 291 def __ne__(self, other):
292 292 return not self.__eq__(other)
293 293
294 294 def get_create_shadow_cache_pr_path(self, db_repo):
295 295 path = db_repo.cached_diffs_dir
296 296 if not os.path.exists(path):
297 297 os.makedirs(path, 0o755)
298 298 return path
299 299
300 300 @classmethod
301 301 def get_default_config(cls, default=None):
302 302 config = Config()
303 303 if default and isinstance(default, list):
304 304 for section, key, val in default:
305 305 config.set(section, key, val)
306 306 return config
307 307
308 308 @LazyProperty
309 309 def _remote(self):
310 310 raise NotImplementedError
311 311
312 312 @LazyProperty
313 313 def EMPTY_COMMIT(self):
314 314 return EmptyCommit(self.EMPTY_COMMIT_ID)
315 315
316 316 @LazyProperty
317 317 def alias(self):
318 318 for k, v in settings.BACKENDS.items():
319 319 if v.split('.')[-1] == str(self.__class__.__name__):
320 320 return k
321 321
322 322 @LazyProperty
323 323 def name(self):
324 324 return safe_unicode(os.path.basename(self.path))
325 325
326 326 @LazyProperty
327 327 def description(self):
328 328 raise NotImplementedError
329 329
330 330 def refs(self):
331 331 """
332 332 returns a `dict` with branches, bookmarks, tags, and closed_branches
333 333 for this repository
334 334 """
335 335 return dict(
336 336 branches=self.branches,
337 337 branches_closed=self.branches_closed,
338 338 tags=self.tags,
339 339 bookmarks=self.bookmarks
340 340 )
341 341
342 342 @LazyProperty
343 343 def branches(self):
344 344 """
345 345 A `dict` which maps branch names to commit ids.
346 346 """
347 347 raise NotImplementedError
348 348
349 349 @LazyProperty
350 350 def branches_closed(self):
351 351 """
352 352 A `dict` which maps tags names to commit ids.
353 353 """
354 354 raise NotImplementedError
355 355
356 356 @LazyProperty
357 357 def bookmarks(self):
358 358 """
359 359 A `dict` which maps tags names to commit ids.
360 360 """
361 361 raise NotImplementedError
362 362
363 363 @LazyProperty
364 364 def tags(self):
365 365 """
366 366 A `dict` which maps tags names to commit ids.
367 367 """
368 368 raise NotImplementedError
369 369
370 370 @LazyProperty
371 371 def size(self):
372 372 """
373 373 Returns combined size in bytes for all repository files
374 374 """
375 375 tip = self.get_commit()
376 376 return tip.size
377 377
378 378 def size_at_commit(self, commit_id):
379 379 commit = self.get_commit(commit_id)
380 380 return commit.size
381 381
382 382 def is_empty(self):
383 383 return not bool(self.commit_ids)
384 384
385 385 @staticmethod
386 386 def check_url(url, config):
387 387 """
388 388 Function will check given url and try to verify if it's a valid
389 389 link.
390 390 """
391 391 raise NotImplementedError
392 392
393 393 @staticmethod
394 394 def is_valid_repository(path):
395 395 """
396 396 Check if given `path` contains a valid repository of this backend
397 397 """
398 398 raise NotImplementedError
399 399
400 400 # ==========================================================================
401 401 # COMMITS
402 402 # ==========================================================================
403 403
404 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
404 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
405 405 """
406 406 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
407 407 are both None, most recent commit is returned.
408 408
409 409 :param pre_load: Optional. List of commit attributes to load.
410 410
411 411 :raises ``EmptyRepositoryError``: if there are no commits
412 412 """
413 413 raise NotImplementedError
414 414
415 415 def __iter__(self):
416 416 for commit_id in self.commit_ids:
417 417 yield self.get_commit(commit_id=commit_id)
418 418
419 419 def get_commits(
420 420 self, start_id=None, end_id=None, start_date=None, end_date=None,
421 branch_name=None, show_hidden=False, pre_load=None):
421 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
422 422 """
423 423 Returns iterator of `BaseCommit` objects from start to end
424 424 not inclusive. This should behave just like a list, ie. end is not
425 425 inclusive.
426 426
427 427 :param start_id: None or str, must be a valid commit id
428 428 :param end_id: None or str, must be a valid commit id
429 429 :param start_date:
430 430 :param end_date:
431 431 :param branch_name:
432 432 :param show_hidden:
433 433 :param pre_load:
434 :param translate_tags:
434 435 """
435 436 raise NotImplementedError
436 437
437 438 def __getitem__(self, key):
438 439 """
439 440 Allows index based access to the commit objects of this repository.
440 441 """
441 442 pre_load = ["author", "branch", "date", "message", "parents"]
442 443 if isinstance(key, slice):
443 444 return self._get_range(key, pre_load)
444 445 return self.get_commit(commit_idx=key, pre_load=pre_load)
445 446
446 447 def _get_range(self, slice_obj, pre_load):
447 448 for commit_id in self.commit_ids.__getitem__(slice_obj):
448 449 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
449 450
450 451 def count(self):
451 452 return len(self.commit_ids)
452 453
453 454 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
454 455 """
455 456 Creates and returns a tag for the given ``commit_id``.
456 457
457 458 :param name: name for new tag
458 459 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
459 460 :param commit_id: commit id for which new tag would be created
460 461 :param message: message of the tag's commit
461 462 :param date: date of tag's commit
462 463
463 464 :raises TagAlreadyExistError: if tag with same name already exists
464 465 """
465 466 raise NotImplementedError
466 467
467 468 def remove_tag(self, name, user, message=None, date=None):
468 469 """
469 470 Removes tag with the given ``name``.
470 471
471 472 :param name: name of the tag to be removed
472 473 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
473 474 :param message: message of the tag's removal commit
474 475 :param date: date of tag's removal commit
475 476
476 477 :raises TagDoesNotExistError: if tag with given name does not exists
477 478 """
478 479 raise NotImplementedError
479 480
480 481 def get_diff(
481 482 self, commit1, commit2, path=None, ignore_whitespace=False,
482 483 context=3, path1=None):
483 484 """
484 485 Returns (git like) *diff*, as plain text. Shows changes introduced by
485 486 `commit2` since `commit1`.
486 487
487 488 :param commit1: Entry point from which diff is shown. Can be
488 489 ``self.EMPTY_COMMIT`` - in this case, patch showing all
489 490 the changes since empty state of the repository until `commit2`
490 491 :param commit2: Until which commit changes should be shown.
491 492 :param path: Can be set to a path of a file to create a diff of that
492 493 file. If `path1` is also set, this value is only associated to
493 494 `commit2`.
494 495 :param ignore_whitespace: If set to ``True``, would not show whitespace
495 496 changes. Defaults to ``False``.
496 497 :param context: How many lines before/after changed lines should be
497 498 shown. Defaults to ``3``.
498 499 :param path1: Can be set to a path to associate with `commit1`. This
499 500 parameter works only for backends which support diff generation for
500 501 different paths. Other backends will raise a `ValueError` if `path1`
501 502 is set and has a different value than `path`.
502 503 :param file_path: filter this diff by given path pattern
503 504 """
504 505 raise NotImplementedError
505 506
506 507 def strip(self, commit_id, branch=None):
507 508 """
508 509 Strip given commit_id from the repository
509 510 """
510 511 raise NotImplementedError
511 512
512 513 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
513 514 """
514 515 Return a latest common ancestor commit if one exists for this repo
515 516 `commit_id1` vs `commit_id2` from `repo2`.
516 517
517 518 :param commit_id1: Commit it from this repository to use as a
518 519 target for the comparison.
519 520 :param commit_id2: Source commit id to use for comparison.
520 521 :param repo2: Source repository to use for comparison.
521 522 """
522 523 raise NotImplementedError
523 524
524 525 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
525 526 """
526 527 Compare this repository's revision `commit_id1` with `commit_id2`.
527 528
528 529 Returns a tuple(commits, ancestor) that would be merged from
529 530 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
530 531 will be returned as ancestor.
531 532
532 533 :param commit_id1: Commit it from this repository to use as a
533 534 target for the comparison.
534 535 :param commit_id2: Source commit id to use for comparison.
535 536 :param repo2: Source repository to use for comparison.
536 537 :param merge: If set to ``True`` will do a merge compare which also
537 538 returns the common ancestor.
538 539 :param pre_load: Optional. List of commit attributes to load.
539 540 """
540 541 raise NotImplementedError
541 542
542 543 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
543 544 user_name='', user_email='', message='', dry_run=False,
544 545 use_rebase=False, close_branch=False):
545 546 """
546 547 Merge the revisions specified in `source_ref` from `source_repo`
547 548 onto the `target_ref` of this repository.
548 549
549 550 `source_ref` and `target_ref` are named tupls with the following
550 551 fields `type`, `name` and `commit_id`.
551 552
552 553 Returns a MergeResponse named tuple with the following fields
553 554 'possible', 'executed', 'source_commit', 'target_commit',
554 555 'merge_commit'.
555 556
556 557 :param repo_id: `repo_id` target repo id.
557 558 :param workspace_id: `workspace_id` unique identifier.
558 559 :param target_ref: `target_ref` points to the commit on top of which
559 560 the `source_ref` should be merged.
560 561 :param source_repo: The repository that contains the commits to be
561 562 merged.
562 563 :param source_ref: `source_ref` points to the topmost commit from
563 564 the `source_repo` which should be merged.
564 565 :param user_name: Merge commit `user_name`.
565 566 :param user_email: Merge commit `user_email`.
566 567 :param message: Merge commit `message`.
567 568 :param dry_run: If `True` the merge will not take place.
568 569 :param use_rebase: If `True` commits from the source will be rebased
569 570 on top of the target instead of being merged.
570 571 :param close_branch: If `True` branch will be close before merging it
571 572 """
572 573 if dry_run:
573 574 message = message or settings.MERGE_DRY_RUN_MESSAGE
574 575 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
575 576 user_name = user_name or settings.MERGE_DRY_RUN_USER
576 577 else:
577 578 if not user_name:
578 579 raise ValueError('user_name cannot be empty')
579 580 if not user_email:
580 581 raise ValueError('user_email cannot be empty')
581 582 if not message:
582 583 raise ValueError('message cannot be empty')
583 584
584 585 try:
585 586 return self._merge_repo(
586 587 repo_id, workspace_id, target_ref, source_repo,
587 588 source_ref, message, user_name, user_email, dry_run=dry_run,
588 589 use_rebase=use_rebase, close_branch=close_branch)
589 590 except RepositoryError as exc:
590 591 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
591 592 return MergeResponse(
592 593 False, False, None, MergeFailureReason.UNKNOWN,
593 594 metadata={'exception': str(exc)})
594 595
595 596 def _merge_repo(self, repo_id, workspace_id, target_ref,
596 597 source_repo, source_ref, merge_message,
597 598 merger_name, merger_email, dry_run=False,
598 599 use_rebase=False, close_branch=False):
599 600 """Internal implementation of merge."""
600 601 raise NotImplementedError
601 602
602 603 def _maybe_prepare_merge_workspace(
603 604 self, repo_id, workspace_id, target_ref, source_ref):
604 605 """
605 606 Create the merge workspace.
606 607
607 608 :param workspace_id: `workspace_id` unique identifier.
608 609 """
609 610 raise NotImplementedError
610 611
611 612 def _get_legacy_shadow_repository_path(self, workspace_id):
612 613 """
613 614 Legacy version that was used before. We still need it for
614 615 backward compat
615 616 """
616 617 return os.path.join(
617 618 os.path.dirname(self.path),
618 619 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
619 620
620 621 def _get_shadow_repository_path(self, repo_id, workspace_id):
621 622 # The name of the shadow repository must start with '.', so it is
622 623 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
623 624 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
624 625 if os.path.exists(legacy_repository_path):
625 626 return legacy_repository_path
626 627 else:
627 628 return os.path.join(
628 629 os.path.dirname(self.path),
629 630 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
630 631
631 632 def cleanup_merge_workspace(self, repo_id, workspace_id):
632 633 """
633 634 Remove merge workspace.
634 635
635 636 This function MUST not fail in case there is no workspace associated to
636 637 the given `workspace_id`.
637 638
638 639 :param workspace_id: `workspace_id` unique identifier.
639 640 """
640 641 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
641 642 shadow_repository_path_del = '{}.{}.delete'.format(
642 643 shadow_repository_path, time.time())
643 644
644 645 # move the shadow repo, so it never conflicts with the one used.
645 646 # we use this method because shutil.rmtree had some edge case problems
646 647 # removing symlinked repositories
647 648 if not os.path.isdir(shadow_repository_path):
648 649 return
649 650
650 651 shutil.move(shadow_repository_path, shadow_repository_path_del)
651 652 try:
652 653 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
653 654 except Exception:
654 655 log.exception('Failed to gracefully remove shadow repo under %s',
655 656 shadow_repository_path_del)
656 657 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
657 658
658 659 # ========== #
659 660 # COMMIT API #
660 661 # ========== #
661 662
662 663 @LazyProperty
663 664 def in_memory_commit(self):
664 665 """
665 666 Returns :class:`InMemoryCommit` object for this repository.
666 667 """
667 668 raise NotImplementedError
668 669
669 670 # ======================== #
670 671 # UTILITIES FOR SUBCLASSES #
671 672 # ======================== #
672 673
673 674 def _validate_diff_commits(self, commit1, commit2):
674 675 """
675 676 Validates that the given commits are related to this repository.
676 677
677 678 Intended as a utility for sub classes to have a consistent validation
678 679 of input parameters in methods like :meth:`get_diff`.
679 680 """
680 681 self._validate_commit(commit1)
681 682 self._validate_commit(commit2)
682 683 if (isinstance(commit1, EmptyCommit) and
683 684 isinstance(commit2, EmptyCommit)):
684 685 raise ValueError("Cannot compare two empty commits")
685 686
686 687 def _validate_commit(self, commit):
687 688 if not isinstance(commit, BaseCommit):
688 689 raise TypeError(
689 690 "%s is not of type BaseCommit" % repr(commit))
690 691 if commit.repository != self and not isinstance(commit, EmptyCommit):
691 692 raise ValueError(
692 693 "Commit %s must be a valid commit from this repository %s, "
693 694 "related to this repository instead %s." %
694 695 (commit, self, commit.repository))
695 696
696 697 def _validate_commit_id(self, commit_id):
697 698 if not isinstance(commit_id, compat.string_types):
698 699 raise TypeError("commit_id must be a string value")
699 700
700 701 def _validate_commit_idx(self, commit_idx):
701 702 if not isinstance(commit_idx, (int, long)):
702 703 raise TypeError("commit_idx must be a numeric value")
703 704
704 705 def _validate_branch_name(self, branch_name):
705 706 if branch_name and branch_name not in self.branches_all:
706 707 msg = ("Branch %s not found in %s" % (branch_name, self))
707 708 raise BranchDoesNotExistError(msg)
708 709
709 710 #
710 711 # Supporting deprecated API parts
711 712 # TODO: johbo: consider to move this into a mixin
712 713 #
713 714
714 715 @property
715 716 def EMPTY_CHANGESET(self):
716 717 warnings.warn(
717 718 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
718 719 return self.EMPTY_COMMIT_ID
719 720
720 721 @property
721 722 def revisions(self):
722 723 warnings.warn("Use commits attribute instead", DeprecationWarning)
723 724 return self.commit_ids
724 725
725 726 @revisions.setter
726 727 def revisions(self, value):
727 728 warnings.warn("Use commits attribute instead", DeprecationWarning)
728 729 self.commit_ids = value
729 730
730 731 def get_changeset(self, revision=None, pre_load=None):
731 732 warnings.warn("Use get_commit instead", DeprecationWarning)
732 733 commit_id = None
733 734 commit_idx = None
734 735 if isinstance(revision, compat.string_types):
735 736 commit_id = revision
736 737 else:
737 738 commit_idx = revision
738 739 return self.get_commit(
739 740 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
740 741
741 742 def get_changesets(
742 743 self, start=None, end=None, start_date=None, end_date=None,
743 744 branch_name=None, pre_load=None):
744 745 warnings.warn("Use get_commits instead", DeprecationWarning)
745 746 start_id = self._revision_to_commit(start)
746 747 end_id = self._revision_to_commit(end)
747 748 return self.get_commits(
748 749 start_id=start_id, end_id=end_id, start_date=start_date,
749 750 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
750 751
751 752 def _revision_to_commit(self, revision):
752 753 """
753 754 Translates a revision to a commit_id
754 755
755 756 Helps to support the old changeset based API which allows to use
756 757 commit ids and commit indices interchangeable.
757 758 """
758 759 if revision is None:
759 760 return revision
760 761
761 762 if isinstance(revision, compat.string_types):
762 763 commit_id = revision
763 764 else:
764 765 commit_id = self.commit_ids[revision]
765 766 return commit_id
766 767
767 768 @property
768 769 def in_memory_changeset(self):
769 770 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
770 771 return self.in_memory_commit
771 772
772 773 def get_path_permissions(self, username):
773 774 """
774 775 Returns a path permission checker or None if not supported
775 776
776 777 :param username: session user name
777 778 :return: an instance of BasePathPermissionChecker or None
778 779 """
779 780 return None
780 781
781 782 def install_hooks(self, force=False):
782 783 return self._remote.install_hooks(force)
783 784
784 785 def get_hooks_info(self):
785 786 return self._remote.get_hooks_info()
786 787
787 788
788 789 class BaseCommit(object):
789 790 """
790 791 Each backend should implement it's commit representation.
791 792
792 793 **Attributes**
793 794
794 795 ``repository``
795 796 repository object within which commit exists
796 797
797 798 ``id``
798 799 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
799 800 just ``tip``.
800 801
801 802 ``raw_id``
802 803 raw commit representation (i.e. full 40 length sha for git
803 804 backend)
804 805
805 806 ``short_id``
806 807 shortened (if apply) version of ``raw_id``; it would be simple
807 808 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
808 809 as ``raw_id`` for subversion
809 810
810 811 ``idx``
811 812 commit index
812 813
813 814 ``files``
814 815 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
815 816
816 817 ``dirs``
817 818 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
818 819
819 820 ``nodes``
820 821 combined list of ``Node`` objects
821 822
822 823 ``author``
823 824 author of the commit, as unicode
824 825
825 826 ``message``
826 827 message of the commit, as unicode
827 828
828 829 ``parents``
829 830 list of parent commits
830 831
831 832 """
832 833
833 834 branch = None
834 835 """
835 836 Depending on the backend this should be set to the branch name of the
836 837 commit. Backends not supporting branches on commits should leave this
837 838 value as ``None``.
838 839 """
839 840
840 841 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
841 842 """
842 843 This template is used to generate a default prefix for repository archives
843 844 if no prefix has been specified.
844 845 """
845 846
846 847 def __str__(self):
847 848 return '<%s at %s:%s>' % (
848 849 self.__class__.__name__, self.idx, self.short_id)
849 850
850 851 def __repr__(self):
851 852 return self.__str__()
852 853
853 854 def __unicode__(self):
854 855 return u'%s:%s' % (self.idx, self.short_id)
855 856
856 857 def __eq__(self, other):
857 858 same_instance = isinstance(other, self.__class__)
858 859 return same_instance and self.raw_id == other.raw_id
859 860
860 861 def __json__(self):
861 862 parents = []
862 863 try:
863 864 for parent in self.parents:
864 865 parents.append({'raw_id': parent.raw_id})
865 866 except NotImplementedError:
866 867 # empty commit doesn't have parents implemented
867 868 pass
868 869
869 870 return {
870 871 'short_id': self.short_id,
871 872 'raw_id': self.raw_id,
872 873 'revision': self.idx,
873 874 'message': self.message,
874 875 'date': self.date,
875 876 'author': self.author,
876 877 'parents': parents,
877 878 'branch': self.branch
878 879 }
879 880
880 881 def __getstate__(self):
881 882 d = self.__dict__.copy()
882 883 d.pop('_remote', None)
883 884 d.pop('repository', None)
884 885 return d
885 886
886 887 def _get_refs(self):
887 888 return {
888 889 'branches': [self.branch] if self.branch else [],
889 890 'bookmarks': getattr(self, 'bookmarks', []),
890 891 'tags': self.tags
891 892 }
892 893
893 894 @LazyProperty
894 895 def last(self):
895 896 """
896 897 ``True`` if this is last commit in repository, ``False``
897 898 otherwise; trying to access this attribute while there is no
898 899 commits would raise `EmptyRepositoryError`
899 900 """
900 901 if self.repository is None:
901 902 raise CommitError("Cannot check if it's most recent commit")
902 903 return self.raw_id == self.repository.commit_ids[-1]
903 904
904 905 @LazyProperty
905 906 def parents(self):
906 907 """
907 908 Returns list of parent commits.
908 909 """
909 910 raise NotImplementedError
910 911
911 912 @LazyProperty
912 913 def first_parent(self):
913 914 """
914 915 Returns list of parent commits.
915 916 """
916 917 return self.parents[0] if self.parents else EmptyCommit()
917 918
918 919 @property
919 920 def merge(self):
920 921 """
921 922 Returns boolean if commit is a merge.
922 923 """
923 924 return len(self.parents) > 1
924 925
925 926 @LazyProperty
926 927 def children(self):
927 928 """
928 929 Returns list of child commits.
929 930 """
930 931 raise NotImplementedError
931 932
932 933 @LazyProperty
933 934 def id(self):
934 935 """
935 936 Returns string identifying this commit.
936 937 """
937 938 raise NotImplementedError
938 939
939 940 @LazyProperty
940 941 def raw_id(self):
941 942 """
942 943 Returns raw string identifying this commit.
943 944 """
944 945 raise NotImplementedError
945 946
946 947 @LazyProperty
947 948 def short_id(self):
948 949 """
949 950 Returns shortened version of ``raw_id`` attribute, as string,
950 951 identifying this commit, useful for presentation to users.
951 952 """
952 953 raise NotImplementedError
953 954
954 955 @LazyProperty
955 956 def idx(self):
956 957 """
957 958 Returns integer identifying this commit.
958 959 """
959 960 raise NotImplementedError
960 961
961 962 @LazyProperty
962 963 def committer(self):
963 964 """
964 965 Returns committer for this commit
965 966 """
966 967 raise NotImplementedError
967 968
968 969 @LazyProperty
969 970 def committer_name(self):
970 971 """
971 972 Returns committer name for this commit
972 973 """
973 974
974 975 return author_name(self.committer)
975 976
976 977 @LazyProperty
977 978 def committer_email(self):
978 979 """
979 980 Returns committer email address for this commit
980 981 """
981 982
982 983 return author_email(self.committer)
983 984
984 985 @LazyProperty
985 986 def author(self):
986 987 """
987 988 Returns author for this commit
988 989 """
989 990
990 991 raise NotImplementedError
991 992
992 993 @LazyProperty
993 994 def author_name(self):
994 995 """
995 996 Returns author name for this commit
996 997 """
997 998
998 999 return author_name(self.author)
999 1000
1000 1001 @LazyProperty
1001 1002 def author_email(self):
1002 1003 """
1003 1004 Returns author email address for this commit
1004 1005 """
1005 1006
1006 1007 return author_email(self.author)
1007 1008
1008 1009 def get_file_mode(self, path):
1009 1010 """
1010 1011 Returns stat mode of the file at `path`.
1011 1012 """
1012 1013 raise NotImplementedError
1013 1014
1014 1015 def is_link(self, path):
1015 1016 """
1016 1017 Returns ``True`` if given `path` is a symlink
1017 1018 """
1018 1019 raise NotImplementedError
1019 1020
1020 1021 def get_file_content(self, path):
1021 1022 """
1022 1023 Returns content of the file at the given `path`.
1023 1024 """
1024 1025 raise NotImplementedError
1025 1026
1026 1027 def get_file_size(self, path):
1027 1028 """
1028 1029 Returns size of the file at the given `path`.
1029 1030 """
1030 1031 raise NotImplementedError
1031 1032
1032 1033 def get_path_commit(self, path, pre_load=None):
1033 1034 """
1034 1035 Returns last commit of the file at the given `path`.
1035 1036
1036 1037 :param pre_load: Optional. List of commit attributes to load.
1037 1038 """
1038 1039 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1039 1040 if not commits:
1040 1041 raise RepositoryError(
1041 1042 'Failed to fetch history for path {}. '
1042 1043 'Please check if such path exists in your repository'.format(
1043 1044 path))
1044 1045 return commits[0]
1045 1046
1046 1047 def get_path_history(self, path, limit=None, pre_load=None):
1047 1048 """
1048 1049 Returns history of file as reversed list of :class:`BaseCommit`
1049 1050 objects for which file at given `path` has been modified.
1050 1051
1051 1052 :param limit: Optional. Allows to limit the size of the returned
1052 1053 history. This is intended as a hint to the underlying backend, so
1053 1054 that it can apply optimizations depending on the limit.
1054 1055 :param pre_load: Optional. List of commit attributes to load.
1055 1056 """
1056 1057 raise NotImplementedError
1057 1058
1058 1059 def get_file_annotate(self, path, pre_load=None):
1059 1060 """
1060 1061 Returns a generator of four element tuples with
1061 1062 lineno, sha, commit lazy loader and line
1062 1063
1063 1064 :param pre_load: Optional. List of commit attributes to load.
1064 1065 """
1065 1066 raise NotImplementedError
1066 1067
1067 1068 def get_nodes(self, path):
1068 1069 """
1069 1070 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1070 1071 state of commit at the given ``path``.
1071 1072
1072 1073 :raises ``CommitError``: if node at the given ``path`` is not
1073 1074 instance of ``DirNode``
1074 1075 """
1075 1076 raise NotImplementedError
1076 1077
1077 1078 def get_node(self, path):
1078 1079 """
1079 1080 Returns ``Node`` object from the given ``path``.
1080 1081
1081 1082 :raises ``NodeDoesNotExistError``: if there is no node at the given
1082 1083 ``path``
1083 1084 """
1084 1085 raise NotImplementedError
1085 1086
1086 1087 def get_largefile_node(self, path):
1087 1088 """
1088 1089 Returns the path to largefile from Mercurial/Git-lfs storage.
1089 1090 or None if it's not a largefile node
1090 1091 """
1091 1092 return None
1092 1093
1093 1094 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1094 1095 prefix=None, write_metadata=False, mtime=None):
1095 1096 """
1096 1097 Creates an archive containing the contents of the repository.
1097 1098
1098 1099 :param file_path: path to the file which to create the archive.
1099 1100 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1100 1101 :param prefix: name of root directory in archive.
1101 1102 Default is repository name and commit's short_id joined with dash:
1102 1103 ``"{repo_name}-{short_id}"``.
1103 1104 :param write_metadata: write a metadata file into archive.
1104 1105 :param mtime: custom modification time for archive creation, defaults
1105 1106 to time.time() if not given.
1106 1107
1107 1108 :raise VCSError: If prefix has a problem.
1108 1109 """
1109 1110 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1110 1111 if kind not in allowed_kinds:
1111 1112 raise ImproperArchiveTypeError(
1112 1113 'Archive kind (%s) not supported use one of %s' %
1113 1114 (kind, allowed_kinds))
1114 1115
1115 1116 prefix = self._validate_archive_prefix(prefix)
1116 1117
1117 1118 mtime = mtime or time.mktime(self.date.timetuple())
1118 1119
1119 1120 file_info = []
1120 1121 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1121 1122 for _r, _d, files in cur_rev.walk('/'):
1122 1123 for f in files:
1123 1124 f_path = os.path.join(prefix, f.path)
1124 1125 file_info.append(
1125 1126 (f_path, f.mode, f.is_link(), f.raw_bytes))
1126 1127
1127 1128 if write_metadata:
1128 1129 metadata = [
1129 1130 ('repo_name', self.repository.name),
1130 1131 ('rev', self.raw_id),
1131 1132 ('create_time', mtime),
1132 1133 ('branch', self.branch),
1133 1134 ('tags', ','.join(self.tags)),
1134 1135 ]
1135 1136 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1136 1137 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1137 1138
1138 1139 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1139 1140
1140 1141 def _validate_archive_prefix(self, prefix):
1141 1142 if prefix is None:
1142 1143 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1143 1144 repo_name=safe_str(self.repository.name),
1144 1145 short_id=self.short_id)
1145 1146 elif not isinstance(prefix, str):
1146 1147 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1147 1148 elif prefix.startswith('/'):
1148 1149 raise VCSError("Prefix cannot start with leading slash")
1149 1150 elif prefix.strip() == '':
1150 1151 raise VCSError("Prefix cannot be empty")
1151 1152 return prefix
1152 1153
1153 1154 @LazyProperty
1154 1155 def root(self):
1155 1156 """
1156 1157 Returns ``RootNode`` object for this commit.
1157 1158 """
1158 1159 return self.get_node('')
1159 1160
1160 1161 def next(self, branch=None):
1161 1162 """
1162 1163 Returns next commit from current, if branch is gives it will return
1163 1164 next commit belonging to this branch
1164 1165
1165 1166 :param branch: show commits within the given named branch
1166 1167 """
1167 1168 indexes = xrange(self.idx + 1, self.repository.count())
1168 1169 return self._find_next(indexes, branch)
1169 1170
1170 1171 def prev(self, branch=None):
1171 1172 """
1172 1173 Returns previous commit from current, if branch is gives it will
1173 1174 return previous commit belonging to this branch
1174 1175
1175 1176 :param branch: show commit within the given named branch
1176 1177 """
1177 1178 indexes = xrange(self.idx - 1, -1, -1)
1178 1179 return self._find_next(indexes, branch)
1179 1180
1180 1181 def _find_next(self, indexes, branch=None):
1181 1182 if branch and self.branch != branch:
1182 1183 raise VCSError('Branch option used on commit not belonging '
1183 1184 'to that branch')
1184 1185
1185 1186 for next_idx in indexes:
1186 1187 commit = self.repository.get_commit(commit_idx=next_idx)
1187 1188 if branch and branch != commit.branch:
1188 1189 continue
1189 1190 return commit
1190 1191 raise CommitDoesNotExistError
1191 1192
1192 1193 def diff(self, ignore_whitespace=True, context=3):
1193 1194 """
1194 1195 Returns a `Diff` object representing the change made by this commit.
1195 1196 """
1196 1197 parent = self.first_parent
1197 1198 diff = self.repository.get_diff(
1198 1199 parent, self,
1199 1200 ignore_whitespace=ignore_whitespace,
1200 1201 context=context)
1201 1202 return diff
1202 1203
1203 1204 @LazyProperty
1204 1205 def added(self):
1205 1206 """
1206 1207 Returns list of added ``FileNode`` objects.
1207 1208 """
1208 1209 raise NotImplementedError
1209 1210
1210 1211 @LazyProperty
1211 1212 def changed(self):
1212 1213 """
1213 1214 Returns list of modified ``FileNode`` objects.
1214 1215 """
1215 1216 raise NotImplementedError
1216 1217
1217 1218 @LazyProperty
1218 1219 def removed(self):
1219 1220 """
1220 1221 Returns list of removed ``FileNode`` objects.
1221 1222 """
1222 1223 raise NotImplementedError
1223 1224
1224 1225 @LazyProperty
1225 1226 def size(self):
1226 1227 """
1227 1228 Returns total number of bytes from contents of all filenodes.
1228 1229 """
1229 1230 return sum((node.size for node in self.get_filenodes_generator()))
1230 1231
1231 1232 def walk(self, topurl=''):
1232 1233 """
1233 1234 Similar to os.walk method. Insted of filesystem it walks through
1234 1235 commit starting at given ``topurl``. Returns generator of tuples
1235 1236 (topnode, dirnodes, filenodes).
1236 1237 """
1237 1238 topnode = self.get_node(topurl)
1238 1239 if not topnode.is_dir():
1239 1240 return
1240 1241 yield (topnode, topnode.dirs, topnode.files)
1241 1242 for dirnode in topnode.dirs:
1242 1243 for tup in self.walk(dirnode.path):
1243 1244 yield tup
1244 1245
1245 1246 def get_filenodes_generator(self):
1246 1247 """
1247 1248 Returns generator that yields *all* file nodes.
1248 1249 """
1249 1250 for topnode, dirs, files in self.walk():
1250 1251 for node in files:
1251 1252 yield node
1252 1253
1253 1254 #
1254 1255 # Utilities for sub classes to support consistent behavior
1255 1256 #
1256 1257
1257 1258 def no_node_at_path(self, path):
1258 1259 return NodeDoesNotExistError(
1259 1260 u"There is no file nor directory at the given path: "
1260 1261 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1261 1262
1262 1263 def _fix_path(self, path):
1263 1264 """
1264 1265 Paths are stored without trailing slash so we need to get rid off it if
1265 1266 needed.
1266 1267 """
1267 1268 return path.rstrip('/')
1268 1269
1269 1270 #
1270 1271 # Deprecated API based on changesets
1271 1272 #
1272 1273
1273 1274 @property
1274 1275 def revision(self):
1275 1276 warnings.warn("Use idx instead", DeprecationWarning)
1276 1277 return self.idx
1277 1278
1278 1279 @revision.setter
1279 1280 def revision(self, value):
1280 1281 warnings.warn("Use idx instead", DeprecationWarning)
1281 1282 self.idx = value
1282 1283
1283 1284 def get_file_changeset(self, path):
1284 1285 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1285 1286 return self.get_path_commit(path)
1286 1287
1287 1288
1288 1289 class BaseChangesetClass(type):
1289 1290
1290 1291 def __instancecheck__(self, instance):
1291 1292 return isinstance(instance, BaseCommit)
1292 1293
1293 1294
1294 1295 class BaseChangeset(BaseCommit):
1295 1296
1296 1297 __metaclass__ = BaseChangesetClass
1297 1298
1298 1299 def __new__(cls, *args, **kwargs):
1299 1300 warnings.warn(
1300 1301 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1301 1302 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1302 1303
1303 1304
1304 1305 class BaseInMemoryCommit(object):
1305 1306 """
1306 1307 Represents differences between repository's state (most recent head) and
1307 1308 changes made *in place*.
1308 1309
1309 1310 **Attributes**
1310 1311
1311 1312 ``repository``
1312 1313 repository object for this in-memory-commit
1313 1314
1314 1315 ``added``
1315 1316 list of ``FileNode`` objects marked as *added*
1316 1317
1317 1318 ``changed``
1318 1319 list of ``FileNode`` objects marked as *changed*
1319 1320
1320 1321 ``removed``
1321 1322 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1322 1323 *removed*
1323 1324
1324 1325 ``parents``
1325 1326 list of :class:`BaseCommit` instances representing parents of
1326 1327 in-memory commit. Should always be 2-element sequence.
1327 1328
1328 1329 """
1329 1330
1330 1331 def __init__(self, repository):
1331 1332 self.repository = repository
1332 1333 self.added = []
1333 1334 self.changed = []
1334 1335 self.removed = []
1335 1336 self.parents = []
1336 1337
1337 1338 def add(self, *filenodes):
1338 1339 """
1339 1340 Marks given ``FileNode`` objects as *to be committed*.
1340 1341
1341 1342 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1342 1343 latest commit
1343 1344 :raises ``NodeAlreadyAddedError``: if node with same path is already
1344 1345 marked as *added*
1345 1346 """
1346 1347 # Check if not already marked as *added* first
1347 1348 for node in filenodes:
1348 1349 if node.path in (n.path for n in self.added):
1349 1350 raise NodeAlreadyAddedError(
1350 1351 "Such FileNode %s is already marked for addition"
1351 1352 % node.path)
1352 1353 for node in filenodes:
1353 1354 self.added.append(node)
1354 1355
1355 1356 def change(self, *filenodes):
1356 1357 """
1357 1358 Marks given ``FileNode`` objects to be *changed* in next commit.
1358 1359
1359 1360 :raises ``EmptyRepositoryError``: if there are no commits yet
1360 1361 :raises ``NodeAlreadyExistsError``: if node with same path is already
1361 1362 marked to be *changed*
1362 1363 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1363 1364 marked to be *removed*
1364 1365 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1365 1366 commit
1366 1367 :raises ``NodeNotChangedError``: if node hasn't really be changed
1367 1368 """
1368 1369 for node in filenodes:
1369 1370 if node.path in (n.path for n in self.removed):
1370 1371 raise NodeAlreadyRemovedError(
1371 1372 "Node at %s is already marked as removed" % node.path)
1372 1373 try:
1373 1374 self.repository.get_commit()
1374 1375 except EmptyRepositoryError:
1375 1376 raise EmptyRepositoryError(
1376 1377 "Nothing to change - try to *add* new nodes rather than "
1377 1378 "changing them")
1378 1379 for node in filenodes:
1379 1380 if node.path in (n.path for n in self.changed):
1380 1381 raise NodeAlreadyChangedError(
1381 1382 "Node at '%s' is already marked as changed" % node.path)
1382 1383 self.changed.append(node)
1383 1384
1384 1385 def remove(self, *filenodes):
1385 1386 """
1386 1387 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1387 1388 *removed* in next commit.
1388 1389
1389 1390 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1390 1391 be *removed*
1391 1392 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1392 1393 be *changed*
1393 1394 """
1394 1395 for node in filenodes:
1395 1396 if node.path in (n.path for n in self.removed):
1396 1397 raise NodeAlreadyRemovedError(
1397 1398 "Node is already marked to for removal at %s" % node.path)
1398 1399 if node.path in (n.path for n in self.changed):
1399 1400 raise NodeAlreadyChangedError(
1400 1401 "Node is already marked to be changed at %s" % node.path)
1401 1402 # We only mark node as *removed* - real removal is done by
1402 1403 # commit method
1403 1404 self.removed.append(node)
1404 1405
1405 1406 def reset(self):
1406 1407 """
1407 1408 Resets this instance to initial state (cleans ``added``, ``changed``
1408 1409 and ``removed`` lists).
1409 1410 """
1410 1411 self.added = []
1411 1412 self.changed = []
1412 1413 self.removed = []
1413 1414 self.parents = []
1414 1415
1415 1416 def get_ipaths(self):
1416 1417 """
1417 1418 Returns generator of paths from nodes marked as added, changed or
1418 1419 removed.
1419 1420 """
1420 1421 for node in itertools.chain(self.added, self.changed, self.removed):
1421 1422 yield node.path
1422 1423
1423 1424 def get_paths(self):
1424 1425 """
1425 1426 Returns list of paths from nodes marked as added, changed or removed.
1426 1427 """
1427 1428 return list(self.get_ipaths())
1428 1429
1429 1430 def check_integrity(self, parents=None):
1430 1431 """
1431 1432 Checks in-memory commit's integrity. Also, sets parents if not
1432 1433 already set.
1433 1434
1434 1435 :raises CommitError: if any error occurs (i.e.
1435 1436 ``NodeDoesNotExistError``).
1436 1437 """
1437 1438 if not self.parents:
1438 1439 parents = parents or []
1439 1440 if len(parents) == 0:
1440 1441 try:
1441 1442 parents = [self.repository.get_commit(), None]
1442 1443 except EmptyRepositoryError:
1443 1444 parents = [None, None]
1444 1445 elif len(parents) == 1:
1445 1446 parents += [None]
1446 1447 self.parents = parents
1447 1448
1448 1449 # Local parents, only if not None
1449 1450 parents = [p for p in self.parents if p]
1450 1451
1451 1452 # Check nodes marked as added
1452 1453 for p in parents:
1453 1454 for node in self.added:
1454 1455 try:
1455 1456 p.get_node(node.path)
1456 1457 except NodeDoesNotExistError:
1457 1458 pass
1458 1459 else:
1459 1460 raise NodeAlreadyExistsError(
1460 1461 "Node `%s` already exists at %s" % (node.path, p))
1461 1462
1462 1463 # Check nodes marked as changed
1463 1464 missing = set(self.changed)
1464 1465 not_changed = set(self.changed)
1465 1466 if self.changed and not parents:
1466 1467 raise NodeDoesNotExistError(str(self.changed[0].path))
1467 1468 for p in parents:
1468 1469 for node in self.changed:
1469 1470 try:
1470 1471 old = p.get_node(node.path)
1471 1472 missing.remove(node)
1472 1473 # if content actually changed, remove node from not_changed
1473 1474 if old.content != node.content:
1474 1475 not_changed.remove(node)
1475 1476 except NodeDoesNotExistError:
1476 1477 pass
1477 1478 if self.changed and missing:
1478 1479 raise NodeDoesNotExistError(
1479 1480 "Node `%s` marked as modified but missing in parents: %s"
1480 1481 % (node.path, parents))
1481 1482
1482 1483 if self.changed and not_changed:
1483 1484 raise NodeNotChangedError(
1484 1485 "Node `%s` wasn't actually changed (parents: %s)"
1485 1486 % (not_changed.pop().path, parents))
1486 1487
1487 1488 # Check nodes marked as removed
1488 1489 if self.removed and not parents:
1489 1490 raise NodeDoesNotExistError(
1490 1491 "Cannot remove node at %s as there "
1491 1492 "were no parents specified" % self.removed[0].path)
1492 1493 really_removed = set()
1493 1494 for p in parents:
1494 1495 for node in self.removed:
1495 1496 try:
1496 1497 p.get_node(node.path)
1497 1498 really_removed.add(node)
1498 1499 except CommitError:
1499 1500 pass
1500 1501 not_removed = set(self.removed) - really_removed
1501 1502 if not_removed:
1502 1503 # TODO: johbo: This code branch does not seem to be covered
1503 1504 raise NodeDoesNotExistError(
1504 1505 "Cannot remove node at %s from "
1505 1506 "following parents: %s" % (not_removed, parents))
1506 1507
1507 1508 def commit(
1508 1509 self, message, author, parents=None, branch=None, date=None,
1509 1510 **kwargs):
1510 1511 """
1511 1512 Performs in-memory commit (doesn't check workdir in any way) and
1512 1513 returns newly created :class:`BaseCommit`. Updates repository's
1513 1514 attribute `commits`.
1514 1515
1515 1516 .. note::
1516 1517
1517 1518 While overriding this method each backend's should call
1518 1519 ``self.check_integrity(parents)`` in the first place.
1519 1520
1520 1521 :param message: message of the commit
1521 1522 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1522 1523 :param parents: single parent or sequence of parents from which commit
1523 1524 would be derived
1524 1525 :param date: ``datetime.datetime`` instance. Defaults to
1525 1526 ``datetime.datetime.now()``.
1526 1527 :param branch: branch name, as string. If none given, default backend's
1527 1528 branch would be used.
1528 1529
1529 1530 :raises ``CommitError``: if any error occurs while committing
1530 1531 """
1531 1532 raise NotImplementedError
1532 1533
1533 1534
1534 1535 class BaseInMemoryChangesetClass(type):
1535 1536
1536 1537 def __instancecheck__(self, instance):
1537 1538 return isinstance(instance, BaseInMemoryCommit)
1538 1539
1539 1540
1540 1541 class BaseInMemoryChangeset(BaseInMemoryCommit):
1541 1542
1542 1543 __metaclass__ = BaseInMemoryChangesetClass
1543 1544
1544 1545 def __new__(cls, *args, **kwargs):
1545 1546 warnings.warn(
1546 1547 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1547 1548 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1548 1549
1549 1550
1550 1551 class EmptyCommit(BaseCommit):
1551 1552 """
1552 1553 An dummy empty commit. It's possible to pass hash when creating
1553 1554 an EmptyCommit
1554 1555 """
1555 1556
1556 1557 def __init__(
1557 1558 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1558 1559 message='', author='', date=None):
1559 1560 self._empty_commit_id = commit_id
1560 1561 # TODO: johbo: Solve idx parameter, default value does not make
1561 1562 # too much sense
1562 1563 self.idx = idx
1563 1564 self.message = message
1564 1565 self.author = author
1565 1566 self.date = date or datetime.datetime.fromtimestamp(0)
1566 1567 self.repository = repo
1567 1568 self.alias = alias
1568 1569
1569 1570 @LazyProperty
1570 1571 def raw_id(self):
1571 1572 """
1572 1573 Returns raw string identifying this commit, useful for web
1573 1574 representation.
1574 1575 """
1575 1576
1576 1577 return self._empty_commit_id
1577 1578
1578 1579 @LazyProperty
1579 1580 def branch(self):
1580 1581 if self.alias:
1581 1582 from rhodecode.lib.vcs.backends import get_backend
1582 1583 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1583 1584
1584 1585 @LazyProperty
1585 1586 def short_id(self):
1586 1587 return self.raw_id[:12]
1587 1588
1588 1589 @LazyProperty
1589 1590 def id(self):
1590 1591 return self.raw_id
1591 1592
1592 1593 def get_path_commit(self, path):
1593 1594 return self
1594 1595
1595 1596 def get_file_content(self, path):
1596 1597 return u''
1597 1598
1598 1599 def get_file_size(self, path):
1599 1600 return 0
1600 1601
1601 1602
1602 1603 class EmptyChangesetClass(type):
1603 1604
1604 1605 def __instancecheck__(self, instance):
1605 1606 return isinstance(instance, EmptyCommit)
1606 1607
1607 1608
1608 1609 class EmptyChangeset(EmptyCommit):
1609 1610
1610 1611 __metaclass__ = EmptyChangesetClass
1611 1612
1612 1613 def __new__(cls, *args, **kwargs):
1613 1614 warnings.warn(
1614 1615 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1615 1616 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1616 1617
1617 1618 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1618 1619 alias=None, revision=-1, message='', author='', date=None):
1619 1620 if requested_revision is not None:
1620 1621 warnings.warn(
1621 1622 "Parameter requested_revision not supported anymore",
1622 1623 DeprecationWarning)
1623 1624 super(EmptyChangeset, self).__init__(
1624 1625 commit_id=cs, repo=repo, alias=alias, idx=revision,
1625 1626 message=message, author=author, date=date)
1626 1627
1627 1628 @property
1628 1629 def revision(self):
1629 1630 warnings.warn("Use idx instead", DeprecationWarning)
1630 1631 return self.idx
1631 1632
1632 1633 @revision.setter
1633 1634 def revision(self, value):
1634 1635 warnings.warn("Use idx instead", DeprecationWarning)
1635 1636 self.idx = value
1636 1637
1637 1638
1638 1639 class EmptyRepository(BaseRepository):
1639 1640 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1640 1641 pass
1641 1642
1642 1643 def get_diff(self, *args, **kwargs):
1643 1644 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1644 1645 return GitDiff('')
1645 1646
1646 1647
1647 1648 class CollectionGenerator(object):
1648 1649
1649 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1650 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1650 1651 self.repo = repo
1651 1652 self.commit_ids = commit_ids
1652 1653 # TODO: (oliver) this isn't currently hooked up
1653 1654 self.collection_size = None
1654 1655 self.pre_load = pre_load
1656 self.translate_tag = translate_tag
1655 1657
1656 1658 def __len__(self):
1657 1659 if self.collection_size is not None:
1658 1660 return self.collection_size
1659 1661 return self.commit_ids.__len__()
1660 1662
1661 1663 def __iter__(self):
1662 1664 for commit_id in self.commit_ids:
1663 1665 # TODO: johbo: Mercurial passes in commit indices or commit ids
1664 1666 yield self._commit_factory(commit_id)
1665 1667
1666 1668 def _commit_factory(self, commit_id):
1667 1669 """
1668 1670 Allows backends to override the way commits are generated.
1669 1671 """
1670 return self.repo.get_commit(commit_id=commit_id,
1671 pre_load=self.pre_load)
1672 return self.repo.get_commit(
1673 commit_id=commit_id, pre_load=self.pre_load,
1674 translate_tag=self.translate_tag)
1672 1675
1673 1676 def __getslice__(self, i, j):
1674 1677 """
1675 1678 Returns an iterator of sliced repository
1676 1679 """
1677 1680 commit_ids = self.commit_ids[i:j]
1678 1681 return self.__class__(
1679 self.repo, commit_ids, pre_load=self.pre_load)
1682 self.repo, commit_ids, pre_load=self.pre_load,
1683 translate_tag=self.translate_tag)
1680 1684
1681 1685 def __repr__(self):
1682 1686 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1683 1687
1684 1688
1685 1689 class Config(object):
1686 1690 """
1687 1691 Represents the configuration for a repository.
1688 1692
1689 1693 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1690 1694 standard library. It implements only the needed subset.
1691 1695 """
1692 1696
1693 1697 def __init__(self):
1694 1698 self._values = {}
1695 1699
1696 1700 def copy(self):
1697 1701 clone = Config()
1698 1702 for section, values in self._values.items():
1699 1703 clone._values[section] = values.copy()
1700 1704 return clone
1701 1705
1702 1706 def __repr__(self):
1703 1707 return '<Config(%s sections) at %s>' % (
1704 1708 len(self._values), hex(id(self)))
1705 1709
1706 1710 def items(self, section):
1707 1711 return self._values.get(section, {}).iteritems()
1708 1712
1709 1713 def get(self, section, option):
1710 1714 return self._values.get(section, {}).get(option)
1711 1715
1712 1716 def set(self, section, option, value):
1713 1717 section_values = self._values.setdefault(section, {})
1714 1718 section_values[option] = value
1715 1719
1716 1720 def clear_section(self, section):
1717 1721 self._values[section] = {}
1718 1722
1719 1723 def serialize(self):
1720 1724 """
1721 1725 Creates a list of three tuples (section, key, value) representing
1722 1726 this config object.
1723 1727 """
1724 1728 items = []
1725 1729 for section in self._values:
1726 1730 for option, value in self._values[section].items():
1727 1731 items.append(
1728 1732 (safe_str(section), safe_str(option), safe_str(value)))
1729 1733 return items
1730 1734
1731 1735
1732 1736 class Diff(object):
1733 1737 """
1734 1738 Represents a diff result from a repository backend.
1735 1739
1736 1740 Subclasses have to provide a backend specific value for
1737 1741 :attr:`_header_re` and :attr:`_meta_re`.
1738 1742 """
1739 1743 _meta_re = None
1740 1744 _header_re = None
1741 1745
1742 1746 def __init__(self, raw_diff):
1743 1747 self.raw = raw_diff
1744 1748
1745 1749 def chunks(self):
1746 1750 """
1747 1751 split the diff in chunks of separate --git a/file b/file chunks
1748 1752 to make diffs consistent we must prepend with \n, and make sure
1749 1753 we can detect last chunk as this was also has special rule
1750 1754 """
1751 1755
1752 1756 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1753 1757 header = diff_parts[0]
1754 1758
1755 1759 if self._meta_re:
1756 1760 match = self._meta_re.match(header)
1757 1761
1758 1762 chunks = diff_parts[1:]
1759 1763 total_chunks = len(chunks)
1760 1764
1761 1765 return (
1762 1766 DiffChunk(chunk, self, cur_chunk == total_chunks)
1763 1767 for cur_chunk, chunk in enumerate(chunks, start=1))
1764 1768
1765 1769
1766 1770 class DiffChunk(object):
1767 1771
1768 1772 def __init__(self, chunk, diff, last_chunk):
1769 1773 self._diff = diff
1770 1774
1771 1775 # since we split by \ndiff --git that part is lost from original diff
1772 1776 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1773 1777 if not last_chunk:
1774 1778 chunk += '\n'
1775 1779
1776 1780 match = self._diff._header_re.match(chunk)
1777 1781 self.header = match.groupdict()
1778 1782 self.diff = chunk[match.end():]
1779 1783 self.raw = chunk
1780 1784
1781 1785
1782 1786 class BasePathPermissionChecker(object):
1783 1787
1784 1788 @staticmethod
1785 1789 def create_from_patterns(includes, excludes):
1786 1790 if includes and '*' in includes and not excludes:
1787 1791 return AllPathPermissionChecker()
1788 1792 elif excludes and '*' in excludes:
1789 1793 return NonePathPermissionChecker()
1790 1794 else:
1791 1795 return PatternPathPermissionChecker(includes, excludes)
1792 1796
1793 1797 @property
1794 1798 def has_full_access(self):
1795 1799 raise NotImplemented()
1796 1800
1797 1801 def has_access(self, path):
1798 1802 raise NotImplemented()
1799 1803
1800 1804
1801 1805 class AllPathPermissionChecker(BasePathPermissionChecker):
1802 1806
1803 1807 @property
1804 1808 def has_full_access(self):
1805 1809 return True
1806 1810
1807 1811 def has_access(self, path):
1808 1812 return True
1809 1813
1810 1814
1811 1815 class NonePathPermissionChecker(BasePathPermissionChecker):
1812 1816
1813 1817 @property
1814 1818 def has_full_access(self):
1815 1819 return False
1816 1820
1817 1821 def has_access(self, path):
1818 1822 return False
1819 1823
1820 1824
1821 1825 class PatternPathPermissionChecker(BasePathPermissionChecker):
1822 1826
1823 1827 def __init__(self, includes, excludes):
1824 1828 self.includes = includes
1825 1829 self.excludes = excludes
1826 1830 self.includes_re = [] if not includes else [
1827 1831 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1828 1832 self.excludes_re = [] if not excludes else [
1829 1833 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1830 1834
1831 1835 @property
1832 1836 def has_full_access(self):
1833 1837 return '*' in self.includes and not self.excludes
1834 1838
1835 1839 def has_access(self, path):
1836 1840 for regex in self.excludes_re:
1837 1841 if regex.match(path):
1838 1842 return False
1839 1843 for regex in self.includes_re:
1840 1844 if regex.match(path):
1841 1845 return True
1842 1846 return False
@@ -1,534 +1,535 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 33 from rhodecode.lib.utils import safe_unicode, safe_str
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs.conf import settings
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 38 from rhodecode.lib.vcs.nodes import (
39 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 41 RemovedFileNodesGenerator, LargeFileNode)
42 42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49 _author_property = 'author'
50 50 _committer_property = 'committer'
51 51 _date_property = 'commit_time'
52 52 _date_tz_property = 'commit_timezone'
53 53 _message_property = 'message'
54 54 _parents_property = 'parents'
55 55
56 56 _filter_pre_load = [
57 57 # done through a more complex tree walk on parents
58 58 "affected_files",
59 59 # based on repository cached property
60 60 "branch",
61 61 # done through subprocess not remote call
62 62 "children",
63 63 # done through a more complex tree walk on parents
64 64 "status",
65 65 # mercurial specific property not supported here
66 66 "_file_paths",
67 67 # mercurial specific property not supported here
68 68 'obsolete',
69 69 # mercurial specific property not supported here
70 70 'phase',
71 71 # mercurial specific property not supported here
72 72 'hidden'
73 73 ]
74 74
75 75 def __init__(self, repository, raw_id, idx, pre_load=None):
76 76 self.repository = repository
77 77 self._remote = repository._remote
78 78 # TODO: johbo: Tweak of raw_id should not be necessary
79 79 self.raw_id = safe_str(raw_id)
80 80 self.idx = idx
81 81
82 82 self._set_bulk_properties(pre_load)
83 83
84 84 # caches
85 85 self._stat_modes = {} # stat info for paths
86 86 self._paths = {} # path processed with parse_tree
87 87 self.nodes = {}
88 88 self._submodules = None
89 89
90 90 def _set_bulk_properties(self, pre_load):
91 91 if not pre_load:
92 92 return
93 93 pre_load = [entry for entry in pre_load
94 94 if entry not in self._filter_pre_load]
95 95 if not pre_load:
96 96 return
97 97
98 98 result = self._remote.bulk_request(self.raw_id, pre_load)
99 99 for attr, value in result.items():
100 100 if attr in ["author", "message"]:
101 101 if value:
102 102 value = safe_unicode(value)
103 103 elif attr == "date":
104 104 value = utcdate_fromtimestamp(*value)
105 105 elif attr == "parents":
106 106 value = self._make_commits(value)
107 107 self.__dict__[attr] = value
108 108
109 109 @LazyProperty
110 110 def _commit(self):
111 111 return self._remote[self.raw_id]
112 112
113 113 @LazyProperty
114 114 def _tree_id(self):
115 115 return self._remote[self._commit['tree']]['id']
116 116
117 117 @LazyProperty
118 118 def id(self):
119 119 return self.raw_id
120 120
121 121 @LazyProperty
122 122 def short_id(self):
123 123 return self.raw_id[:12]
124 124
125 125 @LazyProperty
126 126 def message(self):
127 127 return safe_unicode(
128 128 self._remote.commit_attribute(self.id, self._message_property))
129 129
130 130 @LazyProperty
131 131 def committer(self):
132 132 return safe_unicode(
133 133 self._remote.commit_attribute(self.id, self._committer_property))
134 134
135 135 @LazyProperty
136 136 def author(self):
137 137 return safe_unicode(
138 138 self._remote.commit_attribute(self.id, self._author_property))
139 139
140 140 @LazyProperty
141 141 def date(self):
142 142 unix_ts, tz = self._remote.get_object_attrs(
143 143 self.raw_id, self._date_property, self._date_tz_property)
144 144 return utcdate_fromtimestamp(unix_ts, tz)
145 145
146 146 @LazyProperty
147 147 def status(self):
148 148 """
149 149 Returns modified, added, removed, deleted files for current commit
150 150 """
151 151 return self.changed, self.added, self.removed
152 152
153 153 @LazyProperty
154 154 def tags(self):
155 155 tags = [safe_unicode(name) for name,
156 156 commit_id in self.repository.tags.iteritems()
157 157 if commit_id == self.raw_id]
158 158 return tags
159 159
160 160 @LazyProperty
161 161 def branch(self):
162 162 for name, commit_id in self.repository.branches.iteritems():
163 163 if commit_id == self.raw_id:
164 164 return safe_unicode(name)
165 165 return None
166 166
167 167 def _get_id_for_path(self, path):
168 168 path = safe_str(path)
169 169 if path in self._paths:
170 170 return self._paths[path]
171 171
172 172 tree_id = self._tree_id
173 173
174 174 path = path.strip('/')
175 175 if path == '':
176 176 data = [tree_id, "tree"]
177 177 self._paths[''] = data
178 178 return data
179 179
180 180 parts = path.split('/')
181 181 dirs, name = parts[:-1], parts[-1]
182 182 cur_dir = ''
183 183
184 184 # initially extract things from root dir
185 185 tree_items = self._remote.tree_items(tree_id)
186 186 self._process_tree_items(tree_items, cur_dir)
187 187
188 188 for dir in dirs:
189 189 if cur_dir:
190 190 cur_dir = '/'.join((cur_dir, dir))
191 191 else:
192 192 cur_dir = dir
193 193 dir_id = None
194 194 for item, stat_, id_, type_ in tree_items:
195 195 if item == dir:
196 196 dir_id = id_
197 197 break
198 198 if dir_id:
199 199 if type_ != "tree":
200 200 raise CommitError('%s is not a directory' % cur_dir)
201 201 # update tree
202 202 tree_items = self._remote.tree_items(dir_id)
203 203 else:
204 204 raise CommitError('%s have not been found' % cur_dir)
205 205
206 206 # cache all items from the given traversed tree
207 207 self._process_tree_items(tree_items, cur_dir)
208 208
209 209 if path not in self._paths:
210 210 raise self.no_node_at_path(path)
211 211
212 212 return self._paths[path]
213 213
214 214 def _process_tree_items(self, items, cur_dir):
215 215 for item, stat_, id_, type_ in items:
216 216 if cur_dir:
217 217 name = '/'.join((cur_dir, item))
218 218 else:
219 219 name = item
220 220 self._paths[name] = [id_, type_]
221 221 self._stat_modes[name] = stat_
222 222
223 223 def _get_kind(self, path):
224 224 path_id, type_ = self._get_id_for_path(path)
225 225 if type_ == 'blob':
226 226 return NodeKind.FILE
227 227 elif type_ == 'tree':
228 228 return NodeKind.DIR
229 229 elif type == 'link':
230 230 return NodeKind.SUBMODULE
231 231 return None
232 232
233 233 def _get_filectx(self, path):
234 234 path = self._fix_path(path)
235 235 if self._get_kind(path) != NodeKind.FILE:
236 236 raise CommitError(
237 237 "File does not exist for commit %s at '%s'" %
238 238 (self.raw_id, path))
239 239 return path
240 240
241 241 def _get_file_nodes(self):
242 242 return chain(*(t[2] for t in self.walk()))
243 243
244 244 @LazyProperty
245 245 def parents(self):
246 246 """
247 247 Returns list of parent commits.
248 248 """
249 249 parent_ids = self._remote.commit_attribute(
250 250 self.id, self._parents_property)
251 251 return self._make_commits(parent_ids)
252 252
253 253 @LazyProperty
254 254 def children(self):
255 255 """
256 256 Returns list of child commits.
257 257 """
258 258 rev_filter = settings.GIT_REV_FILTER
259 259 output, __ = self.repository.run_git_command(
260 260 ['rev-list', '--children'] + rev_filter)
261 261
262 262 child_ids = []
263 263 pat = re.compile(r'^%s' % self.raw_id)
264 264 for l in output.splitlines():
265 265 if pat.match(l):
266 266 found_ids = l.split(' ')[1:]
267 267 child_ids.extend(found_ids)
268 268 return self._make_commits(child_ids)
269 269
270 270 def _make_commits(self, commit_ids, pre_load=None):
271 271 return [
272 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
272 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load,
273 translate_tag=False)
273 274 for commit_id in commit_ids]
274 275
275 276 def get_file_mode(self, path):
276 277 """
277 278 Returns stat mode of the file at the given `path`.
278 279 """
279 280 path = safe_str(path)
280 281 # ensure path is traversed
281 282 self._get_id_for_path(path)
282 283 return self._stat_modes[path]
283 284
284 285 def is_link(self, path):
285 286 return stat.S_ISLNK(self.get_file_mode(path))
286 287
287 288 def get_file_content(self, path):
288 289 """
289 290 Returns content of the file at given `path`.
290 291 """
291 292 id_, _ = self._get_id_for_path(path)
292 293 return self._remote.blob_as_pretty_string(id_)
293 294
294 295 def get_file_size(self, path):
295 296 """
296 297 Returns size of the file at given `path`.
297 298 """
298 299 id_, _ = self._get_id_for_path(path)
299 300 return self._remote.blob_raw_length(id_)
300 301
301 302 def get_path_history(self, path, limit=None, pre_load=None):
302 303 """
303 304 Returns history of file as reversed list of `GitCommit` objects for
304 305 which file at given `path` has been modified.
305 306
306 307 TODO: This function now uses an underlying 'git' command which works
307 308 quickly but ideally we should replace with an algorithm.
308 309 """
309 310 self._get_filectx(path)
310 311 f_path = safe_str(path)
311 312
312 313 # optimize for n==1, rev-list is much faster for that use-case
313 314 if limit == 1:
314 315 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
315 316 else:
316 317 cmd = ['log']
317 318 if limit:
318 319 cmd.extend(['-n', str(safe_int(limit, 0))])
319 320 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
320 321
321 322 output, __ = self.repository.run_git_command(cmd)
322 323 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
323 324
324 325 return [
325 326 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
326 327 for commit_id in commit_ids]
327 328
328 329 def get_file_annotate(self, path, pre_load=None):
329 330 """
330 331 Returns a generator of four element tuples with
331 332 lineno, commit_id, commit lazy loader and line
332 333
333 334 TODO: This function now uses os underlying 'git' command which is
334 335 generally not good. Should be replaced with algorithm iterating
335 336 commits.
336 337 """
337 338 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
338 339 # -l ==> outputs long shas (and we need all 40 characters)
339 340 # --root ==> doesn't put '^' character for bounderies
340 341 # -r commit_id ==> blames for the given commit
341 342 output, __ = self.repository.run_git_command(cmd)
342 343
343 344 for i, blame_line in enumerate(output.split('\n')[:-1]):
344 345 line_no = i + 1
345 346 commit_id, line = re.split(r' ', blame_line, 1)
346 347 yield (
347 348 line_no, commit_id,
348 349 lambda: self.repository.get_commit(commit_id=commit_id,
349 350 pre_load=pre_load),
350 351 line)
351 352
352 353 def get_nodes(self, path):
353 354 if self._get_kind(path) != NodeKind.DIR:
354 355 raise CommitError(
355 356 "Directory does not exist for commit %s at "
356 357 " '%s'" % (self.raw_id, path))
357 358 path = self._fix_path(path)
358 359 id_, _ = self._get_id_for_path(path)
359 360 tree_id = self._remote[id_]['id']
360 361 dirnodes = []
361 362 filenodes = []
362 363 alias = self.repository.alias
363 364 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
364 365 if type_ == 'link':
365 366 url = self._get_submodule_url('/'.join((path, name)))
366 367 dirnodes.append(SubModuleNode(
367 368 name, url=url, commit=id_, alias=alias))
368 369 continue
369 370
370 371 if path != '':
371 372 obj_path = '/'.join((path, name))
372 373 else:
373 374 obj_path = name
374 375 if obj_path not in self._stat_modes:
375 376 self._stat_modes[obj_path] = stat_
376 377
377 378 if type_ == 'tree':
378 379 dirnodes.append(DirNode(obj_path, commit=self))
379 380 elif type_ == 'blob':
380 381 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
381 382 else:
382 383 raise CommitError(
383 384 "Requested object should be Tree or Blob, is %s", type_)
384 385
385 386 nodes = dirnodes + filenodes
386 387 for node in nodes:
387 388 if node.path not in self.nodes:
388 389 self.nodes[node.path] = node
389 390 nodes.sort()
390 391 return nodes
391 392
392 393 def get_node(self, path, pre_load=None):
393 394 if isinstance(path, unicode):
394 395 path = path.encode('utf-8')
395 396 path = self._fix_path(path)
396 397 if path not in self.nodes:
397 398 try:
398 399 id_, type_ = self._get_id_for_path(path)
399 400 except CommitError:
400 401 raise NodeDoesNotExistError(
401 402 "Cannot find one of parents' directories for a given "
402 403 "path: %s" % path)
403 404
404 405 if type_ == 'link':
405 406 url = self._get_submodule_url(path)
406 407 node = SubModuleNode(path, url=url, commit=id_,
407 408 alias=self.repository.alias)
408 409 elif type_ == 'tree':
409 410 if path == '':
410 411 node = RootNode(commit=self)
411 412 else:
412 413 node = DirNode(path, commit=self)
413 414 elif type_ == 'blob':
414 415 node = FileNode(path, commit=self, pre_load=pre_load)
415 416 else:
416 417 raise self.no_node_at_path(path)
417 418
418 419 # cache node
419 420 self.nodes[path] = node
420 421 return self.nodes[path]
421 422
422 423 def get_largefile_node(self, path):
423 424 id_, _ = self._get_id_for_path(path)
424 425 pointer_spec = self._remote.is_large_file(id_)
425 426
426 427 if pointer_spec:
427 428 # content of that file regular FileNode is the hash of largefile
428 429 file_id = pointer_spec.get('oid_hash')
429 430 if self._remote.in_largefiles_store(file_id):
430 431 lf_path = self._remote.store_path(file_id)
431 432 return LargeFileNode(lf_path, commit=self, org_path=path)
432 433
433 434 @LazyProperty
434 435 def affected_files(self):
435 436 """
436 437 Gets a fast accessible file changes for given commit
437 438 """
438 439 added, modified, deleted = self._changes_cache
439 440 return list(added.union(modified).union(deleted))
440 441
441 442 @LazyProperty
442 443 def _changes_cache(self):
443 444 added = set()
444 445 modified = set()
445 446 deleted = set()
446 447 _r = self._remote
447 448
448 449 parents = self.parents
449 450 if not self.parents:
450 451 parents = [base.EmptyCommit()]
451 452 for parent in parents:
452 453 if isinstance(parent, base.EmptyCommit):
453 454 oid = None
454 455 else:
455 456 oid = parent.raw_id
456 457 changes = _r.tree_changes(oid, self.raw_id)
457 458 for (oldpath, newpath), (_, _), (_, _) in changes:
458 459 if newpath and oldpath:
459 460 modified.add(newpath)
460 461 elif newpath and not oldpath:
461 462 added.add(newpath)
462 463 elif not newpath and oldpath:
463 464 deleted.add(oldpath)
464 465 return added, modified, deleted
465 466
466 467 def _get_paths_for_status(self, status):
467 468 """
468 469 Returns sorted list of paths for given ``status``.
469 470
470 471 :param status: one of: *added*, *modified* or *deleted*
471 472 """
472 473 added, modified, deleted = self._changes_cache
473 474 return sorted({
474 475 'added': list(added),
475 476 'modified': list(modified),
476 477 'deleted': list(deleted)}[status]
477 478 )
478 479
479 480 @LazyProperty
480 481 def added(self):
481 482 """
482 483 Returns list of added ``FileNode`` objects.
483 484 """
484 485 if not self.parents:
485 486 return list(self._get_file_nodes())
486 487 return AddedFileNodesGenerator(
487 488 [n for n in self._get_paths_for_status('added')], self)
488 489
489 490 @LazyProperty
490 491 def changed(self):
491 492 """
492 493 Returns list of modified ``FileNode`` objects.
493 494 """
494 495 if not self.parents:
495 496 return []
496 497 return ChangedFileNodesGenerator(
497 498 [n for n in self._get_paths_for_status('modified')], self)
498 499
499 500 @LazyProperty
500 501 def removed(self):
501 502 """
502 503 Returns list of removed ``FileNode`` objects.
503 504 """
504 505 if not self.parents:
505 506 return []
506 507 return RemovedFileNodesGenerator(
507 508 [n for n in self._get_paths_for_status('deleted')], self)
508 509
509 510 def _get_submodule_url(self, submodule_path):
510 511 git_modules_path = '.gitmodules'
511 512
512 513 if self._submodules is None:
513 514 self._submodules = {}
514 515
515 516 try:
516 517 submodules_node = self.get_node(git_modules_path)
517 518 except NodeDoesNotExistError:
518 519 return None
519 520
520 521 content = submodules_node.content
521 522
522 523 # ConfigParser fails if there are whitespaces
523 524 content = '\n'.join(l.strip() for l in content.split('\n'))
524 525
525 526 parser = configparser.ConfigParser()
526 527 parser.readfp(StringIO(content))
527 528
528 529 for section in parser.sections():
529 530 path = parser.get(section, 'path')
530 531 url = parser.get(section, 'url')
531 532 if path and url:
532 533 self._submodules[path.strip('/')] = url
533 534
534 535 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1010 +1,1012 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference)
39 39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError,
44 44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 45
46 46
47 47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class GitRepository(BaseRepository):
53 53 """
54 54 Git repository backend.
55 55 """
56 56 DEFAULT_BRANCH_NAME = 'master'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self.with_wire = with_wire
66 66
67 67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 68
69 69 # caches
70 70 self._commit_ids = {}
71 71
72 72 @LazyProperty
73 73 def _remote(self):
74 74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75 75
76 76 @LazyProperty
77 77 def bare(self):
78 78 return self._remote.bare()
79 79
80 80 @LazyProperty
81 81 def head(self):
82 82 return self._remote.head()
83 83
84 84 @LazyProperty
85 85 def commit_ids(self):
86 86 """
87 87 Returns list of commit ids, in ascending order. Being lazy
88 88 attribute allows external tools to inject commit ids from cache.
89 89 """
90 90 commit_ids = self._get_all_commit_ids()
91 91 self._rebuild_cache(commit_ids)
92 92 return commit_ids
93 93
94 94 def _rebuild_cache(self, commit_ids):
95 95 self._commit_ids = dict((commit_id, index)
96 96 for index, commit_id in enumerate(commit_ids))
97 97
98 98 def run_git_command(self, cmd, **opts):
99 99 """
100 100 Runs given ``cmd`` as git command and returns tuple
101 101 (stdout, stderr).
102 102
103 103 :param cmd: git command to be executed
104 104 :param opts: env options to pass into Subprocess command
105 105 """
106 106 if not isinstance(cmd, list):
107 107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108 108
109 109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 110 out, err = self._remote.run_git_command(cmd, **opts)
111 111 if err and not skip_stderr_log:
112 112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 113 return out, err
114 114
115 115 @staticmethod
116 116 def check_url(url, config):
117 117 """
118 118 Function will check given url and try to verify if it's a valid
119 119 link. Sometimes it may happened that git will issue basic
120 120 auth request that can cause whole API to hang when used from python
121 121 or other external calls.
122 122
123 123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 124 when the return code is non 200
125 125 """
126 126 # check first if it's not an url
127 127 if os.path.isdir(url) or url.startswith('file:'):
128 128 return True
129 129
130 130 if '+' in url.split('://', 1)[0]:
131 131 url = url.split('+', 1)[1]
132 132
133 133 # Request the _remote to verify the url
134 134 return connection.Git.check_url(url, config.serialize())
135 135
136 136 @staticmethod
137 137 def is_valid_repository(path):
138 138 if os.path.isdir(os.path.join(path, '.git')):
139 139 return True
140 140 # check case of bare repository
141 141 try:
142 142 GitRepository(path)
143 143 return True
144 144 except VCSError:
145 145 pass
146 146 return False
147 147
148 148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 149 bare=False):
150 150 if create and os.path.exists(self.path):
151 151 raise RepositoryError(
152 152 "Cannot create repository at %s, location already exist"
153 153 % self.path)
154 154
155 155 if bare and do_workspace_checkout:
156 156 raise RepositoryError("Cannot update a bare repository")
157 157 try:
158 158
159 159 if src_url:
160 160 # check URL before any actions
161 161 GitRepository.check_url(src_url, self.config)
162 162
163 163 if create:
164 164 os.makedirs(self.path, mode=0o755)
165 165
166 166 if bare:
167 167 self._remote.init_bare()
168 168 else:
169 169 self._remote.init()
170 170
171 171 if src_url and bare:
172 172 # bare repository only allows a fetch and checkout is not allowed
173 173 self.fetch(src_url, commit_ids=None)
174 174 elif src_url:
175 175 self.pull(src_url, commit_ids=None,
176 176 update_after=do_workspace_checkout)
177 177
178 178 else:
179 179 if not self._remote.assert_correct_path():
180 180 raise RepositoryError(
181 181 'Path "%s" does not contain a Git repository' %
182 182 (self.path,))
183 183
184 184 # TODO: johbo: check if we have to translate the OSError here
185 185 except OSError as err:
186 186 raise RepositoryError(err)
187 187
188 188 def _get_all_commit_ids(self, filters=None):
189 189 # we must check if this repo is not empty, since later command
190 190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 191 # errors
192 192
193 193 head = self._remote.head(show_exc=False)
194 194 if not head:
195 195 return []
196 196
197 197 rev_filter = ['--branches', '--tags']
198 198 extra_filter = []
199 199
200 200 if filters:
201 201 if filters.get('since'):
202 202 extra_filter.append('--since=%s' % (filters['since']))
203 203 if filters.get('until'):
204 204 extra_filter.append('--until=%s' % (filters['until']))
205 205 if filters.get('branch_name'):
206 206 rev_filter = ['--tags']
207 207 extra_filter.append(filters['branch_name'])
208 208 rev_filter.extend(extra_filter)
209 209
210 210 # if filters.get('start') or filters.get('end'):
211 211 # # skip is offset, max-count is limit
212 212 # if filters.get('start'):
213 213 # extra_filter += ' --skip=%s' % filters['start']
214 214 # if filters.get('end'):
215 215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 216
217 217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 218 try:
219 219 output, __ = self.run_git_command(cmd)
220 220 except RepositoryError:
221 221 # Can be raised for empty repositories
222 222 return []
223 223 return output.splitlines()
224 224
225 225 def _get_commit_id(self, commit_id_or_idx):
226 226 def is_null(value):
227 227 return len(value) == commit_id_or_idx.count('0')
228 228
229 229 if self.is_empty():
230 230 raise EmptyRepositoryError("There are no commits yet")
231 231
232 232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
233 233 return self.commit_ids[-1]
234 234
235 235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
236 236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
237 237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
238 238 try:
239 239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
240 240 except Exception:
241 241 msg = "Commit %s does not exist for %s" % (
242 242 commit_id_or_idx, self)
243 243 raise CommitDoesNotExistError(msg)
244 244
245 245 elif is_bstr:
246 246 # check full path ref, eg. refs/heads/master
247 247 ref_id = self._refs.get(commit_id_or_idx)
248 248 if ref_id:
249 249 return ref_id
250 250
251 251 # check branch name
252 252 branch_ids = self.branches.values()
253 253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 254 if ref_id:
255 255 return ref_id
256 256
257 257 # check tag name
258 258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 259 if ref_id:
260 260 return ref_id
261 261
262 262 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 263 commit_id_or_idx not in self.commit_ids):
264 264 msg = "Commit %s does not exist for %s" % (
265 265 commit_id_or_idx, self)
266 266 raise CommitDoesNotExistError(msg)
267 267
268 268 # Ensure we return full id
269 269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 270 raise CommitDoesNotExistError(
271 271 "Given commit id %s not recognized" % commit_id_or_idx)
272 272 return commit_id_or_idx
273 273
274 274 def get_hook_location(self):
275 275 """
276 276 returns absolute path to location where hooks are stored
277 277 """
278 278 loc = os.path.join(self.path, 'hooks')
279 279 if not self.bare:
280 280 loc = os.path.join(self.path, '.git', 'hooks')
281 281 return loc
282 282
283 283 @LazyProperty
284 284 def last_change(self):
285 285 """
286 286 Returns last change made on this repository as
287 287 `datetime.datetime` object.
288 288 """
289 289 try:
290 290 return self.get_commit().date
291 291 except RepositoryError:
292 292 tzoffset = makedate()[1]
293 293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294 294
295 295 def _get_fs_mtime(self):
296 296 idx_loc = '' if self.bare else '.git'
297 297 # fallback to filesystem
298 298 in_path = os.path.join(self.path, idx_loc, "index")
299 299 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 300 if os.path.exists(in_path):
301 301 return os.stat(in_path).st_mtime
302 302 else:
303 303 return os.stat(he_path).st_mtime
304 304
305 305 @LazyProperty
306 306 def description(self):
307 307 description = self._remote.get_description()
308 308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309 309
310 310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 311 if self.is_empty():
312 312 return OrderedDict()
313 313
314 314 result = []
315 315 for ref, sha in self._refs.iteritems():
316 316 if ref.startswith(prefix):
317 317 ref_name = ref
318 318 if strip_prefix:
319 319 ref_name = ref[len(prefix):]
320 320 result.append((safe_unicode(ref_name), sha))
321 321
322 322 def get_name(entry):
323 323 return entry[0]
324 324
325 325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326 326
327 327 def _get_branches(self):
328 328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329 329
330 330 @LazyProperty
331 331 def branches(self):
332 332 return self._get_branches()
333 333
334 334 @LazyProperty
335 335 def branches_closed(self):
336 336 return {}
337 337
338 338 @LazyProperty
339 339 def bookmarks(self):
340 340 return {}
341 341
342 342 @LazyProperty
343 343 def branches_all(self):
344 344 all_branches = {}
345 345 all_branches.update(self.branches)
346 346 all_branches.update(self.branches_closed)
347 347 return all_branches
348 348
349 349 @LazyProperty
350 350 def tags(self):
351 351 return self._get_tags()
352 352
353 353 def _get_tags(self):
354 354 return self._get_refs_entries(
355 355 prefix='refs/tags/', strip_prefix=True, reverse=True)
356 356
357 357 def tag(self, name, user, commit_id=None, message=None, date=None,
358 358 **kwargs):
359 359 # TODO: fix this method to apply annotated tags correct with message
360 360 """
361 361 Creates and returns a tag for the given ``commit_id``.
362 362
363 363 :param name: name for new tag
364 364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 365 :param commit_id: commit id for which new tag would be created
366 366 :param message: message of the tag's commit
367 367 :param date: date of tag's commit
368 368
369 369 :raises TagAlreadyExistError: if tag with same name already exists
370 370 """
371 371 if name in self.tags:
372 372 raise TagAlreadyExistError("Tag %s already exists" % name)
373 373 commit = self.get_commit(commit_id=commit_id)
374 374 message = message or "Added tag %s for commit %s" % (
375 375 name, commit.raw_id)
376 376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377 377
378 378 self._refs = self._get_refs()
379 379 self.tags = self._get_tags()
380 380 return commit
381 381
382 382 def remove_tag(self, name, user, message=None, date=None):
383 383 """
384 384 Removes tag with the given ``name``.
385 385
386 386 :param name: name of the tag to be removed
387 387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 388 :param message: message of the tag's removal commit
389 389 :param date: date of tag's removal commit
390 390
391 391 :raises TagDoesNotExistError: if tag with given name does not exists
392 392 """
393 393 if name not in self.tags:
394 394 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 395 tagpath = vcspath.join(
396 396 self._remote.get_refs_path(), 'refs', 'tags', name)
397 397 try:
398 398 os.remove(tagpath)
399 399 self._refs = self._get_refs()
400 400 self.tags = self._get_tags()
401 401 except OSError as e:
402 402 raise RepositoryError(e.strerror)
403 403
404 404 def _get_refs(self):
405 405 return self._remote.get_refs()
406 406
407 407 @LazyProperty
408 408 def _refs(self):
409 409 return self._get_refs()
410 410
411 411 @property
412 412 def _ref_tree(self):
413 413 node = tree = {}
414 414 for ref, sha in self._refs.iteritems():
415 415 path = ref.split('/')
416 416 for bit in path[:-1]:
417 417 node = node.setdefault(bit, {})
418 418 node[path[-1]] = sha
419 419 node = tree
420 420 return tree
421 421
422 422 def get_remote_ref(self, ref_name):
423 423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 424 try:
425 425 return self._refs[ref_key]
426 426 except Exception:
427 427 return
428 428
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
430 430 """
431 431 Returns `GitCommit` object representing commit from git repository
432 432 at the given `commit_id` or head (most recent commit) if None given.
433 433 """
434 434 if commit_id is not None:
435 435 self._validate_commit_id(commit_id)
436 436 elif commit_idx is not None:
437 437 self._validate_commit_idx(commit_idx)
438 438 commit_id = commit_idx
439 439 commit_id = self._get_commit_id(commit_id)
440 440 try:
441 # Need to call remote to translate id for tagging scenario
442 commit_id = self._remote.get_object(commit_id)["commit_id"]
441 if translate_tag:
442 # Need to call remote to translate id for tagging scenario
443 commit_id = self._remote.get_object(commit_id)["commit_id"]
443 444 idx = self._commit_ids[commit_id]
444 445 except KeyError:
445 446 raise RepositoryError("Cannot get object with id %s" % commit_id)
446 447
447 448 return GitCommit(self, commit_id, idx, pre_load=pre_load)
448 449
449 450 def get_commits(
450 451 self, start_id=None, end_id=None, start_date=None, end_date=None,
451 branch_name=None, show_hidden=False, pre_load=None):
452 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
452 453 """
453 454 Returns generator of `GitCommit` objects from start to end (both
454 455 are inclusive), in ascending date order.
455 456
456 457 :param start_id: None, str(commit_id)
457 458 :param end_id: None, str(commit_id)
458 459 :param start_date: if specified, commits with commit date less than
459 460 ``start_date`` would be filtered out from returned set
460 461 :param end_date: if specified, commits with commit date greater than
461 462 ``end_date`` would be filtered out from returned set
462 463 :param branch_name: if specified, commits not reachable from given
463 464 branch would be filtered out from returned set
464 465 :param show_hidden: Show hidden commits such as obsolete or hidden from
465 466 Mercurial evolve
466 467 :raise BranchDoesNotExistError: If given `branch_name` does not
467 468 exist.
468 469 :raise CommitDoesNotExistError: If commits for given `start` or
469 470 `end` could not be found.
470 471
471 472 """
472 473 if self.is_empty():
473 474 raise EmptyRepositoryError("There are no commits yet")
474 475 self._validate_branch_name(branch_name)
475 476
476 477 if start_id is not None:
477 478 self._validate_commit_id(start_id)
478 479 if end_id is not None:
479 480 self._validate_commit_id(end_id)
480 481
481 482 start_raw_id = self._get_commit_id(start_id)
482 483 start_pos = self._commit_ids[start_raw_id] if start_id else None
483 484 end_raw_id = self._get_commit_id(end_id)
484 485 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
485 486
486 487 if None not in [start_id, end_id] and start_pos > end_pos:
487 488 raise RepositoryError(
488 489 "Start commit '%s' cannot be after end commit '%s'" %
489 490 (start_id, end_id))
490 491
491 492 if end_pos is not None:
492 493 end_pos += 1
493 494
494 495 filter_ = []
495 496 if branch_name:
496 497 filter_.append({'branch_name': branch_name})
497 498 if start_date and not end_date:
498 499 filter_.append({'since': start_date})
499 500 if end_date and not start_date:
500 501 filter_.append({'until': end_date})
501 502 if start_date and end_date:
502 503 filter_.append({'since': start_date})
503 504 filter_.append({'until': end_date})
504 505
505 506 # if start_pos or end_pos:
506 507 # filter_.append({'start': start_pos})
507 508 # filter_.append({'end': end_pos})
508 509
509 510 if filter_:
510 511 revfilters = {
511 512 'branch_name': branch_name,
512 513 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
513 514 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
514 515 'start': start_pos,
515 516 'end': end_pos,
516 517 }
517 518 commit_ids = self._get_all_commit_ids(filters=revfilters)
518 519
519 520 # pure python stuff, it's slow due to walker walking whole repo
520 521 # def get_revs(walker):
521 522 # for walker_entry in walker:
522 523 # yield walker_entry.commit.id
523 524 # revfilters = {}
524 525 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
525 526 else:
526 527 commit_ids = self.commit_ids
527 528
528 529 if start_pos or end_pos:
529 530 commit_ids = commit_ids[start_pos: end_pos]
530 531
531 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 translate_tag=translate_tags)
532 534
533 535 def get_diff(
534 536 self, commit1, commit2, path='', ignore_whitespace=False,
535 537 context=3, path1=None):
536 538 """
537 539 Returns (git like) *diff*, as plain text. Shows changes introduced by
538 540 ``commit2`` since ``commit1``.
539 541
540 542 :param commit1: Entry point from which diff is shown. Can be
541 543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
542 544 the changes since empty state of the repository until ``commit2``
543 545 :param commit2: Until which commits changes should be shown.
544 546 :param ignore_whitespace: If set to ``True``, would not show whitespace
545 547 changes. Defaults to ``False``.
546 548 :param context: How many lines before/after changed lines should be
547 549 shown. Defaults to ``3``.
548 550 """
549 551 self._validate_diff_commits(commit1, commit2)
550 552 if path1 is not None and path1 != path:
551 553 raise ValueError("Diff of two different paths not supported.")
552 554
553 555 flags = [
554 556 '-U%s' % context, '--full-index', '--binary', '-p',
555 557 '-M', '--abbrev=40']
556 558 if ignore_whitespace:
557 559 flags.append('-w')
558 560
559 561 if commit1 == self.EMPTY_COMMIT:
560 562 cmd = ['show'] + flags + [commit2.raw_id]
561 563 else:
562 564 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
563 565
564 566 if path:
565 567 cmd.extend(['--', path])
566 568
567 569 stdout, __ = self.run_git_command(cmd)
568 570 # If we used 'show' command, strip first few lines (until actual diff
569 571 # starts)
570 572 if commit1 == self.EMPTY_COMMIT:
571 573 lines = stdout.splitlines()
572 574 x = 0
573 575 for line in lines:
574 576 if line.startswith('diff'):
575 577 break
576 578 x += 1
577 579 # Append new line just like 'diff' command do
578 580 stdout = '\n'.join(lines[x:]) + '\n'
579 581 return GitDiff(stdout)
580 582
581 583 def strip(self, commit_id, branch_name):
582 584 commit = self.get_commit(commit_id=commit_id)
583 585 if commit.merge:
584 586 raise Exception('Cannot reset to merge commit')
585 587
586 588 # parent is going to be the new head now
587 589 commit = commit.parents[0]
588 590 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
589 591
590 592 self.commit_ids = self._get_all_commit_ids()
591 593 self._rebuild_cache(self.commit_ids)
592 594
593 595 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
594 596 if commit_id1 == commit_id2:
595 597 return commit_id1
596 598
597 599 if self != repo2:
598 600 commits = self._remote.get_missing_revs(
599 601 commit_id1, commit_id2, repo2.path)
600 602 if commits:
601 603 commit = repo2.get_commit(commits[-1])
602 604 if commit.parents:
603 605 ancestor_id = commit.parents[0].raw_id
604 606 else:
605 607 ancestor_id = None
606 608 else:
607 609 # no commits from other repo, ancestor_id is the commit_id2
608 610 ancestor_id = commit_id2
609 611 else:
610 612 output, __ = self.run_git_command(
611 613 ['merge-base', commit_id1, commit_id2])
612 614 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
613 615
614 616 return ancestor_id
615 617
616 618 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
617 619 repo1 = self
618 620 ancestor_id = None
619 621
620 622 if commit_id1 == commit_id2:
621 623 commits = []
622 624 elif repo1 != repo2:
623 625 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
624 626 repo2.path)
625 627 commits = [
626 628 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
627 629 for commit_id in reversed(missing_ids)]
628 630 else:
629 631 output, __ = repo1.run_git_command(
630 632 ['log', '--reverse', '--pretty=format: %H', '-s',
631 633 '%s..%s' % (commit_id1, commit_id2)])
632 634 commits = [
633 635 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
634 636 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
635 637
636 638 return commits
637 639
638 640 @LazyProperty
639 641 def in_memory_commit(self):
640 642 """
641 643 Returns ``GitInMemoryCommit`` object for this repository.
642 644 """
643 645 return GitInMemoryCommit(self)
644 646
645 647 def pull(self, url, commit_ids=None, update_after=False):
646 648 """
647 649 Pull changes from external location. Pull is different in GIT
648 650 that fetch since it's doing a checkout
649 651
650 652 :param commit_ids: Optional. Can be set to a list of commit ids
651 653 which shall be pulled from the other repository.
652 654 """
653 655 refs = None
654 656 if commit_ids is not None:
655 657 remote_refs = self._remote.get_remote_refs(url)
656 658 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
657 659 self._remote.pull(url, refs=refs, update_after=update_after)
658 660 self._remote.invalidate_vcs_cache()
659 661
660 662 def fetch(self, url, commit_ids=None):
661 663 """
662 664 Fetch all git objects from external location.
663 665 """
664 666 self._remote.sync_fetch(url, refs=commit_ids)
665 667 self._remote.invalidate_vcs_cache()
666 668
667 669 def push(self, url):
668 670 refs = None
669 671 self._remote.sync_push(url, refs=refs)
670 672
671 673 def set_refs(self, ref_name, commit_id):
672 674 self._remote.set_refs(ref_name, commit_id)
673 675
674 676 def remove_ref(self, ref_name):
675 677 self._remote.remove_ref(ref_name)
676 678
677 679 def _update_server_info(self):
678 680 """
679 681 runs gits update-server-info command in this repo instance
680 682 """
681 683 self._remote.update_server_info()
682 684
683 685 def _current_branch(self):
684 686 """
685 687 Return the name of the current branch.
686 688
687 689 It only works for non bare repositories (i.e. repositories with a
688 690 working copy)
689 691 """
690 692 if self.bare:
691 693 raise RepositoryError('Bare git repos do not have active branches')
692 694
693 695 if self.is_empty():
694 696 return None
695 697
696 698 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
697 699 return stdout.strip()
698 700
699 701 def _checkout(self, branch_name, create=False, force=False):
700 702 """
701 703 Checkout a branch in the working directory.
702 704
703 705 It tries to create the branch if create is True, failing if the branch
704 706 already exists.
705 707
706 708 It only works for non bare repositories (i.e. repositories with a
707 709 working copy)
708 710 """
709 711 if self.bare:
710 712 raise RepositoryError('Cannot checkout branches in a bare git repo')
711 713
712 714 cmd = ['checkout']
713 715 if force:
714 716 cmd.append('-f')
715 717 if create:
716 718 cmd.append('-b')
717 719 cmd.append(branch_name)
718 720 self.run_git_command(cmd, fail_on_stderr=False)
719 721
720 722 def _identify(self):
721 723 """
722 724 Return the current state of the working directory.
723 725 """
724 726 if self.bare:
725 727 raise RepositoryError('Bare git repos do not have active branches')
726 728
727 729 if self.is_empty():
728 730 return None
729 731
730 732 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
731 733 return stdout.strip()
732 734
733 735 def _local_clone(self, clone_path, branch_name, source_branch=None):
734 736 """
735 737 Create a local clone of the current repo.
736 738 """
737 739 # N.B.(skreft): the --branch option is required as otherwise the shallow
738 740 # clone will only fetch the active branch.
739 741 cmd = ['clone', '--branch', branch_name,
740 742 self.path, os.path.abspath(clone_path)]
741 743
742 744 self.run_git_command(cmd, fail_on_stderr=False)
743 745
744 746 # if we get the different source branch, make sure we also fetch it for
745 747 # merge conditions
746 748 if source_branch and source_branch != branch_name:
747 749 # check if the ref exists.
748 750 shadow_repo = GitRepository(os.path.abspath(clone_path))
749 751 if shadow_repo.get_remote_ref(source_branch):
750 752 cmd = ['fetch', self.path, source_branch]
751 753 self.run_git_command(cmd, fail_on_stderr=False)
752 754
753 755 def _local_fetch(self, repository_path, branch_name, use_origin=False):
754 756 """
755 757 Fetch a branch from a local repository.
756 758 """
757 759 repository_path = os.path.abspath(repository_path)
758 760 if repository_path == self.path:
759 761 raise ValueError('Cannot fetch from the same repository')
760 762
761 763 if use_origin:
762 764 branch_name = '+{branch}:refs/heads/{branch}'.format(
763 765 branch=branch_name)
764 766
765 767 cmd = ['fetch', '--no-tags', '--update-head-ok',
766 768 repository_path, branch_name]
767 769 self.run_git_command(cmd, fail_on_stderr=False)
768 770
769 771 def _local_reset(self, branch_name):
770 772 branch_name = '{}'.format(branch_name)
771 773 cmd = ['reset', '--hard', branch_name, '--']
772 774 self.run_git_command(cmd, fail_on_stderr=False)
773 775
774 776 def _last_fetch_heads(self):
775 777 """
776 778 Return the last fetched heads that need merging.
777 779
778 780 The algorithm is defined at
779 781 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
780 782 """
781 783 if not self.bare:
782 784 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
783 785 else:
784 786 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
785 787
786 788 heads = []
787 789 with open(fetch_heads_path) as f:
788 790 for line in f:
789 791 if ' not-for-merge ' in line:
790 792 continue
791 793 line = re.sub('\t.*', '', line, flags=re.DOTALL)
792 794 heads.append(line)
793 795
794 796 return heads
795 797
796 798 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
797 799 return GitRepository(shadow_repository_path)
798 800
799 801 def _local_pull(self, repository_path, branch_name, ff_only=True):
800 802 """
801 803 Pull a branch from a local repository.
802 804 """
803 805 if self.bare:
804 806 raise RepositoryError('Cannot pull into a bare git repository')
805 807 # N.B.(skreft): The --ff-only option is to make sure this is a
806 808 # fast-forward (i.e., we are only pulling new changes and there are no
807 809 # conflicts with our current branch)
808 810 # Additionally, that option needs to go before --no-tags, otherwise git
809 811 # pull complains about it being an unknown flag.
810 812 cmd = ['pull']
811 813 if ff_only:
812 814 cmd.append('--ff-only')
813 815 cmd.extend(['--no-tags', repository_path, branch_name])
814 816 self.run_git_command(cmd, fail_on_stderr=False)
815 817
816 818 def _local_merge(self, merge_message, user_name, user_email, heads):
817 819 """
818 820 Merge the given head into the checked out branch.
819 821
820 822 It will force a merge commit.
821 823
822 824 Currently it raises an error if the repo is empty, as it is not possible
823 825 to create a merge commit in an empty repo.
824 826
825 827 :param merge_message: The message to use for the merge commit.
826 828 :param heads: the heads to merge.
827 829 """
828 830 if self.bare:
829 831 raise RepositoryError('Cannot merge into a bare git repository')
830 832
831 833 if not heads:
832 834 return
833 835
834 836 if self.is_empty():
835 837 # TODO(skreft): do somehting more robust in this case.
836 838 raise RepositoryError(
837 839 'Do not know how to merge into empty repositories yet')
838 840
839 841 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
840 842 # commit message. We also specify the user who is doing the merge.
841 843 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
842 844 '-c', 'user.email=%s' % safe_str(user_email),
843 845 'merge', '--no-ff', '-m', safe_str(merge_message)]
844 846 cmd.extend(heads)
845 847 try:
846 848 output = self.run_git_command(cmd, fail_on_stderr=False)
847 849 except RepositoryError:
848 850 # Cleanup any merge leftovers
849 851 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850 852 raise
851 853
852 854 def _local_push(
853 855 self, source_branch, repository_path, target_branch,
854 856 enable_hooks=False, rc_scm_data=None):
855 857 """
856 858 Push the source_branch to the given repository and target_branch.
857 859
858 860 Currently it if the target_branch is not master and the target repo is
859 861 empty, the push will work, but then GitRepository won't be able to find
860 862 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
861 863 pointing to master, which does not exist).
862 864
863 865 It does not run the hooks in the target repo.
864 866 """
865 867 # TODO(skreft): deal with the case in which the target repo is empty,
866 868 # and the target_branch is not master.
867 869 target_repo = GitRepository(repository_path)
868 870 if (not target_repo.bare and
869 871 target_repo._current_branch() == target_branch):
870 872 # Git prevents pushing to the checked out branch, so simulate it by
871 873 # pulling into the target repository.
872 874 target_repo._local_pull(self.path, source_branch)
873 875 else:
874 876 cmd = ['push', os.path.abspath(repository_path),
875 877 '%s:%s' % (source_branch, target_branch)]
876 878 gitenv = {}
877 879 if rc_scm_data:
878 880 gitenv.update({'RC_SCM_DATA': rc_scm_data})
879 881
880 882 if not enable_hooks:
881 883 gitenv['RC_SKIP_HOOKS'] = '1'
882 884 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
883 885
884 886 def _get_new_pr_branch(self, source_branch, target_branch):
885 887 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
886 888 pr_branches = []
887 889 for branch in self.branches:
888 890 if branch.startswith(prefix):
889 891 pr_branches.append(int(branch[len(prefix):]))
890 892
891 893 if not pr_branches:
892 894 branch_id = 0
893 895 else:
894 896 branch_id = max(pr_branches) + 1
895 897
896 898 return '%s%d' % (prefix, branch_id)
897 899
898 900 def _maybe_prepare_merge_workspace(
899 901 self, repo_id, workspace_id, target_ref, source_ref):
900 902 shadow_repository_path = self._get_shadow_repository_path(
901 903 repo_id, workspace_id)
902 904 if not os.path.exists(shadow_repository_path):
903 905 self._local_clone(
904 906 shadow_repository_path, target_ref.name, source_ref.name)
905 907 log.debug(
906 908 'Prepared shadow repository in %s', shadow_repository_path)
907 909
908 910 return shadow_repository_path
909 911
910 912 def _merge_repo(self, repo_id, workspace_id, target_ref,
911 913 source_repo, source_ref, merge_message,
912 914 merger_name, merger_email, dry_run=False,
913 915 use_rebase=False, close_branch=False):
914 916
915 917 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
916 918 'rebase' if use_rebase else 'merge', dry_run)
917 919 if target_ref.commit_id != self.branches[target_ref.name]:
918 920 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
919 921 target_ref.commit_id, self.branches[target_ref.name])
920 922 return MergeResponse(
921 923 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
922 924 metadata={'target_ref': target_ref})
923 925
924 926 shadow_repository_path = self._maybe_prepare_merge_workspace(
925 927 repo_id, workspace_id, target_ref, source_ref)
926 928 shadow_repo = self._get_shadow_instance(shadow_repository_path)
927 929
928 930 # checkout source, if it's different. Otherwise we could not
929 931 # fetch proper commits for merge testing
930 932 if source_ref.name != target_ref.name:
931 933 if shadow_repo.get_remote_ref(source_ref.name):
932 934 shadow_repo._checkout(source_ref.name, force=True)
933 935
934 936 # checkout target, and fetch changes
935 937 shadow_repo._checkout(target_ref.name, force=True)
936 938
937 939 # fetch/reset pull the target, in case it is changed
938 940 # this handles even force changes
939 941 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
940 942 shadow_repo._local_reset(target_ref.name)
941 943
942 944 # Need to reload repo to invalidate the cache, or otherwise we cannot
943 945 # retrieve the last target commit.
944 946 shadow_repo = self._get_shadow_instance(shadow_repository_path)
945 947 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
946 948 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
947 949 target_ref, target_ref.commit_id,
948 950 shadow_repo.branches[target_ref.name])
949 951 return MergeResponse(
950 952 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
951 953 metadata={'target_ref': target_ref})
952 954
953 955 # calculate new branch
954 956 pr_branch = shadow_repo._get_new_pr_branch(
955 957 source_ref.name, target_ref.name)
956 958 log.debug('using pull-request merge branch: `%s`', pr_branch)
957 959 # checkout to temp branch, and fetch changes
958 960 shadow_repo._checkout(pr_branch, create=True)
959 961 try:
960 962 shadow_repo._local_fetch(source_repo.path, source_ref.name)
961 963 except RepositoryError:
962 964 log.exception('Failure when doing local fetch on '
963 965 'shadow repo: %s', shadow_repo)
964 966 return MergeResponse(
965 967 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
966 968 metadata={'source_ref': source_ref})
967 969
968 970 merge_ref = None
969 971 merge_failure_reason = MergeFailureReason.NONE
970 972 metadata = {}
971 973 try:
972 974 shadow_repo._local_merge(merge_message, merger_name, merger_email,
973 975 [source_ref.commit_id])
974 976 merge_possible = True
975 977
976 978 # Need to reload repo to invalidate the cache, or otherwise we
977 979 # cannot retrieve the merge commit.
978 980 shadow_repo = GitRepository(shadow_repository_path)
979 981 merge_commit_id = shadow_repo.branches[pr_branch]
980 982
981 983 # Set a reference pointing to the merge commit. This reference may
982 984 # be used to easily identify the last successful merge commit in
983 985 # the shadow repository.
984 986 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
985 987 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
986 988 except RepositoryError:
987 989 log.exception('Failure when doing local merge on git shadow repo')
988 990 merge_possible = False
989 991 merge_failure_reason = MergeFailureReason.MERGE_FAILED
990 992
991 993 if merge_possible and not dry_run:
992 994 try:
993 995 shadow_repo._local_push(
994 996 pr_branch, self.path, target_ref.name, enable_hooks=True,
995 997 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
996 998 merge_succeeded = True
997 999 except RepositoryError:
998 1000 log.exception(
999 1001 'Failure when doing local push from the shadow '
1000 1002 'repository to the target repository at %s.', self.path)
1001 1003 merge_succeeded = False
1002 1004 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1003 1005 metadata['target'] = 'git shadow repo'
1004 1006 metadata['merge_commit'] = pr_branch
1005 1007 else:
1006 1008 merge_succeeded = False
1007 1009
1008 1010 return MergeResponse(
1009 1011 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1010 1012 metadata=metadata)
@@ -1,932 +1,932 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 do_workspace_checkout=False, with_wire=None, bare=False):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param do_workspace_checkout=False: sets update of working copy after
71 71 making a clone
72 72 :param bare: not used, compatible with other VCS
73 73 """
74 74
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 # mercurial since 4.4.X requires certain configuration to be present
77 77 # because sometimes we init the repos with config we need to meet
78 78 # special requirements
79 79 self.config = config if config else self.get_default_config(
80 80 default=[('extensions', 'largefiles', '1')])
81 81 self.with_wire = with_wire
82 82
83 83 self._init_repo(create, src_url, do_workspace_checkout)
84 84
85 85 # caches
86 86 self._commit_ids = {}
87 87
88 88 @LazyProperty
89 89 def _remote(self):
90 90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 91
92 92 @LazyProperty
93 93 def commit_ids(self):
94 94 """
95 95 Returns list of commit ids, in ascending order. Being lazy
96 96 attribute allows external tools to inject shas from cache.
97 97 """
98 98 commit_ids = self._get_all_commit_ids()
99 99 self._rebuild_cache(commit_ids)
100 100 return commit_ids
101 101
102 102 def _rebuild_cache(self, commit_ids):
103 103 self._commit_ids = dict((commit_id, index)
104 104 for index, commit_id in enumerate(commit_ids))
105 105
106 106 @LazyProperty
107 107 def branches(self):
108 108 return self._get_branches()
109 109
110 110 @LazyProperty
111 111 def branches_closed(self):
112 112 return self._get_branches(active=False, closed=True)
113 113
114 114 @LazyProperty
115 115 def branches_all(self):
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 def _get_branches(self, active=True, closed=False):
122 122 """
123 123 Gets branches for this repository
124 124 Returns only not closed active branches by default
125 125
126 126 :param active: return also active branches
127 127 :param closed: return also closed branches
128 128
129 129 """
130 130 if self.is_empty():
131 131 return {}
132 132
133 133 def get_name(ctx):
134 134 return ctx[0]
135 135
136 136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 137 self._remote.branches(active, closed).items()]
138 138
139 139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 140
141 141 @LazyProperty
142 142 def tags(self):
143 143 """
144 144 Gets tags for this repository
145 145 """
146 146 return self._get_tags()
147 147
148 148 def _get_tags(self):
149 149 if self.is_empty():
150 150 return {}
151 151
152 152 def get_name(ctx):
153 153 return ctx[0]
154 154
155 155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 156 self._remote.tags().items()]
157 157
158 158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 159
160 160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 161 **kwargs):
162 162 """
163 163 Creates and returns a tag for the given ``commit_id``.
164 164
165 165 :param name: name for new tag
166 166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 167 :param commit_id: commit id for which new tag would be created
168 168 :param message: message of the tag's commit
169 169 :param date: date of tag's commit
170 170
171 171 :raises TagAlreadyExistError: if tag with same name already exists
172 172 """
173 173 if name in self.tags:
174 174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 175 commit = self.get_commit(commit_id=commit_id)
176 176 local = kwargs.setdefault('local', False)
177 177
178 178 if message is None:
179 179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 180
181 181 date, tz = date_to_timestamp_plus_offset(date)
182 182
183 183 self._remote.tag(
184 184 name, commit.raw_id, message, local, user, date, tz)
185 185 self._remote.invalidate_vcs_cache()
186 186
187 187 # Reinitialize tags
188 188 self.tags = self._get_tags()
189 189 tag_id = self.tags[name]
190 190
191 191 return self.get_commit(commit_id=tag_id)
192 192
193 193 def remove_tag(self, name, user, message=None, date=None):
194 194 """
195 195 Removes tag with the given `name`.
196 196
197 197 :param name: name of the tag to be removed
198 198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 199 :param message: message of the tag's removal commit
200 200 :param date: date of tag's removal commit
201 201
202 202 :raises TagDoesNotExistError: if tag with given name does not exists
203 203 """
204 204 if name not in self.tags:
205 205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 206 if message is None:
207 207 message = "Removed tag %s" % name
208 208 local = False
209 209
210 210 date, tz = date_to_timestamp_plus_offset(date)
211 211
212 212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 213 self._remote.invalidate_vcs_cache()
214 214 self.tags = self._get_tags()
215 215
216 216 @LazyProperty
217 217 def bookmarks(self):
218 218 """
219 219 Gets bookmarks for this repository
220 220 """
221 221 return self._get_bookmarks()
222 222
223 223 def _get_bookmarks(self):
224 224 if self.is_empty():
225 225 return {}
226 226
227 227 def get_name(ctx):
228 228 return ctx[0]
229 229
230 230 _bookmarks = [
231 231 (safe_unicode(n), hexlify(h)) for n, h in
232 232 self._remote.bookmarks().items()]
233 233
234 234 return OrderedDict(sorted(_bookmarks, key=get_name))
235 235
236 236 def _get_all_commit_ids(self):
237 237 return self._remote.get_all_commit_ids('visible')
238 238
239 239 def get_diff(
240 240 self, commit1, commit2, path='', ignore_whitespace=False,
241 241 context=3, path1=None):
242 242 """
243 243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 244 `commit2` since `commit1`.
245 245
246 246 :param commit1: Entry point from which diff is shown. Can be
247 247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 248 the changes since empty state of the repository until `commit2`
249 249 :param commit2: Until which commit changes should be shown.
250 250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 251 changes. Defaults to ``False``.
252 252 :param context: How many lines before/after changed lines should be
253 253 shown. Defaults to ``3``.
254 254 """
255 255 self._validate_diff_commits(commit1, commit2)
256 256 if path1 is not None and path1 != path:
257 257 raise ValueError("Diff of two different paths not supported.")
258 258
259 259 if path:
260 260 file_filter = [self.path, path]
261 261 else:
262 262 file_filter = None
263 263
264 264 diff = self._remote.diff(
265 265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 266 opt_git=True, opt_ignorews=ignore_whitespace,
267 267 context=context)
268 268 return MercurialDiff(diff)
269 269
270 270 def strip(self, commit_id, branch=None):
271 271 self._remote.strip(commit_id, update=False, backup="none")
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 self.commit_ids = self._get_all_commit_ids()
275 275 self._rebuild_cache(self.commit_ids)
276 276
277 277 def verify(self):
278 278 verify = self._remote.verify()
279 279
280 280 self._remote.invalidate_vcs_cache()
281 281 return verify
282 282
283 283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 284 if commit_id1 == commit_id2:
285 285 return commit_id1
286 286
287 287 ancestors = self._remote.revs_from_revspec(
288 288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 289 other_path=repo2.path)
290 290 return repo2[ancestors[0]].raw_id if ancestors else None
291 291
292 292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 293 if commit_id1 == commit_id2:
294 294 commits = []
295 295 else:
296 296 if merge:
297 297 indexes = self._remote.revs_from_revspec(
298 298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 300 else:
301 301 indexes = self._remote.revs_from_revspec(
302 302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 303 commit_id1, other_path=repo2.path)
304 304
305 305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 306 for idx in indexes]
307 307
308 308 return commits
309 309
310 310 @staticmethod
311 311 def check_url(url, config):
312 312 """
313 313 Function will check given url and try to verify if it's a valid
314 314 link. Sometimes it may happened that mercurial will issue basic
315 315 auth request that can cause whole API to hang when used from python
316 316 or other external calls.
317 317
318 318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 319 when the return code is non 200
320 320 """
321 321 # check first if it's not an local url
322 322 if os.path.isdir(url) or url.startswith('file:'):
323 323 return True
324 324
325 325 # Request the _remote to verify the url
326 326 return connection.Hg.check_url(url, config.serialize())
327 327
328 328 @staticmethod
329 329 def is_valid_repository(path):
330 330 return os.path.isdir(os.path.join(path, '.hg'))
331 331
332 332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 333 """
334 334 Function will check for mercurial repository in given path. If there
335 335 is no repository in that path it will raise an exception unless
336 336 `create` parameter is set to True - in that case repository would
337 337 be created.
338 338
339 339 If `src_url` is given, would try to clone repository from the
340 340 location at given clone_point. Additionally it'll make update to
341 341 working copy accordingly to `do_workspace_checkout` flag.
342 342 """
343 343 if create and os.path.exists(self.path):
344 344 raise RepositoryError(
345 345 "Cannot create repository at %s, location already exist"
346 346 % self.path)
347 347
348 348 if src_url:
349 349 url = str(self._get_url(src_url))
350 350 MercurialRepository.check_url(url, self.config)
351 351
352 352 self._remote.clone(url, self.path, do_workspace_checkout)
353 353
354 354 # Don't try to create if we've already cloned repo
355 355 create = False
356 356
357 357 if create:
358 358 os.makedirs(self.path, mode=0o755)
359 359
360 360 self._remote.localrepository(create)
361 361
362 362 @LazyProperty
363 363 def in_memory_commit(self):
364 364 return MercurialInMemoryCommit(self)
365 365
366 366 @LazyProperty
367 367 def description(self):
368 368 description = self._remote.get_config_value(
369 369 'web', 'description', untrusted=True)
370 370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371 371
372 372 @LazyProperty
373 373 def contact(self):
374 374 contact = (
375 375 self._remote.get_config_value("web", "contact") or
376 376 self._remote.get_config_value("ui", "username"))
377 377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378 378
379 379 @LazyProperty
380 380 def last_change(self):
381 381 """
382 382 Returns last change made on this repository as
383 383 `datetime.datetime` object.
384 384 """
385 385 try:
386 386 return self.get_commit().date
387 387 except RepositoryError:
388 388 tzoffset = makedate()[1]
389 389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390 390
391 391 def _get_fs_mtime(self):
392 392 # fallback to filesystem
393 393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 394 st_path = os.path.join(self.path, '.hg', "store")
395 395 if os.path.exists(cl_path):
396 396 return os.stat(cl_path).st_mtime
397 397 else:
398 398 return os.stat(st_path).st_mtime
399 399
400 400 def _get_url(self, url):
401 401 """
402 402 Returns normalized url. If schema is not given, would fall
403 403 to filesystem
404 404 (``file:///``) schema.
405 405 """
406 406 url = url.encode('utf8')
407 407 if url != 'default' and '://' not in url:
408 408 url = "file:" + urllib.pathname2url(url)
409 409 return url
410 410
411 411 def get_hook_location(self):
412 412 """
413 413 returns absolute path to location where hooks are stored
414 414 """
415 415 return os.path.join(self.path, '.hg', '.hgrc')
416 416
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 418 """
419 419 Returns ``MercurialCommit`` object representing repository's
420 420 commit at the given `commit_id` or `commit_idx`.
421 421 """
422 422 if self.is_empty():
423 423 raise EmptyRepositoryError("There are no commits yet")
424 424
425 425 if commit_id is not None:
426 426 self._validate_commit_id(commit_id)
427 427 try:
428 428 idx = self._commit_ids[commit_id]
429 429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 430 except KeyError:
431 431 pass
432 432 elif commit_idx is not None:
433 433 self._validate_commit_idx(commit_idx)
434 434 try:
435 435 id_ = self.commit_ids[commit_idx]
436 436 if commit_idx < 0:
437 437 commit_idx += len(self.commit_ids)
438 438 return MercurialCommit(
439 439 self, id_, commit_idx, pre_load=pre_load)
440 440 except IndexError:
441 441 commit_id = commit_idx
442 442 else:
443 443 commit_id = "tip"
444 444
445 445 if isinstance(commit_id, unicode):
446 446 commit_id = safe_str(commit_id)
447 447
448 448 try:
449 449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 450 except CommitDoesNotExistError:
451 451 msg = "Commit %s does not exist for %s" % (
452 452 commit_id, self)
453 453 raise CommitDoesNotExistError(msg)
454 454
455 455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 456
457 457 def get_commits(
458 458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 branch_name=None, show_hidden=False, pre_load=None):
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 460 """
461 461 Returns generator of ``MercurialCommit`` objects from start to end
462 462 (both are inclusive)
463 463
464 464 :param start_id: None, str(commit_id)
465 465 :param end_id: None, str(commit_id)
466 466 :param start_date: if specified, commits with commit date less than
467 467 ``start_date`` would be filtered out from returned set
468 468 :param end_date: if specified, commits with commit date greater than
469 469 ``end_date`` would be filtered out from returned set
470 470 :param branch_name: if specified, commits not reachable from given
471 471 branch would be filtered out from returned set
472 472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 473 Mercurial evolve
474 474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 475 exist.
476 476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 477 ``end`` could not be found.
478 478 """
479 479 # actually we should check now if it's not an empty repo
480 480 branch_ancestors = False
481 481 if self.is_empty():
482 482 raise EmptyRepositoryError("There are no commits yet")
483 483 self._validate_branch_name(branch_name)
484 484
485 485 if start_id is not None:
486 486 self._validate_commit_id(start_id)
487 487 c_start = self.get_commit(commit_id=start_id)
488 488 start_pos = self._commit_ids[c_start.raw_id]
489 489 else:
490 490 start_pos = None
491 491
492 492 if end_id is not None:
493 493 self._validate_commit_id(end_id)
494 494 c_end = self.get_commit(commit_id=end_id)
495 495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 496 else:
497 497 end_pos = None
498 498
499 499 if None not in [start_id, end_id] and start_pos > end_pos:
500 500 raise RepositoryError(
501 501 "Start commit '%s' cannot be after end commit '%s'" %
502 502 (start_id, end_id))
503 503
504 504 if end_pos is not None:
505 505 end_pos += 1
506 506
507 507 commit_filter = []
508 508
509 509 if branch_name and not branch_ancestors:
510 510 commit_filter.append('branch("%s")' % (branch_name,))
511 511 elif branch_name and branch_ancestors:
512 512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 513
514 514 if start_date and not end_date:
515 515 commit_filter.append('date(">%s")' % (start_date,))
516 516 if end_date and not start_date:
517 517 commit_filter.append('date("<%s")' % (end_date,))
518 518 if start_date and end_date:
519 519 commit_filter.append(
520 520 'date(">%s") and date("<%s")' % (start_date, end_date))
521 521
522 522 if not show_hidden:
523 523 commit_filter.append('not obsolete()')
524 524 commit_filter.append('not hidden()')
525 525
526 526 # TODO: johbo: Figure out a simpler way for this solution
527 527 collection_generator = CollectionGenerator
528 528 if commit_filter:
529 529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 530 revisions = self._remote.rev_range([commit_filter])
531 531 collection_generator = MercurialIndexBasedCollectionGenerator
532 532 else:
533 533 revisions = self.commit_ids
534 534
535 535 if start_pos or end_pos:
536 536 revisions = revisions[start_pos:end_pos]
537 537
538 538 return collection_generator(self, revisions, pre_load=pre_load)
539 539
540 540 def pull(self, url, commit_ids=None):
541 541 """
542 542 Pull changes from external location.
543 543
544 544 :param commit_ids: Optional. Can be set to a list of commit ids
545 545 which shall be pulled from the other repository.
546 546 """
547 547 url = self._get_url(url)
548 548 self._remote.pull(url, commit_ids=commit_ids)
549 549 self._remote.invalidate_vcs_cache()
550 550
551 551 def fetch(self, url, commit_ids=None):
552 552 """
553 553 Backward compatibility with GIT fetch==pull
554 554 """
555 555 return self.pull(url, commit_ids=commit_ids)
556 556
557 557 def push(self, url):
558 558 url = self._get_url(url)
559 559 self._remote.sync_push(url)
560 560
561 561 def _local_clone(self, clone_path):
562 562 """
563 563 Create a local clone of the current repo.
564 564 """
565 565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 566 hooks=False)
567 567
568 568 def _update(self, revision, clean=False):
569 569 """
570 570 Update the working copy to the specified revision.
571 571 """
572 572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 573 self._remote.update(revision, clean=clean)
574 574
575 575 def _identify(self):
576 576 """
577 577 Return the current state of the working directory.
578 578 """
579 579 return self._remote.identify().strip().rstrip('+')
580 580
581 581 def _heads(self, branch=None):
582 582 """
583 583 Return the commit ids of the repository heads.
584 584 """
585 585 return self._remote.heads(branch=branch).strip().split(' ')
586 586
587 587 def _ancestor(self, revision1, revision2):
588 588 """
589 589 Return the common ancestor of the two revisions.
590 590 """
591 591 return self._remote.ancestor(revision1, revision2)
592 592
593 593 def _local_push(
594 594 self, revision, repository_path, push_branches=False,
595 595 enable_hooks=False):
596 596 """
597 597 Push the given revision to the specified repository.
598 598
599 599 :param push_branches: allow to create branches in the target repo.
600 600 """
601 601 self._remote.push(
602 602 [revision], repository_path, hooks=enable_hooks,
603 603 push_branches=push_branches)
604 604
605 605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 606 source_ref, use_rebase=False, dry_run=False):
607 607 """
608 608 Merge the given source_revision into the checked out revision.
609 609
610 610 Returns the commit id of the merge and a boolean indicating if the
611 611 commit needs to be pushed.
612 612 """
613 613 self._update(target_ref.commit_id, clean=True)
614 614
615 615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 617
618 618 if ancestor == source_ref.commit_id:
619 619 # Nothing to do, the changes were already integrated
620 620 return target_ref.commit_id, False
621 621
622 622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 623 # In this case we should force a commit message
624 624 return source_ref.commit_id, True
625 625
626 626 if use_rebase:
627 627 try:
628 628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 629 target_ref.commit_id)
630 630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 631 self._remote.rebase(
632 632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 633 self._remote.invalidate_vcs_cache()
634 634 self._update(bookmark_name, clean=True)
635 635 return self._identify(), True
636 636 except RepositoryError:
637 637 # The rebase-abort may raise another exception which 'hides'
638 638 # the original one, therefore we log it here.
639 639 log.exception('Error while rebasing shadow repo during merge.')
640 640
641 641 # Cleanup any rebase leftovers
642 642 self._remote.invalidate_vcs_cache()
643 643 self._remote.rebase(abort=True)
644 644 self._remote.invalidate_vcs_cache()
645 645 self._remote.update(clean=True)
646 646 raise
647 647 else:
648 648 try:
649 649 self._remote.merge(source_ref.commit_id)
650 650 self._remote.invalidate_vcs_cache()
651 651 self._remote.commit(
652 652 message=safe_str(merge_message),
653 653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 654 self._remote.invalidate_vcs_cache()
655 655 return self._identify(), True
656 656 except RepositoryError:
657 657 # Cleanup any merge leftovers
658 658 self._remote.update(clean=True)
659 659 raise
660 660
661 661 def _local_close(self, target_ref, user_name, user_email,
662 662 source_ref, close_message=''):
663 663 """
664 664 Close the branch of the given source_revision
665 665
666 666 Returns the commit id of the close and a boolean indicating if the
667 667 commit needs to be pushed.
668 668 """
669 669 self._update(source_ref.commit_id)
670 670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 671 try:
672 672 self._remote.commit(
673 673 message=safe_str(message),
674 674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 675 close_branch=True)
676 676 self._remote.invalidate_vcs_cache()
677 677 return self._identify(), True
678 678 except RepositoryError:
679 679 # Cleanup any commit leftovers
680 680 self._remote.update(clean=True)
681 681 raise
682 682
683 683 def _is_the_same_branch(self, target_ref, source_ref):
684 684 return (
685 685 self._get_branch_name(target_ref) ==
686 686 self._get_branch_name(source_ref))
687 687
688 688 def _get_branch_name(self, ref):
689 689 if ref.type == 'branch':
690 690 return ref.name
691 691 return self._remote.ctx_branch(ref.commit_id)
692 692
693 693 def _maybe_prepare_merge_workspace(
694 694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 695 shadow_repository_path = self._get_shadow_repository_path(
696 696 repo_id, workspace_id)
697 697 if not os.path.exists(shadow_repository_path):
698 698 self._local_clone(shadow_repository_path)
699 699 log.debug(
700 700 'Prepared shadow repository in %s', shadow_repository_path)
701 701
702 702 return shadow_repository_path
703 703
704 704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 705 source_repo, source_ref, merge_message,
706 706 merger_name, merger_email, dry_run=False,
707 707 use_rebase=False, close_branch=False):
708 708
709 709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 710 'rebase' if use_rebase else 'merge', dry_run)
711 711 if target_ref.commit_id not in self._heads():
712 712 return MergeResponse(
713 713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 714 metadata={'target_ref': target_ref})
715 715
716 716 try:
717 717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 718 heads = ','.join(self._heads(target_ref.name))
719 719 return MergeResponse(
720 720 False, False, None,
721 721 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
722 722 metadata={'heads': heads})
723 723 except CommitDoesNotExistError:
724 724 log.exception('Failure when looking up branch heads on hg target')
725 725 return MergeResponse(
726 726 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
727 727 metadata={'target_ref': target_ref})
728 728
729 729 shadow_repository_path = self._maybe_prepare_merge_workspace(
730 730 repo_id, workspace_id, target_ref, source_ref)
731 731 shadow_repo = self._get_shadow_instance(shadow_repository_path)
732 732
733 733 log.debug('Pulling in target reference %s', target_ref)
734 734 self._validate_pull_reference(target_ref)
735 735 shadow_repo._local_pull(self.path, target_ref)
736 736
737 737 try:
738 738 log.debug('Pulling in source reference %s', source_ref)
739 739 source_repo._validate_pull_reference(source_ref)
740 740 shadow_repo._local_pull(source_repo.path, source_ref)
741 741 except CommitDoesNotExistError:
742 742 log.exception('Failure when doing local pull on hg shadow repo')
743 743 return MergeResponse(
744 744 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
745 745 metadata={'source_ref': source_ref})
746 746
747 747 merge_ref = None
748 748 merge_commit_id = None
749 749 close_commit_id = None
750 750 merge_failure_reason = MergeFailureReason.NONE
751 751 metadata = {}
752 752
753 753 # enforce that close branch should be used only in case we source from
754 754 # an actual Branch
755 755 close_branch = close_branch and source_ref.type == 'branch'
756 756
757 757 # don't allow to close branch if source and target are the same
758 758 close_branch = close_branch and source_ref.name != target_ref.name
759 759
760 760 needs_push_on_close = False
761 761 if close_branch and not use_rebase and not dry_run:
762 762 try:
763 763 close_commit_id, needs_push_on_close = shadow_repo._local_close(
764 764 target_ref, merger_name, merger_email, source_ref)
765 765 merge_possible = True
766 766 except RepositoryError:
767 767 log.exception('Failure when doing close branch on '
768 768 'shadow repo: %s', shadow_repo)
769 769 merge_possible = False
770 770 merge_failure_reason = MergeFailureReason.MERGE_FAILED
771 771 else:
772 772 merge_possible = True
773 773
774 774 needs_push = False
775 775 if merge_possible:
776 776 try:
777 777 merge_commit_id, needs_push = shadow_repo._local_merge(
778 778 target_ref, merge_message, merger_name, merger_email,
779 779 source_ref, use_rebase=use_rebase, dry_run=dry_run)
780 780 merge_possible = True
781 781
782 782 # read the state of the close action, if it
783 783 # maybe required a push
784 784 needs_push = needs_push or needs_push_on_close
785 785
786 786 # Set a bookmark pointing to the merge commit. This bookmark
787 787 # may be used to easily identify the last successful merge
788 788 # commit in the shadow repository.
789 789 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
790 790 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
791 791 except SubrepoMergeError:
792 792 log.exception(
793 793 'Subrepo merge error during local merge on hg shadow repo.')
794 794 merge_possible = False
795 795 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
796 796 needs_push = False
797 797 except RepositoryError:
798 798 log.exception('Failure when doing local merge on hg shadow repo')
799 799 merge_possible = False
800 800 merge_failure_reason = MergeFailureReason.MERGE_FAILED
801 801 needs_push = False
802 802
803 803 if merge_possible and not dry_run:
804 804 if needs_push:
805 805 # In case the target is a bookmark, update it, so after pushing
806 806 # the bookmarks is also updated in the target.
807 807 if target_ref.type == 'book':
808 808 shadow_repo.bookmark(
809 809 target_ref.name, revision=merge_commit_id)
810 810 try:
811 811 shadow_repo_with_hooks = self._get_shadow_instance(
812 812 shadow_repository_path,
813 813 enable_hooks=True)
814 814 # This is the actual merge action, we push from shadow
815 815 # into origin.
816 816 # Note: the push_branches option will push any new branch
817 817 # defined in the source repository to the target. This may
818 818 # be dangerous as branches are permanent in Mercurial.
819 819 # This feature was requested in issue #441.
820 820 shadow_repo_with_hooks._local_push(
821 821 merge_commit_id, self.path, push_branches=True,
822 822 enable_hooks=True)
823 823
824 824 # maybe we also need to push the close_commit_id
825 825 if close_commit_id:
826 826 shadow_repo_with_hooks._local_push(
827 827 close_commit_id, self.path, push_branches=True,
828 828 enable_hooks=True)
829 829 merge_succeeded = True
830 830 except RepositoryError:
831 831 log.exception(
832 832 'Failure when doing local push from the shadow '
833 833 'repository to the target repository at %s.', self.path)
834 834 merge_succeeded = False
835 835 merge_failure_reason = MergeFailureReason.PUSH_FAILED
836 836 metadata['target'] = 'hg shadow repo'
837 837 metadata['merge_commit'] = merge_commit_id
838 838 else:
839 839 merge_succeeded = True
840 840 else:
841 841 merge_succeeded = False
842 842
843 843 return MergeResponse(
844 844 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
845 845 metadata=metadata)
846 846
847 847 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
848 848 config = self.config.copy()
849 849 if not enable_hooks:
850 850 config.clear_section('hooks')
851 851 return MercurialRepository(shadow_repository_path, config)
852 852
853 853 def _validate_pull_reference(self, reference):
854 854 if not (reference.name in self.bookmarks or
855 855 reference.name in self.branches or
856 856 self.get_commit(reference.commit_id)):
857 857 raise CommitDoesNotExistError(
858 858 'Unknown branch, bookmark or commit id')
859 859
860 860 def _local_pull(self, repository_path, reference):
861 861 """
862 862 Fetch a branch, bookmark or commit from a local repository.
863 863 """
864 864 repository_path = os.path.abspath(repository_path)
865 865 if repository_path == self.path:
866 866 raise ValueError('Cannot pull from the same repository')
867 867
868 868 reference_type_to_option_name = {
869 869 'book': 'bookmark',
870 870 'branch': 'branch',
871 871 }
872 872 option_name = reference_type_to_option_name.get(
873 873 reference.type, 'revision')
874 874
875 875 if option_name == 'revision':
876 876 ref = reference.commit_id
877 877 else:
878 878 ref = reference.name
879 879
880 880 options = {option_name: [ref]}
881 881 self._remote.pull_cmd(repository_path, hooks=False, **options)
882 882 self._remote.invalidate_vcs_cache()
883 883
884 884 def bookmark(self, bookmark, revision=None):
885 885 if isinstance(bookmark, unicode):
886 886 bookmark = safe_str(bookmark)
887 887 self._remote.bookmark(bookmark, revision=revision)
888 888 self._remote.invalidate_vcs_cache()
889 889
890 890 def get_path_permissions(self, username):
891 891 hgacl_file = os.path.join(self.path, '.hg/hgacl')
892 892
893 893 def read_patterns(suffix):
894 894 svalue = None
895 895 try:
896 896 svalue = hgacl.get('narrowhgacl', username + suffix)
897 897 except configparser.NoOptionError:
898 898 try:
899 899 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
900 900 except configparser.NoOptionError:
901 901 pass
902 902 if not svalue:
903 903 return None
904 904 result = ['/']
905 905 for pattern in svalue.split():
906 906 result.append(pattern)
907 907 if '*' not in pattern and '?' not in pattern:
908 908 result.append(pattern + '/*')
909 909 return result
910 910
911 911 if os.path.exists(hgacl_file):
912 912 try:
913 913 hgacl = configparser.RawConfigParser()
914 914 hgacl.read(hgacl_file)
915 915
916 916 includes = read_patterns('.includes')
917 917 excludes = read_patterns('.excludes')
918 918 return BasePathPermissionChecker.create_from_patterns(
919 919 includes, excludes)
920 920 except BaseException as e:
921 921 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
922 922 hgacl_file, self.name, e)
923 923 raise exceptions.RepositoryRequirementError(msg)
924 924 else:
925 925 return None
926 926
927 927
928 928 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
929 929
930 930 def _commit_factory(self, commit_id):
931 931 return self.repo.get_commit(
932 932 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,343 +1,343 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import date_astimestamp
33 33 from rhodecode.lib.utils import safe_str, safe_unicode
34 34 from rhodecode.lib.vcs import connection, path as vcspath
35 35 from rhodecode.lib.vcs.backends import base
36 36 from rhodecode.lib.vcs.backends.svn.commit import (
37 37 SubversionCommit, _date_from_svn_properties)
38 38 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 39 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 40 from rhodecode.lib.vcs.conf import settings
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 43 VCSError, NodeDoesNotExistError)
44 44
45 45
46 46 log = logging.getLogger(__name__)
47 47
48 48
49 49 class SubversionRepository(base.BaseRepository):
50 50 """
51 51 Subversion backend implementation
52 52
53 53 .. important::
54 54
55 55 It is very important to distinguish the commit index and the commit id
56 56 which is assigned by Subversion. The first one is always handled as an
57 57 `int` by this implementation. The commit id assigned by Subversion on
58 58 the other side will always be a `str`.
59 59
60 60 There is a specific trap since the first commit will have the index
61 61 ``0`` but the svn id will be ``"1"``.
62 62
63 63 """
64 64
65 65 # Note: Subversion does not really have a default branch name.
66 66 DEFAULT_BRANCH_NAME = None
67 67
68 68 contact = base.BaseRepository.DEFAULT_CONTACT
69 69 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 70
71 71 def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False,
72 72 **kwargs):
73 73 self.path = safe_str(os.path.abspath(repo_path))
74 74 self.config = config if config else self.get_default_config()
75 75
76 76 self._init_repo(create, src_url)
77 77
78 78 @LazyProperty
79 79 def _remote(self):
80 80 return connection.Svn(self.path, self.config)
81 81
82 82 def _init_repo(self, create, src_url):
83 83 if create and os.path.exists(self.path):
84 84 raise RepositoryError(
85 85 "Cannot create repository at %s, location already exist"
86 86 % self.path)
87 87
88 88 if create:
89 89 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
90 90 if src_url:
91 91 src_url = _sanitize_url(src_url)
92 92 self._remote.import_remote_repository(src_url)
93 93 else:
94 94 self._check_path()
95 95
96 96 @LazyProperty
97 97 def commit_ids(self):
98 98 head = self._remote.lookup(None)
99 99 return [str(r) for r in xrange(1, head + 1)]
100 100
101 101 @LazyProperty
102 102 def branches(self):
103 103 return self._tags_or_branches('vcs_svn_branch')
104 104
105 105 @LazyProperty
106 106 def branches_closed(self):
107 107 return {}
108 108
109 109 @LazyProperty
110 110 def bookmarks(self):
111 111 return {}
112 112
113 113 @LazyProperty
114 114 def branches_all(self):
115 115 # TODO: johbo: Implement proper branch support
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 @LazyProperty
122 122 def tags(self):
123 123 return self._tags_or_branches('vcs_svn_tag')
124 124
125 125 def _tags_or_branches(self, config_section):
126 126 found_items = {}
127 127
128 128 if self.is_empty():
129 129 return {}
130 130
131 131 for pattern in self._patterns_from_section(config_section):
132 132 pattern = vcspath.sanitize(pattern)
133 133 tip = self.get_commit()
134 134 try:
135 135 if pattern.endswith('*'):
136 136 basedir = tip.get_node(vcspath.dirname(pattern))
137 137 directories = basedir.dirs
138 138 else:
139 139 directories = (tip.get_node(pattern), )
140 140 except NodeDoesNotExistError:
141 141 continue
142 142 found_items.update(
143 143 (safe_unicode(n.path),
144 144 self.commit_ids[-1])
145 145 for n in directories)
146 146
147 147 def get_name(item):
148 148 return item[0]
149 149
150 150 return OrderedDict(sorted(found_items.items(), key=get_name))
151 151
152 152 def _patterns_from_section(self, section):
153 153 return (pattern for key, pattern in self.config.items(section))
154 154
155 155 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
156 156 if self != repo2:
157 157 raise ValueError(
158 158 "Subversion does not support getting common ancestor of"
159 159 " different repositories.")
160 160
161 161 if int(commit_id1) < int(commit_id2):
162 162 return commit_id1
163 163 return commit_id2
164 164
165 165 def verify(self):
166 166 verify = self._remote.verify()
167 167
168 168 self._remote.invalidate_vcs_cache()
169 169 return verify
170 170
171 171 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
172 172 # TODO: johbo: Implement better comparison, this is a very naive
173 173 # version which does not allow to compare branches, tags or folders
174 174 # at all.
175 175 if repo2 != self:
176 176 raise ValueError(
177 177 "Subversion does not support comparison of of different "
178 178 "repositories.")
179 179
180 180 if commit_id1 == commit_id2:
181 181 return []
182 182
183 183 commit_idx1 = self._get_commit_idx(commit_id1)
184 184 commit_idx2 = self._get_commit_idx(commit_id2)
185 185
186 186 commits = [
187 187 self.get_commit(commit_idx=idx)
188 188 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
189 189
190 190 return commits
191 191
192 192 def _get_commit_idx(self, commit_id):
193 193 try:
194 194 svn_rev = int(commit_id)
195 195 except:
196 196 # TODO: johbo: this might be only one case, HEAD, check this
197 197 svn_rev = self._remote.lookup(commit_id)
198 198 commit_idx = svn_rev - 1
199 199 if commit_idx >= len(self.commit_ids):
200 200 raise CommitDoesNotExistError(
201 201 "Commit at index %s does not exist." % (commit_idx, ))
202 202 return commit_idx
203 203
204 204 @staticmethod
205 205 def check_url(url, config):
206 206 """
207 207 Check if `url` is a valid source to import a Subversion repository.
208 208 """
209 209 # convert to URL if it's a local directory
210 210 if os.path.isdir(url):
211 211 url = 'file://' + urllib.pathname2url(url)
212 212 return connection.Svn.check_url(url, config.serialize())
213 213
214 214 @staticmethod
215 215 def is_valid_repository(path):
216 216 try:
217 217 SubversionRepository(path)
218 218 return True
219 219 except VCSError:
220 220 pass
221 221 return False
222 222
223 223 def _check_path(self):
224 224 if not os.path.exists(self.path):
225 225 raise VCSError('Path "%s" does not exist!' % (self.path, ))
226 226 if not self._remote.is_path_valid_repository(self.path):
227 227 raise VCSError(
228 228 'Path "%s" does not contain a Subversion repository' %
229 229 (self.path, ))
230 230
231 231 @LazyProperty
232 232 def last_change(self):
233 233 """
234 234 Returns last change made on this repository as
235 235 `datetime.datetime` object.
236 236 """
237 237 # Subversion always has a first commit which has id "0" and contains
238 238 # what we are looking for.
239 239 last_id = len(self.commit_ids)
240 240 properties = self._remote.revision_properties(last_id)
241 241 return _date_from_svn_properties(properties)
242 242
243 243 @LazyProperty
244 244 def in_memory_commit(self):
245 245 return SubversionInMemoryCommit(self)
246 246
247 247 def get_hook_location(self):
248 248 """
249 249 returns absolute path to location where hooks are stored
250 250 """
251 251 return os.path.join(self.path, 'hooks')
252 252
253 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
253 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
254 254 if self.is_empty():
255 255 raise EmptyRepositoryError("There are no commits yet")
256 256 if commit_id is not None:
257 257 self._validate_commit_id(commit_id)
258 258 elif commit_idx is not None:
259 259 self._validate_commit_idx(commit_idx)
260 260 try:
261 261 commit_id = self.commit_ids[commit_idx]
262 262 except IndexError:
263 263 raise CommitDoesNotExistError
264 264
265 265 commit_id = self._sanitize_commit_id(commit_id)
266 266 commit = SubversionCommit(repository=self, commit_id=commit_id)
267 267 return commit
268 268
269 269 def get_commits(
270 270 self, start_id=None, end_id=None, start_date=None, end_date=None,
271 branch_name=None, show_hidden=False, pre_load=None):
271 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
272 272 if self.is_empty():
273 273 raise EmptyRepositoryError("There are no commit_ids yet")
274 274 self._validate_branch_name(branch_name)
275 275
276 276 if start_id is not None:
277 277 self._validate_commit_id(start_id)
278 278 if end_id is not None:
279 279 self._validate_commit_id(end_id)
280 280
281 281 start_raw_id = self._sanitize_commit_id(start_id)
282 282 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
283 283 end_raw_id = self._sanitize_commit_id(end_id)
284 284 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
285 285
286 286 if None not in [start_id, end_id] and start_pos > end_pos:
287 287 raise RepositoryError(
288 288 "Start commit '%s' cannot be after end commit '%s'" %
289 289 (start_id, end_id))
290 290 if end_pos is not None:
291 291 end_pos += 1
292 292
293 293 # Date based filtering
294 294 if start_date or end_date:
295 295 start_raw_id, end_raw_id = self._remote.lookup_interval(
296 296 date_astimestamp(start_date) if start_date else None,
297 297 date_astimestamp(end_date) if end_date else None)
298 298 start_pos = start_raw_id - 1
299 299 end_pos = end_raw_id
300 300
301 301 commit_ids = self.commit_ids
302 302
303 303 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
304 304 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
305 305 svn_rev = long(self.commit_ids[-1])
306 306 commit_ids = self._remote.node_history(
307 307 path=branch_name, revision=svn_rev, limit=None)
308 308 commit_ids = [str(i) for i in reversed(commit_ids)]
309 309
310 310 if start_pos or end_pos:
311 311 commit_ids = commit_ids[start_pos:end_pos]
312 312 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
313 313
314 314 def _sanitize_commit_id(self, commit_id):
315 315 if commit_id and commit_id.isdigit():
316 316 if int(commit_id) <= len(self.commit_ids):
317 317 return commit_id
318 318 else:
319 319 raise CommitDoesNotExistError(
320 320 "Commit %s does not exist." % (commit_id, ))
321 321 if commit_id not in [
322 322 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
323 323 raise CommitDoesNotExistError(
324 324 "Commit id %s not understood." % (commit_id, ))
325 325 svn_rev = self._remote.lookup('HEAD')
326 326 return str(svn_rev)
327 327
328 328 def get_diff(
329 329 self, commit1, commit2, path=None, ignore_whitespace=False,
330 330 context=3, path1=None):
331 331 self._validate_diff_commits(commit1, commit2)
332 332 svn_rev1 = long(commit1.raw_id)
333 333 svn_rev2 = long(commit2.raw_id)
334 334 diff = self._remote.diff(
335 335 svn_rev1, svn_rev2, path1=path1, path2=path,
336 336 ignore_whitespace=ignore_whitespace, context=context)
337 337 return SubversionDiff(diff)
338 338
339 339
340 340 def _sanitize_url(url):
341 341 if '://' not in url:
342 342 url = 'file://' + urllib.pathname2url(url)
343 343 return url
General Comments 0
You need to be logged in to leave comments. Login now