##// END OF EJS Templates

Compare Commits

Target:

Source:

Time Author Commit Description
No commits in this compare
@@ -1,1549 +1,1545 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import itertools
28 28 import logging
29 29 import os
30 30 import time
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 36 from rhodecode.lib.vcs import connection
37 37 from rhodecode.lib.vcs.utils import author_name, author_email
38 38 from rhodecode.lib.vcs.conf import settings
39 39 from rhodecode.lib.vcs.exceptions import (
40 40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 44 RepositoryError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 FILEMODE_DEFAULT = 0100644
51 51 FILEMODE_EXECUTABLE = 0100755
52 52
53 53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 54 MergeResponse = collections.namedtuple(
55 55 'MergeResponse',
56 56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57 57
58 58
59 59 class MergeFailureReason(object):
60 60 """
61 61 Enumeration with all the reasons why the server side merge could fail.
62 62
63 63 DO NOT change the number of the reasons, as they may be stored in the
64 64 database.
65 65
66 66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 67 reasons.
68 68 """
69 69
70 70 # Everything went well.
71 71 NONE = 0
72 72
73 73 # An unexpected exception was raised. Check the logs for more details.
74 74 UNKNOWN = 1
75 75
76 76 # The merge was not successful, there are conflicts.
77 77 MERGE_FAILED = 2
78 78
79 79 # The merge succeeded but we could not push it to the target repository.
80 80 PUSH_FAILED = 3
81 81
82 82 # The specified target is not a head in the target repository.
83 83 TARGET_IS_NOT_HEAD = 4
84 84
85 85 # The source repository contains more branches than the target. Pushing
86 86 # the merge will create additional branches in the target.
87 87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88 88
89 89 # The target reference has multiple heads. That does not allow to correctly
90 90 # identify the target location. This could only happen for mercurial
91 91 # branches.
92 92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93 93
94 94 # The target repository is locked
95 95 TARGET_IS_LOCKED = 7
96 96
97 97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 98 # A involved commit could not be found.
99 99 _DEPRECATED_MISSING_COMMIT = 8
100 100
101 101 # The target repo reference is missing.
102 102 MISSING_TARGET_REF = 9
103 103
104 104 # The source repo reference is missing.
105 105 MISSING_SOURCE_REF = 10
106 106
107 # The merge was not successful, there are conflicts related to sub
108 # repositories.
109 SUBREPO_MERGE_FAILED = 11
110
111 107
112 108 class UpdateFailureReason(object):
113 109 """
114 110 Enumeration with all the reasons why the pull request update could fail.
115 111
116 112 DO NOT change the number of the reasons, as they may be stored in the
117 113 database.
118 114
119 115 Changing the name of a reason is acceptable and encouraged to deprecate old
120 116 reasons.
121 117 """
122 118
123 119 # Everything went well.
124 120 NONE = 0
125 121
126 122 # An unexpected exception was raised. Check the logs for more details.
127 123 UNKNOWN = 1
128 124
129 125 # The pull request is up to date.
130 126 NO_CHANGE = 2
131 127
132 128 # The pull request has a reference type that is not supported for update.
133 129 WRONG_REF_TPYE = 3
134 130
135 131 # Update failed because the target reference is missing.
136 132 MISSING_TARGET_REF = 4
137 133
138 134 # Update failed because the source reference is missing.
139 135 MISSING_SOURCE_REF = 5
140 136
141 137
142 138 class BaseRepository(object):
143 139 """
144 140 Base Repository for final backends
145 141
146 142 .. attribute:: DEFAULT_BRANCH_NAME
147 143
148 144 name of default branch (i.e. "trunk" for svn, "master" for git etc.
149 145
150 146 .. attribute:: commit_ids
151 147
152 148 list of all available commit ids, in ascending order
153 149
154 150 .. attribute:: path
155 151
156 152 absolute path to the repository
157 153
158 154 .. attribute:: bookmarks
159 155
160 156 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
161 157 there are no bookmarks or the backend implementation does not support
162 158 bookmarks.
163 159
164 160 .. attribute:: tags
165 161
166 162 Mapping from name to :term:`Commit ID` of the tag.
167 163
168 164 """
169 165
170 166 DEFAULT_BRANCH_NAME = None
171 167 DEFAULT_CONTACT = u"Unknown"
172 168 DEFAULT_DESCRIPTION = u"unknown"
173 169 EMPTY_COMMIT_ID = '0' * 40
174 170
175 171 path = None
176 172
177 173 def __init__(self, repo_path, config=None, create=False, **kwargs):
178 174 """
179 175 Initializes repository. Raises RepositoryError if repository could
180 176 not be find at the given ``repo_path`` or directory at ``repo_path``
181 177 exists and ``create`` is set to True.
182 178
183 179 :param repo_path: local path of the repository
184 180 :param config: repository configuration
185 181 :param create=False: if set to True, would try to create repository.
186 182 :param src_url=None: if set, should be proper url from which repository
187 183 would be cloned; requires ``create`` parameter to be set to True -
188 184 raises RepositoryError if src_url is set and create evaluates to
189 185 False
190 186 """
191 187 raise NotImplementedError
192 188
193 189 def __repr__(self):
194 190 return '<%s at %s>' % (self.__class__.__name__, self.path)
195 191
196 192 def __len__(self):
197 193 return self.count()
198 194
199 195 def __eq__(self, other):
200 196 same_instance = isinstance(other, self.__class__)
201 197 return same_instance and other.path == self.path
202 198
203 199 def __ne__(self, other):
204 200 return not self.__eq__(other)
205 201
206 202 @LazyProperty
207 203 def EMPTY_COMMIT(self):
208 204 return EmptyCommit(self.EMPTY_COMMIT_ID)
209 205
210 206 @LazyProperty
211 207 def alias(self):
212 208 for k, v in settings.BACKENDS.items():
213 209 if v.split('.')[-1] == str(self.__class__.__name__):
214 210 return k
215 211
216 212 @LazyProperty
217 213 def name(self):
218 214 return safe_unicode(os.path.basename(self.path))
219 215
220 216 @LazyProperty
221 217 def description(self):
222 218 raise NotImplementedError
223 219
224 220 def refs(self):
225 221 """
226 222 returns a `dict` with branches, bookmarks, tags, and closed_branches
227 223 for this repository
228 224 """
229 225 raise NotImplementedError
230 226
231 227 @LazyProperty
232 228 def branches(self):
233 229 """
234 230 A `dict` which maps branch names to commit ids.
235 231 """
236 232 raise NotImplementedError
237 233
238 234 @LazyProperty
239 235 def size(self):
240 236 """
241 237 Returns combined size in bytes for all repository files
242 238 """
243 239 tip = self.get_commit()
244 240 return tip.size
245 241
246 242 def size_at_commit(self, commit_id):
247 243 commit = self.get_commit(commit_id)
248 244 return commit.size
249 245
250 246 def is_empty(self):
251 247 return not bool(self.commit_ids)
252 248
253 249 @staticmethod
254 250 def check_url(url, config):
255 251 """
256 252 Function will check given url and try to verify if it's a valid
257 253 link.
258 254 """
259 255 raise NotImplementedError
260 256
261 257 @staticmethod
262 258 def is_valid_repository(path):
263 259 """
264 260 Check if given `path` contains a valid repository of this backend
265 261 """
266 262 raise NotImplementedError
267 263
268 264 # ==========================================================================
269 265 # COMMITS
270 266 # ==========================================================================
271 267
272 268 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
273 269 """
274 270 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
275 271 are both None, most recent commit is returned.
276 272
277 273 :param pre_load: Optional. List of commit attributes to load.
278 274
279 275 :raises ``EmptyRepositoryError``: if there are no commits
280 276 """
281 277 raise NotImplementedError
282 278
283 279 def __iter__(self):
284 280 for commit_id in self.commit_ids:
285 281 yield self.get_commit(commit_id=commit_id)
286 282
287 283 def get_commits(
288 284 self, start_id=None, end_id=None, start_date=None, end_date=None,
289 285 branch_name=None, pre_load=None):
290 286 """
291 287 Returns iterator of `BaseCommit` objects from start to end
292 288 not inclusive. This should behave just like a list, ie. end is not
293 289 inclusive.
294 290
295 291 :param start_id: None or str, must be a valid commit id
296 292 :param end_id: None or str, must be a valid commit id
297 293 :param start_date:
298 294 :param end_date:
299 295 :param branch_name:
300 296 :param pre_load:
301 297 """
302 298 raise NotImplementedError
303 299
304 300 def __getitem__(self, key):
305 301 """
306 302 Allows index based access to the commit objects of this repository.
307 303 """
308 304 pre_load = ["author", "branch", "date", "message", "parents"]
309 305 if isinstance(key, slice):
310 306 return self._get_range(key, pre_load)
311 307 return self.get_commit(commit_idx=key, pre_load=pre_load)
312 308
313 309 def _get_range(self, slice_obj, pre_load):
314 310 for commit_id in self.commit_ids.__getitem__(slice_obj):
315 311 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
316 312
317 313 def count(self):
318 314 return len(self.commit_ids)
319 315
320 316 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
321 317 """
322 318 Creates and returns a tag for the given ``commit_id``.
323 319
324 320 :param name: name for new tag
325 321 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
326 322 :param commit_id: commit id for which new tag would be created
327 323 :param message: message of the tag's commit
328 324 :param date: date of tag's commit
329 325
330 326 :raises TagAlreadyExistError: if tag with same name already exists
331 327 """
332 328 raise NotImplementedError
333 329
334 330 def remove_tag(self, name, user, message=None, date=None):
335 331 """
336 332 Removes tag with the given ``name``.
337 333
338 334 :param name: name of the tag to be removed
339 335 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
340 336 :param message: message of the tag's removal commit
341 337 :param date: date of tag's removal commit
342 338
343 339 :raises TagDoesNotExistError: if tag with given name does not exists
344 340 """
345 341 raise NotImplementedError
346 342
347 343 def get_diff(
348 344 self, commit1, commit2, path=None, ignore_whitespace=False,
349 345 context=3, path1=None):
350 346 """
351 347 Returns (git like) *diff*, as plain text. Shows changes introduced by
352 348 `commit2` since `commit1`.
353 349
354 350 :param commit1: Entry point from which diff is shown. Can be
355 351 ``self.EMPTY_COMMIT`` - in this case, patch showing all
356 352 the changes since empty state of the repository until `commit2`
357 353 :param commit2: Until which commit changes should be shown.
358 354 :param path: Can be set to a path of a file to create a diff of that
359 355 file. If `path1` is also set, this value is only associated to
360 356 `commit2`.
361 357 :param ignore_whitespace: If set to ``True``, would not show whitespace
362 358 changes. Defaults to ``False``.
363 359 :param context: How many lines before/after changed lines should be
364 360 shown. Defaults to ``3``.
365 361 :param path1: Can be set to a path to associate with `commit1`. This
366 362 parameter works only for backends which support diff generation for
367 363 different paths. Other backends will raise a `ValueError` if `path1`
368 364 is set and has a different value than `path`.
369 365 """
370 366 raise NotImplementedError
371 367
372 368 def strip(self, commit_id, branch=None):
373 369 """
374 370 Strip given commit_id from the repository
375 371 """
376 372 raise NotImplementedError
377 373
378 374 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
379 375 """
380 376 Return a latest common ancestor commit if one exists for this repo
381 377 `commit_id1` vs `commit_id2` from `repo2`.
382 378
383 379 :param commit_id1: Commit it from this repository to use as a
384 380 target for the comparison.
385 381 :param commit_id2: Source commit id to use for comparison.
386 382 :param repo2: Source repository to use for comparison.
387 383 """
388 384 raise NotImplementedError
389 385
390 386 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
391 387 """
392 388 Compare this repository's revision `commit_id1` with `commit_id2`.
393 389
394 390 Returns a tuple(commits, ancestor) that would be merged from
395 391 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
396 392 will be returned as ancestor.
397 393
398 394 :param commit_id1: Commit it from this repository to use as a
399 395 target for the comparison.
400 396 :param commit_id2: Source commit id to use for comparison.
401 397 :param repo2: Source repository to use for comparison.
402 398 :param merge: If set to ``True`` will do a merge compare which also
403 399 returns the common ancestor.
404 400 :param pre_load: Optional. List of commit attributes to load.
405 401 """
406 402 raise NotImplementedError
407 403
408 404 def merge(self, target_ref, source_repo, source_ref, workspace_id,
409 405 user_name='', user_email='', message='', dry_run=False,
410 406 use_rebase=False):
411 407 """
412 408 Merge the revisions specified in `source_ref` from `source_repo`
413 409 onto the `target_ref` of this repository.
414 410
415 411 `source_ref` and `target_ref` are named tupls with the following
416 412 fields `type`, `name` and `commit_id`.
417 413
418 414 Returns a MergeResponse named tuple with the following fields
419 415 'possible', 'executed', 'source_commit', 'target_commit',
420 416 'merge_commit'.
421 417
422 418 :param target_ref: `target_ref` points to the commit on top of which
423 419 the `source_ref` should be merged.
424 420 :param source_repo: The repository that contains the commits to be
425 421 merged.
426 422 :param source_ref: `source_ref` points to the topmost commit from
427 423 the `source_repo` which should be merged.
428 424 :param workspace_id: `workspace_id` unique identifier.
429 425 :param user_name: Merge commit `user_name`.
430 426 :param user_email: Merge commit `user_email`.
431 427 :param message: Merge commit `message`.
432 428 :param dry_run: If `True` the merge will not take place.
433 429 :param use_rebase: If `True` commits from the source will be rebased
434 430 on top of the target instead of being merged.
435 431 """
436 432 if dry_run:
437 433 message = message or 'dry_run_merge_message'
438 434 user_email = user_email or 'dry-run-merge@rhodecode.com'
439 435 user_name = user_name or 'Dry-Run User'
440 436 else:
441 437 if not user_name:
442 438 raise ValueError('user_name cannot be empty')
443 439 if not user_email:
444 440 raise ValueError('user_email cannot be empty')
445 441 if not message:
446 442 raise ValueError('message cannot be empty')
447 443
448 444 shadow_repository_path = self._maybe_prepare_merge_workspace(
449 445 workspace_id, target_ref)
450 446
451 447 try:
452 448 return self._merge_repo(
453 449 shadow_repository_path, target_ref, source_repo,
454 450 source_ref, message, user_name, user_email, dry_run=dry_run,
455 451 use_rebase=use_rebase)
456 452 except RepositoryError:
457 453 log.exception(
458 454 'Unexpected failure when running merge, dry-run=%s',
459 455 dry_run)
460 456 return MergeResponse(
461 457 False, False, None, MergeFailureReason.UNKNOWN)
462 458
463 459 def _merge_repo(self, shadow_repository_path, target_ref,
464 460 source_repo, source_ref, merge_message,
465 461 merger_name, merger_email, dry_run=False, use_rebase=False):
466 462 """Internal implementation of merge."""
467 463 raise NotImplementedError
468 464
469 465 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
470 466 """
471 467 Create the merge workspace.
472 468
473 469 :param workspace_id: `workspace_id` unique identifier.
474 470 """
475 471 raise NotImplementedError
476 472
477 473 def cleanup_merge_workspace(self, workspace_id):
478 474 """
479 475 Remove merge workspace.
480 476
481 477 This function MUST not fail in case there is no workspace associated to
482 478 the given `workspace_id`.
483 479
484 480 :param workspace_id: `workspace_id` unique identifier.
485 481 """
486 482 raise NotImplementedError
487 483
488 484 # ========== #
489 485 # COMMIT API #
490 486 # ========== #
491 487
492 488 @LazyProperty
493 489 def in_memory_commit(self):
494 490 """
495 491 Returns :class:`InMemoryCommit` object for this repository.
496 492 """
497 493 raise NotImplementedError
498 494
499 495 # ======================== #
500 496 # UTILITIES FOR SUBCLASSES #
501 497 # ======================== #
502 498
503 499 def _validate_diff_commits(self, commit1, commit2):
504 500 """
505 501 Validates that the given commits are related to this repository.
506 502
507 503 Intended as a utility for sub classes to have a consistent validation
508 504 of input parameters in methods like :meth:`get_diff`.
509 505 """
510 506 self._validate_commit(commit1)
511 507 self._validate_commit(commit2)
512 508 if (isinstance(commit1, EmptyCommit) and
513 509 isinstance(commit2, EmptyCommit)):
514 510 raise ValueError("Cannot compare two empty commits")
515 511
516 512 def _validate_commit(self, commit):
517 513 if not isinstance(commit, BaseCommit):
518 514 raise TypeError(
519 515 "%s is not of type BaseCommit" % repr(commit))
520 516 if commit.repository != self and not isinstance(commit, EmptyCommit):
521 517 raise ValueError(
522 518 "Commit %s must be a valid commit from this repository %s, "
523 519 "related to this repository instead %s." %
524 520 (commit, self, commit.repository))
525 521
526 522 def _validate_commit_id(self, commit_id):
527 523 if not isinstance(commit_id, basestring):
528 524 raise TypeError("commit_id must be a string value")
529 525
530 526 def _validate_commit_idx(self, commit_idx):
531 527 if not isinstance(commit_idx, (int, long)):
532 528 raise TypeError("commit_idx must be a numeric value")
533 529
534 530 def _validate_branch_name(self, branch_name):
535 531 if branch_name and branch_name not in self.branches_all:
536 532 msg = ("Branch %s not found in %s" % (branch_name, self))
537 533 raise BranchDoesNotExistError(msg)
538 534
539 535 #
540 536 # Supporting deprecated API parts
541 537 # TODO: johbo: consider to move this into a mixin
542 538 #
543 539
544 540 @property
545 541 def EMPTY_CHANGESET(self):
546 542 warnings.warn(
547 543 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
548 544 return self.EMPTY_COMMIT_ID
549 545
550 546 @property
551 547 def revisions(self):
552 548 warnings.warn("Use commits attribute instead", DeprecationWarning)
553 549 return self.commit_ids
554 550
555 551 @revisions.setter
556 552 def revisions(self, value):
557 553 warnings.warn("Use commits attribute instead", DeprecationWarning)
558 554 self.commit_ids = value
559 555
560 556 def get_changeset(self, revision=None, pre_load=None):
561 557 warnings.warn("Use get_commit instead", DeprecationWarning)
562 558 commit_id = None
563 559 commit_idx = None
564 560 if isinstance(revision, basestring):
565 561 commit_id = revision
566 562 else:
567 563 commit_idx = revision
568 564 return self.get_commit(
569 565 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
570 566
571 567 def get_changesets(
572 568 self, start=None, end=None, start_date=None, end_date=None,
573 569 branch_name=None, pre_load=None):
574 570 warnings.warn("Use get_commits instead", DeprecationWarning)
575 571 start_id = self._revision_to_commit(start)
576 572 end_id = self._revision_to_commit(end)
577 573 return self.get_commits(
578 574 start_id=start_id, end_id=end_id, start_date=start_date,
579 575 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
580 576
581 577 def _revision_to_commit(self, revision):
582 578 """
583 579 Translates a revision to a commit_id
584 580
585 581 Helps to support the old changeset based API which allows to use
586 582 commit ids and commit indices interchangeable.
587 583 """
588 584 if revision is None:
589 585 return revision
590 586
591 587 if isinstance(revision, basestring):
592 588 commit_id = revision
593 589 else:
594 590 commit_id = self.commit_ids[revision]
595 591 return commit_id
596 592
597 593 @property
598 594 def in_memory_changeset(self):
599 595 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
600 596 return self.in_memory_commit
601 597
602 598
603 599 class BaseCommit(object):
604 600 """
605 601 Each backend should implement it's commit representation.
606 602
607 603 **Attributes**
608 604
609 605 ``repository``
610 606 repository object within which commit exists
611 607
612 608 ``id``
613 609 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
614 610 just ``tip``.
615 611
616 612 ``raw_id``
617 613 raw commit representation (i.e. full 40 length sha for git
618 614 backend)
619 615
620 616 ``short_id``
621 617 shortened (if apply) version of ``raw_id``; it would be simple
622 618 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
623 619 as ``raw_id`` for subversion
624 620
625 621 ``idx``
626 622 commit index
627 623
628 624 ``files``
629 625 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
630 626
631 627 ``dirs``
632 628 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
633 629
634 630 ``nodes``
635 631 combined list of ``Node`` objects
636 632
637 633 ``author``
638 634 author of the commit, as unicode
639 635
640 636 ``message``
641 637 message of the commit, as unicode
642 638
643 639 ``parents``
644 640 list of parent commits
645 641
646 642 """
647 643
648 644 branch = None
649 645 """
650 646 Depending on the backend this should be set to the branch name of the
651 647 commit. Backends not supporting branches on commits should leave this
652 648 value as ``None``.
653 649 """
654 650
655 651 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
656 652 """
657 653 This template is used to generate a default prefix for repository archives
658 654 if no prefix has been specified.
659 655 """
660 656
661 657 def __str__(self):
662 658 return '<%s at %s:%s>' % (
663 659 self.__class__.__name__, self.idx, self.short_id)
664 660
665 661 def __repr__(self):
666 662 return self.__str__()
667 663
668 664 def __unicode__(self):
669 665 return u'%s:%s' % (self.idx, self.short_id)
670 666
671 667 def __eq__(self, other):
672 668 same_instance = isinstance(other, self.__class__)
673 669 return same_instance and self.raw_id == other.raw_id
674 670
675 671 def __json__(self):
676 672 parents = []
677 673 try:
678 674 for parent in self.parents:
679 675 parents.append({'raw_id': parent.raw_id})
680 676 except NotImplementedError:
681 677 # empty commit doesn't have parents implemented
682 678 pass
683 679
684 680 return {
685 681 'short_id': self.short_id,
686 682 'raw_id': self.raw_id,
687 683 'revision': self.idx,
688 684 'message': self.message,
689 685 'date': self.date,
690 686 'author': self.author,
691 687 'parents': parents,
692 688 'branch': self.branch
693 689 }
694 690
695 691 @LazyProperty
696 692 def last(self):
697 693 """
698 694 ``True`` if this is last commit in repository, ``False``
699 695 otherwise; trying to access this attribute while there is no
700 696 commits would raise `EmptyRepositoryError`
701 697 """
702 698 if self.repository is None:
703 699 raise CommitError("Cannot check if it's most recent commit")
704 700 return self.raw_id == self.repository.commit_ids[-1]
705 701
706 702 @LazyProperty
707 703 def parents(self):
708 704 """
709 705 Returns list of parent commits.
710 706 """
711 707 raise NotImplementedError
712 708
713 709 @property
714 710 def merge(self):
715 711 """
716 712 Returns boolean if commit is a merge.
717 713 """
718 714 return len(self.parents) > 1
719 715
720 716 @LazyProperty
721 717 def children(self):
722 718 """
723 719 Returns list of child commits.
724 720 """
725 721 raise NotImplementedError
726 722
727 723 @LazyProperty
728 724 def id(self):
729 725 """
730 726 Returns string identifying this commit.
731 727 """
732 728 raise NotImplementedError
733 729
734 730 @LazyProperty
735 731 def raw_id(self):
736 732 """
737 733 Returns raw string identifying this commit.
738 734 """
739 735 raise NotImplementedError
740 736
741 737 @LazyProperty
742 738 def short_id(self):
743 739 """
744 740 Returns shortened version of ``raw_id`` attribute, as string,
745 741 identifying this commit, useful for presentation to users.
746 742 """
747 743 raise NotImplementedError
748 744
749 745 @LazyProperty
750 746 def idx(self):
751 747 """
752 748 Returns integer identifying this commit.
753 749 """
754 750 raise NotImplementedError
755 751
756 752 @LazyProperty
757 753 def committer(self):
758 754 """
759 755 Returns committer for this commit
760 756 """
761 757 raise NotImplementedError
762 758
763 759 @LazyProperty
764 760 def committer_name(self):
765 761 """
766 762 Returns committer name for this commit
767 763 """
768 764
769 765 return author_name(self.committer)
770 766
771 767 @LazyProperty
772 768 def committer_email(self):
773 769 """
774 770 Returns committer email address for this commit
775 771 """
776 772
777 773 return author_email(self.committer)
778 774
779 775 @LazyProperty
780 776 def author(self):
781 777 """
782 778 Returns author for this commit
783 779 """
784 780
785 781 raise NotImplementedError
786 782
787 783 @LazyProperty
788 784 def author_name(self):
789 785 """
790 786 Returns author name for this commit
791 787 """
792 788
793 789 return author_name(self.author)
794 790
795 791 @LazyProperty
796 792 def author_email(self):
797 793 """
798 794 Returns author email address for this commit
799 795 """
800 796
801 797 return author_email(self.author)
802 798
803 799 def get_file_mode(self, path):
804 800 """
805 801 Returns stat mode of the file at `path`.
806 802 """
807 803 raise NotImplementedError
808 804
809 805 def is_link(self, path):
810 806 """
811 807 Returns ``True`` if given `path` is a symlink
812 808 """
813 809 raise NotImplementedError
814 810
815 811 def get_file_content(self, path):
816 812 """
817 813 Returns content of the file at the given `path`.
818 814 """
819 815 raise NotImplementedError
820 816
821 817 def get_file_size(self, path):
822 818 """
823 819 Returns size of the file at the given `path`.
824 820 """
825 821 raise NotImplementedError
826 822
827 823 def get_file_commit(self, path, pre_load=None):
828 824 """
829 825 Returns last commit of the file at the given `path`.
830 826
831 827 :param pre_load: Optional. List of commit attributes to load.
832 828 """
833 829 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
834 830
835 831 def get_file_history(self, path, limit=None, pre_load=None):
836 832 """
837 833 Returns history of file as reversed list of :class:`BaseCommit`
838 834 objects for which file at given `path` has been modified.
839 835
840 836 :param limit: Optional. Allows to limit the size of the returned
841 837 history. This is intended as a hint to the underlying backend, so
842 838 that it can apply optimizations depending on the limit.
843 839 :param pre_load: Optional. List of commit attributes to load.
844 840 """
845 841 raise NotImplementedError
846 842
847 843 def get_file_annotate(self, path, pre_load=None):
848 844 """
849 845 Returns a generator of four element tuples with
850 846 lineno, sha, commit lazy loader and line
851 847
852 848 :param pre_load: Optional. List of commit attributes to load.
853 849 """
854 850 raise NotImplementedError
855 851
856 852 def get_nodes(self, path):
857 853 """
858 854 Returns combined ``DirNode`` and ``FileNode`` objects list representing
859 855 state of commit at the given ``path``.
860 856
861 857 :raises ``CommitError``: if node at the given ``path`` is not
862 858 instance of ``DirNode``
863 859 """
864 860 raise NotImplementedError
865 861
866 862 def get_node(self, path):
867 863 """
868 864 Returns ``Node`` object from the given ``path``.
869 865
870 866 :raises ``NodeDoesNotExistError``: if there is no node at the given
871 867 ``path``
872 868 """
873 869 raise NotImplementedError
874 870
875 871 def get_largefile_node(self, path):
876 872 """
877 873 Returns the path to largefile from Mercurial storage.
878 874 """
879 875 raise NotImplementedError
880 876
881 877 def archive_repo(self, file_path, kind='tgz', subrepos=None,
882 878 prefix=None, write_metadata=False, mtime=None):
883 879 """
884 880 Creates an archive containing the contents of the repository.
885 881
886 882 :param file_path: path to the file which to create the archive.
887 883 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
888 884 :param prefix: name of root directory in archive.
889 885 Default is repository name and commit's short_id joined with dash:
890 886 ``"{repo_name}-{short_id}"``.
891 887 :param write_metadata: write a metadata file into archive.
892 888 :param mtime: custom modification time for archive creation, defaults
893 889 to time.time() if not given.
894 890
895 891 :raise VCSError: If prefix has a problem.
896 892 """
897 893 allowed_kinds = settings.ARCHIVE_SPECS.keys()
898 894 if kind not in allowed_kinds:
899 895 raise ImproperArchiveTypeError(
900 896 'Archive kind (%s) not supported use one of %s' %
901 897 (kind, allowed_kinds))
902 898
903 899 prefix = self._validate_archive_prefix(prefix)
904 900
905 901 mtime = mtime or time.mktime(self.date.timetuple())
906 902
907 903 file_info = []
908 904 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
909 905 for _r, _d, files in cur_rev.walk('/'):
910 906 for f in files:
911 907 f_path = os.path.join(prefix, f.path)
912 908 file_info.append(
913 909 (f_path, f.mode, f.is_link(), f.raw_bytes))
914 910
915 911 if write_metadata:
916 912 metadata = [
917 913 ('repo_name', self.repository.name),
918 914 ('rev', self.raw_id),
919 915 ('create_time', mtime),
920 916 ('branch', self.branch),
921 917 ('tags', ','.join(self.tags)),
922 918 ]
923 919 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
924 920 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
925 921
926 922 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
927 923
928 924 def _validate_archive_prefix(self, prefix):
929 925 if prefix is None:
930 926 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
931 927 repo_name=safe_str(self.repository.name),
932 928 short_id=self.short_id)
933 929 elif not isinstance(prefix, str):
934 930 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
935 931 elif prefix.startswith('/'):
936 932 raise VCSError("Prefix cannot start with leading slash")
937 933 elif prefix.strip() == '':
938 934 raise VCSError("Prefix cannot be empty")
939 935 return prefix
940 936
941 937 @LazyProperty
942 938 def root(self):
943 939 """
944 940 Returns ``RootNode`` object for this commit.
945 941 """
946 942 return self.get_node('')
947 943
948 944 def next(self, branch=None):
949 945 """
950 946 Returns next commit from current, if branch is gives it will return
951 947 next commit belonging to this branch
952 948
953 949 :param branch: show commits within the given named branch
954 950 """
955 951 indexes = xrange(self.idx + 1, self.repository.count())
956 952 return self._find_next(indexes, branch)
957 953
958 954 def prev(self, branch=None):
959 955 """
960 956 Returns previous commit from current, if branch is gives it will
961 957 return previous commit belonging to this branch
962 958
963 959 :param branch: show commit within the given named branch
964 960 """
965 961 indexes = xrange(self.idx - 1, -1, -1)
966 962 return self._find_next(indexes, branch)
967 963
968 964 def _find_next(self, indexes, branch=None):
969 965 if branch and self.branch != branch:
970 966 raise VCSError('Branch option used on commit not belonging '
971 967 'to that branch')
972 968
973 969 for next_idx in indexes:
974 970 commit = self.repository.get_commit(commit_idx=next_idx)
975 971 if branch and branch != commit.branch:
976 972 continue
977 973 return commit
978 974 raise CommitDoesNotExistError
979 975
980 976 def diff(self, ignore_whitespace=True, context=3):
981 977 """
982 978 Returns a `Diff` object representing the change made by this commit.
983 979 """
984 980 parent = (
985 981 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
986 982 diff = self.repository.get_diff(
987 983 parent, self,
988 984 ignore_whitespace=ignore_whitespace,
989 985 context=context)
990 986 return diff
991 987
992 988 @LazyProperty
993 989 def added(self):
994 990 """
995 991 Returns list of added ``FileNode`` objects.
996 992 """
997 993 raise NotImplementedError
998 994
999 995 @LazyProperty
1000 996 def changed(self):
1001 997 """
1002 998 Returns list of modified ``FileNode`` objects.
1003 999 """
1004 1000 raise NotImplementedError
1005 1001
1006 1002 @LazyProperty
1007 1003 def removed(self):
1008 1004 """
1009 1005 Returns list of removed ``FileNode`` objects.
1010 1006 """
1011 1007 raise NotImplementedError
1012 1008
1013 1009 @LazyProperty
1014 1010 def size(self):
1015 1011 """
1016 1012 Returns total number of bytes from contents of all filenodes.
1017 1013 """
1018 1014 return sum((node.size for node in self.get_filenodes_generator()))
1019 1015
1020 1016 def walk(self, topurl=''):
1021 1017 """
1022 1018 Similar to os.walk method. Insted of filesystem it walks through
1023 1019 commit starting at given ``topurl``. Returns generator of tuples
1024 1020 (topnode, dirnodes, filenodes).
1025 1021 """
1026 1022 topnode = self.get_node(topurl)
1027 1023 if not topnode.is_dir():
1028 1024 return
1029 1025 yield (topnode, topnode.dirs, topnode.files)
1030 1026 for dirnode in topnode.dirs:
1031 1027 for tup in self.walk(dirnode.path):
1032 1028 yield tup
1033 1029
1034 1030 def get_filenodes_generator(self):
1035 1031 """
1036 1032 Returns generator that yields *all* file nodes.
1037 1033 """
1038 1034 for topnode, dirs, files in self.walk():
1039 1035 for node in files:
1040 1036 yield node
1041 1037
1042 1038 #
1043 1039 # Utilities for sub classes to support consistent behavior
1044 1040 #
1045 1041
1046 1042 def no_node_at_path(self, path):
1047 1043 return NodeDoesNotExistError(
1048 1044 "There is no file nor directory at the given path: "
1049 1045 "'%s' at commit %s" % (path, self.short_id))
1050 1046
1051 1047 def _fix_path(self, path):
1052 1048 """
1053 1049 Paths are stored without trailing slash so we need to get rid off it if
1054 1050 needed.
1055 1051 """
1056 1052 return path.rstrip('/')
1057 1053
1058 1054 #
1059 1055 # Deprecated API based on changesets
1060 1056 #
1061 1057
1062 1058 @property
1063 1059 def revision(self):
1064 1060 warnings.warn("Use idx instead", DeprecationWarning)
1065 1061 return self.idx
1066 1062
1067 1063 @revision.setter
1068 1064 def revision(self, value):
1069 1065 warnings.warn("Use idx instead", DeprecationWarning)
1070 1066 self.idx = value
1071 1067
1072 1068 def get_file_changeset(self, path):
1073 1069 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1074 1070 return self.get_file_commit(path)
1075 1071
1076 1072
1077 1073 class BaseChangesetClass(type):
1078 1074
1079 1075 def __instancecheck__(self, instance):
1080 1076 return isinstance(instance, BaseCommit)
1081 1077
1082 1078
1083 1079 class BaseChangeset(BaseCommit):
1084 1080
1085 1081 __metaclass__ = BaseChangesetClass
1086 1082
1087 1083 def __new__(cls, *args, **kwargs):
1088 1084 warnings.warn(
1089 1085 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1090 1086 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1091 1087
1092 1088
1093 1089 class BaseInMemoryCommit(object):
1094 1090 """
1095 1091 Represents differences between repository's state (most recent head) and
1096 1092 changes made *in place*.
1097 1093
1098 1094 **Attributes**
1099 1095
1100 1096 ``repository``
1101 1097 repository object for this in-memory-commit
1102 1098
1103 1099 ``added``
1104 1100 list of ``FileNode`` objects marked as *added*
1105 1101
1106 1102 ``changed``
1107 1103 list of ``FileNode`` objects marked as *changed*
1108 1104
1109 1105 ``removed``
1110 1106 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1111 1107 *removed*
1112 1108
1113 1109 ``parents``
1114 1110 list of :class:`BaseCommit` instances representing parents of
1115 1111 in-memory commit. Should always be 2-element sequence.
1116 1112
1117 1113 """
1118 1114
1119 1115 def __init__(self, repository):
1120 1116 self.repository = repository
1121 1117 self.added = []
1122 1118 self.changed = []
1123 1119 self.removed = []
1124 1120 self.parents = []
1125 1121
1126 1122 def add(self, *filenodes):
1127 1123 """
1128 1124 Marks given ``FileNode`` objects as *to be committed*.
1129 1125
1130 1126 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1131 1127 latest commit
1132 1128 :raises ``NodeAlreadyAddedError``: if node with same path is already
1133 1129 marked as *added*
1134 1130 """
1135 1131 # Check if not already marked as *added* first
1136 1132 for node in filenodes:
1137 1133 if node.path in (n.path for n in self.added):
1138 1134 raise NodeAlreadyAddedError(
1139 1135 "Such FileNode %s is already marked for addition"
1140 1136 % node.path)
1141 1137 for node in filenodes:
1142 1138 self.added.append(node)
1143 1139
1144 1140 def change(self, *filenodes):
1145 1141 """
1146 1142 Marks given ``FileNode`` objects to be *changed* in next commit.
1147 1143
1148 1144 :raises ``EmptyRepositoryError``: if there are no commits yet
1149 1145 :raises ``NodeAlreadyExistsError``: if node with same path is already
1150 1146 marked to be *changed*
1151 1147 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1152 1148 marked to be *removed*
1153 1149 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1154 1150 commit
1155 1151 :raises ``NodeNotChangedError``: if node hasn't really be changed
1156 1152 """
1157 1153 for node in filenodes:
1158 1154 if node.path in (n.path for n in self.removed):
1159 1155 raise NodeAlreadyRemovedError(
1160 1156 "Node at %s is already marked as removed" % node.path)
1161 1157 try:
1162 1158 self.repository.get_commit()
1163 1159 except EmptyRepositoryError:
1164 1160 raise EmptyRepositoryError(
1165 1161 "Nothing to change - try to *add* new nodes rather than "
1166 1162 "changing them")
1167 1163 for node in filenodes:
1168 1164 if node.path in (n.path for n in self.changed):
1169 1165 raise NodeAlreadyChangedError(
1170 1166 "Node at '%s' is already marked as changed" % node.path)
1171 1167 self.changed.append(node)
1172 1168
1173 1169 def remove(self, *filenodes):
1174 1170 """
1175 1171 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1176 1172 *removed* in next commit.
1177 1173
1178 1174 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1179 1175 be *removed*
1180 1176 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1181 1177 be *changed*
1182 1178 """
1183 1179 for node in filenodes:
1184 1180 if node.path in (n.path for n in self.removed):
1185 1181 raise NodeAlreadyRemovedError(
1186 1182 "Node is already marked to for removal at %s" % node.path)
1187 1183 if node.path in (n.path for n in self.changed):
1188 1184 raise NodeAlreadyChangedError(
1189 1185 "Node is already marked to be changed at %s" % node.path)
1190 1186 # We only mark node as *removed* - real removal is done by
1191 1187 # commit method
1192 1188 self.removed.append(node)
1193 1189
1194 1190 def reset(self):
1195 1191 """
1196 1192 Resets this instance to initial state (cleans ``added``, ``changed``
1197 1193 and ``removed`` lists).
1198 1194 """
1199 1195 self.added = []
1200 1196 self.changed = []
1201 1197 self.removed = []
1202 1198 self.parents = []
1203 1199
1204 1200 def get_ipaths(self):
1205 1201 """
1206 1202 Returns generator of paths from nodes marked as added, changed or
1207 1203 removed.
1208 1204 """
1209 1205 for node in itertools.chain(self.added, self.changed, self.removed):
1210 1206 yield node.path
1211 1207
1212 1208 def get_paths(self):
1213 1209 """
1214 1210 Returns list of paths from nodes marked as added, changed or removed.
1215 1211 """
1216 1212 return list(self.get_ipaths())
1217 1213
1218 1214 def check_integrity(self, parents=None):
1219 1215 """
1220 1216 Checks in-memory commit's integrity. Also, sets parents if not
1221 1217 already set.
1222 1218
1223 1219 :raises CommitError: if any error occurs (i.e.
1224 1220 ``NodeDoesNotExistError``).
1225 1221 """
1226 1222 if not self.parents:
1227 1223 parents = parents or []
1228 1224 if len(parents) == 0:
1229 1225 try:
1230 1226 parents = [self.repository.get_commit(), None]
1231 1227 except EmptyRepositoryError:
1232 1228 parents = [None, None]
1233 1229 elif len(parents) == 1:
1234 1230 parents += [None]
1235 1231 self.parents = parents
1236 1232
1237 1233 # Local parents, only if not None
1238 1234 parents = [p for p in self.parents if p]
1239 1235
1240 1236 # Check nodes marked as added
1241 1237 for p in parents:
1242 1238 for node in self.added:
1243 1239 try:
1244 1240 p.get_node(node.path)
1245 1241 except NodeDoesNotExistError:
1246 1242 pass
1247 1243 else:
1248 1244 raise NodeAlreadyExistsError(
1249 1245 "Node `%s` already exists at %s" % (node.path, p))
1250 1246
1251 1247 # Check nodes marked as changed
1252 1248 missing = set(self.changed)
1253 1249 not_changed = set(self.changed)
1254 1250 if self.changed and not parents:
1255 1251 raise NodeDoesNotExistError(str(self.changed[0].path))
1256 1252 for p in parents:
1257 1253 for node in self.changed:
1258 1254 try:
1259 1255 old = p.get_node(node.path)
1260 1256 missing.remove(node)
1261 1257 # if content actually changed, remove node from not_changed
1262 1258 if old.content != node.content:
1263 1259 not_changed.remove(node)
1264 1260 except NodeDoesNotExistError:
1265 1261 pass
1266 1262 if self.changed and missing:
1267 1263 raise NodeDoesNotExistError(
1268 1264 "Node `%s` marked as modified but missing in parents: %s"
1269 1265 % (node.path, parents))
1270 1266
1271 1267 if self.changed and not_changed:
1272 1268 raise NodeNotChangedError(
1273 1269 "Node `%s` wasn't actually changed (parents: %s)"
1274 1270 % (not_changed.pop().path, parents))
1275 1271
1276 1272 # Check nodes marked as removed
1277 1273 if self.removed and not parents:
1278 1274 raise NodeDoesNotExistError(
1279 1275 "Cannot remove node at %s as there "
1280 1276 "were no parents specified" % self.removed[0].path)
1281 1277 really_removed = set()
1282 1278 for p in parents:
1283 1279 for node in self.removed:
1284 1280 try:
1285 1281 p.get_node(node.path)
1286 1282 really_removed.add(node)
1287 1283 except CommitError:
1288 1284 pass
1289 1285 not_removed = set(self.removed) - really_removed
1290 1286 if not_removed:
1291 1287 # TODO: johbo: This code branch does not seem to be covered
1292 1288 raise NodeDoesNotExistError(
1293 1289 "Cannot remove node at %s from "
1294 1290 "following parents: %s" % (not_removed, parents))
1295 1291
1296 1292 def commit(
1297 1293 self, message, author, parents=None, branch=None, date=None,
1298 1294 **kwargs):
1299 1295 """
1300 1296 Performs in-memory commit (doesn't check workdir in any way) and
1301 1297 returns newly created :class:`BaseCommit`. Updates repository's
1302 1298 attribute `commits`.
1303 1299
1304 1300 .. note::
1305 1301
1306 1302 While overriding this method each backend's should call
1307 1303 ``self.check_integrity(parents)`` in the first place.
1308 1304
1309 1305 :param message: message of the commit
1310 1306 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1311 1307 :param parents: single parent or sequence of parents from which commit
1312 1308 would be derived
1313 1309 :param date: ``datetime.datetime`` instance. Defaults to
1314 1310 ``datetime.datetime.now()``.
1315 1311 :param branch: branch name, as string. If none given, default backend's
1316 1312 branch would be used.
1317 1313
1318 1314 :raises ``CommitError``: if any error occurs while committing
1319 1315 """
1320 1316 raise NotImplementedError
1321 1317
1322 1318
1323 1319 class BaseInMemoryChangesetClass(type):
1324 1320
1325 1321 def __instancecheck__(self, instance):
1326 1322 return isinstance(instance, BaseInMemoryCommit)
1327 1323
1328 1324
1329 1325 class BaseInMemoryChangeset(BaseInMemoryCommit):
1330 1326
1331 1327 __metaclass__ = BaseInMemoryChangesetClass
1332 1328
1333 1329 def __new__(cls, *args, **kwargs):
1334 1330 warnings.warn(
1335 1331 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1336 1332 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1337 1333
1338 1334
1339 1335 class EmptyCommit(BaseCommit):
1340 1336 """
1341 1337 An dummy empty commit. It's possible to pass hash when creating
1342 1338 an EmptyCommit
1343 1339 """
1344 1340
1345 1341 def __init__(
1346 1342 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1347 1343 message='', author='', date=None):
1348 1344 self._empty_commit_id = commit_id
1349 1345 # TODO: johbo: Solve idx parameter, default value does not make
1350 1346 # too much sense
1351 1347 self.idx = idx
1352 1348 self.message = message
1353 1349 self.author = author
1354 1350 self.date = date or datetime.datetime.fromtimestamp(0)
1355 1351 self.repository = repo
1356 1352 self.alias = alias
1357 1353
1358 1354 @LazyProperty
1359 1355 def raw_id(self):
1360 1356 """
1361 1357 Returns raw string identifying this commit, useful for web
1362 1358 representation.
1363 1359 """
1364 1360
1365 1361 return self._empty_commit_id
1366 1362
1367 1363 @LazyProperty
1368 1364 def branch(self):
1369 1365 if self.alias:
1370 1366 from rhodecode.lib.vcs.backends import get_backend
1371 1367 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1372 1368
1373 1369 @LazyProperty
1374 1370 def short_id(self):
1375 1371 return self.raw_id[:12]
1376 1372
1377 1373 @LazyProperty
1378 1374 def id(self):
1379 1375 return self.raw_id
1380 1376
1381 1377 def get_file_commit(self, path):
1382 1378 return self
1383 1379
1384 1380 def get_file_content(self, path):
1385 1381 return u''
1386 1382
1387 1383 def get_file_size(self, path):
1388 1384 return 0
1389 1385
1390 1386
1391 1387 class EmptyChangesetClass(type):
1392 1388
1393 1389 def __instancecheck__(self, instance):
1394 1390 return isinstance(instance, EmptyCommit)
1395 1391
1396 1392
1397 1393 class EmptyChangeset(EmptyCommit):
1398 1394
1399 1395 __metaclass__ = EmptyChangesetClass
1400 1396
1401 1397 def __new__(cls, *args, **kwargs):
1402 1398 warnings.warn(
1403 1399 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1404 1400 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1405 1401
1406 1402 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1407 1403 alias=None, revision=-1, message='', author='', date=None):
1408 1404 if requested_revision is not None:
1409 1405 warnings.warn(
1410 1406 "Parameter requested_revision not supported anymore",
1411 1407 DeprecationWarning)
1412 1408 super(EmptyChangeset, self).__init__(
1413 1409 commit_id=cs, repo=repo, alias=alias, idx=revision,
1414 1410 message=message, author=author, date=date)
1415 1411
1416 1412 @property
1417 1413 def revision(self):
1418 1414 warnings.warn("Use idx instead", DeprecationWarning)
1419 1415 return self.idx
1420 1416
1421 1417 @revision.setter
1422 1418 def revision(self, value):
1423 1419 warnings.warn("Use idx instead", DeprecationWarning)
1424 1420 self.idx = value
1425 1421
1426 1422
1427 1423 class CollectionGenerator(object):
1428 1424
1429 1425 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1430 1426 self.repo = repo
1431 1427 self.commit_ids = commit_ids
1432 1428 # TODO: (oliver) this isn't currently hooked up
1433 1429 self.collection_size = None
1434 1430 self.pre_load = pre_load
1435 1431
1436 1432 def __len__(self):
1437 1433 if self.collection_size is not None:
1438 1434 return self.collection_size
1439 1435 return self.commit_ids.__len__()
1440 1436
1441 1437 def __iter__(self):
1442 1438 for commit_id in self.commit_ids:
1443 1439 # TODO: johbo: Mercurial passes in commit indices or commit ids
1444 1440 yield self._commit_factory(commit_id)
1445 1441
1446 1442 def _commit_factory(self, commit_id):
1447 1443 """
1448 1444 Allows backends to override the way commits are generated.
1449 1445 """
1450 1446 return self.repo.get_commit(commit_id=commit_id,
1451 1447 pre_load=self.pre_load)
1452 1448
1453 1449 def __getslice__(self, i, j):
1454 1450 """
1455 1451 Returns an iterator of sliced repository
1456 1452 """
1457 1453 commit_ids = self.commit_ids[i:j]
1458 1454 return self.__class__(
1459 1455 self.repo, commit_ids, pre_load=self.pre_load)
1460 1456
1461 1457 def __repr__(self):
1462 1458 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1463 1459
1464 1460
1465 1461 class Config(object):
1466 1462 """
1467 1463 Represents the configuration for a repository.
1468 1464
1469 1465 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1470 1466 standard library. It implements only the needed subset.
1471 1467 """
1472 1468
1473 1469 def __init__(self):
1474 1470 self._values = {}
1475 1471
1476 1472 def copy(self):
1477 1473 clone = Config()
1478 1474 for section, values in self._values.items():
1479 1475 clone._values[section] = values.copy()
1480 1476 return clone
1481 1477
1482 1478 def __repr__(self):
1483 1479 return '<Config(%s sections) at %s>' % (
1484 1480 len(self._values), hex(id(self)))
1485 1481
1486 1482 def items(self, section):
1487 1483 return self._values.get(section, {}).iteritems()
1488 1484
1489 1485 def get(self, section, option):
1490 1486 return self._values.get(section, {}).get(option)
1491 1487
1492 1488 def set(self, section, option, value):
1493 1489 section_values = self._values.setdefault(section, {})
1494 1490 section_values[option] = value
1495 1491
1496 1492 def clear_section(self, section):
1497 1493 self._values[section] = {}
1498 1494
1499 1495 def serialize(self):
1500 1496 """
1501 1497 Creates a list of three tuples (section, key, value) representing
1502 1498 this config object.
1503 1499 """
1504 1500 items = []
1505 1501 for section in self._values:
1506 1502 for option, value in self._values[section].items():
1507 1503 items.append(
1508 1504 (safe_str(section), safe_str(option), safe_str(value)))
1509 1505 return items
1510 1506
1511 1507
1512 1508 class Diff(object):
1513 1509 """
1514 1510 Represents a diff result from a repository backend.
1515 1511
1516 1512 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1517 1513 """
1518 1514
1519 1515 _header_re = None
1520 1516
1521 1517 def __init__(self, raw_diff):
1522 1518 self.raw = raw_diff
1523 1519
1524 1520 def chunks(self):
1525 1521 """
1526 1522 split the diff in chunks of separate --git a/file b/file chunks
1527 1523 to make diffs consistent we must prepend with \n, and make sure
1528 1524 we can detect last chunk as this was also has special rule
1529 1525 """
1530 1526 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1531 1527 total_chunks = len(chunks)
1532 1528 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1533 1529 for cur_chunk, chunk in enumerate(chunks, start=1))
1534 1530
1535 1531
1536 1532 class DiffChunk(object):
1537 1533
1538 1534 def __init__(self, chunk, diff, last_chunk):
1539 1535 self._diff = diff
1540 1536
1541 1537 # since we split by \ndiff --git that part is lost from original diff
1542 1538 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1543 1539 if not last_chunk:
1544 1540 chunk += '\n'
1545 1541
1546 1542 match = self._diff._header_re.match(chunk)
1547 1543 self.header = match.groupdict()
1548 1544 self.diff = chunk[match.end():]
1549 1545 self.raw = chunk
@@ -1,808 +1,803 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24
25 25 import logging
26 26 import binascii
27 27 import os
28 28 import shutil
29 29 import urllib
30 30
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 from rhodecode.lib.compat import OrderedDict
34 34 from rhodecode.lib.datelib import (
35 35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 36 date_astimestamp)
37 37 from rhodecode.lib.utils import safe_unicode, safe_str
38 38 from rhodecode.lib.vcs import connection
39 39 from rhodecode.lib.vcs.backends.base import (
40 40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 41 MergeFailureReason, Reference)
42 42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
47 TagDoesNotExistError, CommitDoesNotExistError)
48 48
49 49 hexlify = binascii.hexlify
50 50 nullid = "\0" * 20
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class MercurialRepository(BaseRepository):
56 56 """
57 57 Mercurial repository backend
58 58 """
59 59 DEFAULT_BRANCH_NAME = 'default'
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 update_after_clone=False, with_wire=None):
63 63 """
64 64 Raises RepositoryError if repository could not be find at the given
65 65 ``repo_path``.
66 66
67 67 :param repo_path: local path of the repository
68 68 :param config: config object containing the repo configuration
69 69 :param create=False: if set to True, would try to create repository if
70 70 it does not exist rather than raising exception
71 71 :param src_url=None: would try to clone repository from given location
72 72 :param update_after_clone=False: sets update of working copy after
73 73 making a clone
74 74 """
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 self.config = config if config else Config()
77 77 self._remote = connection.Hg(
78 78 self.path, self.config, with_wire=with_wire)
79 79
80 80 self._init_repo(create, src_url, update_after_clone)
81 81
82 82 # caches
83 83 self._commit_ids = {}
84 84
85 85 @LazyProperty
86 86 def commit_ids(self):
87 87 """
88 88 Returns list of commit ids, in ascending order. Being lazy
89 89 attribute allows external tools to inject shas from cache.
90 90 """
91 91 commit_ids = self._get_all_commit_ids()
92 92 self._rebuild_cache(commit_ids)
93 93 return commit_ids
94 94
95 95 def _rebuild_cache(self, commit_ids):
96 96 self._commit_ids = dict((commit_id, index)
97 97 for index, commit_id in enumerate(commit_ids))
98 98
99 99 @LazyProperty
100 100 def branches(self):
101 101 return self._get_branches()
102 102
103 103 @LazyProperty
104 104 def branches_closed(self):
105 105 return self._get_branches(active=False, closed=True)
106 106
107 107 @LazyProperty
108 108 def branches_all(self):
109 109 all_branches = {}
110 110 all_branches.update(self.branches)
111 111 all_branches.update(self.branches_closed)
112 112 return all_branches
113 113
114 114 def _get_branches(self, active=True, closed=False):
115 115 """
116 116 Gets branches for this repository
117 117 Returns only not closed active branches by default
118 118
119 119 :param active: return also active branches
120 120 :param closed: return also closed branches
121 121
122 122 """
123 123 if self.is_empty():
124 124 return {}
125 125
126 126 def get_name(ctx):
127 127 return ctx[0]
128 128
129 129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 130 self._remote.branches(active, closed).items()]
131 131
132 132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133 133
134 134 @LazyProperty
135 135 def tags(self):
136 136 """
137 137 Gets tags for this repository
138 138 """
139 139 return self._get_tags()
140 140
141 141 def _get_tags(self):
142 142 if self.is_empty():
143 143 return {}
144 144
145 145 def get_name(ctx):
146 146 return ctx[0]
147 147
148 148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 149 self._remote.tags().items()]
150 150
151 151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152 152
153 153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 154 **kwargs):
155 155 """
156 156 Creates and returns a tag for the given ``commit_id``.
157 157
158 158 :param name: name for new tag
159 159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 160 :param commit_id: commit id for which new tag would be created
161 161 :param message: message of the tag's commit
162 162 :param date: date of tag's commit
163 163
164 164 :raises TagAlreadyExistError: if tag with same name already exists
165 165 """
166 166 if name in self.tags:
167 167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 168 commit = self.get_commit(commit_id=commit_id)
169 169 local = kwargs.setdefault('local', False)
170 170
171 171 if message is None:
172 172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173 173
174 174 date, tz = date_to_timestamp_plus_offset(date)
175 175
176 176 self._remote.tag(
177 177 name, commit.raw_id, message, local, user, date, tz)
178 178 self._remote.invalidate_vcs_cache()
179 179
180 180 # Reinitialize tags
181 181 self.tags = self._get_tags()
182 182 tag_id = self.tags[name]
183 183
184 184 return self.get_commit(commit_id=tag_id)
185 185
186 186 def remove_tag(self, name, user, message=None, date=None):
187 187 """
188 188 Removes tag with the given `name`.
189 189
190 190 :param name: name of the tag to be removed
191 191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
192 192 :param message: message of the tag's removal commit
193 193 :param date: date of tag's removal commit
194 194
195 195 :raises TagDoesNotExistError: if tag with given name does not exists
196 196 """
197 197 if name not in self.tags:
198 198 raise TagDoesNotExistError("Tag %s does not exist" % name)
199 199 if message is None:
200 200 message = "Removed tag %s" % name
201 201 local = False
202 202
203 203 date, tz = date_to_timestamp_plus_offset(date)
204 204
205 205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 206 self._remote.invalidate_vcs_cache()
207 207 self.tags = self._get_tags()
208 208
209 209 @LazyProperty
210 210 def bookmarks(self):
211 211 """
212 212 Gets bookmarks for this repository
213 213 """
214 214 return self._get_bookmarks()
215 215
216 216 def _get_bookmarks(self):
217 217 if self.is_empty():
218 218 return {}
219 219
220 220 def get_name(ctx):
221 221 return ctx[0]
222 222
223 223 _bookmarks = [
224 224 (safe_unicode(n), hexlify(h)) for n, h in
225 225 self._remote.bookmarks().items()]
226 226
227 227 return OrderedDict(sorted(_bookmarks, key=get_name))
228 228
229 229 def _get_all_commit_ids(self):
230 230 return self._remote.get_all_commit_ids('visible')
231 231
232 232 def get_diff(
233 233 self, commit1, commit2, path='', ignore_whitespace=False,
234 234 context=3, path1=None):
235 235 """
236 236 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 237 `commit2` since `commit1`.
238 238
239 239 :param commit1: Entry point from which diff is shown. Can be
240 240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 241 the changes since empty state of the repository until `commit2`
242 242 :param commit2: Until which commit changes should be shown.
243 243 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 244 changes. Defaults to ``False``.
245 245 :param context: How many lines before/after changed lines should be
246 246 shown. Defaults to ``3``.
247 247 """
248 248 self._validate_diff_commits(commit1, commit2)
249 249 if path1 is not None and path1 != path:
250 250 raise ValueError("Diff of two different paths not supported.")
251 251
252 252 if path:
253 253 file_filter = [self.path, path]
254 254 else:
255 255 file_filter = None
256 256
257 257 diff = self._remote.diff(
258 258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 259 opt_git=True, opt_ignorews=ignore_whitespace,
260 260 context=context)
261 261 return MercurialDiff(diff)
262 262
263 263 def strip(self, commit_id, branch=None):
264 264 self._remote.strip(commit_id, update=False, backup="none")
265 265
266 266 self._remote.invalidate_vcs_cache()
267 267 self.commit_ids = self._get_all_commit_ids()
268 268 self._rebuild_cache(self.commit_ids)
269 269
270 270 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
271 271 if commit_id1 == commit_id2:
272 272 return commit_id1
273 273
274 274 ancestors = self._remote.revs_from_revspec(
275 275 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
276 276 other_path=repo2.path)
277 277 return repo2[ancestors[0]].raw_id if ancestors else None
278 278
279 279 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
280 280 if commit_id1 == commit_id2:
281 281 commits = []
282 282 else:
283 283 if merge:
284 284 indexes = self._remote.revs_from_revspec(
285 285 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
286 286 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
287 287 else:
288 288 indexes = self._remote.revs_from_revspec(
289 289 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
290 290 commit_id1, other_path=repo2.path)
291 291
292 292 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
293 293 for idx in indexes]
294 294
295 295 return commits
296 296
297 297 @staticmethod
298 298 def check_url(url, config):
299 299 """
300 300 Function will check given url and try to verify if it's a valid
301 301 link. Sometimes it may happened that mercurial will issue basic
302 302 auth request that can cause whole API to hang when used from python
303 303 or other external calls.
304 304
305 305 On failures it'll raise urllib2.HTTPError, exception is also thrown
306 306 when the return code is non 200
307 307 """
308 308 # check first if it's not an local url
309 309 if os.path.isdir(url) or url.startswith('file:'):
310 310 return True
311 311
312 312 # Request the _remote to verify the url
313 313 return connection.Hg.check_url(url, config.serialize())
314 314
315 315 @staticmethod
316 316 def is_valid_repository(path):
317 317 return os.path.isdir(os.path.join(path, '.hg'))
318 318
319 319 def _init_repo(self, create, src_url=None, update_after_clone=False):
320 320 """
321 321 Function will check for mercurial repository in given path. If there
322 322 is no repository in that path it will raise an exception unless
323 323 `create` parameter is set to True - in that case repository would
324 324 be created.
325 325
326 326 If `src_url` is given, would try to clone repository from the
327 327 location at given clone_point. Additionally it'll make update to
328 328 working copy accordingly to `update_after_clone` flag.
329 329 """
330 330 if create and os.path.exists(self.path):
331 331 raise RepositoryError(
332 332 "Cannot create repository at %s, location already exist"
333 333 % self.path)
334 334
335 335 if src_url:
336 336 url = str(self._get_url(src_url))
337 337 MercurialRepository.check_url(url, self.config)
338 338
339 339 self._remote.clone(url, self.path, update_after_clone)
340 340
341 341 # Don't try to create if we've already cloned repo
342 342 create = False
343 343
344 344 if create:
345 345 os.makedirs(self.path, mode=0755)
346 346
347 347 self._remote.localrepository(create)
348 348
349 349 @LazyProperty
350 350 def in_memory_commit(self):
351 351 return MercurialInMemoryCommit(self)
352 352
353 353 @LazyProperty
354 354 def description(self):
355 355 description = self._remote.get_config_value(
356 356 'web', 'description', untrusted=True)
357 357 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
358 358
359 359 @LazyProperty
360 360 def contact(self):
361 361 contact = (
362 362 self._remote.get_config_value("web", "contact") or
363 363 self._remote.get_config_value("ui", "username"))
364 364 return safe_unicode(contact or self.DEFAULT_CONTACT)
365 365
366 366 @LazyProperty
367 367 def last_change(self):
368 368 """
369 369 Returns last change made on this repository as
370 370 `datetime.datetime` object
371 371 """
372 372 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
373 373
374 374 def _get_mtime(self):
375 375 try:
376 376 return date_astimestamp(self.get_commit().date)
377 377 except RepositoryError:
378 378 # fallback to filesystem
379 379 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
380 380 st_path = os.path.join(self.path, '.hg', "store")
381 381 if os.path.exists(cl_path):
382 382 return os.stat(cl_path).st_mtime
383 383 else:
384 384 return os.stat(st_path).st_mtime
385 385
386 386 def _sanitize_commit_idx(self, idx):
387 387 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
388 388 # number. A `long` is treated in the correct way though. So we convert
389 389 # `int` to `long` here to make sure it is handled correctly.
390 390 if isinstance(idx, int):
391 391 return long(idx)
392 392 return idx
393 393
394 394 def _get_url(self, url):
395 395 """
396 396 Returns normalized url. If schema is not given, would fall
397 397 to filesystem
398 398 (``file:///``) schema.
399 399 """
400 400 url = url.encode('utf8')
401 401 if url != 'default' and '://' not in url:
402 402 url = "file:" + urllib.pathname2url(url)
403 403 return url
404 404
405 405 def get_hook_location(self):
406 406 """
407 407 returns absolute path to location where hooks are stored
408 408 """
409 409 return os.path.join(self.path, '.hg', '.hgrc')
410 410
411 411 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
412 412 """
413 413 Returns ``MercurialCommit`` object representing repository's
414 414 commit at the given `commit_id` or `commit_idx`.
415 415 """
416 416 if self.is_empty():
417 417 raise EmptyRepositoryError("There are no commits yet")
418 418
419 419 if commit_id is not None:
420 420 self._validate_commit_id(commit_id)
421 421 try:
422 422 idx = self._commit_ids[commit_id]
423 423 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
424 424 except KeyError:
425 425 pass
426 426 elif commit_idx is not None:
427 427 self._validate_commit_idx(commit_idx)
428 428 commit_idx = self._sanitize_commit_idx(commit_idx)
429 429 try:
430 430 id_ = self.commit_ids[commit_idx]
431 431 if commit_idx < 0:
432 432 commit_idx += len(self.commit_ids)
433 433 return MercurialCommit(
434 434 self, id_, commit_idx, pre_load=pre_load)
435 435 except IndexError:
436 436 commit_id = commit_idx
437 437 else:
438 438 commit_id = "tip"
439 439
440 440 # TODO Paris: Ugly hack to "serialize" long for msgpack
441 441 if isinstance(commit_id, long):
442 442 commit_id = float(commit_id)
443 443
444 444 if isinstance(commit_id, unicode):
445 445 commit_id = safe_str(commit_id)
446 446
447 447 raw_id, idx = self._remote.lookup(commit_id, both=True)
448 448
449 449 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
450 450
451 451 def get_commits(
452 452 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 453 branch_name=None, pre_load=None):
454 454 """
455 455 Returns generator of ``MercurialCommit`` objects from start to end
456 456 (both are inclusive)
457 457
458 458 :param start_id: None, str(commit_id)
459 459 :param end_id: None, str(commit_id)
460 460 :param start_date: if specified, commits with commit date less than
461 461 ``start_date`` would be filtered out from returned set
462 462 :param end_date: if specified, commits with commit date greater than
463 463 ``end_date`` would be filtered out from returned set
464 464 :param branch_name: if specified, commits not reachable from given
465 465 branch would be filtered out from returned set
466 466
467 467 :raise BranchDoesNotExistError: If given ``branch_name`` does not
468 468 exist.
469 469 :raise CommitDoesNotExistError: If commit for given ``start`` or
470 470 ``end`` could not be found.
471 471 """
472 472 # actually we should check now if it's not an empty repo
473 473 branch_ancestors = False
474 474 if self.is_empty():
475 475 raise EmptyRepositoryError("There are no commits yet")
476 476 self._validate_branch_name(branch_name)
477 477
478 478 if start_id is not None:
479 479 self._validate_commit_id(start_id)
480 480 c_start = self.get_commit(commit_id=start_id)
481 481 start_pos = self._commit_ids[c_start.raw_id]
482 482 else:
483 483 start_pos = None
484 484
485 485 if end_id is not None:
486 486 self._validate_commit_id(end_id)
487 487 c_end = self.get_commit(commit_id=end_id)
488 488 end_pos = max(0, self._commit_ids[c_end.raw_id])
489 489 else:
490 490 end_pos = None
491 491
492 492 if None not in [start_id, end_id] and start_pos > end_pos:
493 493 raise RepositoryError(
494 494 "Start commit '%s' cannot be after end commit '%s'" %
495 495 (start_id, end_id))
496 496
497 497 if end_pos is not None:
498 498 end_pos += 1
499 499
500 500 commit_filter = []
501 501 if branch_name and not branch_ancestors:
502 502 commit_filter.append('branch("%s")' % branch_name)
503 503 elif branch_name and branch_ancestors:
504 504 commit_filter.append('ancestors(branch("%s"))' % branch_name)
505 505 if start_date and not end_date:
506 506 commit_filter.append('date(">%s")' % start_date)
507 507 if end_date and not start_date:
508 508 commit_filter.append('date("<%s")' % end_date)
509 509 if start_date and end_date:
510 510 commit_filter.append(
511 511 'date(">%s") and date("<%s")' % (start_date, end_date))
512 512
513 513 # TODO: johbo: Figure out a simpler way for this solution
514 514 collection_generator = CollectionGenerator
515 515 if commit_filter:
516 516 commit_filter = map(safe_str, commit_filter)
517 517 revisions = self._remote.rev_range(commit_filter)
518 518 collection_generator = MercurialIndexBasedCollectionGenerator
519 519 else:
520 520 revisions = self.commit_ids
521 521
522 522 if start_pos or end_pos:
523 523 revisions = revisions[start_pos:end_pos]
524 524
525 525 return collection_generator(self, revisions, pre_load=pre_load)
526 526
527 527 def pull(self, url, commit_ids=None):
528 528 """
529 529 Tries to pull changes from external location.
530 530
531 531 :param commit_ids: Optional. Can be set to a list of commit ids
532 532 which shall be pulled from the other repository.
533 533 """
534 534 url = self._get_url(url)
535 535 self._remote.pull(url, commit_ids=commit_ids)
536 536 self._remote.invalidate_vcs_cache()
537 537
538 538 def _local_clone(self, clone_path):
539 539 """
540 540 Create a local clone of the current repo.
541 541 """
542 542 self._remote.clone(self.path, clone_path, update_after_clone=True,
543 543 hooks=False)
544 544
545 545 def _update(self, revision, clean=False):
546 546 """
547 547 Update the working copty to the specified revision.
548 548 """
549 549 self._remote.update(revision, clean=clean)
550 550
551 551 def _identify(self):
552 552 """
553 553 Return the current state of the working directory.
554 554 """
555 555 return self._remote.identify().strip().rstrip('+')
556 556
557 557 def _heads(self, branch=None):
558 558 """
559 559 Return the commit ids of the repository heads.
560 560 """
561 561 return self._remote.heads(branch=branch).strip().split(' ')
562 562
563 563 def _ancestor(self, revision1, revision2):
564 564 """
565 565 Return the common ancestor of the two revisions.
566 566 """
567 567 return self._remote.ancestor(
568 568 revision1, revision2).strip().split(':')[-1]
569 569
570 570 def _local_push(
571 571 self, revision, repository_path, push_branches=False,
572 572 enable_hooks=False):
573 573 """
574 574 Push the given revision to the specified repository.
575 575
576 576 :param push_branches: allow to create branches in the target repo.
577 577 """
578 578 self._remote.push(
579 579 [revision], repository_path, hooks=enable_hooks,
580 580 push_branches=push_branches)
581 581
582 582 def _local_merge(self, target_ref, merge_message, user_name, user_email,
583 583 source_ref, use_rebase=False):
584 584 """
585 585 Merge the given source_revision into the checked out revision.
586 586
587 587 Returns the commit id of the merge and a boolean indicating if the
588 588 commit needs to be pushed.
589 589 """
590 590 self._update(target_ref.commit_id)
591 591
592 592 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
593 593 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
594 594
595 595 if ancestor == source_ref.commit_id:
596 596 # Nothing to do, the changes were already integrated
597 597 return target_ref.commit_id, False
598 598
599 599 elif ancestor == target_ref.commit_id and is_the_same_branch:
600 600 # In this case we should force a commit message
601 601 return source_ref.commit_id, True
602 602
603 603 if use_rebase:
604 604 try:
605 605 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
606 606 target_ref.commit_id)
607 607 self.bookmark(bookmark_name, revision=source_ref.commit_id)
608 608 self._remote.rebase(
609 609 source=source_ref.commit_id, dest=target_ref.commit_id)
610 610 self._remote.invalidate_vcs_cache()
611 611 self._update(bookmark_name)
612 612 return self._identify(), True
613 613 except RepositoryError:
614 614 # The rebase-abort may raise another exception which 'hides'
615 615 # the original one, therefore we log it here.
616 616 log.exception('Error while rebasing shadow repo during merge.')
617 617
618 618 # Cleanup any rebase leftovers
619 619 self._remote.invalidate_vcs_cache()
620 620 self._remote.rebase(abort=True)
621 621 self._remote.invalidate_vcs_cache()
622 622 self._remote.update(clean=True)
623 623 raise
624 624 else:
625 625 try:
626 626 self._remote.merge(source_ref.commit_id)
627 627 self._remote.invalidate_vcs_cache()
628 628 self._remote.commit(
629 629 message=safe_str(merge_message),
630 630 username=safe_str('%s <%s>' % (user_name, user_email)))
631 631 self._remote.invalidate_vcs_cache()
632 632 return self._identify(), True
633 633 except RepositoryError:
634 634 # Cleanup any merge leftovers
635 635 self._remote.update(clean=True)
636 636 raise
637 637
638 638 def _is_the_same_branch(self, target_ref, source_ref):
639 639 return (
640 640 self._get_branch_name(target_ref) ==
641 641 self._get_branch_name(source_ref))
642 642
643 643 def _get_branch_name(self, ref):
644 644 if ref.type == 'branch':
645 645 return ref.name
646 646 return self._remote.ctx_branch(ref.commit_id)
647 647
648 648 def _get_shadow_repository_path(self, workspace_id):
649 649 # The name of the shadow repository must start with '.', so it is
650 650 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
651 651 return os.path.join(
652 652 os.path.dirname(self.path),
653 653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
654 654
655 655 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
656 656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
657 657 if not os.path.exists(shadow_repository_path):
658 658 self._local_clone(shadow_repository_path)
659 659 log.debug(
660 660 'Prepared shadow repository in %s', shadow_repository_path)
661 661
662 662 return shadow_repository_path
663 663
664 664 def cleanup_merge_workspace(self, workspace_id):
665 665 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
666 666 shutil.rmtree(shadow_repository_path, ignore_errors=True)
667 667
668 668 def _merge_repo(self, shadow_repository_path, target_ref,
669 669 source_repo, source_ref, merge_message,
670 670 merger_name, merger_email, dry_run=False,
671 671 use_rebase=False):
672 672 if target_ref.commit_id not in self._heads():
673 673 return MergeResponse(
674 674 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
675 675
676 676 try:
677 677 if (target_ref.type == 'branch' and
678 678 len(self._heads(target_ref.name)) != 1):
679 679 return MergeResponse(
680 680 False, False, None,
681 681 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
682 682 except CommitDoesNotExistError as e:
683 683 log.exception('Failure when looking up branch heads on hg target')
684 684 return MergeResponse(
685 685 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
686 686
687 687 shadow_repo = self._get_shadow_instance(shadow_repository_path)
688 688
689 689 log.debug('Pulling in target reference %s', target_ref)
690 690 self._validate_pull_reference(target_ref)
691 691 shadow_repo._local_pull(self.path, target_ref)
692 692 try:
693 693 log.debug('Pulling in source reference %s', source_ref)
694 694 source_repo._validate_pull_reference(source_ref)
695 695 shadow_repo._local_pull(source_repo.path, source_ref)
696 696 except CommitDoesNotExistError:
697 697 log.exception('Failure when doing local pull on hg shadow repo')
698 698 return MergeResponse(
699 699 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
700 700
701 701 merge_ref = None
702 702 merge_failure_reason = MergeFailureReason.NONE
703 703
704 704 try:
705 705 merge_commit_id, needs_push = shadow_repo._local_merge(
706 706 target_ref, merge_message, merger_name, merger_email,
707 707 source_ref, use_rebase=use_rebase)
708 708 merge_possible = True
709 709
710 710 # Set a bookmark pointing to the merge commit. This bookmark may be
711 711 # used to easily identify the last successful merge commit in the
712 712 # shadow repository.
713 713 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
714 714 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
715 except SubrepoMergeError:
716 log.exception(
717 'Subrepo merge error during local merge on hg shadow repo.')
718 merge_possible = False
719 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
720 715 except RepositoryError:
721 716 log.exception('Failure when doing local merge on hg shadow repo')
722 717 merge_possible = False
723 718 merge_failure_reason = MergeFailureReason.MERGE_FAILED
724 719
725 720 if merge_possible and not dry_run:
726 721 if needs_push:
727 722 # In case the target is a bookmark, update it, so after pushing
728 723 # the bookmarks is also updated in the target.
729 724 if target_ref.type == 'book':
730 725 shadow_repo.bookmark(
731 726 target_ref.name, revision=merge_commit_id)
732 727
733 728 try:
734 729 shadow_repo_with_hooks = self._get_shadow_instance(
735 730 shadow_repository_path,
736 731 enable_hooks=True)
737 732 # Note: the push_branches option will push any new branch
738 733 # defined in the source repository to the target. This may
739 734 # be dangerous as branches are permanent in Mercurial.
740 735 # This feature was requested in issue #441.
741 736 shadow_repo_with_hooks._local_push(
742 737 merge_commit_id, self.path, push_branches=True,
743 738 enable_hooks=True)
744 739 merge_succeeded = True
745 740 except RepositoryError:
746 741 log.exception(
747 742 'Failure when doing local push from the shadow '
748 743 'repository to the target repository.')
749 744 merge_succeeded = False
750 745 merge_failure_reason = MergeFailureReason.PUSH_FAILED
751 746 else:
752 747 merge_succeeded = True
753 748 else:
754 749 merge_succeeded = False
755 750
756 751 return MergeResponse(
757 752 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
758 753
759 754 def _get_shadow_instance(
760 755 self, shadow_repository_path, enable_hooks=False):
761 756 config = self.config.copy()
762 757 if not enable_hooks:
763 758 config.clear_section('hooks')
764 759 return MercurialRepository(shadow_repository_path, config)
765 760
766 761 def _validate_pull_reference(self, reference):
767 762 if not (reference.name in self.bookmarks or
768 763 reference.name in self.branches or
769 764 self.get_commit(reference.commit_id)):
770 765 raise CommitDoesNotExistError(
771 766 'Unknown branch, bookmark or commit id')
772 767
773 768 def _local_pull(self, repository_path, reference):
774 769 """
775 770 Fetch a branch, bookmark or commit from a local repository.
776 771 """
777 772 repository_path = os.path.abspath(repository_path)
778 773 if repository_path == self.path:
779 774 raise ValueError('Cannot pull from the same repository')
780 775
781 776 reference_type_to_option_name = {
782 777 'book': 'bookmark',
783 778 'branch': 'branch',
784 779 }
785 780 option_name = reference_type_to_option_name.get(
786 781 reference.type, 'revision')
787 782
788 783 if option_name == 'revision':
789 784 ref = reference.commit_id
790 785 else:
791 786 ref = reference.name
792 787
793 788 options = {option_name: [ref]}
794 789 self._remote.pull_cmd(repository_path, hooks=False, **options)
795 790 self._remote.invalidate_vcs_cache()
796 791
797 792 def bookmark(self, bookmark, revision=None):
798 793 if isinstance(bookmark, unicode):
799 794 bookmark = safe_str(bookmark)
800 795 self._remote.bookmark(bookmark, revision=revision)
801 796 self._remote.invalidate_vcs_cache()
802 797
803 798
804 799 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
805 800
806 801 def _commit_factory(self, commit_id):
807 802 return self.repo.get_commit(
808 803 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,205 +1,196 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Custom vcs exceptions module.
23 23 """
24 24
25 25 import functools
26 26 import urllib2
27 27
28 28
29 29 class VCSCommunicationError(Exception):
30 30 pass
31 31
32 32
33 33 class PyroVCSCommunicationError(VCSCommunicationError):
34 34 pass
35 35
36 36
37 37 class HttpVCSCommunicationError(VCSCommunicationError):
38 38 pass
39 39
40 40
41 41 class VCSError(Exception):
42 42 pass
43 43
44 44
45 45 class RepositoryError(VCSError):
46 46 pass
47 47
48 48
49 49 class RepositoryRequirementError(RepositoryError):
50 50 pass
51 51
52 52
53 53 class VCSBackendNotSupportedError(VCSError):
54 54 """
55 55 Exception raised when VCSServer does not support requested backend
56 56 """
57 57
58 58
59 59 class EmptyRepositoryError(RepositoryError):
60 60 pass
61 61
62 62
63 63 class TagAlreadyExistError(RepositoryError):
64 64 pass
65 65
66 66
67 67 class TagDoesNotExistError(RepositoryError):
68 68 pass
69 69
70 70
71 71 class BranchAlreadyExistError(RepositoryError):
72 72 pass
73 73
74 74
75 75 class BranchDoesNotExistError(RepositoryError):
76 76 pass
77 77
78 78
79 79 class CommitError(RepositoryError):
80 80 """
81 81 Exceptions related to an existing commit
82 82 """
83 83
84 84
85 85 class CommitDoesNotExistError(CommitError):
86 86 pass
87 87
88 88
89 89 class CommittingError(RepositoryError):
90 90 """
91 91 Exceptions happening while creating a new commit
92 92 """
93 93
94 94
95 95 class NothingChangedError(CommittingError):
96 96 pass
97 97
98 98
99 99 class NodeError(VCSError):
100 100 pass
101 101
102 102
103 103 class RemovedFileNodeError(NodeError):
104 104 pass
105 105
106 106
107 107 class NodeAlreadyExistsError(CommittingError):
108 108 pass
109 109
110 110
111 111 class NodeAlreadyChangedError(CommittingError):
112 112 pass
113 113
114 114
115 115 class NodeDoesNotExistError(CommittingError):
116 116 pass
117 117
118 118
119 119 class NodeNotChangedError(CommittingError):
120 120 pass
121 121
122 122
123 123 class NodeAlreadyAddedError(CommittingError):
124 124 pass
125 125
126 126
127 127 class NodeAlreadyRemovedError(CommittingError):
128 128 pass
129 129
130 130
131 class SubrepoMergeError(RepositoryError):
132 """
133 This happens if we try to merge a repository which contains subrepos and
134 the subrepos cannot be merged. The subrepos are not merged itself but
135 their references in the root repo are merged.
136 """
137
138
139 131 class ImproperArchiveTypeError(VCSError):
140 132 pass
141 133
142 134
143 135 class CommandError(VCSError):
144 136 pass
145 137
146 138
147 139 class UnhandledException(VCSError):
148 140 """
149 141 Signals that something unexpected went wrong.
150 142
151 143 This usually means we have a programming error on the side of the VCSServer
152 144 and should inspect the logfile of the VCSServer to find more details.
153 145 """
154 146
155 147
156 148 _EXCEPTION_MAP = {
157 149 'abort': RepositoryError,
158 150 'archive': ImproperArchiveTypeError,
159 151 'error': RepositoryError,
160 152 'lookup': CommitDoesNotExistError,
161 153 'repo_locked': RepositoryError,
162 154 'requirement': RepositoryRequirementError,
163 155 'unhandled': UnhandledException,
164 156 # TODO: johbo: Define our own exception for this and stop abusing
165 157 # urllib's exception class.
166 158 'url_error': urllib2.URLError,
167 'subrepo_merge_error': SubrepoMergeError,
168 159 }
169 160
170 161
171 162 def map_vcs_exceptions(func):
172 163 """
173 164 Utility to decorate functions so that plain exceptions are translated.
174 165
175 166 The translation is based on `exc_map` which maps a `str` indicating
176 167 the error type into an exception class representing this error inside
177 168 of the vcs layer.
178 169 """
179 170
180 171 @functools.wraps(func)
181 172 def wrapper(*args, **kwargs):
182 173 try:
183 174 return func(*args, **kwargs)
184 175 except Exception as e:
185 176 # The error middleware adds information if it finds
186 177 # __traceback_info__ in a frame object. This way the remote
187 178 # traceback information is made available in error reports.
188 179 remote_tb = getattr(e, '_pyroTraceback', None)
189 180 if remote_tb:
190 181 __traceback_info__ = (
191 182 'Found Pyro4 remote traceback information:\n\n' +
192 183 '\n'.join(remote_tb))
193 184
194 185 # Avoid that remote_tb also appears in the frame
195 186 del remote_tb
196 187
197 188 # Special vcs errors had an attribute "_vcs_kind" which is used
198 189 # to translate them to the proper exception class in the vcs
199 190 # client layer.
200 191 kind = getattr(e, '_vcs_kind', None)
201 192 if kind:
202 193 raise _EXCEPTION_MAP[kind](*e.args)
203 194 else:
204 195 raise
205 196 return wrapper
@@ -1,1317 +1,1314 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from sqlalchemy import or_
35 35
36 36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 39 from rhodecode.lib.markup_renderer import (
40 40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 41 from rhodecode.lib.utils import action_logger
42 42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 46 from rhodecode.lib.vcs.exceptions import (
47 47 CommitDoesNotExistError, EmptyRepositoryError)
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import ChangesetCommentsModel
51 51 from rhodecode.model.db import (
52 52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 53 PullRequestVersion, ChangesetComment)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.notification import NotificationModel, \
56 56 EmailNotificationModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.settings import VcsSettingsModel
59 59
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 # Data structure to hold the response data when updating commits during a pull
65 65 # request update.
66 66 UpdateResponse = namedtuple(
67 67 'UpdateResponse', 'executed, reason, new, old, changes')
68 68
69 69
70 70 class PullRequestModel(BaseModel):
71 71
72 72 cls = PullRequest
73 73
74 74 DIFF_CONTEXT = 3
75 75
76 76 MERGE_STATUS_MESSAGES = {
77 77 MergeFailureReason.NONE: lazy_ugettext(
78 78 'This pull request can be automatically merged.'),
79 79 MergeFailureReason.UNKNOWN: lazy_ugettext(
80 80 'This pull request cannot be merged because of an unhandled'
81 81 ' exception.'),
82 82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
83 83 'This pull request cannot be merged because of conflicts.'),
84 84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
85 85 'This pull request could not be merged because push to target'
86 86 ' failed.'),
87 87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
88 88 'This pull request cannot be merged because the target is not a'
89 89 ' head.'),
90 90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
91 91 'This pull request cannot be merged because the source contains'
92 92 ' more branches than the target.'),
93 93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
94 94 'This pull request cannot be merged because the target has'
95 95 ' multiple heads.'),
96 96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
97 97 'This pull request cannot be merged because the target repository'
98 98 ' is locked.'),
99 99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
100 100 'This pull request cannot be merged because the target or the '
101 101 'source reference is missing.'),
102 102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
103 103 'This pull request cannot be merged because the target '
104 104 'reference is missing.'),
105 105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the source '
107 107 'reference is missing.'),
108 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
109 'This pull request cannot be merged because of conflicts related '
110 'to sub repositories.'),
111 108 }
112 109
113 110 UPDATE_STATUS_MESSAGES = {
114 111 UpdateFailureReason.NONE: lazy_ugettext(
115 112 'Pull request update successful.'),
116 113 UpdateFailureReason.UNKNOWN: lazy_ugettext(
117 114 'Pull request update failed because of an unknown error.'),
118 115 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
119 116 'No update needed because the source reference is already '
120 117 'up to date.'),
121 118 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
122 119 'Pull request cannot be updated because the reference type is '
123 120 'not supported for an update.'),
124 121 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 122 'This pull request cannot be updated because the target '
126 123 'reference is missing.'),
127 124 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 125 'This pull request cannot be updated because the source '
129 126 'reference is missing.'),
130 127 }
131 128
132 129 def __get_pull_request(self, pull_request):
133 130 return self._get_instance(PullRequest, pull_request)
134 131
135 132 def _check_perms(self, perms, pull_request, user, api=False):
136 133 if not api:
137 134 return h.HasRepoPermissionAny(*perms)(
138 135 user=user, repo_name=pull_request.target_repo.repo_name)
139 136 else:
140 137 return h.HasRepoPermissionAnyApi(*perms)(
141 138 user=user, repo_name=pull_request.target_repo.repo_name)
142 139
143 140 def check_user_read(self, pull_request, user, api=False):
144 141 _perms = ('repository.admin', 'repository.write', 'repository.read',)
145 142 return self._check_perms(_perms, pull_request, user, api)
146 143
147 144 def check_user_merge(self, pull_request, user, api=False):
148 145 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
149 146 return self._check_perms(_perms, pull_request, user, api)
150 147
151 148 def check_user_update(self, pull_request, user, api=False):
152 149 owner = user.user_id == pull_request.user_id
153 150 return self.check_user_merge(pull_request, user, api) or owner
154 151
155 152 def check_user_delete(self, pull_request, user):
156 153 owner = user.user_id == pull_request.user_id
157 154 _perms = ('repository.admin')
158 155 return self._check_perms(_perms, pull_request, user) or owner
159 156
160 157 def check_user_change_status(self, pull_request, user, api=False):
161 158 reviewer = user.user_id in [x.user_id for x in
162 159 pull_request.reviewers]
163 160 return self.check_user_update(pull_request, user, api) or reviewer
164 161
165 162 def get(self, pull_request):
166 163 return self.__get_pull_request(pull_request)
167 164
168 165 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
169 166 opened_by=None, order_by=None,
170 167 order_dir='desc'):
171 168 repo = None
172 169 if repo_name:
173 170 repo = self._get_repo(repo_name)
174 171
175 172 q = PullRequest.query()
176 173
177 174 # source or target
178 175 if repo and source:
179 176 q = q.filter(PullRequest.source_repo == repo)
180 177 elif repo:
181 178 q = q.filter(PullRequest.target_repo == repo)
182 179
183 180 # closed,opened
184 181 if statuses:
185 182 q = q.filter(PullRequest.status.in_(statuses))
186 183
187 184 # opened by filter
188 185 if opened_by:
189 186 q = q.filter(PullRequest.user_id.in_(opened_by))
190 187
191 188 if order_by:
192 189 order_map = {
193 190 'name_raw': PullRequest.pull_request_id,
194 191 'title': PullRequest.title,
195 192 'updated_on_raw': PullRequest.updated_on,
196 193 'target_repo': PullRequest.target_repo_id
197 194 }
198 195 if order_dir == 'asc':
199 196 q = q.order_by(order_map[order_by].asc())
200 197 else:
201 198 q = q.order_by(order_map[order_by].desc())
202 199
203 200 return q
204 201
205 202 def count_all(self, repo_name, source=False, statuses=None,
206 203 opened_by=None):
207 204 """
208 205 Count the number of pull requests for a specific repository.
209 206
210 207 :param repo_name: target or source repo
211 208 :param source: boolean flag to specify if repo_name refers to source
212 209 :param statuses: list of pull request statuses
213 210 :param opened_by: author user of the pull request
214 211 :returns: int number of pull requests
215 212 """
216 213 q = self._prepare_get_all_query(
217 214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
218 215
219 216 return q.count()
220 217
221 218 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
222 219 offset=0, length=None, order_by=None, order_dir='desc'):
223 220 """
224 221 Get all pull requests for a specific repository.
225 222
226 223 :param repo_name: target or source repo
227 224 :param source: boolean flag to specify if repo_name refers to source
228 225 :param statuses: list of pull request statuses
229 226 :param opened_by: author user of the pull request
230 227 :param offset: pagination offset
231 228 :param length: length of returned list
232 229 :param order_by: order of the returned list
233 230 :param order_dir: 'asc' or 'desc' ordering direction
234 231 :returns: list of pull requests
235 232 """
236 233 q = self._prepare_get_all_query(
237 234 repo_name, source=source, statuses=statuses, opened_by=opened_by,
238 235 order_by=order_by, order_dir=order_dir)
239 236
240 237 if length:
241 238 pull_requests = q.limit(length).offset(offset).all()
242 239 else:
243 240 pull_requests = q.all()
244 241
245 242 return pull_requests
246 243
247 244 def count_awaiting_review(self, repo_name, source=False, statuses=None,
248 245 opened_by=None):
249 246 """
250 247 Count the number of pull requests for a specific repository that are
251 248 awaiting review.
252 249
253 250 :param repo_name: target or source repo
254 251 :param source: boolean flag to specify if repo_name refers to source
255 252 :param statuses: list of pull request statuses
256 253 :param opened_by: author user of the pull request
257 254 :returns: int number of pull requests
258 255 """
259 256 pull_requests = self.get_awaiting_review(
260 257 repo_name, source=source, statuses=statuses, opened_by=opened_by)
261 258
262 259 return len(pull_requests)
263 260
264 261 def get_awaiting_review(self, repo_name, source=False, statuses=None,
265 262 opened_by=None, offset=0, length=None,
266 263 order_by=None, order_dir='desc'):
267 264 """
268 265 Get all pull requests for a specific repository that are awaiting
269 266 review.
270 267
271 268 :param repo_name: target or source repo
272 269 :param source: boolean flag to specify if repo_name refers to source
273 270 :param statuses: list of pull request statuses
274 271 :param opened_by: author user of the pull request
275 272 :param offset: pagination offset
276 273 :param length: length of returned list
277 274 :param order_by: order of the returned list
278 275 :param order_dir: 'asc' or 'desc' ordering direction
279 276 :returns: list of pull requests
280 277 """
281 278 pull_requests = self.get_all(
282 279 repo_name, source=source, statuses=statuses, opened_by=opened_by,
283 280 order_by=order_by, order_dir=order_dir)
284 281
285 282 _filtered_pull_requests = []
286 283 for pr in pull_requests:
287 284 status = pr.calculated_review_status()
288 285 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
289 286 ChangesetStatus.STATUS_UNDER_REVIEW]:
290 287 _filtered_pull_requests.append(pr)
291 288 if length:
292 289 return _filtered_pull_requests[offset:offset+length]
293 290 else:
294 291 return _filtered_pull_requests
295 292
296 293 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
297 294 opened_by=None, user_id=None):
298 295 """
299 296 Count the number of pull requests for a specific repository that are
300 297 awaiting review from a specific user.
301 298
302 299 :param repo_name: target or source repo
303 300 :param source: boolean flag to specify if repo_name refers to source
304 301 :param statuses: list of pull request statuses
305 302 :param opened_by: author user of the pull request
306 303 :param user_id: reviewer user of the pull request
307 304 :returns: int number of pull requests
308 305 """
309 306 pull_requests = self.get_awaiting_my_review(
310 307 repo_name, source=source, statuses=statuses, opened_by=opened_by,
311 308 user_id=user_id)
312 309
313 310 return len(pull_requests)
314 311
315 312 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
316 313 opened_by=None, user_id=None, offset=0,
317 314 length=None, order_by=None, order_dir='desc'):
318 315 """
319 316 Get all pull requests for a specific repository that are awaiting
320 317 review from a specific user.
321 318
322 319 :param repo_name: target or source repo
323 320 :param source: boolean flag to specify if repo_name refers to source
324 321 :param statuses: list of pull request statuses
325 322 :param opened_by: author user of the pull request
326 323 :param user_id: reviewer user of the pull request
327 324 :param offset: pagination offset
328 325 :param length: length of returned list
329 326 :param order_by: order of the returned list
330 327 :param order_dir: 'asc' or 'desc' ordering direction
331 328 :returns: list of pull requests
332 329 """
333 330 pull_requests = self.get_all(
334 331 repo_name, source=source, statuses=statuses, opened_by=opened_by,
335 332 order_by=order_by, order_dir=order_dir)
336 333
337 334 _my = PullRequestModel().get_not_reviewed(user_id)
338 335 my_participation = []
339 336 for pr in pull_requests:
340 337 if pr in _my:
341 338 my_participation.append(pr)
342 339 _filtered_pull_requests = my_participation
343 340 if length:
344 341 return _filtered_pull_requests[offset:offset+length]
345 342 else:
346 343 return _filtered_pull_requests
347 344
348 345 def get_not_reviewed(self, user_id):
349 346 return [
350 347 x.pull_request for x in PullRequestReviewers.query().filter(
351 348 PullRequestReviewers.user_id == user_id).all()
352 349 ]
353 350
354 351 def _prepare_participating_query(self, user_id=None, statuses=None,
355 352 order_by=None, order_dir='desc'):
356 353 q = PullRequest.query()
357 354 if user_id:
358 355 reviewers_subquery = Session().query(
359 356 PullRequestReviewers.pull_request_id).filter(
360 357 PullRequestReviewers.user_id == user_id).subquery()
361 358 user_filter= or_(
362 359 PullRequest.user_id == user_id,
363 360 PullRequest.pull_request_id.in_(reviewers_subquery)
364 361 )
365 362 q = PullRequest.query().filter(user_filter)
366 363
367 364 # closed,opened
368 365 if statuses:
369 366 q = q.filter(PullRequest.status.in_(statuses))
370 367
371 368 if order_by:
372 369 order_map = {
373 370 'name_raw': PullRequest.pull_request_id,
374 371 'title': PullRequest.title,
375 372 'updated_on_raw': PullRequest.updated_on,
376 373 'target_repo': PullRequest.target_repo_id
377 374 }
378 375 if order_dir == 'asc':
379 376 q = q.order_by(order_map[order_by].asc())
380 377 else:
381 378 q = q.order_by(order_map[order_by].desc())
382 379
383 380 return q
384 381
385 382 def count_im_participating_in(self, user_id=None, statuses=None):
386 383 q = self._prepare_participating_query(user_id, statuses=statuses)
387 384 return q.count()
388 385
389 386 def get_im_participating_in(
390 387 self, user_id=None, statuses=None, offset=0,
391 388 length=None, order_by=None, order_dir='desc'):
392 389 """
393 390 Get all Pull requests that i'm participating in, or i have opened
394 391 """
395 392
396 393 q = self._prepare_participating_query(
397 394 user_id, statuses=statuses, order_by=order_by,
398 395 order_dir=order_dir)
399 396
400 397 if length:
401 398 pull_requests = q.limit(length).offset(offset).all()
402 399 else:
403 400 pull_requests = q.all()
404 401
405 402 return pull_requests
406 403
407 404 def get_versions(self, pull_request):
408 405 """
409 406 returns version of pull request sorted by ID descending
410 407 """
411 408 return PullRequestVersion.query()\
412 409 .filter(PullRequestVersion.pull_request == pull_request)\
413 410 .order_by(PullRequestVersion.pull_request_version_id.asc())\
414 411 .all()
415 412
416 413 def create(self, created_by, source_repo, source_ref, target_repo,
417 414 target_ref, revisions, reviewers, title, description=None):
418 415 created_by_user = self._get_user(created_by)
419 416 source_repo = self._get_repo(source_repo)
420 417 target_repo = self._get_repo(target_repo)
421 418
422 419 pull_request = PullRequest()
423 420 pull_request.source_repo = source_repo
424 421 pull_request.source_ref = source_ref
425 422 pull_request.target_repo = target_repo
426 423 pull_request.target_ref = target_ref
427 424 pull_request.revisions = revisions
428 425 pull_request.title = title
429 426 pull_request.description = description
430 427 pull_request.author = created_by_user
431 428
432 429 Session().add(pull_request)
433 430 Session().flush()
434 431
435 432 reviewer_ids = set()
436 433 # members / reviewers
437 434 for reviewer_object in reviewers:
438 435 if isinstance(reviewer_object, tuple):
439 436 user_id, reasons = reviewer_object
440 437 else:
441 438 user_id, reasons = reviewer_object, []
442 439
443 440 user = self._get_user(user_id)
444 441 reviewer_ids.add(user.user_id)
445 442
446 443 reviewer = PullRequestReviewers(user, pull_request, reasons)
447 444 Session().add(reviewer)
448 445
449 446 # Set approval status to "Under Review" for all commits which are
450 447 # part of this pull request.
451 448 ChangesetStatusModel().set_status(
452 449 repo=target_repo,
453 450 status=ChangesetStatus.STATUS_UNDER_REVIEW,
454 451 user=created_by_user,
455 452 pull_request=pull_request
456 453 )
457 454
458 455 self.notify_reviewers(pull_request, reviewer_ids)
459 456 self._trigger_pull_request_hook(
460 457 pull_request, created_by_user, 'create')
461 458
462 459 return pull_request
463 460
464 461 def _trigger_pull_request_hook(self, pull_request, user, action):
465 462 pull_request = self.__get_pull_request(pull_request)
466 463 target_scm = pull_request.target_repo.scm_instance()
467 464 if action == 'create':
468 465 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
469 466 elif action == 'merge':
470 467 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
471 468 elif action == 'close':
472 469 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
473 470 elif action == 'review_status_change':
474 471 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
475 472 elif action == 'update':
476 473 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
477 474 else:
478 475 return
479 476
480 477 trigger_hook(
481 478 username=user.username,
482 479 repo_name=pull_request.target_repo.repo_name,
483 480 repo_alias=target_scm.alias,
484 481 pull_request=pull_request)
485 482
486 483 def _get_commit_ids(self, pull_request):
487 484 """
488 485 Return the commit ids of the merged pull request.
489 486
490 487 This method is not dealing correctly yet with the lack of autoupdates
491 488 nor with the implicit target updates.
492 489 For example: if a commit in the source repo is already in the target it
493 490 will be reported anyways.
494 491 """
495 492 merge_rev = pull_request.merge_rev
496 493 if merge_rev is None:
497 494 raise ValueError('This pull request was not merged yet')
498 495
499 496 commit_ids = list(pull_request.revisions)
500 497 if merge_rev not in commit_ids:
501 498 commit_ids.append(merge_rev)
502 499
503 500 return commit_ids
504 501
505 502 def merge(self, pull_request, user, extras):
506 503 log.debug("Merging pull request %s", pull_request.pull_request_id)
507 504 merge_state = self._merge_pull_request(pull_request, user, extras)
508 505 if merge_state.executed:
509 506 log.debug(
510 507 "Merge was successful, updating the pull request comments.")
511 508 self._comment_and_close_pr(pull_request, user, merge_state)
512 509 self._log_action('user_merged_pull_request', user, pull_request)
513 510 else:
514 511 log.warn("Merge failed, not updating the pull request.")
515 512 return merge_state
516 513
517 514 def _merge_pull_request(self, pull_request, user, extras):
518 515 target_vcs = pull_request.target_repo.scm_instance()
519 516 source_vcs = pull_request.source_repo.scm_instance()
520 517 target_ref = self._refresh_reference(
521 518 pull_request.target_ref_parts, target_vcs)
522 519
523 520 message = _(
524 521 'Merge pull request #%(pr_id)s from '
525 522 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
526 523 'pr_id': pull_request.pull_request_id,
527 524 'source_repo': source_vcs.name,
528 525 'source_ref_name': pull_request.source_ref_parts.name,
529 526 'pr_title': pull_request.title
530 527 }
531 528
532 529 workspace_id = self._workspace_id(pull_request)
533 530 use_rebase = self._use_rebase_for_merging(pull_request)
534 531
535 532 callback_daemon, extras = prepare_callback_daemon(
536 533 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
537 534 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
538 535
539 536 with callback_daemon:
540 537 # TODO: johbo: Implement a clean way to run a config_override
541 538 # for a single call.
542 539 target_vcs.config.set(
543 540 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
544 541 merge_state = target_vcs.merge(
545 542 target_ref, source_vcs, pull_request.source_ref_parts,
546 543 workspace_id, user_name=user.username,
547 544 user_email=user.email, message=message, use_rebase=use_rebase)
548 545 return merge_state
549 546
550 547 def _comment_and_close_pr(self, pull_request, user, merge_state):
551 548 pull_request.merge_rev = merge_state.merge_ref.commit_id
552 549 pull_request.updated_on = datetime.datetime.now()
553 550
554 551 ChangesetCommentsModel().create(
555 552 text=unicode(_('Pull request merged and closed')),
556 553 repo=pull_request.target_repo.repo_id,
557 554 user=user.user_id,
558 555 pull_request=pull_request.pull_request_id,
559 556 f_path=None,
560 557 line_no=None,
561 558 closing_pr=True
562 559 )
563 560
564 561 Session().add(pull_request)
565 562 Session().flush()
566 563 # TODO: paris: replace invalidation with less radical solution
567 564 ScmModel().mark_for_invalidation(
568 565 pull_request.target_repo.repo_name)
569 566 self._trigger_pull_request_hook(pull_request, user, 'merge')
570 567
571 568 def has_valid_update_type(self, pull_request):
572 569 source_ref_type = pull_request.source_ref_parts.type
573 570 return source_ref_type in ['book', 'branch', 'tag']
574 571
575 572 def update_commits(self, pull_request):
576 573 """
577 574 Get the updated list of commits for the pull request
578 575 and return the new pull request version and the list
579 576 of commits processed by this update action
580 577 """
581 578 pull_request = self.__get_pull_request(pull_request)
582 579 source_ref_type = pull_request.source_ref_parts.type
583 580 source_ref_name = pull_request.source_ref_parts.name
584 581 source_ref_id = pull_request.source_ref_parts.commit_id
585 582
586 583 if not self.has_valid_update_type(pull_request):
587 584 log.debug(
588 585 "Skipping update of pull request %s due to ref type: %s",
589 586 pull_request, source_ref_type)
590 587 return UpdateResponse(
591 588 executed=False,
592 589 reason=UpdateFailureReason.WRONG_REF_TPYE,
593 590 old=pull_request, new=None, changes=None)
594 591
595 592 source_repo = pull_request.source_repo.scm_instance()
596 593 try:
597 594 source_commit = source_repo.get_commit(commit_id=source_ref_name)
598 595 except CommitDoesNotExistError:
599 596 return UpdateResponse(
600 597 executed=False,
601 598 reason=UpdateFailureReason.MISSING_SOURCE_REF,
602 599 old=pull_request, new=None, changes=None)
603 600
604 601 if source_ref_id == source_commit.raw_id:
605 602 log.debug("Nothing changed in pull request %s", pull_request)
606 603 return UpdateResponse(
607 604 executed=False,
608 605 reason=UpdateFailureReason.NO_CHANGE,
609 606 old=pull_request, new=None, changes=None)
610 607
611 608 # Finally there is a need for an update
612 609 pull_request_version = self._create_version_from_snapshot(pull_request)
613 610 self._link_comments_to_version(pull_request_version)
614 611
615 612 target_ref_type = pull_request.target_ref_parts.type
616 613 target_ref_name = pull_request.target_ref_parts.name
617 614 target_ref_id = pull_request.target_ref_parts.commit_id
618 615 target_repo = pull_request.target_repo.scm_instance()
619 616
620 617 try:
621 618 if target_ref_type in ('tag', 'branch', 'book'):
622 619 target_commit = target_repo.get_commit(target_ref_name)
623 620 else:
624 621 target_commit = target_repo.get_commit(target_ref_id)
625 622 except CommitDoesNotExistError:
626 623 return UpdateResponse(
627 624 executed=False,
628 625 reason=UpdateFailureReason.MISSING_TARGET_REF,
629 626 old=pull_request, new=None, changes=None)
630 627
631 628 # re-compute commit ids
632 629 old_commit_ids = set(pull_request.revisions)
633 630 pre_load = ["author", "branch", "date", "message"]
634 631 commit_ranges = target_repo.compare(
635 632 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
636 633 pre_load=pre_load)
637 634
638 635 ancestor = target_repo.get_common_ancestor(
639 636 target_commit.raw_id, source_commit.raw_id, source_repo)
640 637
641 638 pull_request.source_ref = '%s:%s:%s' % (
642 639 source_ref_type, source_ref_name, source_commit.raw_id)
643 640 pull_request.target_ref = '%s:%s:%s' % (
644 641 target_ref_type, target_ref_name, ancestor)
645 642 pull_request.revisions = [
646 643 commit.raw_id for commit in reversed(commit_ranges)]
647 644 pull_request.updated_on = datetime.datetime.now()
648 645 Session().add(pull_request)
649 646 new_commit_ids = set(pull_request.revisions)
650 647
651 648 changes = self._calculate_commit_id_changes(
652 649 old_commit_ids, new_commit_ids)
653 650
654 651 old_diff_data, new_diff_data = self._generate_update_diffs(
655 652 pull_request, pull_request_version)
656 653
657 654 ChangesetCommentsModel().outdate_comments(
658 655 pull_request, old_diff_data=old_diff_data,
659 656 new_diff_data=new_diff_data)
660 657
661 658 file_changes = self._calculate_file_changes(
662 659 old_diff_data, new_diff_data)
663 660
664 661 # Add an automatic comment to the pull request
665 662 update_comment = ChangesetCommentsModel().create(
666 663 text=self._render_update_message(changes, file_changes),
667 664 repo=pull_request.target_repo,
668 665 user=pull_request.author,
669 666 pull_request=pull_request,
670 667 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
671 668
672 669 # Update status to "Under Review" for added commits
673 670 for commit_id in changes.added:
674 671 ChangesetStatusModel().set_status(
675 672 repo=pull_request.source_repo,
676 673 status=ChangesetStatus.STATUS_UNDER_REVIEW,
677 674 comment=update_comment,
678 675 user=pull_request.author,
679 676 pull_request=pull_request,
680 677 revision=commit_id)
681 678
682 679 log.debug(
683 680 'Updated pull request %s, added_ids: %s, common_ids: %s, '
684 681 'removed_ids: %s', pull_request.pull_request_id,
685 682 changes.added, changes.common, changes.removed)
686 683 log.debug('Updated pull request with the following file changes: %s',
687 684 file_changes)
688 685
689 686 log.info(
690 687 "Updated pull request %s from commit %s to commit %s, "
691 688 "stored new version %s of this pull request.",
692 689 pull_request.pull_request_id, source_ref_id,
693 690 pull_request.source_ref_parts.commit_id,
694 691 pull_request_version.pull_request_version_id)
695 692 Session().commit()
696 693 self._trigger_pull_request_hook(pull_request, pull_request.author,
697 694 'update')
698 695
699 696 return UpdateResponse(
700 697 executed=True, reason=UpdateFailureReason.NONE,
701 698 old=pull_request, new=pull_request_version, changes=changes)
702 699
703 700 def _create_version_from_snapshot(self, pull_request):
704 701 version = PullRequestVersion()
705 702 version.title = pull_request.title
706 703 version.description = pull_request.description
707 704 version.status = pull_request.status
708 705 version.created_on = pull_request.created_on
709 706 version.updated_on = pull_request.updated_on
710 707 version.user_id = pull_request.user_id
711 708 version.source_repo = pull_request.source_repo
712 709 version.source_ref = pull_request.source_ref
713 710 version.target_repo = pull_request.target_repo
714 711 version.target_ref = pull_request.target_ref
715 712
716 713 version._last_merge_source_rev = pull_request._last_merge_source_rev
717 714 version._last_merge_target_rev = pull_request._last_merge_target_rev
718 715 version._last_merge_status = pull_request._last_merge_status
719 716 version.shadow_merge_ref = pull_request.shadow_merge_ref
720 717 version.merge_rev = pull_request.merge_rev
721 718
722 719 version.revisions = pull_request.revisions
723 720 version.pull_request = pull_request
724 721 Session().add(version)
725 722 Session().flush()
726 723
727 724 return version
728 725
729 726 def _generate_update_diffs(self, pull_request, pull_request_version):
730 727 diff_context = (
731 728 self.DIFF_CONTEXT +
732 729 ChangesetCommentsModel.needed_extra_diff_context())
733 730 old_diff = self._get_diff_from_pr_or_version(
734 731 pull_request_version, context=diff_context)
735 732 new_diff = self._get_diff_from_pr_or_version(
736 733 pull_request, context=diff_context)
737 734
738 735 old_diff_data = diffs.DiffProcessor(old_diff)
739 736 old_diff_data.prepare()
740 737 new_diff_data = diffs.DiffProcessor(new_diff)
741 738 new_diff_data.prepare()
742 739
743 740 return old_diff_data, new_diff_data
744 741
745 742 def _link_comments_to_version(self, pull_request_version):
746 743 """
747 744 Link all unlinked comments of this pull request to the given version.
748 745
749 746 :param pull_request_version: The `PullRequestVersion` to which
750 747 the comments shall be linked.
751 748
752 749 """
753 750 pull_request = pull_request_version.pull_request
754 751 comments = ChangesetComment.query().filter(
755 752 # TODO: johbo: Should we query for the repo at all here?
756 753 # Pending decision on how comments of PRs are to be related
757 754 # to either the source repo, the target repo or no repo at all.
758 755 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
759 756 ChangesetComment.pull_request == pull_request,
760 757 ChangesetComment.pull_request_version == None)
761 758
762 759 # TODO: johbo: Find out why this breaks if it is done in a bulk
763 760 # operation.
764 761 for comment in comments:
765 762 comment.pull_request_version_id = (
766 763 pull_request_version.pull_request_version_id)
767 764 Session().add(comment)
768 765
769 766 def _calculate_commit_id_changes(self, old_ids, new_ids):
770 767 added = new_ids.difference(old_ids)
771 768 common = old_ids.intersection(new_ids)
772 769 removed = old_ids.difference(new_ids)
773 770 return ChangeTuple(added, common, removed)
774 771
775 772 def _calculate_file_changes(self, old_diff_data, new_diff_data):
776 773
777 774 old_files = OrderedDict()
778 775 for diff_data in old_diff_data.parsed_diff:
779 776 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
780 777
781 778 added_files = []
782 779 modified_files = []
783 780 removed_files = []
784 781 for diff_data in new_diff_data.parsed_diff:
785 782 new_filename = diff_data['filename']
786 783 new_hash = md5_safe(diff_data['raw_diff'])
787 784
788 785 old_hash = old_files.get(new_filename)
789 786 if not old_hash:
790 787 # file is not present in old diff, means it's added
791 788 added_files.append(new_filename)
792 789 else:
793 790 if new_hash != old_hash:
794 791 modified_files.append(new_filename)
795 792 # now remove a file from old, since we have seen it already
796 793 del old_files[new_filename]
797 794
798 795 # removed files is when there are present in old, but not in NEW,
799 796 # since we remove old files that are present in new diff, left-overs
800 797 # if any should be the removed files
801 798 removed_files.extend(old_files.keys())
802 799
803 800 return FileChangeTuple(added_files, modified_files, removed_files)
804 801
805 802 def _render_update_message(self, changes, file_changes):
806 803 """
807 804 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
808 805 so it's always looking the same disregarding on which default
809 806 renderer system is using.
810 807
811 808 :param changes: changes named tuple
812 809 :param file_changes: file changes named tuple
813 810
814 811 """
815 812 new_status = ChangesetStatus.get_status_lbl(
816 813 ChangesetStatus.STATUS_UNDER_REVIEW)
817 814
818 815 changed_files = (
819 816 file_changes.added + file_changes.modified + file_changes.removed)
820 817
821 818 params = {
822 819 'under_review_label': new_status,
823 820 'added_commits': changes.added,
824 821 'removed_commits': changes.removed,
825 822 'changed_files': changed_files,
826 823 'added_files': file_changes.added,
827 824 'modified_files': file_changes.modified,
828 825 'removed_files': file_changes.removed,
829 826 }
830 827 renderer = RstTemplateRenderer()
831 828 return renderer.render('pull_request_update.mako', **params)
832 829
833 830 def edit(self, pull_request, title, description):
834 831 pull_request = self.__get_pull_request(pull_request)
835 832 if pull_request.is_closed():
836 833 raise ValueError('This pull request is closed')
837 834 if title:
838 835 pull_request.title = title
839 836 pull_request.description = description
840 837 pull_request.updated_on = datetime.datetime.now()
841 838 Session().add(pull_request)
842 839
843 840 def update_reviewers(self, pull_request, reviewer_data):
844 841 """
845 842 Update the reviewers in the pull request
846 843
847 844 :param pull_request: the pr to update
848 845 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
849 846 """
850 847
851 848 reviewers_reasons = {}
852 849 for user_id, reasons in reviewer_data:
853 850 if isinstance(user_id, (int, basestring)):
854 851 user_id = self._get_user(user_id).user_id
855 852 reviewers_reasons[user_id] = reasons
856 853
857 854 reviewers_ids = set(reviewers_reasons.keys())
858 855 pull_request = self.__get_pull_request(pull_request)
859 856 current_reviewers = PullRequestReviewers.query()\
860 857 .filter(PullRequestReviewers.pull_request ==
861 858 pull_request).all()
862 859 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
863 860
864 861 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
865 862 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
866 863
867 864 log.debug("Adding %s reviewers", ids_to_add)
868 865 log.debug("Removing %s reviewers", ids_to_remove)
869 866 changed = False
870 867 for uid in ids_to_add:
871 868 changed = True
872 869 _usr = self._get_user(uid)
873 870 reasons = reviewers_reasons[uid]
874 871 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
875 872 Session().add(reviewer)
876 873
877 874 self.notify_reviewers(pull_request, ids_to_add)
878 875
879 876 for uid in ids_to_remove:
880 877 changed = True
881 878 reviewer = PullRequestReviewers.query()\
882 879 .filter(PullRequestReviewers.user_id == uid,
883 880 PullRequestReviewers.pull_request == pull_request)\
884 881 .scalar()
885 882 if reviewer:
886 883 Session().delete(reviewer)
887 884 if changed:
888 885 pull_request.updated_on = datetime.datetime.now()
889 886 Session().add(pull_request)
890 887
891 888 return ids_to_add, ids_to_remove
892 889
893 890 def get_url(self, pull_request):
894 891 return h.url('pullrequest_show',
895 892 repo_name=safe_str(pull_request.target_repo.repo_name),
896 893 pull_request_id=pull_request.pull_request_id,
897 894 qualified=True)
898 895
899 896 def get_shadow_clone_url(self, pull_request):
900 897 """
901 898 Returns qualified url pointing to the shadow repository. If this pull
902 899 request is closed there is no shadow repository and ``None`` will be
903 900 returned.
904 901 """
905 902 if pull_request.is_closed():
906 903 return None
907 904 else:
908 905 pr_url = urllib.unquote(self.get_url(pull_request))
909 906 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
910 907
911 908 def notify_reviewers(self, pull_request, reviewers_ids):
912 909 # notification to reviewers
913 910 if not reviewers_ids:
914 911 return
915 912
916 913 pull_request_obj = pull_request
917 914 # get the current participants of this pull request
918 915 recipients = reviewers_ids
919 916 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
920 917
921 918 pr_source_repo = pull_request_obj.source_repo
922 919 pr_target_repo = pull_request_obj.target_repo
923 920
924 921 pr_url = h.url(
925 922 'pullrequest_show',
926 923 repo_name=pr_target_repo.repo_name,
927 924 pull_request_id=pull_request_obj.pull_request_id,
928 925 qualified=True,)
929 926
930 927 # set some variables for email notification
931 928 pr_target_repo_url = h.url(
932 929 'summary_home',
933 930 repo_name=pr_target_repo.repo_name,
934 931 qualified=True)
935 932
936 933 pr_source_repo_url = h.url(
937 934 'summary_home',
938 935 repo_name=pr_source_repo.repo_name,
939 936 qualified=True)
940 937
941 938 # pull request specifics
942 939 pull_request_commits = [
943 940 (x.raw_id, x.message)
944 941 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
945 942
946 943 kwargs = {
947 944 'user': pull_request.author,
948 945 'pull_request': pull_request_obj,
949 946 'pull_request_commits': pull_request_commits,
950 947
951 948 'pull_request_target_repo': pr_target_repo,
952 949 'pull_request_target_repo_url': pr_target_repo_url,
953 950
954 951 'pull_request_source_repo': pr_source_repo,
955 952 'pull_request_source_repo_url': pr_source_repo_url,
956 953
957 954 'pull_request_url': pr_url,
958 955 }
959 956
960 957 # pre-generate the subject for notification itself
961 958 (subject,
962 959 _h, _e, # we don't care about those
963 960 body_plaintext) = EmailNotificationModel().render_email(
964 961 notification_type, **kwargs)
965 962
966 963 # create notification objects, and emails
967 964 NotificationModel().create(
968 965 created_by=pull_request.author,
969 966 notification_subject=subject,
970 967 notification_body=body_plaintext,
971 968 notification_type=notification_type,
972 969 recipients=recipients,
973 970 email_kwargs=kwargs,
974 971 )
975 972
976 973 def delete(self, pull_request):
977 974 pull_request = self.__get_pull_request(pull_request)
978 975 self._cleanup_merge_workspace(pull_request)
979 976 Session().delete(pull_request)
980 977
981 978 def close_pull_request(self, pull_request, user):
982 979 pull_request = self.__get_pull_request(pull_request)
983 980 self._cleanup_merge_workspace(pull_request)
984 981 pull_request.status = PullRequest.STATUS_CLOSED
985 982 pull_request.updated_on = datetime.datetime.now()
986 983 Session().add(pull_request)
987 984 self._trigger_pull_request_hook(
988 985 pull_request, pull_request.author, 'close')
989 986 self._log_action('user_closed_pull_request', user, pull_request)
990 987
991 988 def close_pull_request_with_comment(self, pull_request, user, repo,
992 989 message=None):
993 990 status = ChangesetStatus.STATUS_REJECTED
994 991
995 992 if not message:
996 993 message = (
997 994 _('Status change %(transition_icon)s %(status)s') % {
998 995 'transition_icon': '>',
999 996 'status': ChangesetStatus.get_status_lbl(status)})
1000 997
1001 998 internal_message = _('Closing with') + ' ' + message
1002 999
1003 1000 comm = ChangesetCommentsModel().create(
1004 1001 text=internal_message,
1005 1002 repo=repo.repo_id,
1006 1003 user=user.user_id,
1007 1004 pull_request=pull_request.pull_request_id,
1008 1005 f_path=None,
1009 1006 line_no=None,
1010 1007 status_change=ChangesetStatus.get_status_lbl(status),
1011 1008 status_change_type=status,
1012 1009 closing_pr=True
1013 1010 )
1014 1011
1015 1012 ChangesetStatusModel().set_status(
1016 1013 repo.repo_id,
1017 1014 status,
1018 1015 user.user_id,
1019 1016 comm,
1020 1017 pull_request=pull_request.pull_request_id
1021 1018 )
1022 1019 Session().flush()
1023 1020
1024 1021 PullRequestModel().close_pull_request(
1025 1022 pull_request.pull_request_id, user)
1026 1023
1027 1024 def merge_status(self, pull_request):
1028 1025 if not self._is_merge_enabled(pull_request):
1029 1026 return False, _('Server-side pull request merging is disabled.')
1030 1027 if pull_request.is_closed():
1031 1028 return False, _('This pull request is closed.')
1032 1029 merge_possible, msg = self._check_repo_requirements(
1033 1030 target=pull_request.target_repo, source=pull_request.source_repo)
1034 1031 if not merge_possible:
1035 1032 return merge_possible, msg
1036 1033
1037 1034 try:
1038 1035 resp = self._try_merge(pull_request)
1039 1036 log.debug("Merge response: %s", resp)
1040 1037 status = resp.possible, self.merge_status_message(
1041 1038 resp.failure_reason)
1042 1039 except NotImplementedError:
1043 1040 status = False, _('Pull request merging is not supported.')
1044 1041
1045 1042 return status
1046 1043
1047 1044 def _check_repo_requirements(self, target, source):
1048 1045 """
1049 1046 Check if `target` and `source` have compatible requirements.
1050 1047
1051 1048 Currently this is just checking for largefiles.
1052 1049 """
1053 1050 target_has_largefiles = self._has_largefiles(target)
1054 1051 source_has_largefiles = self._has_largefiles(source)
1055 1052 merge_possible = True
1056 1053 message = u''
1057 1054
1058 1055 if target_has_largefiles != source_has_largefiles:
1059 1056 merge_possible = False
1060 1057 if source_has_largefiles:
1061 1058 message = _(
1062 1059 'Target repository large files support is disabled.')
1063 1060 else:
1064 1061 message = _(
1065 1062 'Source repository large files support is disabled.')
1066 1063
1067 1064 return merge_possible, message
1068 1065
1069 1066 def _has_largefiles(self, repo):
1070 1067 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1071 1068 'extensions', 'largefiles')
1072 1069 return largefiles_ui and largefiles_ui[0].active
1073 1070
1074 1071 def _try_merge(self, pull_request):
1075 1072 """
1076 1073 Try to merge the pull request and return the merge status.
1077 1074 """
1078 1075 log.debug(
1079 1076 "Trying out if the pull request %s can be merged.",
1080 1077 pull_request.pull_request_id)
1081 1078 target_vcs = pull_request.target_repo.scm_instance()
1082 1079
1083 1080 # Refresh the target reference.
1084 1081 try:
1085 1082 target_ref = self._refresh_reference(
1086 1083 pull_request.target_ref_parts, target_vcs)
1087 1084 except CommitDoesNotExistError:
1088 1085 merge_state = MergeResponse(
1089 1086 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1090 1087 return merge_state
1091 1088
1092 1089 target_locked = pull_request.target_repo.locked
1093 1090 if target_locked and target_locked[0]:
1094 1091 log.debug("The target repository is locked.")
1095 1092 merge_state = MergeResponse(
1096 1093 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1097 1094 elif self._needs_merge_state_refresh(pull_request, target_ref):
1098 1095 log.debug("Refreshing the merge status of the repository.")
1099 1096 merge_state = self._refresh_merge_state(
1100 1097 pull_request, target_vcs, target_ref)
1101 1098 else:
1102 1099 possible = pull_request.\
1103 1100 _last_merge_status == MergeFailureReason.NONE
1104 1101 merge_state = MergeResponse(
1105 1102 possible, False, None, pull_request._last_merge_status)
1106 1103
1107 1104 return merge_state
1108 1105
1109 1106 def _refresh_reference(self, reference, vcs_repository):
1110 1107 if reference.type in ('branch', 'book'):
1111 1108 name_or_id = reference.name
1112 1109 else:
1113 1110 name_or_id = reference.commit_id
1114 1111 refreshed_commit = vcs_repository.get_commit(name_or_id)
1115 1112 refreshed_reference = Reference(
1116 1113 reference.type, reference.name, refreshed_commit.raw_id)
1117 1114 return refreshed_reference
1118 1115
1119 1116 def _needs_merge_state_refresh(self, pull_request, target_reference):
1120 1117 return not(
1121 1118 pull_request.revisions and
1122 1119 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1123 1120 target_reference.commit_id == pull_request._last_merge_target_rev)
1124 1121
1125 1122 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1126 1123 workspace_id = self._workspace_id(pull_request)
1127 1124 source_vcs = pull_request.source_repo.scm_instance()
1128 1125 use_rebase = self._use_rebase_for_merging(pull_request)
1129 1126 merge_state = target_vcs.merge(
1130 1127 target_reference, source_vcs, pull_request.source_ref_parts,
1131 1128 workspace_id, dry_run=True, use_rebase=use_rebase)
1132 1129
1133 1130 # Do not store the response if there was an unknown error.
1134 1131 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1135 1132 pull_request._last_merge_source_rev = \
1136 1133 pull_request.source_ref_parts.commit_id
1137 1134 pull_request._last_merge_target_rev = target_reference.commit_id
1138 1135 pull_request._last_merge_status = merge_state.failure_reason
1139 1136 pull_request.shadow_merge_ref = merge_state.merge_ref
1140 1137 Session().add(pull_request)
1141 1138 Session().commit()
1142 1139
1143 1140 return merge_state
1144 1141
1145 1142 def _workspace_id(self, pull_request):
1146 1143 workspace_id = 'pr-%s' % pull_request.pull_request_id
1147 1144 return workspace_id
1148 1145
1149 1146 def merge_status_message(self, status_code):
1150 1147 """
1151 1148 Return a human friendly error message for the given merge status code.
1152 1149 """
1153 1150 return self.MERGE_STATUS_MESSAGES[status_code]
1154 1151
1155 1152 def generate_repo_data(self, repo, commit_id=None, branch=None,
1156 1153 bookmark=None):
1157 1154 all_refs, selected_ref = \
1158 1155 self._get_repo_pullrequest_sources(
1159 1156 repo.scm_instance(), commit_id=commit_id,
1160 1157 branch=branch, bookmark=bookmark)
1161 1158
1162 1159 refs_select2 = []
1163 1160 for element in all_refs:
1164 1161 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1165 1162 refs_select2.append({'text': element[1], 'children': children})
1166 1163
1167 1164 return {
1168 1165 'user': {
1169 1166 'user_id': repo.user.user_id,
1170 1167 'username': repo.user.username,
1171 1168 'firstname': repo.user.firstname,
1172 1169 'lastname': repo.user.lastname,
1173 1170 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1174 1171 },
1175 1172 'description': h.chop_at_smart(repo.description, '\n'),
1176 1173 'refs': {
1177 1174 'all_refs': all_refs,
1178 1175 'selected_ref': selected_ref,
1179 1176 'select2_refs': refs_select2
1180 1177 }
1181 1178 }
1182 1179
1183 1180 def generate_pullrequest_title(self, source, source_ref, target):
1184 1181 return u'{source}#{at_ref} to {target}'.format(
1185 1182 source=source,
1186 1183 at_ref=source_ref,
1187 1184 target=target,
1188 1185 )
1189 1186
1190 1187 def _cleanup_merge_workspace(self, pull_request):
1191 1188 # Merging related cleanup
1192 1189 target_scm = pull_request.target_repo.scm_instance()
1193 1190 workspace_id = 'pr-%s' % pull_request.pull_request_id
1194 1191
1195 1192 try:
1196 1193 target_scm.cleanup_merge_workspace(workspace_id)
1197 1194 except NotImplementedError:
1198 1195 pass
1199 1196
1200 1197 def _get_repo_pullrequest_sources(
1201 1198 self, repo, commit_id=None, branch=None, bookmark=None):
1202 1199 """
1203 1200 Return a structure with repo's interesting commits, suitable for
1204 1201 the selectors in pullrequest controller
1205 1202
1206 1203 :param commit_id: a commit that must be in the list somehow
1207 1204 and selected by default
1208 1205 :param branch: a branch that must be in the list and selected
1209 1206 by default - even if closed
1210 1207 :param bookmark: a bookmark that must be in the list and selected
1211 1208 """
1212 1209
1213 1210 commit_id = safe_str(commit_id) if commit_id else None
1214 1211 branch = safe_str(branch) if branch else None
1215 1212 bookmark = safe_str(bookmark) if bookmark else None
1216 1213
1217 1214 selected = None
1218 1215
1219 1216 # order matters: first source that has commit_id in it will be selected
1220 1217 sources = []
1221 1218 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1222 1219 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1223 1220
1224 1221 if commit_id:
1225 1222 ref_commit = (h.short_id(commit_id), commit_id)
1226 1223 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1227 1224
1228 1225 sources.append(
1229 1226 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1230 1227 )
1231 1228
1232 1229 groups = []
1233 1230 for group_key, ref_list, group_name, match in sources:
1234 1231 group_refs = []
1235 1232 for ref_name, ref_id in ref_list:
1236 1233 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1237 1234 group_refs.append((ref_key, ref_name))
1238 1235
1239 1236 if not selected:
1240 1237 if set([commit_id, match]) & set([ref_id, ref_name]):
1241 1238 selected = ref_key
1242 1239
1243 1240 if group_refs:
1244 1241 groups.append((group_refs, group_name))
1245 1242
1246 1243 if not selected:
1247 1244 ref = commit_id or branch or bookmark
1248 1245 if ref:
1249 1246 raise CommitDoesNotExistError(
1250 1247 'No commit refs could be found matching: %s' % ref)
1251 1248 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1252 1249 selected = 'branch:%s:%s' % (
1253 1250 repo.DEFAULT_BRANCH_NAME,
1254 1251 repo.branches[repo.DEFAULT_BRANCH_NAME]
1255 1252 )
1256 1253 elif repo.commit_ids:
1257 1254 rev = repo.commit_ids[0]
1258 1255 selected = 'rev:%s:%s' % (rev, rev)
1259 1256 else:
1260 1257 raise EmptyRepositoryError()
1261 1258 return groups, selected
1262 1259
1263 1260 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1264 1261 pull_request = self.__get_pull_request(pull_request)
1265 1262 return self._get_diff_from_pr_or_version(pull_request, context=context)
1266 1263
1267 1264 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1268 1265 source_repo = pr_or_version.source_repo
1269 1266
1270 1267 # we swap org/other ref since we run a simple diff on one repo
1271 1268 target_ref_id = pr_or_version.target_ref_parts.commit_id
1272 1269 source_ref_id = pr_or_version.source_ref_parts.commit_id
1273 1270 target_commit = source_repo.get_commit(
1274 1271 commit_id=safe_str(target_ref_id))
1275 1272 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1276 1273 vcs_repo = source_repo.scm_instance()
1277 1274
1278 1275 # TODO: johbo: In the context of an update, we cannot reach
1279 1276 # the old commit anymore with our normal mechanisms. It needs
1280 1277 # some sort of special support in the vcs layer to avoid this
1281 1278 # workaround.
1282 1279 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1283 1280 vcs_repo.alias == 'git'):
1284 1281 source_commit.raw_id = safe_str(source_ref_id)
1285 1282
1286 1283 log.debug('calculating diff between '
1287 1284 'source_ref:%s and target_ref:%s for repo `%s`',
1288 1285 target_ref_id, source_ref_id,
1289 1286 safe_unicode(vcs_repo.path))
1290 1287
1291 1288 vcs_diff = vcs_repo.get_diff(
1292 1289 commit1=target_commit, commit2=source_commit, context=context)
1293 1290 return vcs_diff
1294 1291
1295 1292 def _is_merge_enabled(self, pull_request):
1296 1293 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1297 1294 settings = settings_model.get_general_settings()
1298 1295 return settings.get('rhodecode_pr_merge_enabled', False)
1299 1296
1300 1297 def _use_rebase_for_merging(self, pull_request):
1301 1298 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1302 1299 settings = settings_model.get_general_settings()
1303 1300 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1304 1301
1305 1302 def _log_action(self, action, user, pull_request):
1306 1303 action_logger(
1307 1304 user,
1308 1305 '{action}:{pr_id}'.format(
1309 1306 action=action, pr_id=pull_request.pull_request_id),
1310 1307 pull_request.target_repo)
1311 1308
1312 1309
1313 1310 ChangeTuple = namedtuple('ChangeTuple',
1314 1311 ['added', 'common', 'removed'])
1315 1312
1316 1313 FileChangeTuple = namedtuple('FileChangeTuple',
1317 1314 ['added', 'modified', 'removed'])