##// END OF EJS Templates

Compare Commits r1106:490ebeeb75af...r1108:ebe0247cd154

Target:

Source:

Time Author Commit Description
Martin Bornhold
r1106:490ebeeb75af
subrepo: Add merge failure reason code ad message for subrepo merge conflicts.
Martin Bornhold
r1107:6bc055e1504d
subrepo: Add exception for subrepo merge errors.
Martin Bornhold
r1108:ebe0247cd154
subrepo: Handle subrepo merge errors.
@@ -1,1545 +1,1549 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import itertools
28 28 import logging
29 29 import os
30 30 import time
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 36 from rhodecode.lib.vcs import connection
37 37 from rhodecode.lib.vcs.utils import author_name, author_email
38 38 from rhodecode.lib.vcs.conf import settings
39 39 from rhodecode.lib.vcs.exceptions import (
40 40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 44 RepositoryError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 FILEMODE_DEFAULT = 0100644
51 51 FILEMODE_EXECUTABLE = 0100755
52 52
53 53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 54 MergeResponse = collections.namedtuple(
55 55 'MergeResponse',
56 56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57 57
58 58
59 59 class MergeFailureReason(object):
60 60 """
61 61 Enumeration with all the reasons why the server side merge could fail.
62 62
63 63 DO NOT change the number of the reasons, as they may be stored in the
64 64 database.
65 65
66 66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 67 reasons.
68 68 """
69 69
70 70 # Everything went well.
71 71 NONE = 0
72 72
73 73 # An unexpected exception was raised. Check the logs for more details.
74 74 UNKNOWN = 1
75 75
76 76 # The merge was not successful, there are conflicts.
77 77 MERGE_FAILED = 2
78 78
79 79 # The merge succeeded but we could not push it to the target repository.
80 80 PUSH_FAILED = 3
81 81
82 82 # The specified target is not a head in the target repository.
83 83 TARGET_IS_NOT_HEAD = 4
84 84
85 85 # The source repository contains more branches than the target. Pushing
86 86 # the merge will create additional branches in the target.
87 87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88 88
89 89 # The target reference has multiple heads. That does not allow to correctly
90 90 # identify the target location. This could only happen for mercurial
91 91 # branches.
92 92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93 93
94 94 # The target repository is locked
95 95 TARGET_IS_LOCKED = 7
96 96
97 97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 98 # A involved commit could not be found.
99 99 _DEPRECATED_MISSING_COMMIT = 8
100 100
101 101 # The target repo reference is missing.
102 102 MISSING_TARGET_REF = 9
103 103
104 104 # The source repo reference is missing.
105 105 MISSING_SOURCE_REF = 10
106 106
107 # The merge was not successful, there are conflicts related to sub
108 # repositories.
109 SUBREPO_MERGE_FAILED = 11
110
107 111
108 112 class UpdateFailureReason(object):
109 113 """
110 114 Enumeration with all the reasons why the pull request update could fail.
111 115
112 116 DO NOT change the number of the reasons, as they may be stored in the
113 117 database.
114 118
115 119 Changing the name of a reason is acceptable and encouraged to deprecate old
116 120 reasons.
117 121 """
118 122
119 123 # Everything went well.
120 124 NONE = 0
121 125
122 126 # An unexpected exception was raised. Check the logs for more details.
123 127 UNKNOWN = 1
124 128
125 129 # The pull request is up to date.
126 130 NO_CHANGE = 2
127 131
128 132 # The pull request has a reference type that is not supported for update.
129 133 WRONG_REF_TPYE = 3
130 134
131 135 # Update failed because the target reference is missing.
132 136 MISSING_TARGET_REF = 4
133 137
134 138 # Update failed because the source reference is missing.
135 139 MISSING_SOURCE_REF = 5
136 140
137 141
138 142 class BaseRepository(object):
139 143 """
140 144 Base Repository for final backends
141 145
142 146 .. attribute:: DEFAULT_BRANCH_NAME
143 147
144 148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
145 149
146 150 .. attribute:: commit_ids
147 151
148 152 list of all available commit ids, in ascending order
149 153
150 154 .. attribute:: path
151 155
152 156 absolute path to the repository
153 157
154 158 .. attribute:: bookmarks
155 159
156 160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
157 161 there are no bookmarks or the backend implementation does not support
158 162 bookmarks.
159 163
160 164 .. attribute:: tags
161 165
162 166 Mapping from name to :term:`Commit ID` of the tag.
163 167
164 168 """
165 169
166 170 DEFAULT_BRANCH_NAME = None
167 171 DEFAULT_CONTACT = u"Unknown"
168 172 DEFAULT_DESCRIPTION = u"unknown"
169 173 EMPTY_COMMIT_ID = '0' * 40
170 174
171 175 path = None
172 176
173 177 def __init__(self, repo_path, config=None, create=False, **kwargs):
174 178 """
175 179 Initializes repository. Raises RepositoryError if repository could
176 180 not be find at the given ``repo_path`` or directory at ``repo_path``
177 181 exists and ``create`` is set to True.
178 182
179 183 :param repo_path: local path of the repository
180 184 :param config: repository configuration
181 185 :param create=False: if set to True, would try to create repository.
182 186 :param src_url=None: if set, should be proper url from which repository
183 187 would be cloned; requires ``create`` parameter to be set to True -
184 188 raises RepositoryError if src_url is set and create evaluates to
185 189 False
186 190 """
187 191 raise NotImplementedError
188 192
189 193 def __repr__(self):
190 194 return '<%s at %s>' % (self.__class__.__name__, self.path)
191 195
192 196 def __len__(self):
193 197 return self.count()
194 198
195 199 def __eq__(self, other):
196 200 same_instance = isinstance(other, self.__class__)
197 201 return same_instance and other.path == self.path
198 202
199 203 def __ne__(self, other):
200 204 return not self.__eq__(other)
201 205
202 206 @LazyProperty
203 207 def EMPTY_COMMIT(self):
204 208 return EmptyCommit(self.EMPTY_COMMIT_ID)
205 209
206 210 @LazyProperty
207 211 def alias(self):
208 212 for k, v in settings.BACKENDS.items():
209 213 if v.split('.')[-1] == str(self.__class__.__name__):
210 214 return k
211 215
212 216 @LazyProperty
213 217 def name(self):
214 218 return safe_unicode(os.path.basename(self.path))
215 219
216 220 @LazyProperty
217 221 def description(self):
218 222 raise NotImplementedError
219 223
220 224 def refs(self):
221 225 """
222 226 returns a `dict` with branches, bookmarks, tags, and closed_branches
223 227 for this repository
224 228 """
225 229 raise NotImplementedError
226 230
227 231 @LazyProperty
228 232 def branches(self):
229 233 """
230 234 A `dict` which maps branch names to commit ids.
231 235 """
232 236 raise NotImplementedError
233 237
234 238 @LazyProperty
235 239 def size(self):
236 240 """
237 241 Returns combined size in bytes for all repository files
238 242 """
239 243 tip = self.get_commit()
240 244 return tip.size
241 245
242 246 def size_at_commit(self, commit_id):
243 247 commit = self.get_commit(commit_id)
244 248 return commit.size
245 249
246 250 def is_empty(self):
247 251 return not bool(self.commit_ids)
248 252
249 253 @staticmethod
250 254 def check_url(url, config):
251 255 """
252 256 Function will check given url and try to verify if it's a valid
253 257 link.
254 258 """
255 259 raise NotImplementedError
256 260
257 261 @staticmethod
258 262 def is_valid_repository(path):
259 263 """
260 264 Check if given `path` contains a valid repository of this backend
261 265 """
262 266 raise NotImplementedError
263 267
264 268 # ==========================================================================
265 269 # COMMITS
266 270 # ==========================================================================
267 271
268 272 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
269 273 """
270 274 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
271 275 are both None, most recent commit is returned.
272 276
273 277 :param pre_load: Optional. List of commit attributes to load.
274 278
275 279 :raises ``EmptyRepositoryError``: if there are no commits
276 280 """
277 281 raise NotImplementedError
278 282
279 283 def __iter__(self):
280 284 for commit_id in self.commit_ids:
281 285 yield self.get_commit(commit_id=commit_id)
282 286
283 287 def get_commits(
284 288 self, start_id=None, end_id=None, start_date=None, end_date=None,
285 289 branch_name=None, pre_load=None):
286 290 """
287 291 Returns iterator of `BaseCommit` objects from start to end
288 292 not inclusive. This should behave just like a list, ie. end is not
289 293 inclusive.
290 294
291 295 :param start_id: None or str, must be a valid commit id
292 296 :param end_id: None or str, must be a valid commit id
293 297 :param start_date:
294 298 :param end_date:
295 299 :param branch_name:
296 300 :param pre_load:
297 301 """
298 302 raise NotImplementedError
299 303
300 304 def __getitem__(self, key):
301 305 """
302 306 Allows index based access to the commit objects of this repository.
303 307 """
304 308 pre_load = ["author", "branch", "date", "message", "parents"]
305 309 if isinstance(key, slice):
306 310 return self._get_range(key, pre_load)
307 311 return self.get_commit(commit_idx=key, pre_load=pre_load)
308 312
309 313 def _get_range(self, slice_obj, pre_load):
310 314 for commit_id in self.commit_ids.__getitem__(slice_obj):
311 315 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
312 316
313 317 def count(self):
314 318 return len(self.commit_ids)
315 319
316 320 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
317 321 """
318 322 Creates and returns a tag for the given ``commit_id``.
319 323
320 324 :param name: name for new tag
321 325 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
322 326 :param commit_id: commit id for which new tag would be created
323 327 :param message: message of the tag's commit
324 328 :param date: date of tag's commit
325 329
326 330 :raises TagAlreadyExistError: if tag with same name already exists
327 331 """
328 332 raise NotImplementedError
329 333
330 334 def remove_tag(self, name, user, message=None, date=None):
331 335 """
332 336 Removes tag with the given ``name``.
333 337
334 338 :param name: name of the tag to be removed
335 339 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
336 340 :param message: message of the tag's removal commit
337 341 :param date: date of tag's removal commit
338 342
339 343 :raises TagDoesNotExistError: if tag with given name does not exists
340 344 """
341 345 raise NotImplementedError
342 346
343 347 def get_diff(
344 348 self, commit1, commit2, path=None, ignore_whitespace=False,
345 349 context=3, path1=None):
346 350 """
347 351 Returns (git like) *diff*, as plain text. Shows changes introduced by
348 352 `commit2` since `commit1`.
349 353
350 354 :param commit1: Entry point from which diff is shown. Can be
351 355 ``self.EMPTY_COMMIT`` - in this case, patch showing all
352 356 the changes since empty state of the repository until `commit2`
353 357 :param commit2: Until which commit changes should be shown.
354 358 :param path: Can be set to a path of a file to create a diff of that
355 359 file. If `path1` is also set, this value is only associated to
356 360 `commit2`.
357 361 :param ignore_whitespace: If set to ``True``, would not show whitespace
358 362 changes. Defaults to ``False``.
359 363 :param context: How many lines before/after changed lines should be
360 364 shown. Defaults to ``3``.
361 365 :param path1: Can be set to a path to associate with `commit1`. This
362 366 parameter works only for backends which support diff generation for
363 367 different paths. Other backends will raise a `ValueError` if `path1`
364 368 is set and has a different value than `path`.
365 369 """
366 370 raise NotImplementedError
367 371
368 372 def strip(self, commit_id, branch=None):
369 373 """
370 374 Strip given commit_id from the repository
371 375 """
372 376 raise NotImplementedError
373 377
374 378 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
375 379 """
376 380 Return a latest common ancestor commit if one exists for this repo
377 381 `commit_id1` vs `commit_id2` from `repo2`.
378 382
379 383 :param commit_id1: Commit it from this repository to use as a
380 384 target for the comparison.
381 385 :param commit_id2: Source commit id to use for comparison.
382 386 :param repo2: Source repository to use for comparison.
383 387 """
384 388 raise NotImplementedError
385 389
386 390 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
387 391 """
388 392 Compare this repository's revision `commit_id1` with `commit_id2`.
389 393
390 394 Returns a tuple(commits, ancestor) that would be merged from
391 395 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
392 396 will be returned as ancestor.
393 397
394 398 :param commit_id1: Commit it from this repository to use as a
395 399 target for the comparison.
396 400 :param commit_id2: Source commit id to use for comparison.
397 401 :param repo2: Source repository to use for comparison.
398 402 :param merge: If set to ``True`` will do a merge compare which also
399 403 returns the common ancestor.
400 404 :param pre_load: Optional. List of commit attributes to load.
401 405 """
402 406 raise NotImplementedError
403 407
404 408 def merge(self, target_ref, source_repo, source_ref, workspace_id,
405 409 user_name='', user_email='', message='', dry_run=False,
406 410 use_rebase=False):
407 411 """
408 412 Merge the revisions specified in `source_ref` from `source_repo`
409 413 onto the `target_ref` of this repository.
410 414
411 415 `source_ref` and `target_ref` are named tupls with the following
412 416 fields `type`, `name` and `commit_id`.
413 417
414 418 Returns a MergeResponse named tuple with the following fields
415 419 'possible', 'executed', 'source_commit', 'target_commit',
416 420 'merge_commit'.
417 421
418 422 :param target_ref: `target_ref` points to the commit on top of which
419 423 the `source_ref` should be merged.
420 424 :param source_repo: The repository that contains the commits to be
421 425 merged.
422 426 :param source_ref: `source_ref` points to the topmost commit from
423 427 the `source_repo` which should be merged.
424 428 :param workspace_id: `workspace_id` unique identifier.
425 429 :param user_name: Merge commit `user_name`.
426 430 :param user_email: Merge commit `user_email`.
427 431 :param message: Merge commit `message`.
428 432 :param dry_run: If `True` the merge will not take place.
429 433 :param use_rebase: If `True` commits from the source will be rebased
430 434 on top of the target instead of being merged.
431 435 """
432 436 if dry_run:
433 437 message = message or 'dry_run_merge_message'
434 438 user_email = user_email or 'dry-run-merge@rhodecode.com'
435 439 user_name = user_name or 'Dry-Run User'
436 440 else:
437 441 if not user_name:
438 442 raise ValueError('user_name cannot be empty')
439 443 if not user_email:
440 444 raise ValueError('user_email cannot be empty')
441 445 if not message:
442 446 raise ValueError('message cannot be empty')
443 447
444 448 shadow_repository_path = self._maybe_prepare_merge_workspace(
445 449 workspace_id, target_ref)
446 450
447 451 try:
448 452 return self._merge_repo(
449 453 shadow_repository_path, target_ref, source_repo,
450 454 source_ref, message, user_name, user_email, dry_run=dry_run,
451 455 use_rebase=use_rebase)
452 456 except RepositoryError:
453 457 log.exception(
454 458 'Unexpected failure when running merge, dry-run=%s',
455 459 dry_run)
456 460 return MergeResponse(
457 461 False, False, None, MergeFailureReason.UNKNOWN)
458 462
459 463 def _merge_repo(self, shadow_repository_path, target_ref,
460 464 source_repo, source_ref, merge_message,
461 465 merger_name, merger_email, dry_run=False, use_rebase=False):
462 466 """Internal implementation of merge."""
463 467 raise NotImplementedError
464 468
465 469 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
466 470 """
467 471 Create the merge workspace.
468 472
469 473 :param workspace_id: `workspace_id` unique identifier.
470 474 """
471 475 raise NotImplementedError
472 476
473 477 def cleanup_merge_workspace(self, workspace_id):
474 478 """
475 479 Remove merge workspace.
476 480
477 481 This function MUST not fail in case there is no workspace associated to
478 482 the given `workspace_id`.
479 483
480 484 :param workspace_id: `workspace_id` unique identifier.
481 485 """
482 486 raise NotImplementedError
483 487
484 488 # ========== #
485 489 # COMMIT API #
486 490 # ========== #
487 491
488 492 @LazyProperty
489 493 def in_memory_commit(self):
490 494 """
491 495 Returns :class:`InMemoryCommit` object for this repository.
492 496 """
493 497 raise NotImplementedError
494 498
495 499 # ======================== #
496 500 # UTILITIES FOR SUBCLASSES #
497 501 # ======================== #
498 502
499 503 def _validate_diff_commits(self, commit1, commit2):
500 504 """
501 505 Validates that the given commits are related to this repository.
502 506
503 507 Intended as a utility for sub classes to have a consistent validation
504 508 of input parameters in methods like :meth:`get_diff`.
505 509 """
506 510 self._validate_commit(commit1)
507 511 self._validate_commit(commit2)
508 512 if (isinstance(commit1, EmptyCommit) and
509 513 isinstance(commit2, EmptyCommit)):
510 514 raise ValueError("Cannot compare two empty commits")
511 515
512 516 def _validate_commit(self, commit):
513 517 if not isinstance(commit, BaseCommit):
514 518 raise TypeError(
515 519 "%s is not of type BaseCommit" % repr(commit))
516 520 if commit.repository != self and not isinstance(commit, EmptyCommit):
517 521 raise ValueError(
518 522 "Commit %s must be a valid commit from this repository %s, "
519 523 "related to this repository instead %s." %
520 524 (commit, self, commit.repository))
521 525
522 526 def _validate_commit_id(self, commit_id):
523 527 if not isinstance(commit_id, basestring):
524 528 raise TypeError("commit_id must be a string value")
525 529
526 530 def _validate_commit_idx(self, commit_idx):
527 531 if not isinstance(commit_idx, (int, long)):
528 532 raise TypeError("commit_idx must be a numeric value")
529 533
530 534 def _validate_branch_name(self, branch_name):
531 535 if branch_name and branch_name not in self.branches_all:
532 536 msg = ("Branch %s not found in %s" % (branch_name, self))
533 537 raise BranchDoesNotExistError(msg)
534 538
535 539 #
536 540 # Supporting deprecated API parts
537 541 # TODO: johbo: consider to move this into a mixin
538 542 #
539 543
540 544 @property
541 545 def EMPTY_CHANGESET(self):
542 546 warnings.warn(
543 547 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
544 548 return self.EMPTY_COMMIT_ID
545 549
546 550 @property
547 551 def revisions(self):
548 552 warnings.warn("Use commits attribute instead", DeprecationWarning)
549 553 return self.commit_ids
550 554
551 555 @revisions.setter
552 556 def revisions(self, value):
553 557 warnings.warn("Use commits attribute instead", DeprecationWarning)
554 558 self.commit_ids = value
555 559
556 560 def get_changeset(self, revision=None, pre_load=None):
557 561 warnings.warn("Use get_commit instead", DeprecationWarning)
558 562 commit_id = None
559 563 commit_idx = None
560 564 if isinstance(revision, basestring):
561 565 commit_id = revision
562 566 else:
563 567 commit_idx = revision
564 568 return self.get_commit(
565 569 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
566 570
567 571 def get_changesets(
568 572 self, start=None, end=None, start_date=None, end_date=None,
569 573 branch_name=None, pre_load=None):
570 574 warnings.warn("Use get_commits instead", DeprecationWarning)
571 575 start_id = self._revision_to_commit(start)
572 576 end_id = self._revision_to_commit(end)
573 577 return self.get_commits(
574 578 start_id=start_id, end_id=end_id, start_date=start_date,
575 579 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
576 580
577 581 def _revision_to_commit(self, revision):
578 582 """
579 583 Translates a revision to a commit_id
580 584
581 585 Helps to support the old changeset based API which allows to use
582 586 commit ids and commit indices interchangeable.
583 587 """
584 588 if revision is None:
585 589 return revision
586 590
587 591 if isinstance(revision, basestring):
588 592 commit_id = revision
589 593 else:
590 594 commit_id = self.commit_ids[revision]
591 595 return commit_id
592 596
593 597 @property
594 598 def in_memory_changeset(self):
595 599 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
596 600 return self.in_memory_commit
597 601
598 602
599 603 class BaseCommit(object):
600 604 """
601 605 Each backend should implement it's commit representation.
602 606
603 607 **Attributes**
604 608
605 609 ``repository``
606 610 repository object within which commit exists
607 611
608 612 ``id``
609 613 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
610 614 just ``tip``.
611 615
612 616 ``raw_id``
613 617 raw commit representation (i.e. full 40 length sha for git
614 618 backend)
615 619
616 620 ``short_id``
617 621 shortened (if apply) version of ``raw_id``; it would be simple
618 622 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
619 623 as ``raw_id`` for subversion
620 624
621 625 ``idx``
622 626 commit index
623 627
624 628 ``files``
625 629 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
626 630
627 631 ``dirs``
628 632 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
629 633
630 634 ``nodes``
631 635 combined list of ``Node`` objects
632 636
633 637 ``author``
634 638 author of the commit, as unicode
635 639
636 640 ``message``
637 641 message of the commit, as unicode
638 642
639 643 ``parents``
640 644 list of parent commits
641 645
642 646 """
643 647
644 648 branch = None
645 649 """
646 650 Depending on the backend this should be set to the branch name of the
647 651 commit. Backends not supporting branches on commits should leave this
648 652 value as ``None``.
649 653 """
650 654
651 655 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
652 656 """
653 657 This template is used to generate a default prefix for repository archives
654 658 if no prefix has been specified.
655 659 """
656 660
657 661 def __str__(self):
658 662 return '<%s at %s:%s>' % (
659 663 self.__class__.__name__, self.idx, self.short_id)
660 664
661 665 def __repr__(self):
662 666 return self.__str__()
663 667
664 668 def __unicode__(self):
665 669 return u'%s:%s' % (self.idx, self.short_id)
666 670
667 671 def __eq__(self, other):
668 672 same_instance = isinstance(other, self.__class__)
669 673 return same_instance and self.raw_id == other.raw_id
670 674
671 675 def __json__(self):
672 676 parents = []
673 677 try:
674 678 for parent in self.parents:
675 679 parents.append({'raw_id': parent.raw_id})
676 680 except NotImplementedError:
677 681 # empty commit doesn't have parents implemented
678 682 pass
679 683
680 684 return {
681 685 'short_id': self.short_id,
682 686 'raw_id': self.raw_id,
683 687 'revision': self.idx,
684 688 'message': self.message,
685 689 'date': self.date,
686 690 'author': self.author,
687 691 'parents': parents,
688 692 'branch': self.branch
689 693 }
690 694
691 695 @LazyProperty
692 696 def last(self):
693 697 """
694 698 ``True`` if this is last commit in repository, ``False``
695 699 otherwise; trying to access this attribute while there is no
696 700 commits would raise `EmptyRepositoryError`
697 701 """
698 702 if self.repository is None:
699 703 raise CommitError("Cannot check if it's most recent commit")
700 704 return self.raw_id == self.repository.commit_ids[-1]
701 705
702 706 @LazyProperty
703 707 def parents(self):
704 708 """
705 709 Returns list of parent commits.
706 710 """
707 711 raise NotImplementedError
708 712
709 713 @property
710 714 def merge(self):
711 715 """
712 716 Returns boolean if commit is a merge.
713 717 """
714 718 return len(self.parents) > 1
715 719
716 720 @LazyProperty
717 721 def children(self):
718 722 """
719 723 Returns list of child commits.
720 724 """
721 725 raise NotImplementedError
722 726
723 727 @LazyProperty
724 728 def id(self):
725 729 """
726 730 Returns string identifying this commit.
727 731 """
728 732 raise NotImplementedError
729 733
730 734 @LazyProperty
731 735 def raw_id(self):
732 736 """
733 737 Returns raw string identifying this commit.
734 738 """
735 739 raise NotImplementedError
736 740
737 741 @LazyProperty
738 742 def short_id(self):
739 743 """
740 744 Returns shortened version of ``raw_id`` attribute, as string,
741 745 identifying this commit, useful for presentation to users.
742 746 """
743 747 raise NotImplementedError
744 748
745 749 @LazyProperty
746 750 def idx(self):
747 751 """
748 752 Returns integer identifying this commit.
749 753 """
750 754 raise NotImplementedError
751 755
752 756 @LazyProperty
753 757 def committer(self):
754 758 """
755 759 Returns committer for this commit
756 760 """
757 761 raise NotImplementedError
758 762
759 763 @LazyProperty
760 764 def committer_name(self):
761 765 """
762 766 Returns committer name for this commit
763 767 """
764 768
765 769 return author_name(self.committer)
766 770
767 771 @LazyProperty
768 772 def committer_email(self):
769 773 """
770 774 Returns committer email address for this commit
771 775 """
772 776
773 777 return author_email(self.committer)
774 778
775 779 @LazyProperty
776 780 def author(self):
777 781 """
778 782 Returns author for this commit
779 783 """
780 784
781 785 raise NotImplementedError
782 786
783 787 @LazyProperty
784 788 def author_name(self):
785 789 """
786 790 Returns author name for this commit
787 791 """
788 792
789 793 return author_name(self.author)
790 794
791 795 @LazyProperty
792 796 def author_email(self):
793 797 """
794 798 Returns author email address for this commit
795 799 """
796 800
797 801 return author_email(self.author)
798 802
799 803 def get_file_mode(self, path):
800 804 """
801 805 Returns stat mode of the file at `path`.
802 806 """
803 807 raise NotImplementedError
804 808
805 809 def is_link(self, path):
806 810 """
807 811 Returns ``True`` if given `path` is a symlink
808 812 """
809 813 raise NotImplementedError
810 814
811 815 def get_file_content(self, path):
812 816 """
813 817 Returns content of the file at the given `path`.
814 818 """
815 819 raise NotImplementedError
816 820
817 821 def get_file_size(self, path):
818 822 """
819 823 Returns size of the file at the given `path`.
820 824 """
821 825 raise NotImplementedError
822 826
823 827 def get_file_commit(self, path, pre_load=None):
824 828 """
825 829 Returns last commit of the file at the given `path`.
826 830
827 831 :param pre_load: Optional. List of commit attributes to load.
828 832 """
829 833 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
830 834
831 835 def get_file_history(self, path, limit=None, pre_load=None):
832 836 """
833 837 Returns history of file as reversed list of :class:`BaseCommit`
834 838 objects for which file at given `path` has been modified.
835 839
836 840 :param limit: Optional. Allows to limit the size of the returned
837 841 history. This is intended as a hint to the underlying backend, so
838 842 that it can apply optimizations depending on the limit.
839 843 :param pre_load: Optional. List of commit attributes to load.
840 844 """
841 845 raise NotImplementedError
842 846
843 847 def get_file_annotate(self, path, pre_load=None):
844 848 """
845 849 Returns a generator of four element tuples with
846 850 lineno, sha, commit lazy loader and line
847 851
848 852 :param pre_load: Optional. List of commit attributes to load.
849 853 """
850 854 raise NotImplementedError
851 855
852 856 def get_nodes(self, path):
853 857 """
854 858 Returns combined ``DirNode`` and ``FileNode`` objects list representing
855 859 state of commit at the given ``path``.
856 860
857 861 :raises ``CommitError``: if node at the given ``path`` is not
858 862 instance of ``DirNode``
859 863 """
860 864 raise NotImplementedError
861 865
862 866 def get_node(self, path):
863 867 """
864 868 Returns ``Node`` object from the given ``path``.
865 869
866 870 :raises ``NodeDoesNotExistError``: if there is no node at the given
867 871 ``path``
868 872 """
869 873 raise NotImplementedError
870 874
871 875 def get_largefile_node(self, path):
872 876 """
873 877 Returns the path to largefile from Mercurial storage.
874 878 """
875 879 raise NotImplementedError
876 880
877 881 def archive_repo(self, file_path, kind='tgz', subrepos=None,
878 882 prefix=None, write_metadata=False, mtime=None):
879 883 """
880 884 Creates an archive containing the contents of the repository.
881 885
882 886 :param file_path: path to the file which to create the archive.
883 887 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
884 888 :param prefix: name of root directory in archive.
885 889 Default is repository name and commit's short_id joined with dash:
886 890 ``"{repo_name}-{short_id}"``.
887 891 :param write_metadata: write a metadata file into archive.
888 892 :param mtime: custom modification time for archive creation, defaults
889 893 to time.time() if not given.
890 894
891 895 :raise VCSError: If prefix has a problem.
892 896 """
893 897 allowed_kinds = settings.ARCHIVE_SPECS.keys()
894 898 if kind not in allowed_kinds:
895 899 raise ImproperArchiveTypeError(
896 900 'Archive kind (%s) not supported use one of %s' %
897 901 (kind, allowed_kinds))
898 902
899 903 prefix = self._validate_archive_prefix(prefix)
900 904
901 905 mtime = mtime or time.mktime(self.date.timetuple())
902 906
903 907 file_info = []
904 908 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
905 909 for _r, _d, files in cur_rev.walk('/'):
906 910 for f in files:
907 911 f_path = os.path.join(prefix, f.path)
908 912 file_info.append(
909 913 (f_path, f.mode, f.is_link(), f.raw_bytes))
910 914
911 915 if write_metadata:
912 916 metadata = [
913 917 ('repo_name', self.repository.name),
914 918 ('rev', self.raw_id),
915 919 ('create_time', mtime),
916 920 ('branch', self.branch),
917 921 ('tags', ','.join(self.tags)),
918 922 ]
919 923 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
920 924 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
921 925
922 926 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
923 927
924 928 def _validate_archive_prefix(self, prefix):
925 929 if prefix is None:
926 930 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
927 931 repo_name=safe_str(self.repository.name),
928 932 short_id=self.short_id)
929 933 elif not isinstance(prefix, str):
930 934 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
931 935 elif prefix.startswith('/'):
932 936 raise VCSError("Prefix cannot start with leading slash")
933 937 elif prefix.strip() == '':
934 938 raise VCSError("Prefix cannot be empty")
935 939 return prefix
936 940
937 941 @LazyProperty
938 942 def root(self):
939 943 """
940 944 Returns ``RootNode`` object for this commit.
941 945 """
942 946 return self.get_node('')
943 947
944 948 def next(self, branch=None):
945 949 """
946 950 Returns next commit from current, if branch is gives it will return
947 951 next commit belonging to this branch
948 952
949 953 :param branch: show commits within the given named branch
950 954 """
951 955 indexes = xrange(self.idx + 1, self.repository.count())
952 956 return self._find_next(indexes, branch)
953 957
954 958 def prev(self, branch=None):
955 959 """
956 960 Returns previous commit from current, if branch is gives it will
957 961 return previous commit belonging to this branch
958 962
959 963 :param branch: show commit within the given named branch
960 964 """
961 965 indexes = xrange(self.idx - 1, -1, -1)
962 966 return self._find_next(indexes, branch)
963 967
964 968 def _find_next(self, indexes, branch=None):
965 969 if branch and self.branch != branch:
966 970 raise VCSError('Branch option used on commit not belonging '
967 971 'to that branch')
968 972
969 973 for next_idx in indexes:
970 974 commit = self.repository.get_commit(commit_idx=next_idx)
971 975 if branch and branch != commit.branch:
972 976 continue
973 977 return commit
974 978 raise CommitDoesNotExistError
975 979
976 980 def diff(self, ignore_whitespace=True, context=3):
977 981 """
978 982 Returns a `Diff` object representing the change made by this commit.
979 983 """
980 984 parent = (
981 985 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
982 986 diff = self.repository.get_diff(
983 987 parent, self,
984 988 ignore_whitespace=ignore_whitespace,
985 989 context=context)
986 990 return diff
987 991
988 992 @LazyProperty
989 993 def added(self):
990 994 """
991 995 Returns list of added ``FileNode`` objects.
992 996 """
993 997 raise NotImplementedError
994 998
995 999 @LazyProperty
996 1000 def changed(self):
997 1001 """
998 1002 Returns list of modified ``FileNode`` objects.
999 1003 """
1000 1004 raise NotImplementedError
1001 1005
1002 1006 @LazyProperty
1003 1007 def removed(self):
1004 1008 """
1005 1009 Returns list of removed ``FileNode`` objects.
1006 1010 """
1007 1011 raise NotImplementedError
1008 1012
1009 1013 @LazyProperty
1010 1014 def size(self):
1011 1015 """
1012 1016 Returns total number of bytes from contents of all filenodes.
1013 1017 """
1014 1018 return sum((node.size for node in self.get_filenodes_generator()))
1015 1019
1016 1020 def walk(self, topurl=''):
1017 1021 """
1018 1022 Similar to os.walk method. Insted of filesystem it walks through
1019 1023 commit starting at given ``topurl``. Returns generator of tuples
1020 1024 (topnode, dirnodes, filenodes).
1021 1025 """
1022 1026 topnode = self.get_node(topurl)
1023 1027 if not topnode.is_dir():
1024 1028 return
1025 1029 yield (topnode, topnode.dirs, topnode.files)
1026 1030 for dirnode in topnode.dirs:
1027 1031 for tup in self.walk(dirnode.path):
1028 1032 yield tup
1029 1033
1030 1034 def get_filenodes_generator(self):
1031 1035 """
1032 1036 Returns generator that yields *all* file nodes.
1033 1037 """
1034 1038 for topnode, dirs, files in self.walk():
1035 1039 for node in files:
1036 1040 yield node
1037 1041
1038 1042 #
1039 1043 # Utilities for sub classes to support consistent behavior
1040 1044 #
1041 1045
1042 1046 def no_node_at_path(self, path):
1043 1047 return NodeDoesNotExistError(
1044 1048 "There is no file nor directory at the given path: "
1045 1049 "'%s' at commit %s" % (path, self.short_id))
1046 1050
1047 1051 def _fix_path(self, path):
1048 1052 """
1049 1053 Paths are stored without trailing slash so we need to get rid off it if
1050 1054 needed.
1051 1055 """
1052 1056 return path.rstrip('/')
1053 1057
1054 1058 #
1055 1059 # Deprecated API based on changesets
1056 1060 #
1057 1061
1058 1062 @property
1059 1063 def revision(self):
1060 1064 warnings.warn("Use idx instead", DeprecationWarning)
1061 1065 return self.idx
1062 1066
1063 1067 @revision.setter
1064 1068 def revision(self, value):
1065 1069 warnings.warn("Use idx instead", DeprecationWarning)
1066 1070 self.idx = value
1067 1071
1068 1072 def get_file_changeset(self, path):
1069 1073 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1070 1074 return self.get_file_commit(path)
1071 1075
1072 1076
1073 1077 class BaseChangesetClass(type):
1074 1078
1075 1079 def __instancecheck__(self, instance):
1076 1080 return isinstance(instance, BaseCommit)
1077 1081
1078 1082
1079 1083 class BaseChangeset(BaseCommit):
1080 1084
1081 1085 __metaclass__ = BaseChangesetClass
1082 1086
1083 1087 def __new__(cls, *args, **kwargs):
1084 1088 warnings.warn(
1085 1089 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1086 1090 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1087 1091
1088 1092
1089 1093 class BaseInMemoryCommit(object):
1090 1094 """
1091 1095 Represents differences between repository's state (most recent head) and
1092 1096 changes made *in place*.
1093 1097
1094 1098 **Attributes**
1095 1099
1096 1100 ``repository``
1097 1101 repository object for this in-memory-commit
1098 1102
1099 1103 ``added``
1100 1104 list of ``FileNode`` objects marked as *added*
1101 1105
1102 1106 ``changed``
1103 1107 list of ``FileNode`` objects marked as *changed*
1104 1108
1105 1109 ``removed``
1106 1110 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1107 1111 *removed*
1108 1112
1109 1113 ``parents``
1110 1114 list of :class:`BaseCommit` instances representing parents of
1111 1115 in-memory commit. Should always be 2-element sequence.
1112 1116
1113 1117 """
1114 1118
1115 1119 def __init__(self, repository):
1116 1120 self.repository = repository
1117 1121 self.added = []
1118 1122 self.changed = []
1119 1123 self.removed = []
1120 1124 self.parents = []
1121 1125
1122 1126 def add(self, *filenodes):
1123 1127 """
1124 1128 Marks given ``FileNode`` objects as *to be committed*.
1125 1129
1126 1130 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1127 1131 latest commit
1128 1132 :raises ``NodeAlreadyAddedError``: if node with same path is already
1129 1133 marked as *added*
1130 1134 """
1131 1135 # Check if not already marked as *added* first
1132 1136 for node in filenodes:
1133 1137 if node.path in (n.path for n in self.added):
1134 1138 raise NodeAlreadyAddedError(
1135 1139 "Such FileNode %s is already marked for addition"
1136 1140 % node.path)
1137 1141 for node in filenodes:
1138 1142 self.added.append(node)
1139 1143
1140 1144 def change(self, *filenodes):
1141 1145 """
1142 1146 Marks given ``FileNode`` objects to be *changed* in next commit.
1143 1147
1144 1148 :raises ``EmptyRepositoryError``: if there are no commits yet
1145 1149 :raises ``NodeAlreadyExistsError``: if node with same path is already
1146 1150 marked to be *changed*
1147 1151 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1148 1152 marked to be *removed*
1149 1153 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1150 1154 commit
1151 1155 :raises ``NodeNotChangedError``: if node hasn't really be changed
1152 1156 """
1153 1157 for node in filenodes:
1154 1158 if node.path in (n.path for n in self.removed):
1155 1159 raise NodeAlreadyRemovedError(
1156 1160 "Node at %s is already marked as removed" % node.path)
1157 1161 try:
1158 1162 self.repository.get_commit()
1159 1163 except EmptyRepositoryError:
1160 1164 raise EmptyRepositoryError(
1161 1165 "Nothing to change - try to *add* new nodes rather than "
1162 1166 "changing them")
1163 1167 for node in filenodes:
1164 1168 if node.path in (n.path for n in self.changed):
1165 1169 raise NodeAlreadyChangedError(
1166 1170 "Node at '%s' is already marked as changed" % node.path)
1167 1171 self.changed.append(node)
1168 1172
1169 1173 def remove(self, *filenodes):
1170 1174 """
1171 1175 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1172 1176 *removed* in next commit.
1173 1177
1174 1178 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1175 1179 be *removed*
1176 1180 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1177 1181 be *changed*
1178 1182 """
1179 1183 for node in filenodes:
1180 1184 if node.path in (n.path for n in self.removed):
1181 1185 raise NodeAlreadyRemovedError(
1182 1186 "Node is already marked to for removal at %s" % node.path)
1183 1187 if node.path in (n.path for n in self.changed):
1184 1188 raise NodeAlreadyChangedError(
1185 1189 "Node is already marked to be changed at %s" % node.path)
1186 1190 # We only mark node as *removed* - real removal is done by
1187 1191 # commit method
1188 1192 self.removed.append(node)
1189 1193
1190 1194 def reset(self):
1191 1195 """
1192 1196 Resets this instance to initial state (cleans ``added``, ``changed``
1193 1197 and ``removed`` lists).
1194 1198 """
1195 1199 self.added = []
1196 1200 self.changed = []
1197 1201 self.removed = []
1198 1202 self.parents = []
1199 1203
1200 1204 def get_ipaths(self):
1201 1205 """
1202 1206 Returns generator of paths from nodes marked as added, changed or
1203 1207 removed.
1204 1208 """
1205 1209 for node in itertools.chain(self.added, self.changed, self.removed):
1206 1210 yield node.path
1207 1211
1208 1212 def get_paths(self):
1209 1213 """
1210 1214 Returns list of paths from nodes marked as added, changed or removed.
1211 1215 """
1212 1216 return list(self.get_ipaths())
1213 1217
1214 1218 def check_integrity(self, parents=None):
1215 1219 """
1216 1220 Checks in-memory commit's integrity. Also, sets parents if not
1217 1221 already set.
1218 1222
1219 1223 :raises CommitError: if any error occurs (i.e.
1220 1224 ``NodeDoesNotExistError``).
1221 1225 """
1222 1226 if not self.parents:
1223 1227 parents = parents or []
1224 1228 if len(parents) == 0:
1225 1229 try:
1226 1230 parents = [self.repository.get_commit(), None]
1227 1231 except EmptyRepositoryError:
1228 1232 parents = [None, None]
1229 1233 elif len(parents) == 1:
1230 1234 parents += [None]
1231 1235 self.parents = parents
1232 1236
1233 1237 # Local parents, only if not None
1234 1238 parents = [p for p in self.parents if p]
1235 1239
1236 1240 # Check nodes marked as added
1237 1241 for p in parents:
1238 1242 for node in self.added:
1239 1243 try:
1240 1244 p.get_node(node.path)
1241 1245 except NodeDoesNotExistError:
1242 1246 pass
1243 1247 else:
1244 1248 raise NodeAlreadyExistsError(
1245 1249 "Node `%s` already exists at %s" % (node.path, p))
1246 1250
1247 1251 # Check nodes marked as changed
1248 1252 missing = set(self.changed)
1249 1253 not_changed = set(self.changed)
1250 1254 if self.changed and not parents:
1251 1255 raise NodeDoesNotExistError(str(self.changed[0].path))
1252 1256 for p in parents:
1253 1257 for node in self.changed:
1254 1258 try:
1255 1259 old = p.get_node(node.path)
1256 1260 missing.remove(node)
1257 1261 # if content actually changed, remove node from not_changed
1258 1262 if old.content != node.content:
1259 1263 not_changed.remove(node)
1260 1264 except NodeDoesNotExistError:
1261 1265 pass
1262 1266 if self.changed and missing:
1263 1267 raise NodeDoesNotExistError(
1264 1268 "Node `%s` marked as modified but missing in parents: %s"
1265 1269 % (node.path, parents))
1266 1270
1267 1271 if self.changed and not_changed:
1268 1272 raise NodeNotChangedError(
1269 1273 "Node `%s` wasn't actually changed (parents: %s)"
1270 1274 % (not_changed.pop().path, parents))
1271 1275
1272 1276 # Check nodes marked as removed
1273 1277 if self.removed and not parents:
1274 1278 raise NodeDoesNotExistError(
1275 1279 "Cannot remove node at %s as there "
1276 1280 "were no parents specified" % self.removed[0].path)
1277 1281 really_removed = set()
1278 1282 for p in parents:
1279 1283 for node in self.removed:
1280 1284 try:
1281 1285 p.get_node(node.path)
1282 1286 really_removed.add(node)
1283 1287 except CommitError:
1284 1288 pass
1285 1289 not_removed = set(self.removed) - really_removed
1286 1290 if not_removed:
1287 1291 # TODO: johbo: This code branch does not seem to be covered
1288 1292 raise NodeDoesNotExistError(
1289 1293 "Cannot remove node at %s from "
1290 1294 "following parents: %s" % (not_removed, parents))
1291 1295
1292 1296 def commit(
1293 1297 self, message, author, parents=None, branch=None, date=None,
1294 1298 **kwargs):
1295 1299 """
1296 1300 Performs in-memory commit (doesn't check workdir in any way) and
1297 1301 returns newly created :class:`BaseCommit`. Updates repository's
1298 1302 attribute `commits`.
1299 1303
1300 1304 .. note::
1301 1305
1302 1306 While overriding this method each backend's should call
1303 1307 ``self.check_integrity(parents)`` in the first place.
1304 1308
1305 1309 :param message: message of the commit
1306 1310 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1307 1311 :param parents: single parent or sequence of parents from which commit
1308 1312 would be derived
1309 1313 :param date: ``datetime.datetime`` instance. Defaults to
1310 1314 ``datetime.datetime.now()``.
1311 1315 :param branch: branch name, as string. If none given, default backend's
1312 1316 branch would be used.
1313 1317
1314 1318 :raises ``CommitError``: if any error occurs while committing
1315 1319 """
1316 1320 raise NotImplementedError
1317 1321
1318 1322
1319 1323 class BaseInMemoryChangesetClass(type):
1320 1324
1321 1325 def __instancecheck__(self, instance):
1322 1326 return isinstance(instance, BaseInMemoryCommit)
1323 1327
1324 1328
1325 1329 class BaseInMemoryChangeset(BaseInMemoryCommit):
1326 1330
1327 1331 __metaclass__ = BaseInMemoryChangesetClass
1328 1332
1329 1333 def __new__(cls, *args, **kwargs):
1330 1334 warnings.warn(
1331 1335 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1332 1336 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1333 1337
1334 1338
1335 1339 class EmptyCommit(BaseCommit):
1336 1340 """
1337 1341 An dummy empty commit. It's possible to pass hash when creating
1338 1342 an EmptyCommit
1339 1343 """
1340 1344
1341 1345 def __init__(
1342 1346 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1343 1347 message='', author='', date=None):
1344 1348 self._empty_commit_id = commit_id
1345 1349 # TODO: johbo: Solve idx parameter, default value does not make
1346 1350 # too much sense
1347 1351 self.idx = idx
1348 1352 self.message = message
1349 1353 self.author = author
1350 1354 self.date = date or datetime.datetime.fromtimestamp(0)
1351 1355 self.repository = repo
1352 1356 self.alias = alias
1353 1357
1354 1358 @LazyProperty
1355 1359 def raw_id(self):
1356 1360 """
1357 1361 Returns raw string identifying this commit, useful for web
1358 1362 representation.
1359 1363 """
1360 1364
1361 1365 return self._empty_commit_id
1362 1366
1363 1367 @LazyProperty
1364 1368 def branch(self):
1365 1369 if self.alias:
1366 1370 from rhodecode.lib.vcs.backends import get_backend
1367 1371 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1368 1372
1369 1373 @LazyProperty
1370 1374 def short_id(self):
1371 1375 return self.raw_id[:12]
1372 1376
1373 1377 @LazyProperty
1374 1378 def id(self):
1375 1379 return self.raw_id
1376 1380
1377 1381 def get_file_commit(self, path):
1378 1382 return self
1379 1383
1380 1384 def get_file_content(self, path):
1381 1385 return u''
1382 1386
1383 1387 def get_file_size(self, path):
1384 1388 return 0
1385 1389
1386 1390
1387 1391 class EmptyChangesetClass(type):
1388 1392
1389 1393 def __instancecheck__(self, instance):
1390 1394 return isinstance(instance, EmptyCommit)
1391 1395
1392 1396
1393 1397 class EmptyChangeset(EmptyCommit):
1394 1398
1395 1399 __metaclass__ = EmptyChangesetClass
1396 1400
1397 1401 def __new__(cls, *args, **kwargs):
1398 1402 warnings.warn(
1399 1403 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1400 1404 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1401 1405
1402 1406 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1403 1407 alias=None, revision=-1, message='', author='', date=None):
1404 1408 if requested_revision is not None:
1405 1409 warnings.warn(
1406 1410 "Parameter requested_revision not supported anymore",
1407 1411 DeprecationWarning)
1408 1412 super(EmptyChangeset, self).__init__(
1409 1413 commit_id=cs, repo=repo, alias=alias, idx=revision,
1410 1414 message=message, author=author, date=date)
1411 1415
1412 1416 @property
1413 1417 def revision(self):
1414 1418 warnings.warn("Use idx instead", DeprecationWarning)
1415 1419 return self.idx
1416 1420
1417 1421 @revision.setter
1418 1422 def revision(self, value):
1419 1423 warnings.warn("Use idx instead", DeprecationWarning)
1420 1424 self.idx = value
1421 1425
1422 1426
1423 1427 class CollectionGenerator(object):
1424 1428
1425 1429 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1426 1430 self.repo = repo
1427 1431 self.commit_ids = commit_ids
1428 1432 # TODO: (oliver) this isn't currently hooked up
1429 1433 self.collection_size = None
1430 1434 self.pre_load = pre_load
1431 1435
1432 1436 def __len__(self):
1433 1437 if self.collection_size is not None:
1434 1438 return self.collection_size
1435 1439 return self.commit_ids.__len__()
1436 1440
1437 1441 def __iter__(self):
1438 1442 for commit_id in self.commit_ids:
1439 1443 # TODO: johbo: Mercurial passes in commit indices or commit ids
1440 1444 yield self._commit_factory(commit_id)
1441 1445
1442 1446 def _commit_factory(self, commit_id):
1443 1447 """
1444 1448 Allows backends to override the way commits are generated.
1445 1449 """
1446 1450 return self.repo.get_commit(commit_id=commit_id,
1447 1451 pre_load=self.pre_load)
1448 1452
1449 1453 def __getslice__(self, i, j):
1450 1454 """
1451 1455 Returns an iterator of sliced repository
1452 1456 """
1453 1457 commit_ids = self.commit_ids[i:j]
1454 1458 return self.__class__(
1455 1459 self.repo, commit_ids, pre_load=self.pre_load)
1456 1460
1457 1461 def __repr__(self):
1458 1462 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1459 1463
1460 1464
1461 1465 class Config(object):
1462 1466 """
1463 1467 Represents the configuration for a repository.
1464 1468
1465 1469 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1466 1470 standard library. It implements only the needed subset.
1467 1471 """
1468 1472
1469 1473 def __init__(self):
1470 1474 self._values = {}
1471 1475
1472 1476 def copy(self):
1473 1477 clone = Config()
1474 1478 for section, values in self._values.items():
1475 1479 clone._values[section] = values.copy()
1476 1480 return clone
1477 1481
1478 1482 def __repr__(self):
1479 1483 return '<Config(%s sections) at %s>' % (
1480 1484 len(self._values), hex(id(self)))
1481 1485
1482 1486 def items(self, section):
1483 1487 return self._values.get(section, {}).iteritems()
1484 1488
1485 1489 def get(self, section, option):
1486 1490 return self._values.get(section, {}).get(option)
1487 1491
1488 1492 def set(self, section, option, value):
1489 1493 section_values = self._values.setdefault(section, {})
1490 1494 section_values[option] = value
1491 1495
1492 1496 def clear_section(self, section):
1493 1497 self._values[section] = {}
1494 1498
1495 1499 def serialize(self):
1496 1500 """
1497 1501 Creates a list of three tuples (section, key, value) representing
1498 1502 this config object.
1499 1503 """
1500 1504 items = []
1501 1505 for section in self._values:
1502 1506 for option, value in self._values[section].items():
1503 1507 items.append(
1504 1508 (safe_str(section), safe_str(option), safe_str(value)))
1505 1509 return items
1506 1510
1507 1511
1508 1512 class Diff(object):
1509 1513 """
1510 1514 Represents a diff result from a repository backend.
1511 1515
1512 1516 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1513 1517 """
1514 1518
1515 1519 _header_re = None
1516 1520
1517 1521 def __init__(self, raw_diff):
1518 1522 self.raw = raw_diff
1519 1523
1520 1524 def chunks(self):
1521 1525 """
1522 1526 split the diff in chunks of separate --git a/file b/file chunks
1523 1527 to make diffs consistent we must prepend with \n, and make sure
1524 1528 we can detect last chunk as this was also has special rule
1525 1529 """
1526 1530 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1527 1531 total_chunks = len(chunks)
1528 1532 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1529 1533 for cur_chunk, chunk in enumerate(chunks, start=1))
1530 1534
1531 1535
1532 1536 class DiffChunk(object):
1533 1537
1534 1538 def __init__(self, chunk, diff, last_chunk):
1535 1539 self._diff = diff
1536 1540
1537 1541 # since we split by \ndiff --git that part is lost from original diff
1538 1542 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1539 1543 if not last_chunk:
1540 1544 chunk += '\n'
1541 1545
1542 1546 match = self._diff._header_re.match(chunk)
1543 1547 self.header = match.groupdict()
1544 1548 self.diff = chunk[match.end():]
1545 1549 self.raw = chunk
@@ -1,803 +1,808 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24
25 25 import logging
26 26 import binascii
27 27 import os
28 28 import shutil
29 29 import urllib
30 30
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 from rhodecode.lib.compat import OrderedDict
34 34 from rhodecode.lib.datelib import (
35 35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 36 date_astimestamp)
37 37 from rhodecode.lib.utils import safe_unicode, safe_str
38 38 from rhodecode.lib.vcs import connection
39 39 from rhodecode.lib.vcs.backends.base import (
40 40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 41 MergeFailureReason, Reference)
42 42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError)
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
48 48
49 49 hexlify = binascii.hexlify
50 50 nullid = "\0" * 20
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class MercurialRepository(BaseRepository):
56 56 """
57 57 Mercurial repository backend
58 58 """
59 59 DEFAULT_BRANCH_NAME = 'default'
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 update_after_clone=False, with_wire=None):
63 63 """
64 64 Raises RepositoryError if repository could not be find at the given
65 65 ``repo_path``.
66 66
67 67 :param repo_path: local path of the repository
68 68 :param config: config object containing the repo configuration
69 69 :param create=False: if set to True, would try to create repository if
70 70 it does not exist rather than raising exception
71 71 :param src_url=None: would try to clone repository from given location
72 72 :param update_after_clone=False: sets update of working copy after
73 73 making a clone
74 74 """
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 self.config = config if config else Config()
77 77 self._remote = connection.Hg(
78 78 self.path, self.config, with_wire=with_wire)
79 79
80 80 self._init_repo(create, src_url, update_after_clone)
81 81
82 82 # caches
83 83 self._commit_ids = {}
84 84
85 85 @LazyProperty
86 86 def commit_ids(self):
87 87 """
88 88 Returns list of commit ids, in ascending order. Being lazy
89 89 attribute allows external tools to inject shas from cache.
90 90 """
91 91 commit_ids = self._get_all_commit_ids()
92 92 self._rebuild_cache(commit_ids)
93 93 return commit_ids
94 94
95 95 def _rebuild_cache(self, commit_ids):
96 96 self._commit_ids = dict((commit_id, index)
97 97 for index, commit_id in enumerate(commit_ids))
98 98
99 99 @LazyProperty
100 100 def branches(self):
101 101 return self._get_branches()
102 102
103 103 @LazyProperty
104 104 def branches_closed(self):
105 105 return self._get_branches(active=False, closed=True)
106 106
107 107 @LazyProperty
108 108 def branches_all(self):
109 109 all_branches = {}
110 110 all_branches.update(self.branches)
111 111 all_branches.update(self.branches_closed)
112 112 return all_branches
113 113
114 114 def _get_branches(self, active=True, closed=False):
115 115 """
116 116 Gets branches for this repository
117 117 Returns only not closed active branches by default
118 118
119 119 :param active: return also active branches
120 120 :param closed: return also closed branches
121 121
122 122 """
123 123 if self.is_empty():
124 124 return {}
125 125
126 126 def get_name(ctx):
127 127 return ctx[0]
128 128
129 129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 130 self._remote.branches(active, closed).items()]
131 131
132 132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133 133
134 134 @LazyProperty
135 135 def tags(self):
136 136 """
137 137 Gets tags for this repository
138 138 """
139 139 return self._get_tags()
140 140
141 141 def _get_tags(self):
142 142 if self.is_empty():
143 143 return {}
144 144
145 145 def get_name(ctx):
146 146 return ctx[0]
147 147
148 148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 149 self._remote.tags().items()]
150 150
151 151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152 152
153 153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 154 **kwargs):
155 155 """
156 156 Creates and returns a tag for the given ``commit_id``.
157 157
158 158 :param name: name for new tag
159 159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 160 :param commit_id: commit id for which new tag would be created
161 161 :param message: message of the tag's commit
162 162 :param date: date of tag's commit
163 163
164 164 :raises TagAlreadyExistError: if tag with same name already exists
165 165 """
166 166 if name in self.tags:
167 167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 168 commit = self.get_commit(commit_id=commit_id)
169 169 local = kwargs.setdefault('local', False)
170 170
171 171 if message is None:
172 172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173 173
174 174 date, tz = date_to_timestamp_plus_offset(date)
175 175
176 176 self._remote.tag(
177 177 name, commit.raw_id, message, local, user, date, tz)
178 178 self._remote.invalidate_vcs_cache()
179 179
180 180 # Reinitialize tags
181 181 self.tags = self._get_tags()
182 182 tag_id = self.tags[name]
183 183
184 184 return self.get_commit(commit_id=tag_id)
185 185
186 186 def remove_tag(self, name, user, message=None, date=None):
187 187 """
188 188 Removes tag with the given `name`.
189 189
190 190 :param name: name of the tag to be removed
191 191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
192 192 :param message: message of the tag's removal commit
193 193 :param date: date of tag's removal commit
194 194
195 195 :raises TagDoesNotExistError: if tag with given name does not exists
196 196 """
197 197 if name not in self.tags:
198 198 raise TagDoesNotExistError("Tag %s does not exist" % name)
199 199 if message is None:
200 200 message = "Removed tag %s" % name
201 201 local = False
202 202
203 203 date, tz = date_to_timestamp_plus_offset(date)
204 204
205 205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 206 self._remote.invalidate_vcs_cache()
207 207 self.tags = self._get_tags()
208 208
209 209 @LazyProperty
210 210 def bookmarks(self):
211 211 """
212 212 Gets bookmarks for this repository
213 213 """
214 214 return self._get_bookmarks()
215 215
216 216 def _get_bookmarks(self):
217 217 if self.is_empty():
218 218 return {}
219 219
220 220 def get_name(ctx):
221 221 return ctx[0]
222 222
223 223 _bookmarks = [
224 224 (safe_unicode(n), hexlify(h)) for n, h in
225 225 self._remote.bookmarks().items()]
226 226
227 227 return OrderedDict(sorted(_bookmarks, key=get_name))
228 228
229 229 def _get_all_commit_ids(self):
230 230 return self._remote.get_all_commit_ids('visible')
231 231
232 232 def get_diff(
233 233 self, commit1, commit2, path='', ignore_whitespace=False,
234 234 context=3, path1=None):
235 235 """
236 236 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 237 `commit2` since `commit1`.
238 238
239 239 :param commit1: Entry point from which diff is shown. Can be
240 240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 241 the changes since empty state of the repository until `commit2`
242 242 :param commit2: Until which commit changes should be shown.
243 243 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 244 changes. Defaults to ``False``.
245 245 :param context: How many lines before/after changed lines should be
246 246 shown. Defaults to ``3``.
247 247 """
248 248 self._validate_diff_commits(commit1, commit2)
249 249 if path1 is not None and path1 != path:
250 250 raise ValueError("Diff of two different paths not supported.")
251 251
252 252 if path:
253 253 file_filter = [self.path, path]
254 254 else:
255 255 file_filter = None
256 256
257 257 diff = self._remote.diff(
258 258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 259 opt_git=True, opt_ignorews=ignore_whitespace,
260 260 context=context)
261 261 return MercurialDiff(diff)
262 262
263 263 def strip(self, commit_id, branch=None):
264 264 self._remote.strip(commit_id, update=False, backup="none")
265 265
266 266 self._remote.invalidate_vcs_cache()
267 267 self.commit_ids = self._get_all_commit_ids()
268 268 self._rebuild_cache(self.commit_ids)
269 269
270 270 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
271 271 if commit_id1 == commit_id2:
272 272 return commit_id1
273 273
274 274 ancestors = self._remote.revs_from_revspec(
275 275 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
276 276 other_path=repo2.path)
277 277 return repo2[ancestors[0]].raw_id if ancestors else None
278 278
279 279 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
280 280 if commit_id1 == commit_id2:
281 281 commits = []
282 282 else:
283 283 if merge:
284 284 indexes = self._remote.revs_from_revspec(
285 285 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
286 286 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
287 287 else:
288 288 indexes = self._remote.revs_from_revspec(
289 289 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
290 290 commit_id1, other_path=repo2.path)
291 291
292 292 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
293 293 for idx in indexes]
294 294
295 295 return commits
296 296
297 297 @staticmethod
298 298 def check_url(url, config):
299 299 """
300 300 Function will check given url and try to verify if it's a valid
301 301 link. Sometimes it may happened that mercurial will issue basic
302 302 auth request that can cause whole API to hang when used from python
303 303 or other external calls.
304 304
305 305 On failures it'll raise urllib2.HTTPError, exception is also thrown
306 306 when the return code is non 200
307 307 """
308 308 # check first if it's not an local url
309 309 if os.path.isdir(url) or url.startswith('file:'):
310 310 return True
311 311
312 312 # Request the _remote to verify the url
313 313 return connection.Hg.check_url(url, config.serialize())
314 314
315 315 @staticmethod
316 316 def is_valid_repository(path):
317 317 return os.path.isdir(os.path.join(path, '.hg'))
318 318
319 319 def _init_repo(self, create, src_url=None, update_after_clone=False):
320 320 """
321 321 Function will check for mercurial repository in given path. If there
322 322 is no repository in that path it will raise an exception unless
323 323 `create` parameter is set to True - in that case repository would
324 324 be created.
325 325
326 326 If `src_url` is given, would try to clone repository from the
327 327 location at given clone_point. Additionally it'll make update to
328 328 working copy accordingly to `update_after_clone` flag.
329 329 """
330 330 if create and os.path.exists(self.path):
331 331 raise RepositoryError(
332 332 "Cannot create repository at %s, location already exist"
333 333 % self.path)
334 334
335 335 if src_url:
336 336 url = str(self._get_url(src_url))
337 337 MercurialRepository.check_url(url, self.config)
338 338
339 339 self._remote.clone(url, self.path, update_after_clone)
340 340
341 341 # Don't try to create if we've already cloned repo
342 342 create = False
343 343
344 344 if create:
345 345 os.makedirs(self.path, mode=0755)
346 346
347 347 self._remote.localrepository(create)
348 348
349 349 @LazyProperty
350 350 def in_memory_commit(self):
351 351 return MercurialInMemoryCommit(self)
352 352
353 353 @LazyProperty
354 354 def description(self):
355 355 description = self._remote.get_config_value(
356 356 'web', 'description', untrusted=True)
357 357 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
358 358
359 359 @LazyProperty
360 360 def contact(self):
361 361 contact = (
362 362 self._remote.get_config_value("web", "contact") or
363 363 self._remote.get_config_value("ui", "username"))
364 364 return safe_unicode(contact or self.DEFAULT_CONTACT)
365 365
366 366 @LazyProperty
367 367 def last_change(self):
368 368 """
369 369 Returns last change made on this repository as
370 370 `datetime.datetime` object
371 371 """
372 372 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
373 373
374 374 def _get_mtime(self):
375 375 try:
376 376 return date_astimestamp(self.get_commit().date)
377 377 except RepositoryError:
378 378 # fallback to filesystem
379 379 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
380 380 st_path = os.path.join(self.path, '.hg', "store")
381 381 if os.path.exists(cl_path):
382 382 return os.stat(cl_path).st_mtime
383 383 else:
384 384 return os.stat(st_path).st_mtime
385 385
386 386 def _sanitize_commit_idx(self, idx):
387 387 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
388 388 # number. A `long` is treated in the correct way though. So we convert
389 389 # `int` to `long` here to make sure it is handled correctly.
390 390 if isinstance(idx, int):
391 391 return long(idx)
392 392 return idx
393 393
394 394 def _get_url(self, url):
395 395 """
396 396 Returns normalized url. If schema is not given, would fall
397 397 to filesystem
398 398 (``file:///``) schema.
399 399 """
400 400 url = url.encode('utf8')
401 401 if url != 'default' and '://' not in url:
402 402 url = "file:" + urllib.pathname2url(url)
403 403 return url
404 404
405 405 def get_hook_location(self):
406 406 """
407 407 returns absolute path to location where hooks are stored
408 408 """
409 409 return os.path.join(self.path, '.hg', '.hgrc')
410 410
411 411 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
412 412 """
413 413 Returns ``MercurialCommit`` object representing repository's
414 414 commit at the given `commit_id` or `commit_idx`.
415 415 """
416 416 if self.is_empty():
417 417 raise EmptyRepositoryError("There are no commits yet")
418 418
419 419 if commit_id is not None:
420 420 self._validate_commit_id(commit_id)
421 421 try:
422 422 idx = self._commit_ids[commit_id]
423 423 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
424 424 except KeyError:
425 425 pass
426 426 elif commit_idx is not None:
427 427 self._validate_commit_idx(commit_idx)
428 428 commit_idx = self._sanitize_commit_idx(commit_idx)
429 429 try:
430 430 id_ = self.commit_ids[commit_idx]
431 431 if commit_idx < 0:
432 432 commit_idx += len(self.commit_ids)
433 433 return MercurialCommit(
434 434 self, id_, commit_idx, pre_load=pre_load)
435 435 except IndexError:
436 436 commit_id = commit_idx
437 437 else:
438 438 commit_id = "tip"
439 439
440 440 # TODO Paris: Ugly hack to "serialize" long for msgpack
441 441 if isinstance(commit_id, long):
442 442 commit_id = float(commit_id)
443 443
444 444 if isinstance(commit_id, unicode):
445 445 commit_id = safe_str(commit_id)
446 446
447 447 raw_id, idx = self._remote.lookup(commit_id, both=True)
448 448
449 449 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
450 450
451 451 def get_commits(
452 452 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 453 branch_name=None, pre_load=None):
454 454 """
455 455 Returns generator of ``MercurialCommit`` objects from start to end
456 456 (both are inclusive)
457 457
458 458 :param start_id: None, str(commit_id)
459 459 :param end_id: None, str(commit_id)
460 460 :param start_date: if specified, commits with commit date less than
461 461 ``start_date`` would be filtered out from returned set
462 462 :param end_date: if specified, commits with commit date greater than
463 463 ``end_date`` would be filtered out from returned set
464 464 :param branch_name: if specified, commits not reachable from given
465 465 branch would be filtered out from returned set
466 466
467 467 :raise BranchDoesNotExistError: If given ``branch_name`` does not
468 468 exist.
469 469 :raise CommitDoesNotExistError: If commit for given ``start`` or
470 470 ``end`` could not be found.
471 471 """
472 472 # actually we should check now if it's not an empty repo
473 473 branch_ancestors = False
474 474 if self.is_empty():
475 475 raise EmptyRepositoryError("There are no commits yet")
476 476 self._validate_branch_name(branch_name)
477 477
478 478 if start_id is not None:
479 479 self._validate_commit_id(start_id)
480 480 c_start = self.get_commit(commit_id=start_id)
481 481 start_pos = self._commit_ids[c_start.raw_id]
482 482 else:
483 483 start_pos = None
484 484
485 485 if end_id is not None:
486 486 self._validate_commit_id(end_id)
487 487 c_end = self.get_commit(commit_id=end_id)
488 488 end_pos = max(0, self._commit_ids[c_end.raw_id])
489 489 else:
490 490 end_pos = None
491 491
492 492 if None not in [start_id, end_id] and start_pos > end_pos:
493 493 raise RepositoryError(
494 494 "Start commit '%s' cannot be after end commit '%s'" %
495 495 (start_id, end_id))
496 496
497 497 if end_pos is not None:
498 498 end_pos += 1
499 499
500 500 commit_filter = []
501 501 if branch_name and not branch_ancestors:
502 502 commit_filter.append('branch("%s")' % branch_name)
503 503 elif branch_name and branch_ancestors:
504 504 commit_filter.append('ancestors(branch("%s"))' % branch_name)
505 505 if start_date and not end_date:
506 506 commit_filter.append('date(">%s")' % start_date)
507 507 if end_date and not start_date:
508 508 commit_filter.append('date("<%s")' % end_date)
509 509 if start_date and end_date:
510 510 commit_filter.append(
511 511 'date(">%s") and date("<%s")' % (start_date, end_date))
512 512
513 513 # TODO: johbo: Figure out a simpler way for this solution
514 514 collection_generator = CollectionGenerator
515 515 if commit_filter:
516 516 commit_filter = map(safe_str, commit_filter)
517 517 revisions = self._remote.rev_range(commit_filter)
518 518 collection_generator = MercurialIndexBasedCollectionGenerator
519 519 else:
520 520 revisions = self.commit_ids
521 521
522 522 if start_pos or end_pos:
523 523 revisions = revisions[start_pos:end_pos]
524 524
525 525 return collection_generator(self, revisions, pre_load=pre_load)
526 526
527 527 def pull(self, url, commit_ids=None):
528 528 """
529 529 Tries to pull changes from external location.
530 530
531 531 :param commit_ids: Optional. Can be set to a list of commit ids
532 532 which shall be pulled from the other repository.
533 533 """
534 534 url = self._get_url(url)
535 535 self._remote.pull(url, commit_ids=commit_ids)
536 536 self._remote.invalidate_vcs_cache()
537 537
538 538 def _local_clone(self, clone_path):
539 539 """
540 540 Create a local clone of the current repo.
541 541 """
542 542 self._remote.clone(self.path, clone_path, update_after_clone=True,
543 543 hooks=False)
544 544
545 545 def _update(self, revision, clean=False):
546 546 """
547 547 Update the working copty to the specified revision.
548 548 """
549 549 self._remote.update(revision, clean=clean)
550 550
551 551 def _identify(self):
552 552 """
553 553 Return the current state of the working directory.
554 554 """
555 555 return self._remote.identify().strip().rstrip('+')
556 556
557 557 def _heads(self, branch=None):
558 558 """
559 559 Return the commit ids of the repository heads.
560 560 """
561 561 return self._remote.heads(branch=branch).strip().split(' ')
562 562
563 563 def _ancestor(self, revision1, revision2):
564 564 """
565 565 Return the common ancestor of the two revisions.
566 566 """
567 567 return self._remote.ancestor(
568 568 revision1, revision2).strip().split(':')[-1]
569 569
570 570 def _local_push(
571 571 self, revision, repository_path, push_branches=False,
572 572 enable_hooks=False):
573 573 """
574 574 Push the given revision to the specified repository.
575 575
576 576 :param push_branches: allow to create branches in the target repo.
577 577 """
578 578 self._remote.push(
579 579 [revision], repository_path, hooks=enable_hooks,
580 580 push_branches=push_branches)
581 581
582 582 def _local_merge(self, target_ref, merge_message, user_name, user_email,
583 583 source_ref, use_rebase=False):
584 584 """
585 585 Merge the given source_revision into the checked out revision.
586 586
587 587 Returns the commit id of the merge and a boolean indicating if the
588 588 commit needs to be pushed.
589 589 """
590 590 self._update(target_ref.commit_id)
591 591
592 592 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
593 593 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
594 594
595 595 if ancestor == source_ref.commit_id:
596 596 # Nothing to do, the changes were already integrated
597 597 return target_ref.commit_id, False
598 598
599 599 elif ancestor == target_ref.commit_id and is_the_same_branch:
600 600 # In this case we should force a commit message
601 601 return source_ref.commit_id, True
602 602
603 603 if use_rebase:
604 604 try:
605 605 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
606 606 target_ref.commit_id)
607 607 self.bookmark(bookmark_name, revision=source_ref.commit_id)
608 608 self._remote.rebase(
609 609 source=source_ref.commit_id, dest=target_ref.commit_id)
610 610 self._remote.invalidate_vcs_cache()
611 611 self._update(bookmark_name)
612 612 return self._identify(), True
613 613 except RepositoryError:
614 614 # The rebase-abort may raise another exception which 'hides'
615 615 # the original one, therefore we log it here.
616 616 log.exception('Error while rebasing shadow repo during merge.')
617 617
618 618 # Cleanup any rebase leftovers
619 619 self._remote.invalidate_vcs_cache()
620 620 self._remote.rebase(abort=True)
621 621 self._remote.invalidate_vcs_cache()
622 622 self._remote.update(clean=True)
623 623 raise
624 624 else:
625 625 try:
626 626 self._remote.merge(source_ref.commit_id)
627 627 self._remote.invalidate_vcs_cache()
628 628 self._remote.commit(
629 629 message=safe_str(merge_message),
630 630 username=safe_str('%s <%s>' % (user_name, user_email)))
631 631 self._remote.invalidate_vcs_cache()
632 632 return self._identify(), True
633 633 except RepositoryError:
634 634 # Cleanup any merge leftovers
635 635 self._remote.update(clean=True)
636 636 raise
637 637
638 638 def _is_the_same_branch(self, target_ref, source_ref):
639 639 return (
640 640 self._get_branch_name(target_ref) ==
641 641 self._get_branch_name(source_ref))
642 642
643 643 def _get_branch_name(self, ref):
644 644 if ref.type == 'branch':
645 645 return ref.name
646 646 return self._remote.ctx_branch(ref.commit_id)
647 647
648 648 def _get_shadow_repository_path(self, workspace_id):
649 649 # The name of the shadow repository must start with '.', so it is
650 650 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
651 651 return os.path.join(
652 652 os.path.dirname(self.path),
653 653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
654 654
655 655 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
656 656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
657 657 if not os.path.exists(shadow_repository_path):
658 658 self._local_clone(shadow_repository_path)
659 659 log.debug(
660 660 'Prepared shadow repository in %s', shadow_repository_path)
661 661
662 662 return shadow_repository_path
663 663
664 664 def cleanup_merge_workspace(self, workspace_id):
665 665 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
666 666 shutil.rmtree(shadow_repository_path, ignore_errors=True)
667 667
668 668 def _merge_repo(self, shadow_repository_path, target_ref,
669 669 source_repo, source_ref, merge_message,
670 670 merger_name, merger_email, dry_run=False,
671 671 use_rebase=False):
672 672 if target_ref.commit_id not in self._heads():
673 673 return MergeResponse(
674 674 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
675 675
676 676 try:
677 677 if (target_ref.type == 'branch' and
678 678 len(self._heads(target_ref.name)) != 1):
679 679 return MergeResponse(
680 680 False, False, None,
681 681 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
682 682 except CommitDoesNotExistError as e:
683 683 log.exception('Failure when looking up branch heads on hg target')
684 684 return MergeResponse(
685 685 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
686 686
687 687 shadow_repo = self._get_shadow_instance(shadow_repository_path)
688 688
689 689 log.debug('Pulling in target reference %s', target_ref)
690 690 self._validate_pull_reference(target_ref)
691 691 shadow_repo._local_pull(self.path, target_ref)
692 692 try:
693 693 log.debug('Pulling in source reference %s', source_ref)
694 694 source_repo._validate_pull_reference(source_ref)
695 695 shadow_repo._local_pull(source_repo.path, source_ref)
696 696 except CommitDoesNotExistError:
697 697 log.exception('Failure when doing local pull on hg shadow repo')
698 698 return MergeResponse(
699 699 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
700 700
701 701 merge_ref = None
702 702 merge_failure_reason = MergeFailureReason.NONE
703 703
704 704 try:
705 705 merge_commit_id, needs_push = shadow_repo._local_merge(
706 706 target_ref, merge_message, merger_name, merger_email,
707 707 source_ref, use_rebase=use_rebase)
708 708 merge_possible = True
709 709
710 710 # Set a bookmark pointing to the merge commit. This bookmark may be
711 711 # used to easily identify the last successful merge commit in the
712 712 # shadow repository.
713 713 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
714 714 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
715 except SubrepoMergeError:
716 log.exception(
717 'Subrepo merge error during local merge on hg shadow repo.')
718 merge_possible = False
719 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
715 720 except RepositoryError:
716 721 log.exception('Failure when doing local merge on hg shadow repo')
717 722 merge_possible = False
718 723 merge_failure_reason = MergeFailureReason.MERGE_FAILED
719 724
720 725 if merge_possible and not dry_run:
721 726 if needs_push:
722 727 # In case the target is a bookmark, update it, so after pushing
723 728 # the bookmarks is also updated in the target.
724 729 if target_ref.type == 'book':
725 730 shadow_repo.bookmark(
726 731 target_ref.name, revision=merge_commit_id)
727 732
728 733 try:
729 734 shadow_repo_with_hooks = self._get_shadow_instance(
730 735 shadow_repository_path,
731 736 enable_hooks=True)
732 737 # Note: the push_branches option will push any new branch
733 738 # defined in the source repository to the target. This may
734 739 # be dangerous as branches are permanent in Mercurial.
735 740 # This feature was requested in issue #441.
736 741 shadow_repo_with_hooks._local_push(
737 742 merge_commit_id, self.path, push_branches=True,
738 743 enable_hooks=True)
739 744 merge_succeeded = True
740 745 except RepositoryError:
741 746 log.exception(
742 747 'Failure when doing local push from the shadow '
743 748 'repository to the target repository.')
744 749 merge_succeeded = False
745 750 merge_failure_reason = MergeFailureReason.PUSH_FAILED
746 751 else:
747 752 merge_succeeded = True
748 753 else:
749 754 merge_succeeded = False
750 755
751 756 return MergeResponse(
752 757 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
753 758
754 759 def _get_shadow_instance(
755 760 self, shadow_repository_path, enable_hooks=False):
756 761 config = self.config.copy()
757 762 if not enable_hooks:
758 763 config.clear_section('hooks')
759 764 return MercurialRepository(shadow_repository_path, config)
760 765
761 766 def _validate_pull_reference(self, reference):
762 767 if not (reference.name in self.bookmarks or
763 768 reference.name in self.branches or
764 769 self.get_commit(reference.commit_id)):
765 770 raise CommitDoesNotExistError(
766 771 'Unknown branch, bookmark or commit id')
767 772
768 773 def _local_pull(self, repository_path, reference):
769 774 """
770 775 Fetch a branch, bookmark or commit from a local repository.
771 776 """
772 777 repository_path = os.path.abspath(repository_path)
773 778 if repository_path == self.path:
774 779 raise ValueError('Cannot pull from the same repository')
775 780
776 781 reference_type_to_option_name = {
777 782 'book': 'bookmark',
778 783 'branch': 'branch',
779 784 }
780 785 option_name = reference_type_to_option_name.get(
781 786 reference.type, 'revision')
782 787
783 788 if option_name == 'revision':
784 789 ref = reference.commit_id
785 790 else:
786 791 ref = reference.name
787 792
788 793 options = {option_name: [ref]}
789 794 self._remote.pull_cmd(repository_path, hooks=False, **options)
790 795 self._remote.invalidate_vcs_cache()
791 796
792 797 def bookmark(self, bookmark, revision=None):
793 798 if isinstance(bookmark, unicode):
794 799 bookmark = safe_str(bookmark)
795 800 self._remote.bookmark(bookmark, revision=revision)
796 801 self._remote.invalidate_vcs_cache()
797 802
798 803
799 804 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
800 805
801 806 def _commit_factory(self, commit_id):
802 807 return self.repo.get_commit(
803 808 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,196 +1,205 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Custom vcs exceptions module.
23 23 """
24 24
25 25 import functools
26 26 import urllib2
27 27
28 28
29 29 class VCSCommunicationError(Exception):
30 30 pass
31 31
32 32
33 33 class PyroVCSCommunicationError(VCSCommunicationError):
34 34 pass
35 35
36 36
37 37 class HttpVCSCommunicationError(VCSCommunicationError):
38 38 pass
39 39
40 40
41 41 class VCSError(Exception):
42 42 pass
43 43
44 44
45 45 class RepositoryError(VCSError):
46 46 pass
47 47
48 48
49 49 class RepositoryRequirementError(RepositoryError):
50 50 pass
51 51
52 52
53 53 class VCSBackendNotSupportedError(VCSError):
54 54 """
55 55 Exception raised when VCSServer does not support requested backend
56 56 """
57 57
58 58
59 59 class EmptyRepositoryError(RepositoryError):
60 60 pass
61 61
62 62
63 63 class TagAlreadyExistError(RepositoryError):
64 64 pass
65 65
66 66
67 67 class TagDoesNotExistError(RepositoryError):
68 68 pass
69 69
70 70
71 71 class BranchAlreadyExistError(RepositoryError):
72 72 pass
73 73
74 74
75 75 class BranchDoesNotExistError(RepositoryError):
76 76 pass
77 77
78 78
79 79 class CommitError(RepositoryError):
80 80 """
81 81 Exceptions related to an existing commit
82 82 """
83 83
84 84
85 85 class CommitDoesNotExistError(CommitError):
86 86 pass
87 87
88 88
89 89 class CommittingError(RepositoryError):
90 90 """
91 91 Exceptions happening while creating a new commit
92 92 """
93 93
94 94
95 95 class NothingChangedError(CommittingError):
96 96 pass
97 97
98 98
99 99 class NodeError(VCSError):
100 100 pass
101 101
102 102
103 103 class RemovedFileNodeError(NodeError):
104 104 pass
105 105
106 106
107 107 class NodeAlreadyExistsError(CommittingError):
108 108 pass
109 109
110 110
111 111 class NodeAlreadyChangedError(CommittingError):
112 112 pass
113 113
114 114
115 115 class NodeDoesNotExistError(CommittingError):
116 116 pass
117 117
118 118
119 119 class NodeNotChangedError(CommittingError):
120 120 pass
121 121
122 122
123 123 class NodeAlreadyAddedError(CommittingError):
124 124 pass
125 125
126 126
127 127 class NodeAlreadyRemovedError(CommittingError):
128 128 pass
129 129
130 130
131 class SubrepoMergeError(RepositoryError):
132 """
133 This happens if we try to merge a repository which contains subrepos and
134 the subrepos cannot be merged. The subrepos are not merged itself but
135 their references in the root repo are merged.
136 """
137
138
131 139 class ImproperArchiveTypeError(VCSError):
132 140 pass
133 141
134 142
135 143 class CommandError(VCSError):
136 144 pass
137 145
138 146
139 147 class UnhandledException(VCSError):
140 148 """
141 149 Signals that something unexpected went wrong.
142 150
143 151 This usually means we have a programming error on the side of the VCSServer
144 152 and should inspect the logfile of the VCSServer to find more details.
145 153 """
146 154
147 155
148 156 _EXCEPTION_MAP = {
149 157 'abort': RepositoryError,
150 158 'archive': ImproperArchiveTypeError,
151 159 'error': RepositoryError,
152 160 'lookup': CommitDoesNotExistError,
153 161 'repo_locked': RepositoryError,
154 162 'requirement': RepositoryRequirementError,
155 163 'unhandled': UnhandledException,
156 164 # TODO: johbo: Define our own exception for this and stop abusing
157 165 # urllib's exception class.
158 166 'url_error': urllib2.URLError,
167 'subrepo_merge_error': SubrepoMergeError,
159 168 }
160 169
161 170
162 171 def map_vcs_exceptions(func):
163 172 """
164 173 Utility to decorate functions so that plain exceptions are translated.
165 174
166 175 The translation is based on `exc_map` which maps a `str` indicating
167 176 the error type into an exception class representing this error inside
168 177 of the vcs layer.
169 178 """
170 179
171 180 @functools.wraps(func)
172 181 def wrapper(*args, **kwargs):
173 182 try:
174 183 return func(*args, **kwargs)
175 184 except Exception as e:
176 185 # The error middleware adds information if it finds
177 186 # __traceback_info__ in a frame object. This way the remote
178 187 # traceback information is made available in error reports.
179 188 remote_tb = getattr(e, '_pyroTraceback', None)
180 189 if remote_tb:
181 190 __traceback_info__ = (
182 191 'Found Pyro4 remote traceback information:\n\n' +
183 192 '\n'.join(remote_tb))
184 193
185 194 # Avoid that remote_tb also appears in the frame
186 195 del remote_tb
187 196
188 197 # Special vcs errors had an attribute "_vcs_kind" which is used
189 198 # to translate them to the proper exception class in the vcs
190 199 # client layer.
191 200 kind = getattr(e, '_vcs_kind', None)
192 201 if kind:
193 202 raise _EXCEPTION_MAP[kind](*e.args)
194 203 else:
195 204 raise
196 205 return wrapper
@@ -1,1314 +1,1317 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from sqlalchemy import or_
35 35
36 36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 39 from rhodecode.lib.markup_renderer import (
40 40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 41 from rhodecode.lib.utils import action_logger
42 42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 46 from rhodecode.lib.vcs.exceptions import (
47 47 CommitDoesNotExistError, EmptyRepositoryError)
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import ChangesetCommentsModel
51 51 from rhodecode.model.db import (
52 52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 53 PullRequestVersion, ChangesetComment)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.notification import NotificationModel, \
56 56 EmailNotificationModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.settings import VcsSettingsModel
59 59
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 # Data structure to hold the response data when updating commits during a pull
65 65 # request update.
66 66 UpdateResponse = namedtuple(
67 67 'UpdateResponse', 'executed, reason, new, old, changes')
68 68
69 69
70 70 class PullRequestModel(BaseModel):
71 71
72 72 cls = PullRequest
73 73
74 74 DIFF_CONTEXT = 3
75 75
76 76 MERGE_STATUS_MESSAGES = {
77 77 MergeFailureReason.NONE: lazy_ugettext(
78 78 'This pull request can be automatically merged.'),
79 79 MergeFailureReason.UNKNOWN: lazy_ugettext(
80 80 'This pull request cannot be merged because of an unhandled'
81 81 ' exception.'),
82 82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
83 83 'This pull request cannot be merged because of conflicts.'),
84 84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
85 85 'This pull request could not be merged because push to target'
86 86 ' failed.'),
87 87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
88 88 'This pull request cannot be merged because the target is not a'
89 89 ' head.'),
90 90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
91 91 'This pull request cannot be merged because the source contains'
92 92 ' more branches than the target.'),
93 93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
94 94 'This pull request cannot be merged because the target has'
95 95 ' multiple heads.'),
96 96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
97 97 'This pull request cannot be merged because the target repository'
98 98 ' is locked.'),
99 99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
100 100 'This pull request cannot be merged because the target or the '
101 101 'source reference is missing.'),
102 102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
103 103 'This pull request cannot be merged because the target '
104 104 'reference is missing.'),
105 105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the source '
107 107 'reference is missing.'),
108 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
109 'This pull request cannot be merged because of conflicts related '
110 'to sub repositories.'),
108 111 }
109 112
110 113 UPDATE_STATUS_MESSAGES = {
111 114 UpdateFailureReason.NONE: lazy_ugettext(
112 115 'Pull request update successful.'),
113 116 UpdateFailureReason.UNKNOWN: lazy_ugettext(
114 117 'Pull request update failed because of an unknown error.'),
115 118 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
116 119 'No update needed because the source reference is already '
117 120 'up to date.'),
118 121 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
119 122 'Pull request cannot be updated because the reference type is '
120 123 'not supported for an update.'),
121 124 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
122 125 'This pull request cannot be updated because the target '
123 126 'reference is missing.'),
124 127 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
125 128 'This pull request cannot be updated because the source '
126 129 'reference is missing.'),
127 130 }
128 131
129 132 def __get_pull_request(self, pull_request):
130 133 return self._get_instance(PullRequest, pull_request)
131 134
132 135 def _check_perms(self, perms, pull_request, user, api=False):
133 136 if not api:
134 137 return h.HasRepoPermissionAny(*perms)(
135 138 user=user, repo_name=pull_request.target_repo.repo_name)
136 139 else:
137 140 return h.HasRepoPermissionAnyApi(*perms)(
138 141 user=user, repo_name=pull_request.target_repo.repo_name)
139 142
140 143 def check_user_read(self, pull_request, user, api=False):
141 144 _perms = ('repository.admin', 'repository.write', 'repository.read',)
142 145 return self._check_perms(_perms, pull_request, user, api)
143 146
144 147 def check_user_merge(self, pull_request, user, api=False):
145 148 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
146 149 return self._check_perms(_perms, pull_request, user, api)
147 150
148 151 def check_user_update(self, pull_request, user, api=False):
149 152 owner = user.user_id == pull_request.user_id
150 153 return self.check_user_merge(pull_request, user, api) or owner
151 154
152 155 def check_user_delete(self, pull_request, user):
153 156 owner = user.user_id == pull_request.user_id
154 157 _perms = ('repository.admin')
155 158 return self._check_perms(_perms, pull_request, user) or owner
156 159
157 160 def check_user_change_status(self, pull_request, user, api=False):
158 161 reviewer = user.user_id in [x.user_id for x in
159 162 pull_request.reviewers]
160 163 return self.check_user_update(pull_request, user, api) or reviewer
161 164
162 165 def get(self, pull_request):
163 166 return self.__get_pull_request(pull_request)
164 167
165 168 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
166 169 opened_by=None, order_by=None,
167 170 order_dir='desc'):
168 171 repo = None
169 172 if repo_name:
170 173 repo = self._get_repo(repo_name)
171 174
172 175 q = PullRequest.query()
173 176
174 177 # source or target
175 178 if repo and source:
176 179 q = q.filter(PullRequest.source_repo == repo)
177 180 elif repo:
178 181 q = q.filter(PullRequest.target_repo == repo)
179 182
180 183 # closed,opened
181 184 if statuses:
182 185 q = q.filter(PullRequest.status.in_(statuses))
183 186
184 187 # opened by filter
185 188 if opened_by:
186 189 q = q.filter(PullRequest.user_id.in_(opened_by))
187 190
188 191 if order_by:
189 192 order_map = {
190 193 'name_raw': PullRequest.pull_request_id,
191 194 'title': PullRequest.title,
192 195 'updated_on_raw': PullRequest.updated_on,
193 196 'target_repo': PullRequest.target_repo_id
194 197 }
195 198 if order_dir == 'asc':
196 199 q = q.order_by(order_map[order_by].asc())
197 200 else:
198 201 q = q.order_by(order_map[order_by].desc())
199 202
200 203 return q
201 204
202 205 def count_all(self, repo_name, source=False, statuses=None,
203 206 opened_by=None):
204 207 """
205 208 Count the number of pull requests for a specific repository.
206 209
207 210 :param repo_name: target or source repo
208 211 :param source: boolean flag to specify if repo_name refers to source
209 212 :param statuses: list of pull request statuses
210 213 :param opened_by: author user of the pull request
211 214 :returns: int number of pull requests
212 215 """
213 216 q = self._prepare_get_all_query(
214 217 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215 218
216 219 return q.count()
217 220
218 221 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
219 222 offset=0, length=None, order_by=None, order_dir='desc'):
220 223 """
221 224 Get all pull requests for a specific repository.
222 225
223 226 :param repo_name: target or source repo
224 227 :param source: boolean flag to specify if repo_name refers to source
225 228 :param statuses: list of pull request statuses
226 229 :param opened_by: author user of the pull request
227 230 :param offset: pagination offset
228 231 :param length: length of returned list
229 232 :param order_by: order of the returned list
230 233 :param order_dir: 'asc' or 'desc' ordering direction
231 234 :returns: list of pull requests
232 235 """
233 236 q = self._prepare_get_all_query(
234 237 repo_name, source=source, statuses=statuses, opened_by=opened_by,
235 238 order_by=order_by, order_dir=order_dir)
236 239
237 240 if length:
238 241 pull_requests = q.limit(length).offset(offset).all()
239 242 else:
240 243 pull_requests = q.all()
241 244
242 245 return pull_requests
243 246
244 247 def count_awaiting_review(self, repo_name, source=False, statuses=None,
245 248 opened_by=None):
246 249 """
247 250 Count the number of pull requests for a specific repository that are
248 251 awaiting review.
249 252
250 253 :param repo_name: target or source repo
251 254 :param source: boolean flag to specify if repo_name refers to source
252 255 :param statuses: list of pull request statuses
253 256 :param opened_by: author user of the pull request
254 257 :returns: int number of pull requests
255 258 """
256 259 pull_requests = self.get_awaiting_review(
257 260 repo_name, source=source, statuses=statuses, opened_by=opened_by)
258 261
259 262 return len(pull_requests)
260 263
261 264 def get_awaiting_review(self, repo_name, source=False, statuses=None,
262 265 opened_by=None, offset=0, length=None,
263 266 order_by=None, order_dir='desc'):
264 267 """
265 268 Get all pull requests for a specific repository that are awaiting
266 269 review.
267 270
268 271 :param repo_name: target or source repo
269 272 :param source: boolean flag to specify if repo_name refers to source
270 273 :param statuses: list of pull request statuses
271 274 :param opened_by: author user of the pull request
272 275 :param offset: pagination offset
273 276 :param length: length of returned list
274 277 :param order_by: order of the returned list
275 278 :param order_dir: 'asc' or 'desc' ordering direction
276 279 :returns: list of pull requests
277 280 """
278 281 pull_requests = self.get_all(
279 282 repo_name, source=source, statuses=statuses, opened_by=opened_by,
280 283 order_by=order_by, order_dir=order_dir)
281 284
282 285 _filtered_pull_requests = []
283 286 for pr in pull_requests:
284 287 status = pr.calculated_review_status()
285 288 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
286 289 ChangesetStatus.STATUS_UNDER_REVIEW]:
287 290 _filtered_pull_requests.append(pr)
288 291 if length:
289 292 return _filtered_pull_requests[offset:offset+length]
290 293 else:
291 294 return _filtered_pull_requests
292 295
293 296 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
294 297 opened_by=None, user_id=None):
295 298 """
296 299 Count the number of pull requests for a specific repository that are
297 300 awaiting review from a specific user.
298 301
299 302 :param repo_name: target or source repo
300 303 :param source: boolean flag to specify if repo_name refers to source
301 304 :param statuses: list of pull request statuses
302 305 :param opened_by: author user of the pull request
303 306 :param user_id: reviewer user of the pull request
304 307 :returns: int number of pull requests
305 308 """
306 309 pull_requests = self.get_awaiting_my_review(
307 310 repo_name, source=source, statuses=statuses, opened_by=opened_by,
308 311 user_id=user_id)
309 312
310 313 return len(pull_requests)
311 314
312 315 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
313 316 opened_by=None, user_id=None, offset=0,
314 317 length=None, order_by=None, order_dir='desc'):
315 318 """
316 319 Get all pull requests for a specific repository that are awaiting
317 320 review from a specific user.
318 321
319 322 :param repo_name: target or source repo
320 323 :param source: boolean flag to specify if repo_name refers to source
321 324 :param statuses: list of pull request statuses
322 325 :param opened_by: author user of the pull request
323 326 :param user_id: reviewer user of the pull request
324 327 :param offset: pagination offset
325 328 :param length: length of returned list
326 329 :param order_by: order of the returned list
327 330 :param order_dir: 'asc' or 'desc' ordering direction
328 331 :returns: list of pull requests
329 332 """
330 333 pull_requests = self.get_all(
331 334 repo_name, source=source, statuses=statuses, opened_by=opened_by,
332 335 order_by=order_by, order_dir=order_dir)
333 336
334 337 _my = PullRequestModel().get_not_reviewed(user_id)
335 338 my_participation = []
336 339 for pr in pull_requests:
337 340 if pr in _my:
338 341 my_participation.append(pr)
339 342 _filtered_pull_requests = my_participation
340 343 if length:
341 344 return _filtered_pull_requests[offset:offset+length]
342 345 else:
343 346 return _filtered_pull_requests
344 347
345 348 def get_not_reviewed(self, user_id):
346 349 return [
347 350 x.pull_request for x in PullRequestReviewers.query().filter(
348 351 PullRequestReviewers.user_id == user_id).all()
349 352 ]
350 353
351 354 def _prepare_participating_query(self, user_id=None, statuses=None,
352 355 order_by=None, order_dir='desc'):
353 356 q = PullRequest.query()
354 357 if user_id:
355 358 reviewers_subquery = Session().query(
356 359 PullRequestReviewers.pull_request_id).filter(
357 360 PullRequestReviewers.user_id == user_id).subquery()
358 361 user_filter= or_(
359 362 PullRequest.user_id == user_id,
360 363 PullRequest.pull_request_id.in_(reviewers_subquery)
361 364 )
362 365 q = PullRequest.query().filter(user_filter)
363 366
364 367 # closed,opened
365 368 if statuses:
366 369 q = q.filter(PullRequest.status.in_(statuses))
367 370
368 371 if order_by:
369 372 order_map = {
370 373 'name_raw': PullRequest.pull_request_id,
371 374 'title': PullRequest.title,
372 375 'updated_on_raw': PullRequest.updated_on,
373 376 'target_repo': PullRequest.target_repo_id
374 377 }
375 378 if order_dir == 'asc':
376 379 q = q.order_by(order_map[order_by].asc())
377 380 else:
378 381 q = q.order_by(order_map[order_by].desc())
379 382
380 383 return q
381 384
382 385 def count_im_participating_in(self, user_id=None, statuses=None):
383 386 q = self._prepare_participating_query(user_id, statuses=statuses)
384 387 return q.count()
385 388
386 389 def get_im_participating_in(
387 390 self, user_id=None, statuses=None, offset=0,
388 391 length=None, order_by=None, order_dir='desc'):
389 392 """
390 393 Get all Pull requests that i'm participating in, or i have opened
391 394 """
392 395
393 396 q = self._prepare_participating_query(
394 397 user_id, statuses=statuses, order_by=order_by,
395 398 order_dir=order_dir)
396 399
397 400 if length:
398 401 pull_requests = q.limit(length).offset(offset).all()
399 402 else:
400 403 pull_requests = q.all()
401 404
402 405 return pull_requests
403 406
404 407 def get_versions(self, pull_request):
405 408 """
406 409 returns version of pull request sorted by ID descending
407 410 """
408 411 return PullRequestVersion.query()\
409 412 .filter(PullRequestVersion.pull_request == pull_request)\
410 413 .order_by(PullRequestVersion.pull_request_version_id.asc())\
411 414 .all()
412 415
413 416 def create(self, created_by, source_repo, source_ref, target_repo,
414 417 target_ref, revisions, reviewers, title, description=None):
415 418 created_by_user = self._get_user(created_by)
416 419 source_repo = self._get_repo(source_repo)
417 420 target_repo = self._get_repo(target_repo)
418 421
419 422 pull_request = PullRequest()
420 423 pull_request.source_repo = source_repo
421 424 pull_request.source_ref = source_ref
422 425 pull_request.target_repo = target_repo
423 426 pull_request.target_ref = target_ref
424 427 pull_request.revisions = revisions
425 428 pull_request.title = title
426 429 pull_request.description = description
427 430 pull_request.author = created_by_user
428 431
429 432 Session().add(pull_request)
430 433 Session().flush()
431 434
432 435 reviewer_ids = set()
433 436 # members / reviewers
434 437 for reviewer_object in reviewers:
435 438 if isinstance(reviewer_object, tuple):
436 439 user_id, reasons = reviewer_object
437 440 else:
438 441 user_id, reasons = reviewer_object, []
439 442
440 443 user = self._get_user(user_id)
441 444 reviewer_ids.add(user.user_id)
442 445
443 446 reviewer = PullRequestReviewers(user, pull_request, reasons)
444 447 Session().add(reviewer)
445 448
446 449 # Set approval status to "Under Review" for all commits which are
447 450 # part of this pull request.
448 451 ChangesetStatusModel().set_status(
449 452 repo=target_repo,
450 453 status=ChangesetStatus.STATUS_UNDER_REVIEW,
451 454 user=created_by_user,
452 455 pull_request=pull_request
453 456 )
454 457
455 458 self.notify_reviewers(pull_request, reviewer_ids)
456 459 self._trigger_pull_request_hook(
457 460 pull_request, created_by_user, 'create')
458 461
459 462 return pull_request
460 463
461 464 def _trigger_pull_request_hook(self, pull_request, user, action):
462 465 pull_request = self.__get_pull_request(pull_request)
463 466 target_scm = pull_request.target_repo.scm_instance()
464 467 if action == 'create':
465 468 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
466 469 elif action == 'merge':
467 470 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
468 471 elif action == 'close':
469 472 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
470 473 elif action == 'review_status_change':
471 474 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
472 475 elif action == 'update':
473 476 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
474 477 else:
475 478 return
476 479
477 480 trigger_hook(
478 481 username=user.username,
479 482 repo_name=pull_request.target_repo.repo_name,
480 483 repo_alias=target_scm.alias,
481 484 pull_request=pull_request)
482 485
483 486 def _get_commit_ids(self, pull_request):
484 487 """
485 488 Return the commit ids of the merged pull request.
486 489
487 490 This method is not dealing correctly yet with the lack of autoupdates
488 491 nor with the implicit target updates.
489 492 For example: if a commit in the source repo is already in the target it
490 493 will be reported anyways.
491 494 """
492 495 merge_rev = pull_request.merge_rev
493 496 if merge_rev is None:
494 497 raise ValueError('This pull request was not merged yet')
495 498
496 499 commit_ids = list(pull_request.revisions)
497 500 if merge_rev not in commit_ids:
498 501 commit_ids.append(merge_rev)
499 502
500 503 return commit_ids
501 504
502 505 def merge(self, pull_request, user, extras):
503 506 log.debug("Merging pull request %s", pull_request.pull_request_id)
504 507 merge_state = self._merge_pull_request(pull_request, user, extras)
505 508 if merge_state.executed:
506 509 log.debug(
507 510 "Merge was successful, updating the pull request comments.")
508 511 self._comment_and_close_pr(pull_request, user, merge_state)
509 512 self._log_action('user_merged_pull_request', user, pull_request)
510 513 else:
511 514 log.warn("Merge failed, not updating the pull request.")
512 515 return merge_state
513 516
514 517 def _merge_pull_request(self, pull_request, user, extras):
515 518 target_vcs = pull_request.target_repo.scm_instance()
516 519 source_vcs = pull_request.source_repo.scm_instance()
517 520 target_ref = self._refresh_reference(
518 521 pull_request.target_ref_parts, target_vcs)
519 522
520 523 message = _(
521 524 'Merge pull request #%(pr_id)s from '
522 525 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
523 526 'pr_id': pull_request.pull_request_id,
524 527 'source_repo': source_vcs.name,
525 528 'source_ref_name': pull_request.source_ref_parts.name,
526 529 'pr_title': pull_request.title
527 530 }
528 531
529 532 workspace_id = self._workspace_id(pull_request)
530 533 use_rebase = self._use_rebase_for_merging(pull_request)
531 534
532 535 callback_daemon, extras = prepare_callback_daemon(
533 536 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
534 537 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
535 538
536 539 with callback_daemon:
537 540 # TODO: johbo: Implement a clean way to run a config_override
538 541 # for a single call.
539 542 target_vcs.config.set(
540 543 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
541 544 merge_state = target_vcs.merge(
542 545 target_ref, source_vcs, pull_request.source_ref_parts,
543 546 workspace_id, user_name=user.username,
544 547 user_email=user.email, message=message, use_rebase=use_rebase)
545 548 return merge_state
546 549
547 550 def _comment_and_close_pr(self, pull_request, user, merge_state):
548 551 pull_request.merge_rev = merge_state.merge_ref.commit_id
549 552 pull_request.updated_on = datetime.datetime.now()
550 553
551 554 ChangesetCommentsModel().create(
552 555 text=unicode(_('Pull request merged and closed')),
553 556 repo=pull_request.target_repo.repo_id,
554 557 user=user.user_id,
555 558 pull_request=pull_request.pull_request_id,
556 559 f_path=None,
557 560 line_no=None,
558 561 closing_pr=True
559 562 )
560 563
561 564 Session().add(pull_request)
562 565 Session().flush()
563 566 # TODO: paris: replace invalidation with less radical solution
564 567 ScmModel().mark_for_invalidation(
565 568 pull_request.target_repo.repo_name)
566 569 self._trigger_pull_request_hook(pull_request, user, 'merge')
567 570
568 571 def has_valid_update_type(self, pull_request):
569 572 source_ref_type = pull_request.source_ref_parts.type
570 573 return source_ref_type in ['book', 'branch', 'tag']
571 574
572 575 def update_commits(self, pull_request):
573 576 """
574 577 Get the updated list of commits for the pull request
575 578 and return the new pull request version and the list
576 579 of commits processed by this update action
577 580 """
578 581 pull_request = self.__get_pull_request(pull_request)
579 582 source_ref_type = pull_request.source_ref_parts.type
580 583 source_ref_name = pull_request.source_ref_parts.name
581 584 source_ref_id = pull_request.source_ref_parts.commit_id
582 585
583 586 if not self.has_valid_update_type(pull_request):
584 587 log.debug(
585 588 "Skipping update of pull request %s due to ref type: %s",
586 589 pull_request, source_ref_type)
587 590 return UpdateResponse(
588 591 executed=False,
589 592 reason=UpdateFailureReason.WRONG_REF_TPYE,
590 593 old=pull_request, new=None, changes=None)
591 594
592 595 source_repo = pull_request.source_repo.scm_instance()
593 596 try:
594 597 source_commit = source_repo.get_commit(commit_id=source_ref_name)
595 598 except CommitDoesNotExistError:
596 599 return UpdateResponse(
597 600 executed=False,
598 601 reason=UpdateFailureReason.MISSING_SOURCE_REF,
599 602 old=pull_request, new=None, changes=None)
600 603
601 604 if source_ref_id == source_commit.raw_id:
602 605 log.debug("Nothing changed in pull request %s", pull_request)
603 606 return UpdateResponse(
604 607 executed=False,
605 608 reason=UpdateFailureReason.NO_CHANGE,
606 609 old=pull_request, new=None, changes=None)
607 610
608 611 # Finally there is a need for an update
609 612 pull_request_version = self._create_version_from_snapshot(pull_request)
610 613 self._link_comments_to_version(pull_request_version)
611 614
612 615 target_ref_type = pull_request.target_ref_parts.type
613 616 target_ref_name = pull_request.target_ref_parts.name
614 617 target_ref_id = pull_request.target_ref_parts.commit_id
615 618 target_repo = pull_request.target_repo.scm_instance()
616 619
617 620 try:
618 621 if target_ref_type in ('tag', 'branch', 'book'):
619 622 target_commit = target_repo.get_commit(target_ref_name)
620 623 else:
621 624 target_commit = target_repo.get_commit(target_ref_id)
622 625 except CommitDoesNotExistError:
623 626 return UpdateResponse(
624 627 executed=False,
625 628 reason=UpdateFailureReason.MISSING_TARGET_REF,
626 629 old=pull_request, new=None, changes=None)
627 630
628 631 # re-compute commit ids
629 632 old_commit_ids = set(pull_request.revisions)
630 633 pre_load = ["author", "branch", "date", "message"]
631 634 commit_ranges = target_repo.compare(
632 635 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
633 636 pre_load=pre_load)
634 637
635 638 ancestor = target_repo.get_common_ancestor(
636 639 target_commit.raw_id, source_commit.raw_id, source_repo)
637 640
638 641 pull_request.source_ref = '%s:%s:%s' % (
639 642 source_ref_type, source_ref_name, source_commit.raw_id)
640 643 pull_request.target_ref = '%s:%s:%s' % (
641 644 target_ref_type, target_ref_name, ancestor)
642 645 pull_request.revisions = [
643 646 commit.raw_id for commit in reversed(commit_ranges)]
644 647 pull_request.updated_on = datetime.datetime.now()
645 648 Session().add(pull_request)
646 649 new_commit_ids = set(pull_request.revisions)
647 650
648 651 changes = self._calculate_commit_id_changes(
649 652 old_commit_ids, new_commit_ids)
650 653
651 654 old_diff_data, new_diff_data = self._generate_update_diffs(
652 655 pull_request, pull_request_version)
653 656
654 657 ChangesetCommentsModel().outdate_comments(
655 658 pull_request, old_diff_data=old_diff_data,
656 659 new_diff_data=new_diff_data)
657 660
658 661 file_changes = self._calculate_file_changes(
659 662 old_diff_data, new_diff_data)
660 663
661 664 # Add an automatic comment to the pull request
662 665 update_comment = ChangesetCommentsModel().create(
663 666 text=self._render_update_message(changes, file_changes),
664 667 repo=pull_request.target_repo,
665 668 user=pull_request.author,
666 669 pull_request=pull_request,
667 670 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
668 671
669 672 # Update status to "Under Review" for added commits
670 673 for commit_id in changes.added:
671 674 ChangesetStatusModel().set_status(
672 675 repo=pull_request.source_repo,
673 676 status=ChangesetStatus.STATUS_UNDER_REVIEW,
674 677 comment=update_comment,
675 678 user=pull_request.author,
676 679 pull_request=pull_request,
677 680 revision=commit_id)
678 681
679 682 log.debug(
680 683 'Updated pull request %s, added_ids: %s, common_ids: %s, '
681 684 'removed_ids: %s', pull_request.pull_request_id,
682 685 changes.added, changes.common, changes.removed)
683 686 log.debug('Updated pull request with the following file changes: %s',
684 687 file_changes)
685 688
686 689 log.info(
687 690 "Updated pull request %s from commit %s to commit %s, "
688 691 "stored new version %s of this pull request.",
689 692 pull_request.pull_request_id, source_ref_id,
690 693 pull_request.source_ref_parts.commit_id,
691 694 pull_request_version.pull_request_version_id)
692 695 Session().commit()
693 696 self._trigger_pull_request_hook(pull_request, pull_request.author,
694 697 'update')
695 698
696 699 return UpdateResponse(
697 700 executed=True, reason=UpdateFailureReason.NONE,
698 701 old=pull_request, new=pull_request_version, changes=changes)
699 702
700 703 def _create_version_from_snapshot(self, pull_request):
701 704 version = PullRequestVersion()
702 705 version.title = pull_request.title
703 706 version.description = pull_request.description
704 707 version.status = pull_request.status
705 708 version.created_on = pull_request.created_on
706 709 version.updated_on = pull_request.updated_on
707 710 version.user_id = pull_request.user_id
708 711 version.source_repo = pull_request.source_repo
709 712 version.source_ref = pull_request.source_ref
710 713 version.target_repo = pull_request.target_repo
711 714 version.target_ref = pull_request.target_ref
712 715
713 716 version._last_merge_source_rev = pull_request._last_merge_source_rev
714 717 version._last_merge_target_rev = pull_request._last_merge_target_rev
715 718 version._last_merge_status = pull_request._last_merge_status
716 719 version.shadow_merge_ref = pull_request.shadow_merge_ref
717 720 version.merge_rev = pull_request.merge_rev
718 721
719 722 version.revisions = pull_request.revisions
720 723 version.pull_request = pull_request
721 724 Session().add(version)
722 725 Session().flush()
723 726
724 727 return version
725 728
726 729 def _generate_update_diffs(self, pull_request, pull_request_version):
727 730 diff_context = (
728 731 self.DIFF_CONTEXT +
729 732 ChangesetCommentsModel.needed_extra_diff_context())
730 733 old_diff = self._get_diff_from_pr_or_version(
731 734 pull_request_version, context=diff_context)
732 735 new_diff = self._get_diff_from_pr_or_version(
733 736 pull_request, context=diff_context)
734 737
735 738 old_diff_data = diffs.DiffProcessor(old_diff)
736 739 old_diff_data.prepare()
737 740 new_diff_data = diffs.DiffProcessor(new_diff)
738 741 new_diff_data.prepare()
739 742
740 743 return old_diff_data, new_diff_data
741 744
742 745 def _link_comments_to_version(self, pull_request_version):
743 746 """
744 747 Link all unlinked comments of this pull request to the given version.
745 748
746 749 :param pull_request_version: The `PullRequestVersion` to which
747 750 the comments shall be linked.
748 751
749 752 """
750 753 pull_request = pull_request_version.pull_request
751 754 comments = ChangesetComment.query().filter(
752 755 # TODO: johbo: Should we query for the repo at all here?
753 756 # Pending decision on how comments of PRs are to be related
754 757 # to either the source repo, the target repo or no repo at all.
755 758 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
756 759 ChangesetComment.pull_request == pull_request,
757 760 ChangesetComment.pull_request_version == None)
758 761
759 762 # TODO: johbo: Find out why this breaks if it is done in a bulk
760 763 # operation.
761 764 for comment in comments:
762 765 comment.pull_request_version_id = (
763 766 pull_request_version.pull_request_version_id)
764 767 Session().add(comment)
765 768
766 769 def _calculate_commit_id_changes(self, old_ids, new_ids):
767 770 added = new_ids.difference(old_ids)
768 771 common = old_ids.intersection(new_ids)
769 772 removed = old_ids.difference(new_ids)
770 773 return ChangeTuple(added, common, removed)
771 774
772 775 def _calculate_file_changes(self, old_diff_data, new_diff_data):
773 776
774 777 old_files = OrderedDict()
775 778 for diff_data in old_diff_data.parsed_diff:
776 779 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
777 780
778 781 added_files = []
779 782 modified_files = []
780 783 removed_files = []
781 784 for diff_data in new_diff_data.parsed_diff:
782 785 new_filename = diff_data['filename']
783 786 new_hash = md5_safe(diff_data['raw_diff'])
784 787
785 788 old_hash = old_files.get(new_filename)
786 789 if not old_hash:
787 790 # file is not present in old diff, means it's added
788 791 added_files.append(new_filename)
789 792 else:
790 793 if new_hash != old_hash:
791 794 modified_files.append(new_filename)
792 795 # now remove a file from old, since we have seen it already
793 796 del old_files[new_filename]
794 797
795 798 # removed files is when there are present in old, but not in NEW,
796 799 # since we remove old files that are present in new diff, left-overs
797 800 # if any should be the removed files
798 801 removed_files.extend(old_files.keys())
799 802
800 803 return FileChangeTuple(added_files, modified_files, removed_files)
801 804
802 805 def _render_update_message(self, changes, file_changes):
803 806 """
804 807 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
805 808 so it's always looking the same disregarding on which default
806 809 renderer system is using.
807 810
808 811 :param changes: changes named tuple
809 812 :param file_changes: file changes named tuple
810 813
811 814 """
812 815 new_status = ChangesetStatus.get_status_lbl(
813 816 ChangesetStatus.STATUS_UNDER_REVIEW)
814 817
815 818 changed_files = (
816 819 file_changes.added + file_changes.modified + file_changes.removed)
817 820
818 821 params = {
819 822 'under_review_label': new_status,
820 823 'added_commits': changes.added,
821 824 'removed_commits': changes.removed,
822 825 'changed_files': changed_files,
823 826 'added_files': file_changes.added,
824 827 'modified_files': file_changes.modified,
825 828 'removed_files': file_changes.removed,
826 829 }
827 830 renderer = RstTemplateRenderer()
828 831 return renderer.render('pull_request_update.mako', **params)
829 832
830 833 def edit(self, pull_request, title, description):
831 834 pull_request = self.__get_pull_request(pull_request)
832 835 if pull_request.is_closed():
833 836 raise ValueError('This pull request is closed')
834 837 if title:
835 838 pull_request.title = title
836 839 pull_request.description = description
837 840 pull_request.updated_on = datetime.datetime.now()
838 841 Session().add(pull_request)
839 842
840 843 def update_reviewers(self, pull_request, reviewer_data):
841 844 """
842 845 Update the reviewers in the pull request
843 846
844 847 :param pull_request: the pr to update
845 848 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
846 849 """
847 850
848 851 reviewers_reasons = {}
849 852 for user_id, reasons in reviewer_data:
850 853 if isinstance(user_id, (int, basestring)):
851 854 user_id = self._get_user(user_id).user_id
852 855 reviewers_reasons[user_id] = reasons
853 856
854 857 reviewers_ids = set(reviewers_reasons.keys())
855 858 pull_request = self.__get_pull_request(pull_request)
856 859 current_reviewers = PullRequestReviewers.query()\
857 860 .filter(PullRequestReviewers.pull_request ==
858 861 pull_request).all()
859 862 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
860 863
861 864 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
862 865 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
863 866
864 867 log.debug("Adding %s reviewers", ids_to_add)
865 868 log.debug("Removing %s reviewers", ids_to_remove)
866 869 changed = False
867 870 for uid in ids_to_add:
868 871 changed = True
869 872 _usr = self._get_user(uid)
870 873 reasons = reviewers_reasons[uid]
871 874 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
872 875 Session().add(reviewer)
873 876
874 877 self.notify_reviewers(pull_request, ids_to_add)
875 878
876 879 for uid in ids_to_remove:
877 880 changed = True
878 881 reviewer = PullRequestReviewers.query()\
879 882 .filter(PullRequestReviewers.user_id == uid,
880 883 PullRequestReviewers.pull_request == pull_request)\
881 884 .scalar()
882 885 if reviewer:
883 886 Session().delete(reviewer)
884 887 if changed:
885 888 pull_request.updated_on = datetime.datetime.now()
886 889 Session().add(pull_request)
887 890
888 891 return ids_to_add, ids_to_remove
889 892
890 893 def get_url(self, pull_request):
891 894 return h.url('pullrequest_show',
892 895 repo_name=safe_str(pull_request.target_repo.repo_name),
893 896 pull_request_id=pull_request.pull_request_id,
894 897 qualified=True)
895 898
896 899 def get_shadow_clone_url(self, pull_request):
897 900 """
898 901 Returns qualified url pointing to the shadow repository. If this pull
899 902 request is closed there is no shadow repository and ``None`` will be
900 903 returned.
901 904 """
902 905 if pull_request.is_closed():
903 906 return None
904 907 else:
905 908 pr_url = urllib.unquote(self.get_url(pull_request))
906 909 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
907 910
908 911 def notify_reviewers(self, pull_request, reviewers_ids):
909 912 # notification to reviewers
910 913 if not reviewers_ids:
911 914 return
912 915
913 916 pull_request_obj = pull_request
914 917 # get the current participants of this pull request
915 918 recipients = reviewers_ids
916 919 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
917 920
918 921 pr_source_repo = pull_request_obj.source_repo
919 922 pr_target_repo = pull_request_obj.target_repo
920 923
921 924 pr_url = h.url(
922 925 'pullrequest_show',
923 926 repo_name=pr_target_repo.repo_name,
924 927 pull_request_id=pull_request_obj.pull_request_id,
925 928 qualified=True,)
926 929
927 930 # set some variables for email notification
928 931 pr_target_repo_url = h.url(
929 932 'summary_home',
930 933 repo_name=pr_target_repo.repo_name,
931 934 qualified=True)
932 935
933 936 pr_source_repo_url = h.url(
934 937 'summary_home',
935 938 repo_name=pr_source_repo.repo_name,
936 939 qualified=True)
937 940
938 941 # pull request specifics
939 942 pull_request_commits = [
940 943 (x.raw_id, x.message)
941 944 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
942 945
943 946 kwargs = {
944 947 'user': pull_request.author,
945 948 'pull_request': pull_request_obj,
946 949 'pull_request_commits': pull_request_commits,
947 950
948 951 'pull_request_target_repo': pr_target_repo,
949 952 'pull_request_target_repo_url': pr_target_repo_url,
950 953
951 954 'pull_request_source_repo': pr_source_repo,
952 955 'pull_request_source_repo_url': pr_source_repo_url,
953 956
954 957 'pull_request_url': pr_url,
955 958 }
956 959
957 960 # pre-generate the subject for notification itself
958 961 (subject,
959 962 _h, _e, # we don't care about those
960 963 body_plaintext) = EmailNotificationModel().render_email(
961 964 notification_type, **kwargs)
962 965
963 966 # create notification objects, and emails
964 967 NotificationModel().create(
965 968 created_by=pull_request.author,
966 969 notification_subject=subject,
967 970 notification_body=body_plaintext,
968 971 notification_type=notification_type,
969 972 recipients=recipients,
970 973 email_kwargs=kwargs,
971 974 )
972 975
973 976 def delete(self, pull_request):
974 977 pull_request = self.__get_pull_request(pull_request)
975 978 self._cleanup_merge_workspace(pull_request)
976 979 Session().delete(pull_request)
977 980
978 981 def close_pull_request(self, pull_request, user):
979 982 pull_request = self.__get_pull_request(pull_request)
980 983 self._cleanup_merge_workspace(pull_request)
981 984 pull_request.status = PullRequest.STATUS_CLOSED
982 985 pull_request.updated_on = datetime.datetime.now()
983 986 Session().add(pull_request)
984 987 self._trigger_pull_request_hook(
985 988 pull_request, pull_request.author, 'close')
986 989 self._log_action('user_closed_pull_request', user, pull_request)
987 990
988 991 def close_pull_request_with_comment(self, pull_request, user, repo,
989 992 message=None):
990 993 status = ChangesetStatus.STATUS_REJECTED
991 994
992 995 if not message:
993 996 message = (
994 997 _('Status change %(transition_icon)s %(status)s') % {
995 998 'transition_icon': '>',
996 999 'status': ChangesetStatus.get_status_lbl(status)})
997 1000
998 1001 internal_message = _('Closing with') + ' ' + message
999 1002
1000 1003 comm = ChangesetCommentsModel().create(
1001 1004 text=internal_message,
1002 1005 repo=repo.repo_id,
1003 1006 user=user.user_id,
1004 1007 pull_request=pull_request.pull_request_id,
1005 1008 f_path=None,
1006 1009 line_no=None,
1007 1010 status_change=ChangesetStatus.get_status_lbl(status),
1008 1011 status_change_type=status,
1009 1012 closing_pr=True
1010 1013 )
1011 1014
1012 1015 ChangesetStatusModel().set_status(
1013 1016 repo.repo_id,
1014 1017 status,
1015 1018 user.user_id,
1016 1019 comm,
1017 1020 pull_request=pull_request.pull_request_id
1018 1021 )
1019 1022 Session().flush()
1020 1023
1021 1024 PullRequestModel().close_pull_request(
1022 1025 pull_request.pull_request_id, user)
1023 1026
1024 1027 def merge_status(self, pull_request):
1025 1028 if not self._is_merge_enabled(pull_request):
1026 1029 return False, _('Server-side pull request merging is disabled.')
1027 1030 if pull_request.is_closed():
1028 1031 return False, _('This pull request is closed.')
1029 1032 merge_possible, msg = self._check_repo_requirements(
1030 1033 target=pull_request.target_repo, source=pull_request.source_repo)
1031 1034 if not merge_possible:
1032 1035 return merge_possible, msg
1033 1036
1034 1037 try:
1035 1038 resp = self._try_merge(pull_request)
1036 1039 log.debug("Merge response: %s", resp)
1037 1040 status = resp.possible, self.merge_status_message(
1038 1041 resp.failure_reason)
1039 1042 except NotImplementedError:
1040 1043 status = False, _('Pull request merging is not supported.')
1041 1044
1042 1045 return status
1043 1046
1044 1047 def _check_repo_requirements(self, target, source):
1045 1048 """
1046 1049 Check if `target` and `source` have compatible requirements.
1047 1050
1048 1051 Currently this is just checking for largefiles.
1049 1052 """
1050 1053 target_has_largefiles = self._has_largefiles(target)
1051 1054 source_has_largefiles = self._has_largefiles(source)
1052 1055 merge_possible = True
1053 1056 message = u''
1054 1057
1055 1058 if target_has_largefiles != source_has_largefiles:
1056 1059 merge_possible = False
1057 1060 if source_has_largefiles:
1058 1061 message = _(
1059 1062 'Target repository large files support is disabled.')
1060 1063 else:
1061 1064 message = _(
1062 1065 'Source repository large files support is disabled.')
1063 1066
1064 1067 return merge_possible, message
1065 1068
1066 1069 def _has_largefiles(self, repo):
1067 1070 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1068 1071 'extensions', 'largefiles')
1069 1072 return largefiles_ui and largefiles_ui[0].active
1070 1073
1071 1074 def _try_merge(self, pull_request):
1072 1075 """
1073 1076 Try to merge the pull request and return the merge status.
1074 1077 """
1075 1078 log.debug(
1076 1079 "Trying out if the pull request %s can be merged.",
1077 1080 pull_request.pull_request_id)
1078 1081 target_vcs = pull_request.target_repo.scm_instance()
1079 1082
1080 1083 # Refresh the target reference.
1081 1084 try:
1082 1085 target_ref = self._refresh_reference(
1083 1086 pull_request.target_ref_parts, target_vcs)
1084 1087 except CommitDoesNotExistError:
1085 1088 merge_state = MergeResponse(
1086 1089 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1087 1090 return merge_state
1088 1091
1089 1092 target_locked = pull_request.target_repo.locked
1090 1093 if target_locked and target_locked[0]:
1091 1094 log.debug("The target repository is locked.")
1092 1095 merge_state = MergeResponse(
1093 1096 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1094 1097 elif self._needs_merge_state_refresh(pull_request, target_ref):
1095 1098 log.debug("Refreshing the merge status of the repository.")
1096 1099 merge_state = self._refresh_merge_state(
1097 1100 pull_request, target_vcs, target_ref)
1098 1101 else:
1099 1102 possible = pull_request.\
1100 1103 _last_merge_status == MergeFailureReason.NONE
1101 1104 merge_state = MergeResponse(
1102 1105 possible, False, None, pull_request._last_merge_status)
1103 1106
1104 1107 return merge_state
1105 1108
1106 1109 def _refresh_reference(self, reference, vcs_repository):
1107 1110 if reference.type in ('branch', 'book'):
1108 1111 name_or_id = reference.name
1109 1112 else:
1110 1113 name_or_id = reference.commit_id
1111 1114 refreshed_commit = vcs_repository.get_commit(name_or_id)
1112 1115 refreshed_reference = Reference(
1113 1116 reference.type, reference.name, refreshed_commit.raw_id)
1114 1117 return refreshed_reference
1115 1118
1116 1119 def _needs_merge_state_refresh(self, pull_request, target_reference):
1117 1120 return not(
1118 1121 pull_request.revisions and
1119 1122 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1120 1123 target_reference.commit_id == pull_request._last_merge_target_rev)
1121 1124
1122 1125 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1123 1126 workspace_id = self._workspace_id(pull_request)
1124 1127 source_vcs = pull_request.source_repo.scm_instance()
1125 1128 use_rebase = self._use_rebase_for_merging(pull_request)
1126 1129 merge_state = target_vcs.merge(
1127 1130 target_reference, source_vcs, pull_request.source_ref_parts,
1128 1131 workspace_id, dry_run=True, use_rebase=use_rebase)
1129 1132
1130 1133 # Do not store the response if there was an unknown error.
1131 1134 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1132 1135 pull_request._last_merge_source_rev = \
1133 1136 pull_request.source_ref_parts.commit_id
1134 1137 pull_request._last_merge_target_rev = target_reference.commit_id
1135 1138 pull_request._last_merge_status = merge_state.failure_reason
1136 1139 pull_request.shadow_merge_ref = merge_state.merge_ref
1137 1140 Session().add(pull_request)
1138 1141 Session().commit()
1139 1142
1140 1143 return merge_state
1141 1144
1142 1145 def _workspace_id(self, pull_request):
1143 1146 workspace_id = 'pr-%s' % pull_request.pull_request_id
1144 1147 return workspace_id
1145 1148
1146 1149 def merge_status_message(self, status_code):
1147 1150 """
1148 1151 Return a human friendly error message for the given merge status code.
1149 1152 """
1150 1153 return self.MERGE_STATUS_MESSAGES[status_code]
1151 1154
1152 1155 def generate_repo_data(self, repo, commit_id=None, branch=None,
1153 1156 bookmark=None):
1154 1157 all_refs, selected_ref = \
1155 1158 self._get_repo_pullrequest_sources(
1156 1159 repo.scm_instance(), commit_id=commit_id,
1157 1160 branch=branch, bookmark=bookmark)
1158 1161
1159 1162 refs_select2 = []
1160 1163 for element in all_refs:
1161 1164 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1162 1165 refs_select2.append({'text': element[1], 'children': children})
1163 1166
1164 1167 return {
1165 1168 'user': {
1166 1169 'user_id': repo.user.user_id,
1167 1170 'username': repo.user.username,
1168 1171 'firstname': repo.user.firstname,
1169 1172 'lastname': repo.user.lastname,
1170 1173 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1171 1174 },
1172 1175 'description': h.chop_at_smart(repo.description, '\n'),
1173 1176 'refs': {
1174 1177 'all_refs': all_refs,
1175 1178 'selected_ref': selected_ref,
1176 1179 'select2_refs': refs_select2
1177 1180 }
1178 1181 }
1179 1182
1180 1183 def generate_pullrequest_title(self, source, source_ref, target):
1181 1184 return u'{source}#{at_ref} to {target}'.format(
1182 1185 source=source,
1183 1186 at_ref=source_ref,
1184 1187 target=target,
1185 1188 )
1186 1189
1187 1190 def _cleanup_merge_workspace(self, pull_request):
1188 1191 # Merging related cleanup
1189 1192 target_scm = pull_request.target_repo.scm_instance()
1190 1193 workspace_id = 'pr-%s' % pull_request.pull_request_id
1191 1194
1192 1195 try:
1193 1196 target_scm.cleanup_merge_workspace(workspace_id)
1194 1197 except NotImplementedError:
1195 1198 pass
1196 1199
1197 1200 def _get_repo_pullrequest_sources(
1198 1201 self, repo, commit_id=None, branch=None, bookmark=None):
1199 1202 """
1200 1203 Return a structure with repo's interesting commits, suitable for
1201 1204 the selectors in pullrequest controller
1202 1205
1203 1206 :param commit_id: a commit that must be in the list somehow
1204 1207 and selected by default
1205 1208 :param branch: a branch that must be in the list and selected
1206 1209 by default - even if closed
1207 1210 :param bookmark: a bookmark that must be in the list and selected
1208 1211 """
1209 1212
1210 1213 commit_id = safe_str(commit_id) if commit_id else None
1211 1214 branch = safe_str(branch) if branch else None
1212 1215 bookmark = safe_str(bookmark) if bookmark else None
1213 1216
1214 1217 selected = None
1215 1218
1216 1219 # order matters: first source that has commit_id in it will be selected
1217 1220 sources = []
1218 1221 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1219 1222 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1220 1223
1221 1224 if commit_id:
1222 1225 ref_commit = (h.short_id(commit_id), commit_id)
1223 1226 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1224 1227
1225 1228 sources.append(
1226 1229 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1227 1230 )
1228 1231
1229 1232 groups = []
1230 1233 for group_key, ref_list, group_name, match in sources:
1231 1234 group_refs = []
1232 1235 for ref_name, ref_id in ref_list:
1233 1236 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1234 1237 group_refs.append((ref_key, ref_name))
1235 1238
1236 1239 if not selected:
1237 1240 if set([commit_id, match]) & set([ref_id, ref_name]):
1238 1241 selected = ref_key
1239 1242
1240 1243 if group_refs:
1241 1244 groups.append((group_refs, group_name))
1242 1245
1243 1246 if not selected:
1244 1247 ref = commit_id or branch or bookmark
1245 1248 if ref:
1246 1249 raise CommitDoesNotExistError(
1247 1250 'No commit refs could be found matching: %s' % ref)
1248 1251 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1249 1252 selected = 'branch:%s:%s' % (
1250 1253 repo.DEFAULT_BRANCH_NAME,
1251 1254 repo.branches[repo.DEFAULT_BRANCH_NAME]
1252 1255 )
1253 1256 elif repo.commit_ids:
1254 1257 rev = repo.commit_ids[0]
1255 1258 selected = 'rev:%s:%s' % (rev, rev)
1256 1259 else:
1257 1260 raise EmptyRepositoryError()
1258 1261 return groups, selected
1259 1262
1260 1263 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1261 1264 pull_request = self.__get_pull_request(pull_request)
1262 1265 return self._get_diff_from_pr_or_version(pull_request, context=context)
1263 1266
1264 1267 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1265 1268 source_repo = pr_or_version.source_repo
1266 1269
1267 1270 # we swap org/other ref since we run a simple diff on one repo
1268 1271 target_ref_id = pr_or_version.target_ref_parts.commit_id
1269 1272 source_ref_id = pr_or_version.source_ref_parts.commit_id
1270 1273 target_commit = source_repo.get_commit(
1271 1274 commit_id=safe_str(target_ref_id))
1272 1275 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1273 1276 vcs_repo = source_repo.scm_instance()
1274 1277
1275 1278 # TODO: johbo: In the context of an update, we cannot reach
1276 1279 # the old commit anymore with our normal mechanisms. It needs
1277 1280 # some sort of special support in the vcs layer to avoid this
1278 1281 # workaround.
1279 1282 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1280 1283 vcs_repo.alias == 'git'):
1281 1284 source_commit.raw_id = safe_str(source_ref_id)
1282 1285
1283 1286 log.debug('calculating diff between '
1284 1287 'source_ref:%s and target_ref:%s for repo `%s`',
1285 1288 target_ref_id, source_ref_id,
1286 1289 safe_unicode(vcs_repo.path))
1287 1290
1288 1291 vcs_diff = vcs_repo.get_diff(
1289 1292 commit1=target_commit, commit2=source_commit, context=context)
1290 1293 return vcs_diff
1291 1294
1292 1295 def _is_merge_enabled(self, pull_request):
1293 1296 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1294 1297 settings = settings_model.get_general_settings()
1295 1298 return settings.get('rhodecode_pr_merge_enabled', False)
1296 1299
1297 1300 def _use_rebase_for_merging(self, pull_request):
1298 1301 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1299 1302 settings = settings_model.get_general_settings()
1300 1303 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1301 1304
1302 1305 def _log_action(self, action, user, pull_request):
1303 1306 action_logger(
1304 1307 user,
1305 1308 '{action}:{pr_id}'.format(
1306 1309 action=action, pr_id=pull_request.pull_request_id),
1307 1310 pull_request.target_repo)
1308 1311
1309 1312
1310 1313 ChangeTuple = namedtuple('ChangeTuple',
1311 1314 ['added', 'common', 'removed'])
1312 1315
1313 1316 FileChangeTuple = namedtuple('FileChangeTuple',
1314 1317 ['added', 'modified', 'removed'])