##// END OF EJS Templates
subrepo: Add merge failure reason code ad message for subrepo merge conflicts.
Martin Bornhold -
r1106:490ebeeb default
parent child Browse files
Show More
@@ -1,1545 +1,1549 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import itertools
28 28 import logging
29 29 import os
30 30 import time
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 36 from rhodecode.lib.vcs import connection
37 37 from rhodecode.lib.vcs.utils import author_name, author_email
38 38 from rhodecode.lib.vcs.conf import settings
39 39 from rhodecode.lib.vcs.exceptions import (
40 40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 44 RepositoryError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 FILEMODE_DEFAULT = 0100644
51 51 FILEMODE_EXECUTABLE = 0100755
52 52
53 53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 54 MergeResponse = collections.namedtuple(
55 55 'MergeResponse',
56 56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57 57
58 58
59 59 class MergeFailureReason(object):
60 60 """
61 61 Enumeration with all the reasons why the server side merge could fail.
62 62
63 63 DO NOT change the number of the reasons, as they may be stored in the
64 64 database.
65 65
66 66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 67 reasons.
68 68 """
69 69
70 70 # Everything went well.
71 71 NONE = 0
72 72
73 73 # An unexpected exception was raised. Check the logs for more details.
74 74 UNKNOWN = 1
75 75
76 76 # The merge was not successful, there are conflicts.
77 77 MERGE_FAILED = 2
78 78
79 79 # The merge succeeded but we could not push it to the target repository.
80 80 PUSH_FAILED = 3
81 81
82 82 # The specified target is not a head in the target repository.
83 83 TARGET_IS_NOT_HEAD = 4
84 84
85 85 # The source repository contains more branches than the target. Pushing
86 86 # the merge will create additional branches in the target.
87 87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88 88
89 89 # The target reference has multiple heads. That does not allow to correctly
90 90 # identify the target location. This could only happen for mercurial
91 91 # branches.
92 92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93 93
94 94 # The target repository is locked
95 95 TARGET_IS_LOCKED = 7
96 96
97 97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 98 # A involved commit could not be found.
99 99 _DEPRECATED_MISSING_COMMIT = 8
100 100
101 101 # The target repo reference is missing.
102 102 MISSING_TARGET_REF = 9
103 103
104 104 # The source repo reference is missing.
105 105 MISSING_SOURCE_REF = 10
106 106
107 # The merge was not successful, there are conflicts related to sub
108 # repositories.
109 SUBREPO_MERGE_FAILED = 11
110
107 111
108 112 class UpdateFailureReason(object):
109 113 """
110 114 Enumeration with all the reasons why the pull request update could fail.
111 115
112 116 DO NOT change the number of the reasons, as they may be stored in the
113 117 database.
114 118
115 119 Changing the name of a reason is acceptable and encouraged to deprecate old
116 120 reasons.
117 121 """
118 122
119 123 # Everything went well.
120 124 NONE = 0
121 125
122 126 # An unexpected exception was raised. Check the logs for more details.
123 127 UNKNOWN = 1
124 128
125 129 # The pull request is up to date.
126 130 NO_CHANGE = 2
127 131
128 132 # The pull request has a reference type that is not supported for update.
129 133 WRONG_REF_TPYE = 3
130 134
131 135 # Update failed because the target reference is missing.
132 136 MISSING_TARGET_REF = 4
133 137
134 138 # Update failed because the source reference is missing.
135 139 MISSING_SOURCE_REF = 5
136 140
137 141
138 142 class BaseRepository(object):
139 143 """
140 144 Base Repository for final backends
141 145
142 146 .. attribute:: DEFAULT_BRANCH_NAME
143 147
144 148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
145 149
146 150 .. attribute:: commit_ids
147 151
148 152 list of all available commit ids, in ascending order
149 153
150 154 .. attribute:: path
151 155
152 156 absolute path to the repository
153 157
154 158 .. attribute:: bookmarks
155 159
156 160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
157 161 there are no bookmarks or the backend implementation does not support
158 162 bookmarks.
159 163
160 164 .. attribute:: tags
161 165
162 166 Mapping from name to :term:`Commit ID` of the tag.
163 167
164 168 """
165 169
166 170 DEFAULT_BRANCH_NAME = None
167 171 DEFAULT_CONTACT = u"Unknown"
168 172 DEFAULT_DESCRIPTION = u"unknown"
169 173 EMPTY_COMMIT_ID = '0' * 40
170 174
171 175 path = None
172 176
173 177 def __init__(self, repo_path, config=None, create=False, **kwargs):
174 178 """
175 179 Initializes repository. Raises RepositoryError if repository could
176 180 not be find at the given ``repo_path`` or directory at ``repo_path``
177 181 exists and ``create`` is set to True.
178 182
179 183 :param repo_path: local path of the repository
180 184 :param config: repository configuration
181 185 :param create=False: if set to True, would try to create repository.
182 186 :param src_url=None: if set, should be proper url from which repository
183 187 would be cloned; requires ``create`` parameter to be set to True -
184 188 raises RepositoryError if src_url is set and create evaluates to
185 189 False
186 190 """
187 191 raise NotImplementedError
188 192
189 193 def __repr__(self):
190 194 return '<%s at %s>' % (self.__class__.__name__, self.path)
191 195
192 196 def __len__(self):
193 197 return self.count()
194 198
195 199 def __eq__(self, other):
196 200 same_instance = isinstance(other, self.__class__)
197 201 return same_instance and other.path == self.path
198 202
199 203 def __ne__(self, other):
200 204 return not self.__eq__(other)
201 205
202 206 @LazyProperty
203 207 def EMPTY_COMMIT(self):
204 208 return EmptyCommit(self.EMPTY_COMMIT_ID)
205 209
206 210 @LazyProperty
207 211 def alias(self):
208 212 for k, v in settings.BACKENDS.items():
209 213 if v.split('.')[-1] == str(self.__class__.__name__):
210 214 return k
211 215
212 216 @LazyProperty
213 217 def name(self):
214 218 return safe_unicode(os.path.basename(self.path))
215 219
216 220 @LazyProperty
217 221 def description(self):
218 222 raise NotImplementedError
219 223
220 224 def refs(self):
221 225 """
222 226 returns a `dict` with branches, bookmarks, tags, and closed_branches
223 227 for this repository
224 228 """
225 229 raise NotImplementedError
226 230
227 231 @LazyProperty
228 232 def branches(self):
229 233 """
230 234 A `dict` which maps branch names to commit ids.
231 235 """
232 236 raise NotImplementedError
233 237
234 238 @LazyProperty
235 239 def size(self):
236 240 """
237 241 Returns combined size in bytes for all repository files
238 242 """
239 243 tip = self.get_commit()
240 244 return tip.size
241 245
242 246 def size_at_commit(self, commit_id):
243 247 commit = self.get_commit(commit_id)
244 248 return commit.size
245 249
246 250 def is_empty(self):
247 251 return not bool(self.commit_ids)
248 252
249 253 @staticmethod
250 254 def check_url(url, config):
251 255 """
252 256 Function will check given url and try to verify if it's a valid
253 257 link.
254 258 """
255 259 raise NotImplementedError
256 260
257 261 @staticmethod
258 262 def is_valid_repository(path):
259 263 """
260 264 Check if given `path` contains a valid repository of this backend
261 265 """
262 266 raise NotImplementedError
263 267
264 268 # ==========================================================================
265 269 # COMMITS
266 270 # ==========================================================================
267 271
268 272 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
269 273 """
270 274 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
271 275 are both None, most recent commit is returned.
272 276
273 277 :param pre_load: Optional. List of commit attributes to load.
274 278
275 279 :raises ``EmptyRepositoryError``: if there are no commits
276 280 """
277 281 raise NotImplementedError
278 282
279 283 def __iter__(self):
280 284 for commit_id in self.commit_ids:
281 285 yield self.get_commit(commit_id=commit_id)
282 286
283 287 def get_commits(
284 288 self, start_id=None, end_id=None, start_date=None, end_date=None,
285 289 branch_name=None, pre_load=None):
286 290 """
287 291 Returns iterator of `BaseCommit` objects from start to end
288 292 not inclusive. This should behave just like a list, ie. end is not
289 293 inclusive.
290 294
291 295 :param start_id: None or str, must be a valid commit id
292 296 :param end_id: None or str, must be a valid commit id
293 297 :param start_date:
294 298 :param end_date:
295 299 :param branch_name:
296 300 :param pre_load:
297 301 """
298 302 raise NotImplementedError
299 303
300 304 def __getitem__(self, key):
301 305 """
302 306 Allows index based access to the commit objects of this repository.
303 307 """
304 308 pre_load = ["author", "branch", "date", "message", "parents"]
305 309 if isinstance(key, slice):
306 310 return self._get_range(key, pre_load)
307 311 return self.get_commit(commit_idx=key, pre_load=pre_load)
308 312
309 313 def _get_range(self, slice_obj, pre_load):
310 314 for commit_id in self.commit_ids.__getitem__(slice_obj):
311 315 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
312 316
313 317 def count(self):
314 318 return len(self.commit_ids)
315 319
316 320 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
317 321 """
318 322 Creates and returns a tag for the given ``commit_id``.
319 323
320 324 :param name: name for new tag
321 325 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
322 326 :param commit_id: commit id for which new tag would be created
323 327 :param message: message of the tag's commit
324 328 :param date: date of tag's commit
325 329
326 330 :raises TagAlreadyExistError: if tag with same name already exists
327 331 """
328 332 raise NotImplementedError
329 333
330 334 def remove_tag(self, name, user, message=None, date=None):
331 335 """
332 336 Removes tag with the given ``name``.
333 337
334 338 :param name: name of the tag to be removed
335 339 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
336 340 :param message: message of the tag's removal commit
337 341 :param date: date of tag's removal commit
338 342
339 343 :raises TagDoesNotExistError: if tag with given name does not exists
340 344 """
341 345 raise NotImplementedError
342 346
343 347 def get_diff(
344 348 self, commit1, commit2, path=None, ignore_whitespace=False,
345 349 context=3, path1=None):
346 350 """
347 351 Returns (git like) *diff*, as plain text. Shows changes introduced by
348 352 `commit2` since `commit1`.
349 353
350 354 :param commit1: Entry point from which diff is shown. Can be
351 355 ``self.EMPTY_COMMIT`` - in this case, patch showing all
352 356 the changes since empty state of the repository until `commit2`
353 357 :param commit2: Until which commit changes should be shown.
354 358 :param path: Can be set to a path of a file to create a diff of that
355 359 file. If `path1` is also set, this value is only associated to
356 360 `commit2`.
357 361 :param ignore_whitespace: If set to ``True``, would not show whitespace
358 362 changes. Defaults to ``False``.
359 363 :param context: How many lines before/after changed lines should be
360 364 shown. Defaults to ``3``.
361 365 :param path1: Can be set to a path to associate with `commit1`. This
362 366 parameter works only for backends which support diff generation for
363 367 different paths. Other backends will raise a `ValueError` if `path1`
364 368 is set and has a different value than `path`.
365 369 """
366 370 raise NotImplementedError
367 371
368 372 def strip(self, commit_id, branch=None):
369 373 """
370 374 Strip given commit_id from the repository
371 375 """
372 376 raise NotImplementedError
373 377
374 378 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
375 379 """
376 380 Return a latest common ancestor commit if one exists for this repo
377 381 `commit_id1` vs `commit_id2` from `repo2`.
378 382
379 383 :param commit_id1: Commit it from this repository to use as a
380 384 target for the comparison.
381 385 :param commit_id2: Source commit id to use for comparison.
382 386 :param repo2: Source repository to use for comparison.
383 387 """
384 388 raise NotImplementedError
385 389
386 390 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
387 391 """
388 392 Compare this repository's revision `commit_id1` with `commit_id2`.
389 393
390 394 Returns a tuple(commits, ancestor) that would be merged from
391 395 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
392 396 will be returned as ancestor.
393 397
394 398 :param commit_id1: Commit it from this repository to use as a
395 399 target for the comparison.
396 400 :param commit_id2: Source commit id to use for comparison.
397 401 :param repo2: Source repository to use for comparison.
398 402 :param merge: If set to ``True`` will do a merge compare which also
399 403 returns the common ancestor.
400 404 :param pre_load: Optional. List of commit attributes to load.
401 405 """
402 406 raise NotImplementedError
403 407
404 408 def merge(self, target_ref, source_repo, source_ref, workspace_id,
405 409 user_name='', user_email='', message='', dry_run=False,
406 410 use_rebase=False):
407 411 """
408 412 Merge the revisions specified in `source_ref` from `source_repo`
409 413 onto the `target_ref` of this repository.
410 414
411 415 `source_ref` and `target_ref` are named tupls with the following
412 416 fields `type`, `name` and `commit_id`.
413 417
414 418 Returns a MergeResponse named tuple with the following fields
415 419 'possible', 'executed', 'source_commit', 'target_commit',
416 420 'merge_commit'.
417 421
418 422 :param target_ref: `target_ref` points to the commit on top of which
419 423 the `source_ref` should be merged.
420 424 :param source_repo: The repository that contains the commits to be
421 425 merged.
422 426 :param source_ref: `source_ref` points to the topmost commit from
423 427 the `source_repo` which should be merged.
424 428 :param workspace_id: `workspace_id` unique identifier.
425 429 :param user_name: Merge commit `user_name`.
426 430 :param user_email: Merge commit `user_email`.
427 431 :param message: Merge commit `message`.
428 432 :param dry_run: If `True` the merge will not take place.
429 433 :param use_rebase: If `True` commits from the source will be rebased
430 434 on top of the target instead of being merged.
431 435 """
432 436 if dry_run:
433 437 message = message or 'dry_run_merge_message'
434 438 user_email = user_email or 'dry-run-merge@rhodecode.com'
435 439 user_name = user_name or 'Dry-Run User'
436 440 else:
437 441 if not user_name:
438 442 raise ValueError('user_name cannot be empty')
439 443 if not user_email:
440 444 raise ValueError('user_email cannot be empty')
441 445 if not message:
442 446 raise ValueError('message cannot be empty')
443 447
444 448 shadow_repository_path = self._maybe_prepare_merge_workspace(
445 449 workspace_id, target_ref)
446 450
447 451 try:
448 452 return self._merge_repo(
449 453 shadow_repository_path, target_ref, source_repo,
450 454 source_ref, message, user_name, user_email, dry_run=dry_run,
451 455 use_rebase=use_rebase)
452 456 except RepositoryError:
453 457 log.exception(
454 458 'Unexpected failure when running merge, dry-run=%s',
455 459 dry_run)
456 460 return MergeResponse(
457 461 False, False, None, MergeFailureReason.UNKNOWN)
458 462
459 463 def _merge_repo(self, shadow_repository_path, target_ref,
460 464 source_repo, source_ref, merge_message,
461 465 merger_name, merger_email, dry_run=False, use_rebase=False):
462 466 """Internal implementation of merge."""
463 467 raise NotImplementedError
464 468
465 469 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
466 470 """
467 471 Create the merge workspace.
468 472
469 473 :param workspace_id: `workspace_id` unique identifier.
470 474 """
471 475 raise NotImplementedError
472 476
473 477 def cleanup_merge_workspace(self, workspace_id):
474 478 """
475 479 Remove merge workspace.
476 480
477 481 This function MUST not fail in case there is no workspace associated to
478 482 the given `workspace_id`.
479 483
480 484 :param workspace_id: `workspace_id` unique identifier.
481 485 """
482 486 raise NotImplementedError
483 487
484 488 # ========== #
485 489 # COMMIT API #
486 490 # ========== #
487 491
488 492 @LazyProperty
489 493 def in_memory_commit(self):
490 494 """
491 495 Returns :class:`InMemoryCommit` object for this repository.
492 496 """
493 497 raise NotImplementedError
494 498
495 499 # ======================== #
496 500 # UTILITIES FOR SUBCLASSES #
497 501 # ======================== #
498 502
499 503 def _validate_diff_commits(self, commit1, commit2):
500 504 """
501 505 Validates that the given commits are related to this repository.
502 506
503 507 Intended as a utility for sub classes to have a consistent validation
504 508 of input parameters in methods like :meth:`get_diff`.
505 509 """
506 510 self._validate_commit(commit1)
507 511 self._validate_commit(commit2)
508 512 if (isinstance(commit1, EmptyCommit) and
509 513 isinstance(commit2, EmptyCommit)):
510 514 raise ValueError("Cannot compare two empty commits")
511 515
512 516 def _validate_commit(self, commit):
513 517 if not isinstance(commit, BaseCommit):
514 518 raise TypeError(
515 519 "%s is not of type BaseCommit" % repr(commit))
516 520 if commit.repository != self and not isinstance(commit, EmptyCommit):
517 521 raise ValueError(
518 522 "Commit %s must be a valid commit from this repository %s, "
519 523 "related to this repository instead %s." %
520 524 (commit, self, commit.repository))
521 525
522 526 def _validate_commit_id(self, commit_id):
523 527 if not isinstance(commit_id, basestring):
524 528 raise TypeError("commit_id must be a string value")
525 529
526 530 def _validate_commit_idx(self, commit_idx):
527 531 if not isinstance(commit_idx, (int, long)):
528 532 raise TypeError("commit_idx must be a numeric value")
529 533
530 534 def _validate_branch_name(self, branch_name):
531 535 if branch_name and branch_name not in self.branches_all:
532 536 msg = ("Branch %s not found in %s" % (branch_name, self))
533 537 raise BranchDoesNotExistError(msg)
534 538
535 539 #
536 540 # Supporting deprecated API parts
537 541 # TODO: johbo: consider to move this into a mixin
538 542 #
539 543
540 544 @property
541 545 def EMPTY_CHANGESET(self):
542 546 warnings.warn(
543 547 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
544 548 return self.EMPTY_COMMIT_ID
545 549
546 550 @property
547 551 def revisions(self):
548 552 warnings.warn("Use commits attribute instead", DeprecationWarning)
549 553 return self.commit_ids
550 554
551 555 @revisions.setter
552 556 def revisions(self, value):
553 557 warnings.warn("Use commits attribute instead", DeprecationWarning)
554 558 self.commit_ids = value
555 559
556 560 def get_changeset(self, revision=None, pre_load=None):
557 561 warnings.warn("Use get_commit instead", DeprecationWarning)
558 562 commit_id = None
559 563 commit_idx = None
560 564 if isinstance(revision, basestring):
561 565 commit_id = revision
562 566 else:
563 567 commit_idx = revision
564 568 return self.get_commit(
565 569 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
566 570
567 571 def get_changesets(
568 572 self, start=None, end=None, start_date=None, end_date=None,
569 573 branch_name=None, pre_load=None):
570 574 warnings.warn("Use get_commits instead", DeprecationWarning)
571 575 start_id = self._revision_to_commit(start)
572 576 end_id = self._revision_to_commit(end)
573 577 return self.get_commits(
574 578 start_id=start_id, end_id=end_id, start_date=start_date,
575 579 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
576 580
577 581 def _revision_to_commit(self, revision):
578 582 """
579 583 Translates a revision to a commit_id
580 584
581 585 Helps to support the old changeset based API which allows to use
582 586 commit ids and commit indices interchangeable.
583 587 """
584 588 if revision is None:
585 589 return revision
586 590
587 591 if isinstance(revision, basestring):
588 592 commit_id = revision
589 593 else:
590 594 commit_id = self.commit_ids[revision]
591 595 return commit_id
592 596
593 597 @property
594 598 def in_memory_changeset(self):
595 599 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
596 600 return self.in_memory_commit
597 601
598 602
599 603 class BaseCommit(object):
600 604 """
601 605 Each backend should implement it's commit representation.
602 606
603 607 **Attributes**
604 608
605 609 ``repository``
606 610 repository object within which commit exists
607 611
608 612 ``id``
609 613 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
610 614 just ``tip``.
611 615
612 616 ``raw_id``
613 617 raw commit representation (i.e. full 40 length sha for git
614 618 backend)
615 619
616 620 ``short_id``
617 621 shortened (if apply) version of ``raw_id``; it would be simple
618 622 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
619 623 as ``raw_id`` for subversion
620 624
621 625 ``idx``
622 626 commit index
623 627
624 628 ``files``
625 629 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
626 630
627 631 ``dirs``
628 632 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
629 633
630 634 ``nodes``
631 635 combined list of ``Node`` objects
632 636
633 637 ``author``
634 638 author of the commit, as unicode
635 639
636 640 ``message``
637 641 message of the commit, as unicode
638 642
639 643 ``parents``
640 644 list of parent commits
641 645
642 646 """
643 647
644 648 branch = None
645 649 """
646 650 Depending on the backend this should be set to the branch name of the
647 651 commit. Backends not supporting branches on commits should leave this
648 652 value as ``None``.
649 653 """
650 654
651 655 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
652 656 """
653 657 This template is used to generate a default prefix for repository archives
654 658 if no prefix has been specified.
655 659 """
656 660
657 661 def __str__(self):
658 662 return '<%s at %s:%s>' % (
659 663 self.__class__.__name__, self.idx, self.short_id)
660 664
661 665 def __repr__(self):
662 666 return self.__str__()
663 667
664 668 def __unicode__(self):
665 669 return u'%s:%s' % (self.idx, self.short_id)
666 670
667 671 def __eq__(self, other):
668 672 same_instance = isinstance(other, self.__class__)
669 673 return same_instance and self.raw_id == other.raw_id
670 674
671 675 def __json__(self):
672 676 parents = []
673 677 try:
674 678 for parent in self.parents:
675 679 parents.append({'raw_id': parent.raw_id})
676 680 except NotImplementedError:
677 681 # empty commit doesn't have parents implemented
678 682 pass
679 683
680 684 return {
681 685 'short_id': self.short_id,
682 686 'raw_id': self.raw_id,
683 687 'revision': self.idx,
684 688 'message': self.message,
685 689 'date': self.date,
686 690 'author': self.author,
687 691 'parents': parents,
688 692 'branch': self.branch
689 693 }
690 694
691 695 @LazyProperty
692 696 def last(self):
693 697 """
694 698 ``True`` if this is last commit in repository, ``False``
695 699 otherwise; trying to access this attribute while there is no
696 700 commits would raise `EmptyRepositoryError`
697 701 """
698 702 if self.repository is None:
699 703 raise CommitError("Cannot check if it's most recent commit")
700 704 return self.raw_id == self.repository.commit_ids[-1]
701 705
702 706 @LazyProperty
703 707 def parents(self):
704 708 """
705 709 Returns list of parent commits.
706 710 """
707 711 raise NotImplementedError
708 712
709 713 @property
710 714 def merge(self):
711 715 """
712 716 Returns boolean if commit is a merge.
713 717 """
714 718 return len(self.parents) > 1
715 719
716 720 @LazyProperty
717 721 def children(self):
718 722 """
719 723 Returns list of child commits.
720 724 """
721 725 raise NotImplementedError
722 726
723 727 @LazyProperty
724 728 def id(self):
725 729 """
726 730 Returns string identifying this commit.
727 731 """
728 732 raise NotImplementedError
729 733
730 734 @LazyProperty
731 735 def raw_id(self):
732 736 """
733 737 Returns raw string identifying this commit.
734 738 """
735 739 raise NotImplementedError
736 740
737 741 @LazyProperty
738 742 def short_id(self):
739 743 """
740 744 Returns shortened version of ``raw_id`` attribute, as string,
741 745 identifying this commit, useful for presentation to users.
742 746 """
743 747 raise NotImplementedError
744 748
745 749 @LazyProperty
746 750 def idx(self):
747 751 """
748 752 Returns integer identifying this commit.
749 753 """
750 754 raise NotImplementedError
751 755
752 756 @LazyProperty
753 757 def committer(self):
754 758 """
755 759 Returns committer for this commit
756 760 """
757 761 raise NotImplementedError
758 762
759 763 @LazyProperty
760 764 def committer_name(self):
761 765 """
762 766 Returns committer name for this commit
763 767 """
764 768
765 769 return author_name(self.committer)
766 770
767 771 @LazyProperty
768 772 def committer_email(self):
769 773 """
770 774 Returns committer email address for this commit
771 775 """
772 776
773 777 return author_email(self.committer)
774 778
775 779 @LazyProperty
776 780 def author(self):
777 781 """
778 782 Returns author for this commit
779 783 """
780 784
781 785 raise NotImplementedError
782 786
783 787 @LazyProperty
784 788 def author_name(self):
785 789 """
786 790 Returns author name for this commit
787 791 """
788 792
789 793 return author_name(self.author)
790 794
791 795 @LazyProperty
792 796 def author_email(self):
793 797 """
794 798 Returns author email address for this commit
795 799 """
796 800
797 801 return author_email(self.author)
798 802
799 803 def get_file_mode(self, path):
800 804 """
801 805 Returns stat mode of the file at `path`.
802 806 """
803 807 raise NotImplementedError
804 808
805 809 def is_link(self, path):
806 810 """
807 811 Returns ``True`` if given `path` is a symlink
808 812 """
809 813 raise NotImplementedError
810 814
811 815 def get_file_content(self, path):
812 816 """
813 817 Returns content of the file at the given `path`.
814 818 """
815 819 raise NotImplementedError
816 820
817 821 def get_file_size(self, path):
818 822 """
819 823 Returns size of the file at the given `path`.
820 824 """
821 825 raise NotImplementedError
822 826
823 827 def get_file_commit(self, path, pre_load=None):
824 828 """
825 829 Returns last commit of the file at the given `path`.
826 830
827 831 :param pre_load: Optional. List of commit attributes to load.
828 832 """
829 833 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
830 834
831 835 def get_file_history(self, path, limit=None, pre_load=None):
832 836 """
833 837 Returns history of file as reversed list of :class:`BaseCommit`
834 838 objects for which file at given `path` has been modified.
835 839
836 840 :param limit: Optional. Allows to limit the size of the returned
837 841 history. This is intended as a hint to the underlying backend, so
838 842 that it can apply optimizations depending on the limit.
839 843 :param pre_load: Optional. List of commit attributes to load.
840 844 """
841 845 raise NotImplementedError
842 846
843 847 def get_file_annotate(self, path, pre_load=None):
844 848 """
845 849 Returns a generator of four element tuples with
846 850 lineno, sha, commit lazy loader and line
847 851
848 852 :param pre_load: Optional. List of commit attributes to load.
849 853 """
850 854 raise NotImplementedError
851 855
852 856 def get_nodes(self, path):
853 857 """
854 858 Returns combined ``DirNode`` and ``FileNode`` objects list representing
855 859 state of commit at the given ``path``.
856 860
857 861 :raises ``CommitError``: if node at the given ``path`` is not
858 862 instance of ``DirNode``
859 863 """
860 864 raise NotImplementedError
861 865
862 866 def get_node(self, path):
863 867 """
864 868 Returns ``Node`` object from the given ``path``.
865 869
866 870 :raises ``NodeDoesNotExistError``: if there is no node at the given
867 871 ``path``
868 872 """
869 873 raise NotImplementedError
870 874
871 875 def get_largefile_node(self, path):
872 876 """
873 877 Returns the path to largefile from Mercurial storage.
874 878 """
875 879 raise NotImplementedError
876 880
877 881 def archive_repo(self, file_path, kind='tgz', subrepos=None,
878 882 prefix=None, write_metadata=False, mtime=None):
879 883 """
880 884 Creates an archive containing the contents of the repository.
881 885
882 886 :param file_path: path to the file which to create the archive.
883 887 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
884 888 :param prefix: name of root directory in archive.
885 889 Default is repository name and commit's short_id joined with dash:
886 890 ``"{repo_name}-{short_id}"``.
887 891 :param write_metadata: write a metadata file into archive.
888 892 :param mtime: custom modification time for archive creation, defaults
889 893 to time.time() if not given.
890 894
891 895 :raise VCSError: If prefix has a problem.
892 896 """
893 897 allowed_kinds = settings.ARCHIVE_SPECS.keys()
894 898 if kind not in allowed_kinds:
895 899 raise ImproperArchiveTypeError(
896 900 'Archive kind (%s) not supported use one of %s' %
897 901 (kind, allowed_kinds))
898 902
899 903 prefix = self._validate_archive_prefix(prefix)
900 904
901 905 mtime = mtime or time.mktime(self.date.timetuple())
902 906
903 907 file_info = []
904 908 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
905 909 for _r, _d, files in cur_rev.walk('/'):
906 910 for f in files:
907 911 f_path = os.path.join(prefix, f.path)
908 912 file_info.append(
909 913 (f_path, f.mode, f.is_link(), f.raw_bytes))
910 914
911 915 if write_metadata:
912 916 metadata = [
913 917 ('repo_name', self.repository.name),
914 918 ('rev', self.raw_id),
915 919 ('create_time', mtime),
916 920 ('branch', self.branch),
917 921 ('tags', ','.join(self.tags)),
918 922 ]
919 923 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
920 924 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
921 925
922 926 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
923 927
924 928 def _validate_archive_prefix(self, prefix):
925 929 if prefix is None:
926 930 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
927 931 repo_name=safe_str(self.repository.name),
928 932 short_id=self.short_id)
929 933 elif not isinstance(prefix, str):
930 934 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
931 935 elif prefix.startswith('/'):
932 936 raise VCSError("Prefix cannot start with leading slash")
933 937 elif prefix.strip() == '':
934 938 raise VCSError("Prefix cannot be empty")
935 939 return prefix
936 940
937 941 @LazyProperty
938 942 def root(self):
939 943 """
940 944 Returns ``RootNode`` object for this commit.
941 945 """
942 946 return self.get_node('')
943 947
944 948 def next(self, branch=None):
945 949 """
946 950 Returns next commit from current, if branch is gives it will return
947 951 next commit belonging to this branch
948 952
949 953 :param branch: show commits within the given named branch
950 954 """
951 955 indexes = xrange(self.idx + 1, self.repository.count())
952 956 return self._find_next(indexes, branch)
953 957
954 958 def prev(self, branch=None):
955 959 """
956 960 Returns previous commit from current, if branch is gives it will
957 961 return previous commit belonging to this branch
958 962
959 963 :param branch: show commit within the given named branch
960 964 """
961 965 indexes = xrange(self.idx - 1, -1, -1)
962 966 return self._find_next(indexes, branch)
963 967
964 968 def _find_next(self, indexes, branch=None):
965 969 if branch and self.branch != branch:
966 970 raise VCSError('Branch option used on commit not belonging '
967 971 'to that branch')
968 972
969 973 for next_idx in indexes:
970 974 commit = self.repository.get_commit(commit_idx=next_idx)
971 975 if branch and branch != commit.branch:
972 976 continue
973 977 return commit
974 978 raise CommitDoesNotExistError
975 979
976 980 def diff(self, ignore_whitespace=True, context=3):
977 981 """
978 982 Returns a `Diff` object representing the change made by this commit.
979 983 """
980 984 parent = (
981 985 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
982 986 diff = self.repository.get_diff(
983 987 parent, self,
984 988 ignore_whitespace=ignore_whitespace,
985 989 context=context)
986 990 return diff
987 991
988 992 @LazyProperty
989 993 def added(self):
990 994 """
991 995 Returns list of added ``FileNode`` objects.
992 996 """
993 997 raise NotImplementedError
994 998
995 999 @LazyProperty
996 1000 def changed(self):
997 1001 """
998 1002 Returns list of modified ``FileNode`` objects.
999 1003 """
1000 1004 raise NotImplementedError
1001 1005
1002 1006 @LazyProperty
1003 1007 def removed(self):
1004 1008 """
1005 1009 Returns list of removed ``FileNode`` objects.
1006 1010 """
1007 1011 raise NotImplementedError
1008 1012
1009 1013 @LazyProperty
1010 1014 def size(self):
1011 1015 """
1012 1016 Returns total number of bytes from contents of all filenodes.
1013 1017 """
1014 1018 return sum((node.size for node in self.get_filenodes_generator()))
1015 1019
1016 1020 def walk(self, topurl=''):
1017 1021 """
1018 1022 Similar to os.walk method. Insted of filesystem it walks through
1019 1023 commit starting at given ``topurl``. Returns generator of tuples
1020 1024 (topnode, dirnodes, filenodes).
1021 1025 """
1022 1026 topnode = self.get_node(topurl)
1023 1027 if not topnode.is_dir():
1024 1028 return
1025 1029 yield (topnode, topnode.dirs, topnode.files)
1026 1030 for dirnode in topnode.dirs:
1027 1031 for tup in self.walk(dirnode.path):
1028 1032 yield tup
1029 1033
1030 1034 def get_filenodes_generator(self):
1031 1035 """
1032 1036 Returns generator that yields *all* file nodes.
1033 1037 """
1034 1038 for topnode, dirs, files in self.walk():
1035 1039 for node in files:
1036 1040 yield node
1037 1041
1038 1042 #
1039 1043 # Utilities for sub classes to support consistent behavior
1040 1044 #
1041 1045
1042 1046 def no_node_at_path(self, path):
1043 1047 return NodeDoesNotExistError(
1044 1048 "There is no file nor directory at the given path: "
1045 1049 "'%s' at commit %s" % (path, self.short_id))
1046 1050
1047 1051 def _fix_path(self, path):
1048 1052 """
1049 1053 Paths are stored without trailing slash so we need to get rid off it if
1050 1054 needed.
1051 1055 """
1052 1056 return path.rstrip('/')
1053 1057
1054 1058 #
1055 1059 # Deprecated API based on changesets
1056 1060 #
1057 1061
1058 1062 @property
1059 1063 def revision(self):
1060 1064 warnings.warn("Use idx instead", DeprecationWarning)
1061 1065 return self.idx
1062 1066
1063 1067 @revision.setter
1064 1068 def revision(self, value):
1065 1069 warnings.warn("Use idx instead", DeprecationWarning)
1066 1070 self.idx = value
1067 1071
1068 1072 def get_file_changeset(self, path):
1069 1073 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1070 1074 return self.get_file_commit(path)
1071 1075
1072 1076
1073 1077 class BaseChangesetClass(type):
1074 1078
1075 1079 def __instancecheck__(self, instance):
1076 1080 return isinstance(instance, BaseCommit)
1077 1081
1078 1082
1079 1083 class BaseChangeset(BaseCommit):
1080 1084
1081 1085 __metaclass__ = BaseChangesetClass
1082 1086
1083 1087 def __new__(cls, *args, **kwargs):
1084 1088 warnings.warn(
1085 1089 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1086 1090 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1087 1091
1088 1092
1089 1093 class BaseInMemoryCommit(object):
1090 1094 """
1091 1095 Represents differences between repository's state (most recent head) and
1092 1096 changes made *in place*.
1093 1097
1094 1098 **Attributes**
1095 1099
1096 1100 ``repository``
1097 1101 repository object for this in-memory-commit
1098 1102
1099 1103 ``added``
1100 1104 list of ``FileNode`` objects marked as *added*
1101 1105
1102 1106 ``changed``
1103 1107 list of ``FileNode`` objects marked as *changed*
1104 1108
1105 1109 ``removed``
1106 1110 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1107 1111 *removed*
1108 1112
1109 1113 ``parents``
1110 1114 list of :class:`BaseCommit` instances representing parents of
1111 1115 in-memory commit. Should always be 2-element sequence.
1112 1116
1113 1117 """
1114 1118
1115 1119 def __init__(self, repository):
1116 1120 self.repository = repository
1117 1121 self.added = []
1118 1122 self.changed = []
1119 1123 self.removed = []
1120 1124 self.parents = []
1121 1125
1122 1126 def add(self, *filenodes):
1123 1127 """
1124 1128 Marks given ``FileNode`` objects as *to be committed*.
1125 1129
1126 1130 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1127 1131 latest commit
1128 1132 :raises ``NodeAlreadyAddedError``: if node with same path is already
1129 1133 marked as *added*
1130 1134 """
1131 1135 # Check if not already marked as *added* first
1132 1136 for node in filenodes:
1133 1137 if node.path in (n.path for n in self.added):
1134 1138 raise NodeAlreadyAddedError(
1135 1139 "Such FileNode %s is already marked for addition"
1136 1140 % node.path)
1137 1141 for node in filenodes:
1138 1142 self.added.append(node)
1139 1143
1140 1144 def change(self, *filenodes):
1141 1145 """
1142 1146 Marks given ``FileNode`` objects to be *changed* in next commit.
1143 1147
1144 1148 :raises ``EmptyRepositoryError``: if there are no commits yet
1145 1149 :raises ``NodeAlreadyExistsError``: if node with same path is already
1146 1150 marked to be *changed*
1147 1151 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1148 1152 marked to be *removed*
1149 1153 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1150 1154 commit
1151 1155 :raises ``NodeNotChangedError``: if node hasn't really be changed
1152 1156 """
1153 1157 for node in filenodes:
1154 1158 if node.path in (n.path for n in self.removed):
1155 1159 raise NodeAlreadyRemovedError(
1156 1160 "Node at %s is already marked as removed" % node.path)
1157 1161 try:
1158 1162 self.repository.get_commit()
1159 1163 except EmptyRepositoryError:
1160 1164 raise EmptyRepositoryError(
1161 1165 "Nothing to change - try to *add* new nodes rather than "
1162 1166 "changing them")
1163 1167 for node in filenodes:
1164 1168 if node.path in (n.path for n in self.changed):
1165 1169 raise NodeAlreadyChangedError(
1166 1170 "Node at '%s' is already marked as changed" % node.path)
1167 1171 self.changed.append(node)
1168 1172
1169 1173 def remove(self, *filenodes):
1170 1174 """
1171 1175 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1172 1176 *removed* in next commit.
1173 1177
1174 1178 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1175 1179 be *removed*
1176 1180 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1177 1181 be *changed*
1178 1182 """
1179 1183 for node in filenodes:
1180 1184 if node.path in (n.path for n in self.removed):
1181 1185 raise NodeAlreadyRemovedError(
1182 1186 "Node is already marked to for removal at %s" % node.path)
1183 1187 if node.path in (n.path for n in self.changed):
1184 1188 raise NodeAlreadyChangedError(
1185 1189 "Node is already marked to be changed at %s" % node.path)
1186 1190 # We only mark node as *removed* - real removal is done by
1187 1191 # commit method
1188 1192 self.removed.append(node)
1189 1193
1190 1194 def reset(self):
1191 1195 """
1192 1196 Resets this instance to initial state (cleans ``added``, ``changed``
1193 1197 and ``removed`` lists).
1194 1198 """
1195 1199 self.added = []
1196 1200 self.changed = []
1197 1201 self.removed = []
1198 1202 self.parents = []
1199 1203
1200 1204 def get_ipaths(self):
1201 1205 """
1202 1206 Returns generator of paths from nodes marked as added, changed or
1203 1207 removed.
1204 1208 """
1205 1209 for node in itertools.chain(self.added, self.changed, self.removed):
1206 1210 yield node.path
1207 1211
1208 1212 def get_paths(self):
1209 1213 """
1210 1214 Returns list of paths from nodes marked as added, changed or removed.
1211 1215 """
1212 1216 return list(self.get_ipaths())
1213 1217
1214 1218 def check_integrity(self, parents=None):
1215 1219 """
1216 1220 Checks in-memory commit's integrity. Also, sets parents if not
1217 1221 already set.
1218 1222
1219 1223 :raises CommitError: if any error occurs (i.e.
1220 1224 ``NodeDoesNotExistError``).
1221 1225 """
1222 1226 if not self.parents:
1223 1227 parents = parents or []
1224 1228 if len(parents) == 0:
1225 1229 try:
1226 1230 parents = [self.repository.get_commit(), None]
1227 1231 except EmptyRepositoryError:
1228 1232 parents = [None, None]
1229 1233 elif len(parents) == 1:
1230 1234 parents += [None]
1231 1235 self.parents = parents
1232 1236
1233 1237 # Local parents, only if not None
1234 1238 parents = [p for p in self.parents if p]
1235 1239
1236 1240 # Check nodes marked as added
1237 1241 for p in parents:
1238 1242 for node in self.added:
1239 1243 try:
1240 1244 p.get_node(node.path)
1241 1245 except NodeDoesNotExistError:
1242 1246 pass
1243 1247 else:
1244 1248 raise NodeAlreadyExistsError(
1245 1249 "Node `%s` already exists at %s" % (node.path, p))
1246 1250
1247 1251 # Check nodes marked as changed
1248 1252 missing = set(self.changed)
1249 1253 not_changed = set(self.changed)
1250 1254 if self.changed and not parents:
1251 1255 raise NodeDoesNotExistError(str(self.changed[0].path))
1252 1256 for p in parents:
1253 1257 for node in self.changed:
1254 1258 try:
1255 1259 old = p.get_node(node.path)
1256 1260 missing.remove(node)
1257 1261 # if content actually changed, remove node from not_changed
1258 1262 if old.content != node.content:
1259 1263 not_changed.remove(node)
1260 1264 except NodeDoesNotExistError:
1261 1265 pass
1262 1266 if self.changed and missing:
1263 1267 raise NodeDoesNotExistError(
1264 1268 "Node `%s` marked as modified but missing in parents: %s"
1265 1269 % (node.path, parents))
1266 1270
1267 1271 if self.changed and not_changed:
1268 1272 raise NodeNotChangedError(
1269 1273 "Node `%s` wasn't actually changed (parents: %s)"
1270 1274 % (not_changed.pop().path, parents))
1271 1275
1272 1276 # Check nodes marked as removed
1273 1277 if self.removed and not parents:
1274 1278 raise NodeDoesNotExistError(
1275 1279 "Cannot remove node at %s as there "
1276 1280 "were no parents specified" % self.removed[0].path)
1277 1281 really_removed = set()
1278 1282 for p in parents:
1279 1283 for node in self.removed:
1280 1284 try:
1281 1285 p.get_node(node.path)
1282 1286 really_removed.add(node)
1283 1287 except CommitError:
1284 1288 pass
1285 1289 not_removed = set(self.removed) - really_removed
1286 1290 if not_removed:
1287 1291 # TODO: johbo: This code branch does not seem to be covered
1288 1292 raise NodeDoesNotExistError(
1289 1293 "Cannot remove node at %s from "
1290 1294 "following parents: %s" % (not_removed, parents))
1291 1295
1292 1296 def commit(
1293 1297 self, message, author, parents=None, branch=None, date=None,
1294 1298 **kwargs):
1295 1299 """
1296 1300 Performs in-memory commit (doesn't check workdir in any way) and
1297 1301 returns newly created :class:`BaseCommit`. Updates repository's
1298 1302 attribute `commits`.
1299 1303
1300 1304 .. note::
1301 1305
1302 1306 While overriding this method each backend's should call
1303 1307 ``self.check_integrity(parents)`` in the first place.
1304 1308
1305 1309 :param message: message of the commit
1306 1310 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1307 1311 :param parents: single parent or sequence of parents from which commit
1308 1312 would be derived
1309 1313 :param date: ``datetime.datetime`` instance. Defaults to
1310 1314 ``datetime.datetime.now()``.
1311 1315 :param branch: branch name, as string. If none given, default backend's
1312 1316 branch would be used.
1313 1317
1314 1318 :raises ``CommitError``: if any error occurs while committing
1315 1319 """
1316 1320 raise NotImplementedError
1317 1321
1318 1322
1319 1323 class BaseInMemoryChangesetClass(type):
1320 1324
1321 1325 def __instancecheck__(self, instance):
1322 1326 return isinstance(instance, BaseInMemoryCommit)
1323 1327
1324 1328
1325 1329 class BaseInMemoryChangeset(BaseInMemoryCommit):
1326 1330
1327 1331 __metaclass__ = BaseInMemoryChangesetClass
1328 1332
1329 1333 def __new__(cls, *args, **kwargs):
1330 1334 warnings.warn(
1331 1335 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1332 1336 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1333 1337
1334 1338
1335 1339 class EmptyCommit(BaseCommit):
1336 1340 """
1337 1341 An dummy empty commit. It's possible to pass hash when creating
1338 1342 an EmptyCommit
1339 1343 """
1340 1344
1341 1345 def __init__(
1342 1346 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1343 1347 message='', author='', date=None):
1344 1348 self._empty_commit_id = commit_id
1345 1349 # TODO: johbo: Solve idx parameter, default value does not make
1346 1350 # too much sense
1347 1351 self.idx = idx
1348 1352 self.message = message
1349 1353 self.author = author
1350 1354 self.date = date or datetime.datetime.fromtimestamp(0)
1351 1355 self.repository = repo
1352 1356 self.alias = alias
1353 1357
1354 1358 @LazyProperty
1355 1359 def raw_id(self):
1356 1360 """
1357 1361 Returns raw string identifying this commit, useful for web
1358 1362 representation.
1359 1363 """
1360 1364
1361 1365 return self._empty_commit_id
1362 1366
1363 1367 @LazyProperty
1364 1368 def branch(self):
1365 1369 if self.alias:
1366 1370 from rhodecode.lib.vcs.backends import get_backend
1367 1371 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1368 1372
1369 1373 @LazyProperty
1370 1374 def short_id(self):
1371 1375 return self.raw_id[:12]
1372 1376
1373 1377 @LazyProperty
1374 1378 def id(self):
1375 1379 return self.raw_id
1376 1380
1377 1381 def get_file_commit(self, path):
1378 1382 return self
1379 1383
1380 1384 def get_file_content(self, path):
1381 1385 return u''
1382 1386
1383 1387 def get_file_size(self, path):
1384 1388 return 0
1385 1389
1386 1390
1387 1391 class EmptyChangesetClass(type):
1388 1392
1389 1393 def __instancecheck__(self, instance):
1390 1394 return isinstance(instance, EmptyCommit)
1391 1395
1392 1396
1393 1397 class EmptyChangeset(EmptyCommit):
1394 1398
1395 1399 __metaclass__ = EmptyChangesetClass
1396 1400
1397 1401 def __new__(cls, *args, **kwargs):
1398 1402 warnings.warn(
1399 1403 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1400 1404 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1401 1405
1402 1406 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1403 1407 alias=None, revision=-1, message='', author='', date=None):
1404 1408 if requested_revision is not None:
1405 1409 warnings.warn(
1406 1410 "Parameter requested_revision not supported anymore",
1407 1411 DeprecationWarning)
1408 1412 super(EmptyChangeset, self).__init__(
1409 1413 commit_id=cs, repo=repo, alias=alias, idx=revision,
1410 1414 message=message, author=author, date=date)
1411 1415
1412 1416 @property
1413 1417 def revision(self):
1414 1418 warnings.warn("Use idx instead", DeprecationWarning)
1415 1419 return self.idx
1416 1420
1417 1421 @revision.setter
1418 1422 def revision(self, value):
1419 1423 warnings.warn("Use idx instead", DeprecationWarning)
1420 1424 self.idx = value
1421 1425
1422 1426
1423 1427 class CollectionGenerator(object):
1424 1428
1425 1429 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1426 1430 self.repo = repo
1427 1431 self.commit_ids = commit_ids
1428 1432 # TODO: (oliver) this isn't currently hooked up
1429 1433 self.collection_size = None
1430 1434 self.pre_load = pre_load
1431 1435
1432 1436 def __len__(self):
1433 1437 if self.collection_size is not None:
1434 1438 return self.collection_size
1435 1439 return self.commit_ids.__len__()
1436 1440
1437 1441 def __iter__(self):
1438 1442 for commit_id in self.commit_ids:
1439 1443 # TODO: johbo: Mercurial passes in commit indices or commit ids
1440 1444 yield self._commit_factory(commit_id)
1441 1445
1442 1446 def _commit_factory(self, commit_id):
1443 1447 """
1444 1448 Allows backends to override the way commits are generated.
1445 1449 """
1446 1450 return self.repo.get_commit(commit_id=commit_id,
1447 1451 pre_load=self.pre_load)
1448 1452
1449 1453 def __getslice__(self, i, j):
1450 1454 """
1451 1455 Returns an iterator of sliced repository
1452 1456 """
1453 1457 commit_ids = self.commit_ids[i:j]
1454 1458 return self.__class__(
1455 1459 self.repo, commit_ids, pre_load=self.pre_load)
1456 1460
1457 1461 def __repr__(self):
1458 1462 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1459 1463
1460 1464
1461 1465 class Config(object):
1462 1466 """
1463 1467 Represents the configuration for a repository.
1464 1468
1465 1469 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1466 1470 standard library. It implements only the needed subset.
1467 1471 """
1468 1472
1469 1473 def __init__(self):
1470 1474 self._values = {}
1471 1475
1472 1476 def copy(self):
1473 1477 clone = Config()
1474 1478 for section, values in self._values.items():
1475 1479 clone._values[section] = values.copy()
1476 1480 return clone
1477 1481
1478 1482 def __repr__(self):
1479 1483 return '<Config(%s sections) at %s>' % (
1480 1484 len(self._values), hex(id(self)))
1481 1485
1482 1486 def items(self, section):
1483 1487 return self._values.get(section, {}).iteritems()
1484 1488
1485 1489 def get(self, section, option):
1486 1490 return self._values.get(section, {}).get(option)
1487 1491
1488 1492 def set(self, section, option, value):
1489 1493 section_values = self._values.setdefault(section, {})
1490 1494 section_values[option] = value
1491 1495
1492 1496 def clear_section(self, section):
1493 1497 self._values[section] = {}
1494 1498
1495 1499 def serialize(self):
1496 1500 """
1497 1501 Creates a list of three tuples (section, key, value) representing
1498 1502 this config object.
1499 1503 """
1500 1504 items = []
1501 1505 for section in self._values:
1502 1506 for option, value in self._values[section].items():
1503 1507 items.append(
1504 1508 (safe_str(section), safe_str(option), safe_str(value)))
1505 1509 return items
1506 1510
1507 1511
1508 1512 class Diff(object):
1509 1513 """
1510 1514 Represents a diff result from a repository backend.
1511 1515
1512 1516 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1513 1517 """
1514 1518
1515 1519 _header_re = None
1516 1520
1517 1521 def __init__(self, raw_diff):
1518 1522 self.raw = raw_diff
1519 1523
1520 1524 def chunks(self):
1521 1525 """
1522 1526 split the diff in chunks of separate --git a/file b/file chunks
1523 1527 to make diffs consistent we must prepend with \n, and make sure
1524 1528 we can detect last chunk as this was also has special rule
1525 1529 """
1526 1530 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1527 1531 total_chunks = len(chunks)
1528 1532 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1529 1533 for cur_chunk, chunk in enumerate(chunks, start=1))
1530 1534
1531 1535
1532 1536 class DiffChunk(object):
1533 1537
1534 1538 def __init__(self, chunk, diff, last_chunk):
1535 1539 self._diff = diff
1536 1540
1537 1541 # since we split by \ndiff --git that part is lost from original diff
1538 1542 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1539 1543 if not last_chunk:
1540 1544 chunk += '\n'
1541 1545
1542 1546 match = self._diff._header_re.match(chunk)
1543 1547 self.header = match.groupdict()
1544 1548 self.diff = chunk[match.end():]
1545 1549 self.raw = chunk
@@ -1,1314 +1,1317 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from sqlalchemy import or_
35 35
36 36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 39 from rhodecode.lib.markup_renderer import (
40 40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 41 from rhodecode.lib.utils import action_logger
42 42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 46 from rhodecode.lib.vcs.exceptions import (
47 47 CommitDoesNotExistError, EmptyRepositoryError)
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import ChangesetCommentsModel
51 51 from rhodecode.model.db import (
52 52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 53 PullRequestVersion, ChangesetComment)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.notification import NotificationModel, \
56 56 EmailNotificationModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.settings import VcsSettingsModel
59 59
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 # Data structure to hold the response data when updating commits during a pull
65 65 # request update.
66 66 UpdateResponse = namedtuple(
67 67 'UpdateResponse', 'executed, reason, new, old, changes')
68 68
69 69
70 70 class PullRequestModel(BaseModel):
71 71
72 72 cls = PullRequest
73 73
74 74 DIFF_CONTEXT = 3
75 75
76 76 MERGE_STATUS_MESSAGES = {
77 77 MergeFailureReason.NONE: lazy_ugettext(
78 78 'This pull request can be automatically merged.'),
79 79 MergeFailureReason.UNKNOWN: lazy_ugettext(
80 80 'This pull request cannot be merged because of an unhandled'
81 81 ' exception.'),
82 82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
83 83 'This pull request cannot be merged because of conflicts.'),
84 84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
85 85 'This pull request could not be merged because push to target'
86 86 ' failed.'),
87 87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
88 88 'This pull request cannot be merged because the target is not a'
89 89 ' head.'),
90 90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
91 91 'This pull request cannot be merged because the source contains'
92 92 ' more branches than the target.'),
93 93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
94 94 'This pull request cannot be merged because the target has'
95 95 ' multiple heads.'),
96 96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
97 97 'This pull request cannot be merged because the target repository'
98 98 ' is locked.'),
99 99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
100 100 'This pull request cannot be merged because the target or the '
101 101 'source reference is missing.'),
102 102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
103 103 'This pull request cannot be merged because the target '
104 104 'reference is missing.'),
105 105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the source '
107 107 'reference is missing.'),
108 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
109 'This pull request cannot be merged because of conflicts related '
110 'to sub repositories.'),
108 111 }
109 112
110 113 UPDATE_STATUS_MESSAGES = {
111 114 UpdateFailureReason.NONE: lazy_ugettext(
112 115 'Pull request update successful.'),
113 116 UpdateFailureReason.UNKNOWN: lazy_ugettext(
114 117 'Pull request update failed because of an unknown error.'),
115 118 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
116 119 'No update needed because the source reference is already '
117 120 'up to date.'),
118 121 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
119 122 'Pull request cannot be updated because the reference type is '
120 123 'not supported for an update.'),
121 124 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
122 125 'This pull request cannot be updated because the target '
123 126 'reference is missing.'),
124 127 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
125 128 'This pull request cannot be updated because the source '
126 129 'reference is missing.'),
127 130 }
128 131
129 132 def __get_pull_request(self, pull_request):
130 133 return self._get_instance(PullRequest, pull_request)
131 134
132 135 def _check_perms(self, perms, pull_request, user, api=False):
133 136 if not api:
134 137 return h.HasRepoPermissionAny(*perms)(
135 138 user=user, repo_name=pull_request.target_repo.repo_name)
136 139 else:
137 140 return h.HasRepoPermissionAnyApi(*perms)(
138 141 user=user, repo_name=pull_request.target_repo.repo_name)
139 142
140 143 def check_user_read(self, pull_request, user, api=False):
141 144 _perms = ('repository.admin', 'repository.write', 'repository.read',)
142 145 return self._check_perms(_perms, pull_request, user, api)
143 146
144 147 def check_user_merge(self, pull_request, user, api=False):
145 148 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
146 149 return self._check_perms(_perms, pull_request, user, api)
147 150
148 151 def check_user_update(self, pull_request, user, api=False):
149 152 owner = user.user_id == pull_request.user_id
150 153 return self.check_user_merge(pull_request, user, api) or owner
151 154
152 155 def check_user_delete(self, pull_request, user):
153 156 owner = user.user_id == pull_request.user_id
154 157 _perms = ('repository.admin')
155 158 return self._check_perms(_perms, pull_request, user) or owner
156 159
157 160 def check_user_change_status(self, pull_request, user, api=False):
158 161 reviewer = user.user_id in [x.user_id for x in
159 162 pull_request.reviewers]
160 163 return self.check_user_update(pull_request, user, api) or reviewer
161 164
162 165 def get(self, pull_request):
163 166 return self.__get_pull_request(pull_request)
164 167
165 168 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
166 169 opened_by=None, order_by=None,
167 170 order_dir='desc'):
168 171 repo = None
169 172 if repo_name:
170 173 repo = self._get_repo(repo_name)
171 174
172 175 q = PullRequest.query()
173 176
174 177 # source or target
175 178 if repo and source:
176 179 q = q.filter(PullRequest.source_repo == repo)
177 180 elif repo:
178 181 q = q.filter(PullRequest.target_repo == repo)
179 182
180 183 # closed,opened
181 184 if statuses:
182 185 q = q.filter(PullRequest.status.in_(statuses))
183 186
184 187 # opened by filter
185 188 if opened_by:
186 189 q = q.filter(PullRequest.user_id.in_(opened_by))
187 190
188 191 if order_by:
189 192 order_map = {
190 193 'name_raw': PullRequest.pull_request_id,
191 194 'title': PullRequest.title,
192 195 'updated_on_raw': PullRequest.updated_on,
193 196 'target_repo': PullRequest.target_repo_id
194 197 }
195 198 if order_dir == 'asc':
196 199 q = q.order_by(order_map[order_by].asc())
197 200 else:
198 201 q = q.order_by(order_map[order_by].desc())
199 202
200 203 return q
201 204
202 205 def count_all(self, repo_name, source=False, statuses=None,
203 206 opened_by=None):
204 207 """
205 208 Count the number of pull requests for a specific repository.
206 209
207 210 :param repo_name: target or source repo
208 211 :param source: boolean flag to specify if repo_name refers to source
209 212 :param statuses: list of pull request statuses
210 213 :param opened_by: author user of the pull request
211 214 :returns: int number of pull requests
212 215 """
213 216 q = self._prepare_get_all_query(
214 217 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215 218
216 219 return q.count()
217 220
218 221 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
219 222 offset=0, length=None, order_by=None, order_dir='desc'):
220 223 """
221 224 Get all pull requests for a specific repository.
222 225
223 226 :param repo_name: target or source repo
224 227 :param source: boolean flag to specify if repo_name refers to source
225 228 :param statuses: list of pull request statuses
226 229 :param opened_by: author user of the pull request
227 230 :param offset: pagination offset
228 231 :param length: length of returned list
229 232 :param order_by: order of the returned list
230 233 :param order_dir: 'asc' or 'desc' ordering direction
231 234 :returns: list of pull requests
232 235 """
233 236 q = self._prepare_get_all_query(
234 237 repo_name, source=source, statuses=statuses, opened_by=opened_by,
235 238 order_by=order_by, order_dir=order_dir)
236 239
237 240 if length:
238 241 pull_requests = q.limit(length).offset(offset).all()
239 242 else:
240 243 pull_requests = q.all()
241 244
242 245 return pull_requests
243 246
244 247 def count_awaiting_review(self, repo_name, source=False, statuses=None,
245 248 opened_by=None):
246 249 """
247 250 Count the number of pull requests for a specific repository that are
248 251 awaiting review.
249 252
250 253 :param repo_name: target or source repo
251 254 :param source: boolean flag to specify if repo_name refers to source
252 255 :param statuses: list of pull request statuses
253 256 :param opened_by: author user of the pull request
254 257 :returns: int number of pull requests
255 258 """
256 259 pull_requests = self.get_awaiting_review(
257 260 repo_name, source=source, statuses=statuses, opened_by=opened_by)
258 261
259 262 return len(pull_requests)
260 263
261 264 def get_awaiting_review(self, repo_name, source=False, statuses=None,
262 265 opened_by=None, offset=0, length=None,
263 266 order_by=None, order_dir='desc'):
264 267 """
265 268 Get all pull requests for a specific repository that are awaiting
266 269 review.
267 270
268 271 :param repo_name: target or source repo
269 272 :param source: boolean flag to specify if repo_name refers to source
270 273 :param statuses: list of pull request statuses
271 274 :param opened_by: author user of the pull request
272 275 :param offset: pagination offset
273 276 :param length: length of returned list
274 277 :param order_by: order of the returned list
275 278 :param order_dir: 'asc' or 'desc' ordering direction
276 279 :returns: list of pull requests
277 280 """
278 281 pull_requests = self.get_all(
279 282 repo_name, source=source, statuses=statuses, opened_by=opened_by,
280 283 order_by=order_by, order_dir=order_dir)
281 284
282 285 _filtered_pull_requests = []
283 286 for pr in pull_requests:
284 287 status = pr.calculated_review_status()
285 288 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
286 289 ChangesetStatus.STATUS_UNDER_REVIEW]:
287 290 _filtered_pull_requests.append(pr)
288 291 if length:
289 292 return _filtered_pull_requests[offset:offset+length]
290 293 else:
291 294 return _filtered_pull_requests
292 295
293 296 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
294 297 opened_by=None, user_id=None):
295 298 """
296 299 Count the number of pull requests for a specific repository that are
297 300 awaiting review from a specific user.
298 301
299 302 :param repo_name: target or source repo
300 303 :param source: boolean flag to specify if repo_name refers to source
301 304 :param statuses: list of pull request statuses
302 305 :param opened_by: author user of the pull request
303 306 :param user_id: reviewer user of the pull request
304 307 :returns: int number of pull requests
305 308 """
306 309 pull_requests = self.get_awaiting_my_review(
307 310 repo_name, source=source, statuses=statuses, opened_by=opened_by,
308 311 user_id=user_id)
309 312
310 313 return len(pull_requests)
311 314
312 315 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
313 316 opened_by=None, user_id=None, offset=0,
314 317 length=None, order_by=None, order_dir='desc'):
315 318 """
316 319 Get all pull requests for a specific repository that are awaiting
317 320 review from a specific user.
318 321
319 322 :param repo_name: target or source repo
320 323 :param source: boolean flag to specify if repo_name refers to source
321 324 :param statuses: list of pull request statuses
322 325 :param opened_by: author user of the pull request
323 326 :param user_id: reviewer user of the pull request
324 327 :param offset: pagination offset
325 328 :param length: length of returned list
326 329 :param order_by: order of the returned list
327 330 :param order_dir: 'asc' or 'desc' ordering direction
328 331 :returns: list of pull requests
329 332 """
330 333 pull_requests = self.get_all(
331 334 repo_name, source=source, statuses=statuses, opened_by=opened_by,
332 335 order_by=order_by, order_dir=order_dir)
333 336
334 337 _my = PullRequestModel().get_not_reviewed(user_id)
335 338 my_participation = []
336 339 for pr in pull_requests:
337 340 if pr in _my:
338 341 my_participation.append(pr)
339 342 _filtered_pull_requests = my_participation
340 343 if length:
341 344 return _filtered_pull_requests[offset:offset+length]
342 345 else:
343 346 return _filtered_pull_requests
344 347
345 348 def get_not_reviewed(self, user_id):
346 349 return [
347 350 x.pull_request for x in PullRequestReviewers.query().filter(
348 351 PullRequestReviewers.user_id == user_id).all()
349 352 ]
350 353
351 354 def _prepare_participating_query(self, user_id=None, statuses=None,
352 355 order_by=None, order_dir='desc'):
353 356 q = PullRequest.query()
354 357 if user_id:
355 358 reviewers_subquery = Session().query(
356 359 PullRequestReviewers.pull_request_id).filter(
357 360 PullRequestReviewers.user_id == user_id).subquery()
358 361 user_filter= or_(
359 362 PullRequest.user_id == user_id,
360 363 PullRequest.pull_request_id.in_(reviewers_subquery)
361 364 )
362 365 q = PullRequest.query().filter(user_filter)
363 366
364 367 # closed,opened
365 368 if statuses:
366 369 q = q.filter(PullRequest.status.in_(statuses))
367 370
368 371 if order_by:
369 372 order_map = {
370 373 'name_raw': PullRequest.pull_request_id,
371 374 'title': PullRequest.title,
372 375 'updated_on_raw': PullRequest.updated_on,
373 376 'target_repo': PullRequest.target_repo_id
374 377 }
375 378 if order_dir == 'asc':
376 379 q = q.order_by(order_map[order_by].asc())
377 380 else:
378 381 q = q.order_by(order_map[order_by].desc())
379 382
380 383 return q
381 384
382 385 def count_im_participating_in(self, user_id=None, statuses=None):
383 386 q = self._prepare_participating_query(user_id, statuses=statuses)
384 387 return q.count()
385 388
386 389 def get_im_participating_in(
387 390 self, user_id=None, statuses=None, offset=0,
388 391 length=None, order_by=None, order_dir='desc'):
389 392 """
390 393 Get all Pull requests that i'm participating in, or i have opened
391 394 """
392 395
393 396 q = self._prepare_participating_query(
394 397 user_id, statuses=statuses, order_by=order_by,
395 398 order_dir=order_dir)
396 399
397 400 if length:
398 401 pull_requests = q.limit(length).offset(offset).all()
399 402 else:
400 403 pull_requests = q.all()
401 404
402 405 return pull_requests
403 406
404 407 def get_versions(self, pull_request):
405 408 """
406 409 returns version of pull request sorted by ID descending
407 410 """
408 411 return PullRequestVersion.query()\
409 412 .filter(PullRequestVersion.pull_request == pull_request)\
410 413 .order_by(PullRequestVersion.pull_request_version_id.asc())\
411 414 .all()
412 415
413 416 def create(self, created_by, source_repo, source_ref, target_repo,
414 417 target_ref, revisions, reviewers, title, description=None):
415 418 created_by_user = self._get_user(created_by)
416 419 source_repo = self._get_repo(source_repo)
417 420 target_repo = self._get_repo(target_repo)
418 421
419 422 pull_request = PullRequest()
420 423 pull_request.source_repo = source_repo
421 424 pull_request.source_ref = source_ref
422 425 pull_request.target_repo = target_repo
423 426 pull_request.target_ref = target_ref
424 427 pull_request.revisions = revisions
425 428 pull_request.title = title
426 429 pull_request.description = description
427 430 pull_request.author = created_by_user
428 431
429 432 Session().add(pull_request)
430 433 Session().flush()
431 434
432 435 reviewer_ids = set()
433 436 # members / reviewers
434 437 for reviewer_object in reviewers:
435 438 if isinstance(reviewer_object, tuple):
436 439 user_id, reasons = reviewer_object
437 440 else:
438 441 user_id, reasons = reviewer_object, []
439 442
440 443 user = self._get_user(user_id)
441 444 reviewer_ids.add(user.user_id)
442 445
443 446 reviewer = PullRequestReviewers(user, pull_request, reasons)
444 447 Session().add(reviewer)
445 448
446 449 # Set approval status to "Under Review" for all commits which are
447 450 # part of this pull request.
448 451 ChangesetStatusModel().set_status(
449 452 repo=target_repo,
450 453 status=ChangesetStatus.STATUS_UNDER_REVIEW,
451 454 user=created_by_user,
452 455 pull_request=pull_request
453 456 )
454 457
455 458 self.notify_reviewers(pull_request, reviewer_ids)
456 459 self._trigger_pull_request_hook(
457 460 pull_request, created_by_user, 'create')
458 461
459 462 return pull_request
460 463
461 464 def _trigger_pull_request_hook(self, pull_request, user, action):
462 465 pull_request = self.__get_pull_request(pull_request)
463 466 target_scm = pull_request.target_repo.scm_instance()
464 467 if action == 'create':
465 468 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
466 469 elif action == 'merge':
467 470 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
468 471 elif action == 'close':
469 472 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
470 473 elif action == 'review_status_change':
471 474 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
472 475 elif action == 'update':
473 476 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
474 477 else:
475 478 return
476 479
477 480 trigger_hook(
478 481 username=user.username,
479 482 repo_name=pull_request.target_repo.repo_name,
480 483 repo_alias=target_scm.alias,
481 484 pull_request=pull_request)
482 485
483 486 def _get_commit_ids(self, pull_request):
484 487 """
485 488 Return the commit ids of the merged pull request.
486 489
487 490 This method is not dealing correctly yet with the lack of autoupdates
488 491 nor with the implicit target updates.
489 492 For example: if a commit in the source repo is already in the target it
490 493 will be reported anyways.
491 494 """
492 495 merge_rev = pull_request.merge_rev
493 496 if merge_rev is None:
494 497 raise ValueError('This pull request was not merged yet')
495 498
496 499 commit_ids = list(pull_request.revisions)
497 500 if merge_rev not in commit_ids:
498 501 commit_ids.append(merge_rev)
499 502
500 503 return commit_ids
501 504
502 505 def merge(self, pull_request, user, extras):
503 506 log.debug("Merging pull request %s", pull_request.pull_request_id)
504 507 merge_state = self._merge_pull_request(pull_request, user, extras)
505 508 if merge_state.executed:
506 509 log.debug(
507 510 "Merge was successful, updating the pull request comments.")
508 511 self._comment_and_close_pr(pull_request, user, merge_state)
509 512 self._log_action('user_merged_pull_request', user, pull_request)
510 513 else:
511 514 log.warn("Merge failed, not updating the pull request.")
512 515 return merge_state
513 516
514 517 def _merge_pull_request(self, pull_request, user, extras):
515 518 target_vcs = pull_request.target_repo.scm_instance()
516 519 source_vcs = pull_request.source_repo.scm_instance()
517 520 target_ref = self._refresh_reference(
518 521 pull_request.target_ref_parts, target_vcs)
519 522
520 523 message = _(
521 524 'Merge pull request #%(pr_id)s from '
522 525 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
523 526 'pr_id': pull_request.pull_request_id,
524 527 'source_repo': source_vcs.name,
525 528 'source_ref_name': pull_request.source_ref_parts.name,
526 529 'pr_title': pull_request.title
527 530 }
528 531
529 532 workspace_id = self._workspace_id(pull_request)
530 533 use_rebase = self._use_rebase_for_merging(pull_request)
531 534
532 535 callback_daemon, extras = prepare_callback_daemon(
533 536 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
534 537 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
535 538
536 539 with callback_daemon:
537 540 # TODO: johbo: Implement a clean way to run a config_override
538 541 # for a single call.
539 542 target_vcs.config.set(
540 543 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
541 544 merge_state = target_vcs.merge(
542 545 target_ref, source_vcs, pull_request.source_ref_parts,
543 546 workspace_id, user_name=user.username,
544 547 user_email=user.email, message=message, use_rebase=use_rebase)
545 548 return merge_state
546 549
547 550 def _comment_and_close_pr(self, pull_request, user, merge_state):
548 551 pull_request.merge_rev = merge_state.merge_ref.commit_id
549 552 pull_request.updated_on = datetime.datetime.now()
550 553
551 554 ChangesetCommentsModel().create(
552 555 text=unicode(_('Pull request merged and closed')),
553 556 repo=pull_request.target_repo.repo_id,
554 557 user=user.user_id,
555 558 pull_request=pull_request.pull_request_id,
556 559 f_path=None,
557 560 line_no=None,
558 561 closing_pr=True
559 562 )
560 563
561 564 Session().add(pull_request)
562 565 Session().flush()
563 566 # TODO: paris: replace invalidation with less radical solution
564 567 ScmModel().mark_for_invalidation(
565 568 pull_request.target_repo.repo_name)
566 569 self._trigger_pull_request_hook(pull_request, user, 'merge')
567 570
568 571 def has_valid_update_type(self, pull_request):
569 572 source_ref_type = pull_request.source_ref_parts.type
570 573 return source_ref_type in ['book', 'branch', 'tag']
571 574
572 575 def update_commits(self, pull_request):
573 576 """
574 577 Get the updated list of commits for the pull request
575 578 and return the new pull request version and the list
576 579 of commits processed by this update action
577 580 """
578 581 pull_request = self.__get_pull_request(pull_request)
579 582 source_ref_type = pull_request.source_ref_parts.type
580 583 source_ref_name = pull_request.source_ref_parts.name
581 584 source_ref_id = pull_request.source_ref_parts.commit_id
582 585
583 586 if not self.has_valid_update_type(pull_request):
584 587 log.debug(
585 588 "Skipping update of pull request %s due to ref type: %s",
586 589 pull_request, source_ref_type)
587 590 return UpdateResponse(
588 591 executed=False,
589 592 reason=UpdateFailureReason.WRONG_REF_TPYE,
590 593 old=pull_request, new=None, changes=None)
591 594
592 595 source_repo = pull_request.source_repo.scm_instance()
593 596 try:
594 597 source_commit = source_repo.get_commit(commit_id=source_ref_name)
595 598 except CommitDoesNotExistError:
596 599 return UpdateResponse(
597 600 executed=False,
598 601 reason=UpdateFailureReason.MISSING_SOURCE_REF,
599 602 old=pull_request, new=None, changes=None)
600 603
601 604 if source_ref_id == source_commit.raw_id:
602 605 log.debug("Nothing changed in pull request %s", pull_request)
603 606 return UpdateResponse(
604 607 executed=False,
605 608 reason=UpdateFailureReason.NO_CHANGE,
606 609 old=pull_request, new=None, changes=None)
607 610
608 611 # Finally there is a need for an update
609 612 pull_request_version = self._create_version_from_snapshot(pull_request)
610 613 self._link_comments_to_version(pull_request_version)
611 614
612 615 target_ref_type = pull_request.target_ref_parts.type
613 616 target_ref_name = pull_request.target_ref_parts.name
614 617 target_ref_id = pull_request.target_ref_parts.commit_id
615 618 target_repo = pull_request.target_repo.scm_instance()
616 619
617 620 try:
618 621 if target_ref_type in ('tag', 'branch', 'book'):
619 622 target_commit = target_repo.get_commit(target_ref_name)
620 623 else:
621 624 target_commit = target_repo.get_commit(target_ref_id)
622 625 except CommitDoesNotExistError:
623 626 return UpdateResponse(
624 627 executed=False,
625 628 reason=UpdateFailureReason.MISSING_TARGET_REF,
626 629 old=pull_request, new=None, changes=None)
627 630
628 631 # re-compute commit ids
629 632 old_commit_ids = set(pull_request.revisions)
630 633 pre_load = ["author", "branch", "date", "message"]
631 634 commit_ranges = target_repo.compare(
632 635 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
633 636 pre_load=pre_load)
634 637
635 638 ancestor = target_repo.get_common_ancestor(
636 639 target_commit.raw_id, source_commit.raw_id, source_repo)
637 640
638 641 pull_request.source_ref = '%s:%s:%s' % (
639 642 source_ref_type, source_ref_name, source_commit.raw_id)
640 643 pull_request.target_ref = '%s:%s:%s' % (
641 644 target_ref_type, target_ref_name, ancestor)
642 645 pull_request.revisions = [
643 646 commit.raw_id for commit in reversed(commit_ranges)]
644 647 pull_request.updated_on = datetime.datetime.now()
645 648 Session().add(pull_request)
646 649 new_commit_ids = set(pull_request.revisions)
647 650
648 651 changes = self._calculate_commit_id_changes(
649 652 old_commit_ids, new_commit_ids)
650 653
651 654 old_diff_data, new_diff_data = self._generate_update_diffs(
652 655 pull_request, pull_request_version)
653 656
654 657 ChangesetCommentsModel().outdate_comments(
655 658 pull_request, old_diff_data=old_diff_data,
656 659 new_diff_data=new_diff_data)
657 660
658 661 file_changes = self._calculate_file_changes(
659 662 old_diff_data, new_diff_data)
660 663
661 664 # Add an automatic comment to the pull request
662 665 update_comment = ChangesetCommentsModel().create(
663 666 text=self._render_update_message(changes, file_changes),
664 667 repo=pull_request.target_repo,
665 668 user=pull_request.author,
666 669 pull_request=pull_request,
667 670 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
668 671
669 672 # Update status to "Under Review" for added commits
670 673 for commit_id in changes.added:
671 674 ChangesetStatusModel().set_status(
672 675 repo=pull_request.source_repo,
673 676 status=ChangesetStatus.STATUS_UNDER_REVIEW,
674 677 comment=update_comment,
675 678 user=pull_request.author,
676 679 pull_request=pull_request,
677 680 revision=commit_id)
678 681
679 682 log.debug(
680 683 'Updated pull request %s, added_ids: %s, common_ids: %s, '
681 684 'removed_ids: %s', pull_request.pull_request_id,
682 685 changes.added, changes.common, changes.removed)
683 686 log.debug('Updated pull request with the following file changes: %s',
684 687 file_changes)
685 688
686 689 log.info(
687 690 "Updated pull request %s from commit %s to commit %s, "
688 691 "stored new version %s of this pull request.",
689 692 pull_request.pull_request_id, source_ref_id,
690 693 pull_request.source_ref_parts.commit_id,
691 694 pull_request_version.pull_request_version_id)
692 695 Session().commit()
693 696 self._trigger_pull_request_hook(pull_request, pull_request.author,
694 697 'update')
695 698
696 699 return UpdateResponse(
697 700 executed=True, reason=UpdateFailureReason.NONE,
698 701 old=pull_request, new=pull_request_version, changes=changes)
699 702
700 703 def _create_version_from_snapshot(self, pull_request):
701 704 version = PullRequestVersion()
702 705 version.title = pull_request.title
703 706 version.description = pull_request.description
704 707 version.status = pull_request.status
705 708 version.created_on = pull_request.created_on
706 709 version.updated_on = pull_request.updated_on
707 710 version.user_id = pull_request.user_id
708 711 version.source_repo = pull_request.source_repo
709 712 version.source_ref = pull_request.source_ref
710 713 version.target_repo = pull_request.target_repo
711 714 version.target_ref = pull_request.target_ref
712 715
713 716 version._last_merge_source_rev = pull_request._last_merge_source_rev
714 717 version._last_merge_target_rev = pull_request._last_merge_target_rev
715 718 version._last_merge_status = pull_request._last_merge_status
716 719 version.shadow_merge_ref = pull_request.shadow_merge_ref
717 720 version.merge_rev = pull_request.merge_rev
718 721
719 722 version.revisions = pull_request.revisions
720 723 version.pull_request = pull_request
721 724 Session().add(version)
722 725 Session().flush()
723 726
724 727 return version
725 728
726 729 def _generate_update_diffs(self, pull_request, pull_request_version):
727 730 diff_context = (
728 731 self.DIFF_CONTEXT +
729 732 ChangesetCommentsModel.needed_extra_diff_context())
730 733 old_diff = self._get_diff_from_pr_or_version(
731 734 pull_request_version, context=diff_context)
732 735 new_diff = self._get_diff_from_pr_or_version(
733 736 pull_request, context=diff_context)
734 737
735 738 old_diff_data = diffs.DiffProcessor(old_diff)
736 739 old_diff_data.prepare()
737 740 new_diff_data = diffs.DiffProcessor(new_diff)
738 741 new_diff_data.prepare()
739 742
740 743 return old_diff_data, new_diff_data
741 744
742 745 def _link_comments_to_version(self, pull_request_version):
743 746 """
744 747 Link all unlinked comments of this pull request to the given version.
745 748
746 749 :param pull_request_version: The `PullRequestVersion` to which
747 750 the comments shall be linked.
748 751
749 752 """
750 753 pull_request = pull_request_version.pull_request
751 754 comments = ChangesetComment.query().filter(
752 755 # TODO: johbo: Should we query for the repo at all here?
753 756 # Pending decision on how comments of PRs are to be related
754 757 # to either the source repo, the target repo or no repo at all.
755 758 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
756 759 ChangesetComment.pull_request == pull_request,
757 760 ChangesetComment.pull_request_version == None)
758 761
759 762 # TODO: johbo: Find out why this breaks if it is done in a bulk
760 763 # operation.
761 764 for comment in comments:
762 765 comment.pull_request_version_id = (
763 766 pull_request_version.pull_request_version_id)
764 767 Session().add(comment)
765 768
766 769 def _calculate_commit_id_changes(self, old_ids, new_ids):
767 770 added = new_ids.difference(old_ids)
768 771 common = old_ids.intersection(new_ids)
769 772 removed = old_ids.difference(new_ids)
770 773 return ChangeTuple(added, common, removed)
771 774
772 775 def _calculate_file_changes(self, old_diff_data, new_diff_data):
773 776
774 777 old_files = OrderedDict()
775 778 for diff_data in old_diff_data.parsed_diff:
776 779 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
777 780
778 781 added_files = []
779 782 modified_files = []
780 783 removed_files = []
781 784 for diff_data in new_diff_data.parsed_diff:
782 785 new_filename = diff_data['filename']
783 786 new_hash = md5_safe(diff_data['raw_diff'])
784 787
785 788 old_hash = old_files.get(new_filename)
786 789 if not old_hash:
787 790 # file is not present in old diff, means it's added
788 791 added_files.append(new_filename)
789 792 else:
790 793 if new_hash != old_hash:
791 794 modified_files.append(new_filename)
792 795 # now remove a file from old, since we have seen it already
793 796 del old_files[new_filename]
794 797
795 798 # removed files is when there are present in old, but not in NEW,
796 799 # since we remove old files that are present in new diff, left-overs
797 800 # if any should be the removed files
798 801 removed_files.extend(old_files.keys())
799 802
800 803 return FileChangeTuple(added_files, modified_files, removed_files)
801 804
802 805 def _render_update_message(self, changes, file_changes):
803 806 """
804 807 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
805 808 so it's always looking the same disregarding on which default
806 809 renderer system is using.
807 810
808 811 :param changes: changes named tuple
809 812 :param file_changes: file changes named tuple
810 813
811 814 """
812 815 new_status = ChangesetStatus.get_status_lbl(
813 816 ChangesetStatus.STATUS_UNDER_REVIEW)
814 817
815 818 changed_files = (
816 819 file_changes.added + file_changes.modified + file_changes.removed)
817 820
818 821 params = {
819 822 'under_review_label': new_status,
820 823 'added_commits': changes.added,
821 824 'removed_commits': changes.removed,
822 825 'changed_files': changed_files,
823 826 'added_files': file_changes.added,
824 827 'modified_files': file_changes.modified,
825 828 'removed_files': file_changes.removed,
826 829 }
827 830 renderer = RstTemplateRenderer()
828 831 return renderer.render('pull_request_update.mako', **params)
829 832
830 833 def edit(self, pull_request, title, description):
831 834 pull_request = self.__get_pull_request(pull_request)
832 835 if pull_request.is_closed():
833 836 raise ValueError('This pull request is closed')
834 837 if title:
835 838 pull_request.title = title
836 839 pull_request.description = description
837 840 pull_request.updated_on = datetime.datetime.now()
838 841 Session().add(pull_request)
839 842
840 843 def update_reviewers(self, pull_request, reviewer_data):
841 844 """
842 845 Update the reviewers in the pull request
843 846
844 847 :param pull_request: the pr to update
845 848 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
846 849 """
847 850
848 851 reviewers_reasons = {}
849 852 for user_id, reasons in reviewer_data:
850 853 if isinstance(user_id, (int, basestring)):
851 854 user_id = self._get_user(user_id).user_id
852 855 reviewers_reasons[user_id] = reasons
853 856
854 857 reviewers_ids = set(reviewers_reasons.keys())
855 858 pull_request = self.__get_pull_request(pull_request)
856 859 current_reviewers = PullRequestReviewers.query()\
857 860 .filter(PullRequestReviewers.pull_request ==
858 861 pull_request).all()
859 862 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
860 863
861 864 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
862 865 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
863 866
864 867 log.debug("Adding %s reviewers", ids_to_add)
865 868 log.debug("Removing %s reviewers", ids_to_remove)
866 869 changed = False
867 870 for uid in ids_to_add:
868 871 changed = True
869 872 _usr = self._get_user(uid)
870 873 reasons = reviewers_reasons[uid]
871 874 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
872 875 Session().add(reviewer)
873 876
874 877 self.notify_reviewers(pull_request, ids_to_add)
875 878
876 879 for uid in ids_to_remove:
877 880 changed = True
878 881 reviewer = PullRequestReviewers.query()\
879 882 .filter(PullRequestReviewers.user_id == uid,
880 883 PullRequestReviewers.pull_request == pull_request)\
881 884 .scalar()
882 885 if reviewer:
883 886 Session().delete(reviewer)
884 887 if changed:
885 888 pull_request.updated_on = datetime.datetime.now()
886 889 Session().add(pull_request)
887 890
888 891 return ids_to_add, ids_to_remove
889 892
890 893 def get_url(self, pull_request):
891 894 return h.url('pullrequest_show',
892 895 repo_name=safe_str(pull_request.target_repo.repo_name),
893 896 pull_request_id=pull_request.pull_request_id,
894 897 qualified=True)
895 898
896 899 def get_shadow_clone_url(self, pull_request):
897 900 """
898 901 Returns qualified url pointing to the shadow repository. If this pull
899 902 request is closed there is no shadow repository and ``None`` will be
900 903 returned.
901 904 """
902 905 if pull_request.is_closed():
903 906 return None
904 907 else:
905 908 pr_url = urllib.unquote(self.get_url(pull_request))
906 909 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
907 910
908 911 def notify_reviewers(self, pull_request, reviewers_ids):
909 912 # notification to reviewers
910 913 if not reviewers_ids:
911 914 return
912 915
913 916 pull_request_obj = pull_request
914 917 # get the current participants of this pull request
915 918 recipients = reviewers_ids
916 919 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
917 920
918 921 pr_source_repo = pull_request_obj.source_repo
919 922 pr_target_repo = pull_request_obj.target_repo
920 923
921 924 pr_url = h.url(
922 925 'pullrequest_show',
923 926 repo_name=pr_target_repo.repo_name,
924 927 pull_request_id=pull_request_obj.pull_request_id,
925 928 qualified=True,)
926 929
927 930 # set some variables for email notification
928 931 pr_target_repo_url = h.url(
929 932 'summary_home',
930 933 repo_name=pr_target_repo.repo_name,
931 934 qualified=True)
932 935
933 936 pr_source_repo_url = h.url(
934 937 'summary_home',
935 938 repo_name=pr_source_repo.repo_name,
936 939 qualified=True)
937 940
938 941 # pull request specifics
939 942 pull_request_commits = [
940 943 (x.raw_id, x.message)
941 944 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
942 945
943 946 kwargs = {
944 947 'user': pull_request.author,
945 948 'pull_request': pull_request_obj,
946 949 'pull_request_commits': pull_request_commits,
947 950
948 951 'pull_request_target_repo': pr_target_repo,
949 952 'pull_request_target_repo_url': pr_target_repo_url,
950 953
951 954 'pull_request_source_repo': pr_source_repo,
952 955 'pull_request_source_repo_url': pr_source_repo_url,
953 956
954 957 'pull_request_url': pr_url,
955 958 }
956 959
957 960 # pre-generate the subject for notification itself
958 961 (subject,
959 962 _h, _e, # we don't care about those
960 963 body_plaintext) = EmailNotificationModel().render_email(
961 964 notification_type, **kwargs)
962 965
963 966 # create notification objects, and emails
964 967 NotificationModel().create(
965 968 created_by=pull_request.author,
966 969 notification_subject=subject,
967 970 notification_body=body_plaintext,
968 971 notification_type=notification_type,
969 972 recipients=recipients,
970 973 email_kwargs=kwargs,
971 974 )
972 975
973 976 def delete(self, pull_request):
974 977 pull_request = self.__get_pull_request(pull_request)
975 978 self._cleanup_merge_workspace(pull_request)
976 979 Session().delete(pull_request)
977 980
978 981 def close_pull_request(self, pull_request, user):
979 982 pull_request = self.__get_pull_request(pull_request)
980 983 self._cleanup_merge_workspace(pull_request)
981 984 pull_request.status = PullRequest.STATUS_CLOSED
982 985 pull_request.updated_on = datetime.datetime.now()
983 986 Session().add(pull_request)
984 987 self._trigger_pull_request_hook(
985 988 pull_request, pull_request.author, 'close')
986 989 self._log_action('user_closed_pull_request', user, pull_request)
987 990
988 991 def close_pull_request_with_comment(self, pull_request, user, repo,
989 992 message=None):
990 993 status = ChangesetStatus.STATUS_REJECTED
991 994
992 995 if not message:
993 996 message = (
994 997 _('Status change %(transition_icon)s %(status)s') % {
995 998 'transition_icon': '>',
996 999 'status': ChangesetStatus.get_status_lbl(status)})
997 1000
998 1001 internal_message = _('Closing with') + ' ' + message
999 1002
1000 1003 comm = ChangesetCommentsModel().create(
1001 1004 text=internal_message,
1002 1005 repo=repo.repo_id,
1003 1006 user=user.user_id,
1004 1007 pull_request=pull_request.pull_request_id,
1005 1008 f_path=None,
1006 1009 line_no=None,
1007 1010 status_change=ChangesetStatus.get_status_lbl(status),
1008 1011 status_change_type=status,
1009 1012 closing_pr=True
1010 1013 )
1011 1014
1012 1015 ChangesetStatusModel().set_status(
1013 1016 repo.repo_id,
1014 1017 status,
1015 1018 user.user_id,
1016 1019 comm,
1017 1020 pull_request=pull_request.pull_request_id
1018 1021 )
1019 1022 Session().flush()
1020 1023
1021 1024 PullRequestModel().close_pull_request(
1022 1025 pull_request.pull_request_id, user)
1023 1026
1024 1027 def merge_status(self, pull_request):
1025 1028 if not self._is_merge_enabled(pull_request):
1026 1029 return False, _('Server-side pull request merging is disabled.')
1027 1030 if pull_request.is_closed():
1028 1031 return False, _('This pull request is closed.')
1029 1032 merge_possible, msg = self._check_repo_requirements(
1030 1033 target=pull_request.target_repo, source=pull_request.source_repo)
1031 1034 if not merge_possible:
1032 1035 return merge_possible, msg
1033 1036
1034 1037 try:
1035 1038 resp = self._try_merge(pull_request)
1036 1039 log.debug("Merge response: %s", resp)
1037 1040 status = resp.possible, self.merge_status_message(
1038 1041 resp.failure_reason)
1039 1042 except NotImplementedError:
1040 1043 status = False, _('Pull request merging is not supported.')
1041 1044
1042 1045 return status
1043 1046
1044 1047 def _check_repo_requirements(self, target, source):
1045 1048 """
1046 1049 Check if `target` and `source` have compatible requirements.
1047 1050
1048 1051 Currently this is just checking for largefiles.
1049 1052 """
1050 1053 target_has_largefiles = self._has_largefiles(target)
1051 1054 source_has_largefiles = self._has_largefiles(source)
1052 1055 merge_possible = True
1053 1056 message = u''
1054 1057
1055 1058 if target_has_largefiles != source_has_largefiles:
1056 1059 merge_possible = False
1057 1060 if source_has_largefiles:
1058 1061 message = _(
1059 1062 'Target repository large files support is disabled.')
1060 1063 else:
1061 1064 message = _(
1062 1065 'Source repository large files support is disabled.')
1063 1066
1064 1067 return merge_possible, message
1065 1068
1066 1069 def _has_largefiles(self, repo):
1067 1070 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1068 1071 'extensions', 'largefiles')
1069 1072 return largefiles_ui and largefiles_ui[0].active
1070 1073
1071 1074 def _try_merge(self, pull_request):
1072 1075 """
1073 1076 Try to merge the pull request and return the merge status.
1074 1077 """
1075 1078 log.debug(
1076 1079 "Trying out if the pull request %s can be merged.",
1077 1080 pull_request.pull_request_id)
1078 1081 target_vcs = pull_request.target_repo.scm_instance()
1079 1082
1080 1083 # Refresh the target reference.
1081 1084 try:
1082 1085 target_ref = self._refresh_reference(
1083 1086 pull_request.target_ref_parts, target_vcs)
1084 1087 except CommitDoesNotExistError:
1085 1088 merge_state = MergeResponse(
1086 1089 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1087 1090 return merge_state
1088 1091
1089 1092 target_locked = pull_request.target_repo.locked
1090 1093 if target_locked and target_locked[0]:
1091 1094 log.debug("The target repository is locked.")
1092 1095 merge_state = MergeResponse(
1093 1096 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1094 1097 elif self._needs_merge_state_refresh(pull_request, target_ref):
1095 1098 log.debug("Refreshing the merge status of the repository.")
1096 1099 merge_state = self._refresh_merge_state(
1097 1100 pull_request, target_vcs, target_ref)
1098 1101 else:
1099 1102 possible = pull_request.\
1100 1103 _last_merge_status == MergeFailureReason.NONE
1101 1104 merge_state = MergeResponse(
1102 1105 possible, False, None, pull_request._last_merge_status)
1103 1106
1104 1107 return merge_state
1105 1108
1106 1109 def _refresh_reference(self, reference, vcs_repository):
1107 1110 if reference.type in ('branch', 'book'):
1108 1111 name_or_id = reference.name
1109 1112 else:
1110 1113 name_or_id = reference.commit_id
1111 1114 refreshed_commit = vcs_repository.get_commit(name_or_id)
1112 1115 refreshed_reference = Reference(
1113 1116 reference.type, reference.name, refreshed_commit.raw_id)
1114 1117 return refreshed_reference
1115 1118
1116 1119 def _needs_merge_state_refresh(self, pull_request, target_reference):
1117 1120 return not(
1118 1121 pull_request.revisions and
1119 1122 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1120 1123 target_reference.commit_id == pull_request._last_merge_target_rev)
1121 1124
1122 1125 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1123 1126 workspace_id = self._workspace_id(pull_request)
1124 1127 source_vcs = pull_request.source_repo.scm_instance()
1125 1128 use_rebase = self._use_rebase_for_merging(pull_request)
1126 1129 merge_state = target_vcs.merge(
1127 1130 target_reference, source_vcs, pull_request.source_ref_parts,
1128 1131 workspace_id, dry_run=True, use_rebase=use_rebase)
1129 1132
1130 1133 # Do not store the response if there was an unknown error.
1131 1134 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1132 1135 pull_request._last_merge_source_rev = \
1133 1136 pull_request.source_ref_parts.commit_id
1134 1137 pull_request._last_merge_target_rev = target_reference.commit_id
1135 1138 pull_request._last_merge_status = merge_state.failure_reason
1136 1139 pull_request.shadow_merge_ref = merge_state.merge_ref
1137 1140 Session().add(pull_request)
1138 1141 Session().commit()
1139 1142
1140 1143 return merge_state
1141 1144
1142 1145 def _workspace_id(self, pull_request):
1143 1146 workspace_id = 'pr-%s' % pull_request.pull_request_id
1144 1147 return workspace_id
1145 1148
1146 1149 def merge_status_message(self, status_code):
1147 1150 """
1148 1151 Return a human friendly error message for the given merge status code.
1149 1152 """
1150 1153 return self.MERGE_STATUS_MESSAGES[status_code]
1151 1154
1152 1155 def generate_repo_data(self, repo, commit_id=None, branch=None,
1153 1156 bookmark=None):
1154 1157 all_refs, selected_ref = \
1155 1158 self._get_repo_pullrequest_sources(
1156 1159 repo.scm_instance(), commit_id=commit_id,
1157 1160 branch=branch, bookmark=bookmark)
1158 1161
1159 1162 refs_select2 = []
1160 1163 for element in all_refs:
1161 1164 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1162 1165 refs_select2.append({'text': element[1], 'children': children})
1163 1166
1164 1167 return {
1165 1168 'user': {
1166 1169 'user_id': repo.user.user_id,
1167 1170 'username': repo.user.username,
1168 1171 'firstname': repo.user.firstname,
1169 1172 'lastname': repo.user.lastname,
1170 1173 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1171 1174 },
1172 1175 'description': h.chop_at_smart(repo.description, '\n'),
1173 1176 'refs': {
1174 1177 'all_refs': all_refs,
1175 1178 'selected_ref': selected_ref,
1176 1179 'select2_refs': refs_select2
1177 1180 }
1178 1181 }
1179 1182
1180 1183 def generate_pullrequest_title(self, source, source_ref, target):
1181 1184 return u'{source}#{at_ref} to {target}'.format(
1182 1185 source=source,
1183 1186 at_ref=source_ref,
1184 1187 target=target,
1185 1188 )
1186 1189
1187 1190 def _cleanup_merge_workspace(self, pull_request):
1188 1191 # Merging related cleanup
1189 1192 target_scm = pull_request.target_repo.scm_instance()
1190 1193 workspace_id = 'pr-%s' % pull_request.pull_request_id
1191 1194
1192 1195 try:
1193 1196 target_scm.cleanup_merge_workspace(workspace_id)
1194 1197 except NotImplementedError:
1195 1198 pass
1196 1199
1197 1200 def _get_repo_pullrequest_sources(
1198 1201 self, repo, commit_id=None, branch=None, bookmark=None):
1199 1202 """
1200 1203 Return a structure with repo's interesting commits, suitable for
1201 1204 the selectors in pullrequest controller
1202 1205
1203 1206 :param commit_id: a commit that must be in the list somehow
1204 1207 and selected by default
1205 1208 :param branch: a branch that must be in the list and selected
1206 1209 by default - even if closed
1207 1210 :param bookmark: a bookmark that must be in the list and selected
1208 1211 """
1209 1212
1210 1213 commit_id = safe_str(commit_id) if commit_id else None
1211 1214 branch = safe_str(branch) if branch else None
1212 1215 bookmark = safe_str(bookmark) if bookmark else None
1213 1216
1214 1217 selected = None
1215 1218
1216 1219 # order matters: first source that has commit_id in it will be selected
1217 1220 sources = []
1218 1221 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1219 1222 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1220 1223
1221 1224 if commit_id:
1222 1225 ref_commit = (h.short_id(commit_id), commit_id)
1223 1226 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1224 1227
1225 1228 sources.append(
1226 1229 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1227 1230 )
1228 1231
1229 1232 groups = []
1230 1233 for group_key, ref_list, group_name, match in sources:
1231 1234 group_refs = []
1232 1235 for ref_name, ref_id in ref_list:
1233 1236 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1234 1237 group_refs.append((ref_key, ref_name))
1235 1238
1236 1239 if not selected:
1237 1240 if set([commit_id, match]) & set([ref_id, ref_name]):
1238 1241 selected = ref_key
1239 1242
1240 1243 if group_refs:
1241 1244 groups.append((group_refs, group_name))
1242 1245
1243 1246 if not selected:
1244 1247 ref = commit_id or branch or bookmark
1245 1248 if ref:
1246 1249 raise CommitDoesNotExistError(
1247 1250 'No commit refs could be found matching: %s' % ref)
1248 1251 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1249 1252 selected = 'branch:%s:%s' % (
1250 1253 repo.DEFAULT_BRANCH_NAME,
1251 1254 repo.branches[repo.DEFAULT_BRANCH_NAME]
1252 1255 )
1253 1256 elif repo.commit_ids:
1254 1257 rev = repo.commit_ids[0]
1255 1258 selected = 'rev:%s:%s' % (rev, rev)
1256 1259 else:
1257 1260 raise EmptyRepositoryError()
1258 1261 return groups, selected
1259 1262
1260 1263 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1261 1264 pull_request = self.__get_pull_request(pull_request)
1262 1265 return self._get_diff_from_pr_or_version(pull_request, context=context)
1263 1266
1264 1267 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1265 1268 source_repo = pr_or_version.source_repo
1266 1269
1267 1270 # we swap org/other ref since we run a simple diff on one repo
1268 1271 target_ref_id = pr_or_version.target_ref_parts.commit_id
1269 1272 source_ref_id = pr_or_version.source_ref_parts.commit_id
1270 1273 target_commit = source_repo.get_commit(
1271 1274 commit_id=safe_str(target_ref_id))
1272 1275 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1273 1276 vcs_repo = source_repo.scm_instance()
1274 1277
1275 1278 # TODO: johbo: In the context of an update, we cannot reach
1276 1279 # the old commit anymore with our normal mechanisms. It needs
1277 1280 # some sort of special support in the vcs layer to avoid this
1278 1281 # workaround.
1279 1282 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1280 1283 vcs_repo.alias == 'git'):
1281 1284 source_commit.raw_id = safe_str(source_ref_id)
1282 1285
1283 1286 log.debug('calculating diff between '
1284 1287 'source_ref:%s and target_ref:%s for repo `%s`',
1285 1288 target_ref_id, source_ref_id,
1286 1289 safe_unicode(vcs_repo.path))
1287 1290
1288 1291 vcs_diff = vcs_repo.get_diff(
1289 1292 commit1=target_commit, commit2=source_commit, context=context)
1290 1293 return vcs_diff
1291 1294
1292 1295 def _is_merge_enabled(self, pull_request):
1293 1296 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1294 1297 settings = settings_model.get_general_settings()
1295 1298 return settings.get('rhodecode_pr_merge_enabled', False)
1296 1299
1297 1300 def _use_rebase_for_merging(self, pull_request):
1298 1301 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1299 1302 settings = settings_model.get_general_settings()
1300 1303 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1301 1304
1302 1305 def _log_action(self, action, user, pull_request):
1303 1306 action_logger(
1304 1307 user,
1305 1308 '{action}:{pr_id}'.format(
1306 1309 action=action, pr_id=pull_request.pull_request_id),
1307 1310 pull_request.target_repo)
1308 1311
1309 1312
1310 1313 ChangeTuple = namedtuple('ChangeTuple',
1311 1314 ['added', 'common', 'removed'])
1312 1315
1313 1316 FileChangeTuple = namedtuple('FileChangeTuple',
1314 1317 ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now