##// END OF EJS Templates
vcs: Deprecate generic missing commit merge error reason....
Martin Bornhold -
r1081:6047da4a default
parent child Browse files
Show More
@@ -1,1544 +1,1545 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import itertools
28 28 import logging
29 29 import os
30 30 import time
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 36 from rhodecode.lib.vcs import connection
37 37 from rhodecode.lib.vcs.utils import author_name, author_email
38 38 from rhodecode.lib.vcs.conf import settings
39 39 from rhodecode.lib.vcs.exceptions import (
40 40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 44 RepositoryError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 FILEMODE_DEFAULT = 0100644
51 51 FILEMODE_EXECUTABLE = 0100755
52 52
53 53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 54 MergeResponse = collections.namedtuple(
55 55 'MergeResponse',
56 56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57 57
58 58
59 59 class MergeFailureReason(object):
60 60 """
61 61 Enumeration with all the reasons why the server side merge could fail.
62 62
63 63 DO NOT change the number of the reasons, as they may be stored in the
64 64 database.
65 65
66 66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 67 reasons.
68 68 """
69 69
70 70 # Everything went well.
71 71 NONE = 0
72 72
73 73 # An unexpected exception was raised. Check the logs for more details.
74 74 UNKNOWN = 1
75 75
76 76 # The merge was not successful, there are conflicts.
77 77 MERGE_FAILED = 2
78 78
79 79 # The merge succeeded but we could not push it to the target repository.
80 80 PUSH_FAILED = 3
81 81
82 82 # The specified target is not a head in the target repository.
83 83 TARGET_IS_NOT_HEAD = 4
84 84
85 85 # The source repository contains more branches than the target. Pushing
86 86 # the merge will create additional branches in the target.
87 87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88 88
89 89 # The target reference has multiple heads. That does not allow to correctly
90 90 # identify the target location. This could only happen for mercurial
91 91 # branches.
92 92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93 93
94 94 # The target repository is locked
95 95 TARGET_IS_LOCKED = 7
96 96
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
97 98 # A involved commit could not be found.
98 MISSING_COMMIT = 8
99 _DEPRECATED_MISSING_COMMIT = 8
99 100
100 101 # The target repo reference is missing.
101 102 MISSING_TARGET_REF = 9
102 103
103 104 # The source repo reference is missing.
104 105 MISSING_SOURCE_REF = 10
105 106
106 107
107 108 class UpdateFailureReason(object):
108 109 """
109 110 Enumeration with all the reasons why the pull request update could fail.
110 111
111 112 DO NOT change the number of the reasons, as they may be stored in the
112 113 database.
113 114
114 115 Changing the name of a reason is acceptable and encouraged to deprecate old
115 116 reasons.
116 117 """
117 118
118 119 # Everything went well.
119 120 NONE = 0
120 121
121 122 # An unexpected exception was raised. Check the logs for more details.
122 123 UNKNOWN = 1
123 124
124 125 # The pull request is up to date.
125 126 NO_CHANGE = 2
126 127
127 128 # The pull request has a reference type that is not supported for update.
128 129 WRONG_REF_TPYE = 3
129 130
130 131 # Update failed because the target reference is missing.
131 132 MISSING_TARGET_REF = 4
132 133
133 134 # Update failed because the source reference is missing.
134 135 MISSING_SOURCE_REF = 5
135 136
136 137
137 138 class BaseRepository(object):
138 139 """
139 140 Base Repository for final backends
140 141
141 142 .. attribute:: DEFAULT_BRANCH_NAME
142 143
143 144 name of default branch (i.e. "trunk" for svn, "master" for git etc.
144 145
145 146 .. attribute:: commit_ids
146 147
147 148 list of all available commit ids, in ascending order
148 149
149 150 .. attribute:: path
150 151
151 152 absolute path to the repository
152 153
153 154 .. attribute:: bookmarks
154 155
155 156 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
156 157 there are no bookmarks or the backend implementation does not support
157 158 bookmarks.
158 159
159 160 .. attribute:: tags
160 161
161 162 Mapping from name to :term:`Commit ID` of the tag.
162 163
163 164 """
164 165
165 166 DEFAULT_BRANCH_NAME = None
166 167 DEFAULT_CONTACT = u"Unknown"
167 168 DEFAULT_DESCRIPTION = u"unknown"
168 169 EMPTY_COMMIT_ID = '0' * 40
169 170
170 171 path = None
171 172
172 173 def __init__(self, repo_path, config=None, create=False, **kwargs):
173 174 """
174 175 Initializes repository. Raises RepositoryError if repository could
175 176 not be find at the given ``repo_path`` or directory at ``repo_path``
176 177 exists and ``create`` is set to True.
177 178
178 179 :param repo_path: local path of the repository
179 180 :param config: repository configuration
180 181 :param create=False: if set to True, would try to create repository.
181 182 :param src_url=None: if set, should be proper url from which repository
182 183 would be cloned; requires ``create`` parameter to be set to True -
183 184 raises RepositoryError if src_url is set and create evaluates to
184 185 False
185 186 """
186 187 raise NotImplementedError
187 188
188 189 def __repr__(self):
189 190 return '<%s at %s>' % (self.__class__.__name__, self.path)
190 191
191 192 def __len__(self):
192 193 return self.count()
193 194
194 195 def __eq__(self, other):
195 196 same_instance = isinstance(other, self.__class__)
196 197 return same_instance and other.path == self.path
197 198
198 199 def __ne__(self, other):
199 200 return not self.__eq__(other)
200 201
201 202 @LazyProperty
202 203 def EMPTY_COMMIT(self):
203 204 return EmptyCommit(self.EMPTY_COMMIT_ID)
204 205
205 206 @LazyProperty
206 207 def alias(self):
207 208 for k, v in settings.BACKENDS.items():
208 209 if v.split('.')[-1] == str(self.__class__.__name__):
209 210 return k
210 211
211 212 @LazyProperty
212 213 def name(self):
213 214 return safe_unicode(os.path.basename(self.path))
214 215
215 216 @LazyProperty
216 217 def description(self):
217 218 raise NotImplementedError
218 219
219 220 def refs(self):
220 221 """
221 222 returns a `dict` with branches, bookmarks, tags, and closed_branches
222 223 for this repository
223 224 """
224 225 raise NotImplementedError
225 226
226 227 @LazyProperty
227 228 def branches(self):
228 229 """
229 230 A `dict` which maps branch names to commit ids.
230 231 """
231 232 raise NotImplementedError
232 233
233 234 @LazyProperty
234 235 def size(self):
235 236 """
236 237 Returns combined size in bytes for all repository files
237 238 """
238 239 tip = self.get_commit()
239 240 return tip.size
240 241
241 242 def size_at_commit(self, commit_id):
242 243 commit = self.get_commit(commit_id)
243 244 return commit.size
244 245
245 246 def is_empty(self):
246 247 return not bool(self.commit_ids)
247 248
248 249 @staticmethod
249 250 def check_url(url, config):
250 251 """
251 252 Function will check given url and try to verify if it's a valid
252 253 link.
253 254 """
254 255 raise NotImplementedError
255 256
256 257 @staticmethod
257 258 def is_valid_repository(path):
258 259 """
259 260 Check if given `path` contains a valid repository of this backend
260 261 """
261 262 raise NotImplementedError
262 263
263 264 # ==========================================================================
264 265 # COMMITS
265 266 # ==========================================================================
266 267
267 268 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
268 269 """
269 270 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
270 271 are both None, most recent commit is returned.
271 272
272 273 :param pre_load: Optional. List of commit attributes to load.
273 274
274 275 :raises ``EmptyRepositoryError``: if there are no commits
275 276 """
276 277 raise NotImplementedError
277 278
278 279 def __iter__(self):
279 280 for commit_id in self.commit_ids:
280 281 yield self.get_commit(commit_id=commit_id)
281 282
282 283 def get_commits(
283 284 self, start_id=None, end_id=None, start_date=None, end_date=None,
284 285 branch_name=None, pre_load=None):
285 286 """
286 287 Returns iterator of `BaseCommit` objects from start to end
287 288 not inclusive. This should behave just like a list, ie. end is not
288 289 inclusive.
289 290
290 291 :param start_id: None or str, must be a valid commit id
291 292 :param end_id: None or str, must be a valid commit id
292 293 :param start_date:
293 294 :param end_date:
294 295 :param branch_name:
295 296 :param pre_load:
296 297 """
297 298 raise NotImplementedError
298 299
299 300 def __getitem__(self, key):
300 301 """
301 302 Allows index based access to the commit objects of this repository.
302 303 """
303 304 pre_load = ["author", "branch", "date", "message", "parents"]
304 305 if isinstance(key, slice):
305 306 return self._get_range(key, pre_load)
306 307 return self.get_commit(commit_idx=key, pre_load=pre_load)
307 308
308 309 def _get_range(self, slice_obj, pre_load):
309 310 for commit_id in self.commit_ids.__getitem__(slice_obj):
310 311 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
311 312
312 313 def count(self):
313 314 return len(self.commit_ids)
314 315
315 316 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
316 317 """
317 318 Creates and returns a tag for the given ``commit_id``.
318 319
319 320 :param name: name for new tag
320 321 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
321 322 :param commit_id: commit id for which new tag would be created
322 323 :param message: message of the tag's commit
323 324 :param date: date of tag's commit
324 325
325 326 :raises TagAlreadyExistError: if tag with same name already exists
326 327 """
327 328 raise NotImplementedError
328 329
329 330 def remove_tag(self, name, user, message=None, date=None):
330 331 """
331 332 Removes tag with the given ``name``.
332 333
333 334 :param name: name of the tag to be removed
334 335 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
335 336 :param message: message of the tag's removal commit
336 337 :param date: date of tag's removal commit
337 338
338 339 :raises TagDoesNotExistError: if tag with given name does not exists
339 340 """
340 341 raise NotImplementedError
341 342
342 343 def get_diff(
343 344 self, commit1, commit2, path=None, ignore_whitespace=False,
344 345 context=3, path1=None):
345 346 """
346 347 Returns (git like) *diff*, as plain text. Shows changes introduced by
347 348 `commit2` since `commit1`.
348 349
349 350 :param commit1: Entry point from which diff is shown. Can be
350 351 ``self.EMPTY_COMMIT`` - in this case, patch showing all
351 352 the changes since empty state of the repository until `commit2`
352 353 :param commit2: Until which commit changes should be shown.
353 354 :param path: Can be set to a path of a file to create a diff of that
354 355 file. If `path1` is also set, this value is only associated to
355 356 `commit2`.
356 357 :param ignore_whitespace: If set to ``True``, would not show whitespace
357 358 changes. Defaults to ``False``.
358 359 :param context: How many lines before/after changed lines should be
359 360 shown. Defaults to ``3``.
360 361 :param path1: Can be set to a path to associate with `commit1`. This
361 362 parameter works only for backends which support diff generation for
362 363 different paths. Other backends will raise a `ValueError` if `path1`
363 364 is set and has a different value than `path`.
364 365 """
365 366 raise NotImplementedError
366 367
367 368 def strip(self, commit_id, branch=None):
368 369 """
369 370 Strip given commit_id from the repository
370 371 """
371 372 raise NotImplementedError
372 373
373 374 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
374 375 """
375 376 Return a latest common ancestor commit if one exists for this repo
376 377 `commit_id1` vs `commit_id2` from `repo2`.
377 378
378 379 :param commit_id1: Commit it from this repository to use as a
379 380 target for the comparison.
380 381 :param commit_id2: Source commit id to use for comparison.
381 382 :param repo2: Source repository to use for comparison.
382 383 """
383 384 raise NotImplementedError
384 385
385 386 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
386 387 """
387 388 Compare this repository's revision `commit_id1` with `commit_id2`.
388 389
389 390 Returns a tuple(commits, ancestor) that would be merged from
390 391 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
391 392 will be returned as ancestor.
392 393
393 394 :param commit_id1: Commit it from this repository to use as a
394 395 target for the comparison.
395 396 :param commit_id2: Source commit id to use for comparison.
396 397 :param repo2: Source repository to use for comparison.
397 398 :param merge: If set to ``True`` will do a merge compare which also
398 399 returns the common ancestor.
399 400 :param pre_load: Optional. List of commit attributes to load.
400 401 """
401 402 raise NotImplementedError
402 403
403 404 def merge(self, target_ref, source_repo, source_ref, workspace_id,
404 405 user_name='', user_email='', message='', dry_run=False,
405 406 use_rebase=False):
406 407 """
407 408 Merge the revisions specified in `source_ref` from `source_repo`
408 409 onto the `target_ref` of this repository.
409 410
410 411 `source_ref` and `target_ref` are named tupls with the following
411 412 fields `type`, `name` and `commit_id`.
412 413
413 414 Returns a MergeResponse named tuple with the following fields
414 415 'possible', 'executed', 'source_commit', 'target_commit',
415 416 'merge_commit'.
416 417
417 418 :param target_ref: `target_ref` points to the commit on top of which
418 419 the `source_ref` should be merged.
419 420 :param source_repo: The repository that contains the commits to be
420 421 merged.
421 422 :param source_ref: `source_ref` points to the topmost commit from
422 423 the `source_repo` which should be merged.
423 424 :param workspace_id: `workspace_id` unique identifier.
424 425 :param user_name: Merge commit `user_name`.
425 426 :param user_email: Merge commit `user_email`.
426 427 :param message: Merge commit `message`.
427 428 :param dry_run: If `True` the merge will not take place.
428 429 :param use_rebase: If `True` commits from the source will be rebased
429 430 on top of the target instead of being merged.
430 431 """
431 432 if dry_run:
432 433 message = message or 'dry_run_merge_message'
433 434 user_email = user_email or 'dry-run-merge@rhodecode.com'
434 435 user_name = user_name or 'Dry-Run User'
435 436 else:
436 437 if not user_name:
437 438 raise ValueError('user_name cannot be empty')
438 439 if not user_email:
439 440 raise ValueError('user_email cannot be empty')
440 441 if not message:
441 442 raise ValueError('message cannot be empty')
442 443
443 444 shadow_repository_path = self._maybe_prepare_merge_workspace(
444 445 workspace_id, target_ref)
445 446
446 447 try:
447 448 return self._merge_repo(
448 449 shadow_repository_path, target_ref, source_repo,
449 450 source_ref, message, user_name, user_email, dry_run=dry_run,
450 451 use_rebase=use_rebase)
451 452 except RepositoryError:
452 453 log.exception(
453 454 'Unexpected failure when running merge, dry-run=%s',
454 455 dry_run)
455 456 return MergeResponse(
456 457 False, False, None, MergeFailureReason.UNKNOWN)
457 458
458 459 def _merge_repo(self, shadow_repository_path, target_ref,
459 460 source_repo, source_ref, merge_message,
460 461 merger_name, merger_email, dry_run=False, use_rebase=False):
461 462 """Internal implementation of merge."""
462 463 raise NotImplementedError
463 464
464 465 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
465 466 """
466 467 Create the merge workspace.
467 468
468 469 :param workspace_id: `workspace_id` unique identifier.
469 470 """
470 471 raise NotImplementedError
471 472
472 473 def cleanup_merge_workspace(self, workspace_id):
473 474 """
474 475 Remove merge workspace.
475 476
476 477 This function MUST not fail in case there is no workspace associated to
477 478 the given `workspace_id`.
478 479
479 480 :param workspace_id: `workspace_id` unique identifier.
480 481 """
481 482 raise NotImplementedError
482 483
483 484 # ========== #
484 485 # COMMIT API #
485 486 # ========== #
486 487
487 488 @LazyProperty
488 489 def in_memory_commit(self):
489 490 """
490 491 Returns :class:`InMemoryCommit` object for this repository.
491 492 """
492 493 raise NotImplementedError
493 494
494 495 # ======================== #
495 496 # UTILITIES FOR SUBCLASSES #
496 497 # ======================== #
497 498
498 499 def _validate_diff_commits(self, commit1, commit2):
499 500 """
500 501 Validates that the given commits are related to this repository.
501 502
502 503 Intended as a utility for sub classes to have a consistent validation
503 504 of input parameters in methods like :meth:`get_diff`.
504 505 """
505 506 self._validate_commit(commit1)
506 507 self._validate_commit(commit2)
507 508 if (isinstance(commit1, EmptyCommit) and
508 509 isinstance(commit2, EmptyCommit)):
509 510 raise ValueError("Cannot compare two empty commits")
510 511
511 512 def _validate_commit(self, commit):
512 513 if not isinstance(commit, BaseCommit):
513 514 raise TypeError(
514 515 "%s is not of type BaseCommit" % repr(commit))
515 516 if commit.repository != self and not isinstance(commit, EmptyCommit):
516 517 raise ValueError(
517 518 "Commit %s must be a valid commit from this repository %s, "
518 519 "related to this repository instead %s." %
519 520 (commit, self, commit.repository))
520 521
521 522 def _validate_commit_id(self, commit_id):
522 523 if not isinstance(commit_id, basestring):
523 524 raise TypeError("commit_id must be a string value")
524 525
525 526 def _validate_commit_idx(self, commit_idx):
526 527 if not isinstance(commit_idx, (int, long)):
527 528 raise TypeError("commit_idx must be a numeric value")
528 529
529 530 def _validate_branch_name(self, branch_name):
530 531 if branch_name and branch_name not in self.branches_all:
531 532 msg = ("Branch %s not found in %s" % (branch_name, self))
532 533 raise BranchDoesNotExistError(msg)
533 534
534 535 #
535 536 # Supporting deprecated API parts
536 537 # TODO: johbo: consider to move this into a mixin
537 538 #
538 539
539 540 @property
540 541 def EMPTY_CHANGESET(self):
541 542 warnings.warn(
542 543 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
543 544 return self.EMPTY_COMMIT_ID
544 545
545 546 @property
546 547 def revisions(self):
547 548 warnings.warn("Use commits attribute instead", DeprecationWarning)
548 549 return self.commit_ids
549 550
550 551 @revisions.setter
551 552 def revisions(self, value):
552 553 warnings.warn("Use commits attribute instead", DeprecationWarning)
553 554 self.commit_ids = value
554 555
555 556 def get_changeset(self, revision=None, pre_load=None):
556 557 warnings.warn("Use get_commit instead", DeprecationWarning)
557 558 commit_id = None
558 559 commit_idx = None
559 560 if isinstance(revision, basestring):
560 561 commit_id = revision
561 562 else:
562 563 commit_idx = revision
563 564 return self.get_commit(
564 565 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
565 566
566 567 def get_changesets(
567 568 self, start=None, end=None, start_date=None, end_date=None,
568 569 branch_name=None, pre_load=None):
569 570 warnings.warn("Use get_commits instead", DeprecationWarning)
570 571 start_id = self._revision_to_commit(start)
571 572 end_id = self._revision_to_commit(end)
572 573 return self.get_commits(
573 574 start_id=start_id, end_id=end_id, start_date=start_date,
574 575 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
575 576
576 577 def _revision_to_commit(self, revision):
577 578 """
578 579 Translates a revision to a commit_id
579 580
580 581 Helps to support the old changeset based API which allows to use
581 582 commit ids and commit indices interchangeable.
582 583 """
583 584 if revision is None:
584 585 return revision
585 586
586 587 if isinstance(revision, basestring):
587 588 commit_id = revision
588 589 else:
589 590 commit_id = self.commit_ids[revision]
590 591 return commit_id
591 592
592 593 @property
593 594 def in_memory_changeset(self):
594 595 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
595 596 return self.in_memory_commit
596 597
597 598
598 599 class BaseCommit(object):
599 600 """
600 601 Each backend should implement it's commit representation.
601 602
602 603 **Attributes**
603 604
604 605 ``repository``
605 606 repository object within which commit exists
606 607
607 608 ``id``
608 609 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
609 610 just ``tip``.
610 611
611 612 ``raw_id``
612 613 raw commit representation (i.e. full 40 length sha for git
613 614 backend)
614 615
615 616 ``short_id``
616 617 shortened (if apply) version of ``raw_id``; it would be simple
617 618 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
618 619 as ``raw_id`` for subversion
619 620
620 621 ``idx``
621 622 commit index
622 623
623 624 ``files``
624 625 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
625 626
626 627 ``dirs``
627 628 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
628 629
629 630 ``nodes``
630 631 combined list of ``Node`` objects
631 632
632 633 ``author``
633 634 author of the commit, as unicode
634 635
635 636 ``message``
636 637 message of the commit, as unicode
637 638
638 639 ``parents``
639 640 list of parent commits
640 641
641 642 """
642 643
643 644 branch = None
644 645 """
645 646 Depending on the backend this should be set to the branch name of the
646 647 commit. Backends not supporting branches on commits should leave this
647 648 value as ``None``.
648 649 """
649 650
650 651 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
651 652 """
652 653 This template is used to generate a default prefix for repository archives
653 654 if no prefix has been specified.
654 655 """
655 656
656 657 def __str__(self):
657 658 return '<%s at %s:%s>' % (
658 659 self.__class__.__name__, self.idx, self.short_id)
659 660
660 661 def __repr__(self):
661 662 return self.__str__()
662 663
663 664 def __unicode__(self):
664 665 return u'%s:%s' % (self.idx, self.short_id)
665 666
666 667 def __eq__(self, other):
667 668 same_instance = isinstance(other, self.__class__)
668 669 return same_instance and self.raw_id == other.raw_id
669 670
670 671 def __json__(self):
671 672 parents = []
672 673 try:
673 674 for parent in self.parents:
674 675 parents.append({'raw_id': parent.raw_id})
675 676 except NotImplementedError:
676 677 # empty commit doesn't have parents implemented
677 678 pass
678 679
679 680 return {
680 681 'short_id': self.short_id,
681 682 'raw_id': self.raw_id,
682 683 'revision': self.idx,
683 684 'message': self.message,
684 685 'date': self.date,
685 686 'author': self.author,
686 687 'parents': parents,
687 688 'branch': self.branch
688 689 }
689 690
690 691 @LazyProperty
691 692 def last(self):
692 693 """
693 694 ``True`` if this is last commit in repository, ``False``
694 695 otherwise; trying to access this attribute while there is no
695 696 commits would raise `EmptyRepositoryError`
696 697 """
697 698 if self.repository is None:
698 699 raise CommitError("Cannot check if it's most recent commit")
699 700 return self.raw_id == self.repository.commit_ids[-1]
700 701
701 702 @LazyProperty
702 703 def parents(self):
703 704 """
704 705 Returns list of parent commits.
705 706 """
706 707 raise NotImplementedError
707 708
708 709 @property
709 710 def merge(self):
710 711 """
711 712 Returns boolean if commit is a merge.
712 713 """
713 714 return len(self.parents) > 1
714 715
715 716 @LazyProperty
716 717 def children(self):
717 718 """
718 719 Returns list of child commits.
719 720 """
720 721 raise NotImplementedError
721 722
722 723 @LazyProperty
723 724 def id(self):
724 725 """
725 726 Returns string identifying this commit.
726 727 """
727 728 raise NotImplementedError
728 729
729 730 @LazyProperty
730 731 def raw_id(self):
731 732 """
732 733 Returns raw string identifying this commit.
733 734 """
734 735 raise NotImplementedError
735 736
736 737 @LazyProperty
737 738 def short_id(self):
738 739 """
739 740 Returns shortened version of ``raw_id`` attribute, as string,
740 741 identifying this commit, useful for presentation to users.
741 742 """
742 743 raise NotImplementedError
743 744
744 745 @LazyProperty
745 746 def idx(self):
746 747 """
747 748 Returns integer identifying this commit.
748 749 """
749 750 raise NotImplementedError
750 751
751 752 @LazyProperty
752 753 def committer(self):
753 754 """
754 755 Returns committer for this commit
755 756 """
756 757 raise NotImplementedError
757 758
758 759 @LazyProperty
759 760 def committer_name(self):
760 761 """
761 762 Returns committer name for this commit
762 763 """
763 764
764 765 return author_name(self.committer)
765 766
766 767 @LazyProperty
767 768 def committer_email(self):
768 769 """
769 770 Returns committer email address for this commit
770 771 """
771 772
772 773 return author_email(self.committer)
773 774
774 775 @LazyProperty
775 776 def author(self):
776 777 """
777 778 Returns author for this commit
778 779 """
779 780
780 781 raise NotImplementedError
781 782
782 783 @LazyProperty
783 784 def author_name(self):
784 785 """
785 786 Returns author name for this commit
786 787 """
787 788
788 789 return author_name(self.author)
789 790
790 791 @LazyProperty
791 792 def author_email(self):
792 793 """
793 794 Returns author email address for this commit
794 795 """
795 796
796 797 return author_email(self.author)
797 798
798 799 def get_file_mode(self, path):
799 800 """
800 801 Returns stat mode of the file at `path`.
801 802 """
802 803 raise NotImplementedError
803 804
804 805 def is_link(self, path):
805 806 """
806 807 Returns ``True`` if given `path` is a symlink
807 808 """
808 809 raise NotImplementedError
809 810
810 811 def get_file_content(self, path):
811 812 """
812 813 Returns content of the file at the given `path`.
813 814 """
814 815 raise NotImplementedError
815 816
816 817 def get_file_size(self, path):
817 818 """
818 819 Returns size of the file at the given `path`.
819 820 """
820 821 raise NotImplementedError
821 822
822 823 def get_file_commit(self, path, pre_load=None):
823 824 """
824 825 Returns last commit of the file at the given `path`.
825 826
826 827 :param pre_load: Optional. List of commit attributes to load.
827 828 """
828 829 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
829 830
830 831 def get_file_history(self, path, limit=None, pre_load=None):
831 832 """
832 833 Returns history of file as reversed list of :class:`BaseCommit`
833 834 objects for which file at given `path` has been modified.
834 835
835 836 :param limit: Optional. Allows to limit the size of the returned
836 837 history. This is intended as a hint to the underlying backend, so
837 838 that it can apply optimizations depending on the limit.
838 839 :param pre_load: Optional. List of commit attributes to load.
839 840 """
840 841 raise NotImplementedError
841 842
842 843 def get_file_annotate(self, path, pre_load=None):
843 844 """
844 845 Returns a generator of four element tuples with
845 846 lineno, sha, commit lazy loader and line
846 847
847 848 :param pre_load: Optional. List of commit attributes to load.
848 849 """
849 850 raise NotImplementedError
850 851
851 852 def get_nodes(self, path):
852 853 """
853 854 Returns combined ``DirNode`` and ``FileNode`` objects list representing
854 855 state of commit at the given ``path``.
855 856
856 857 :raises ``CommitError``: if node at the given ``path`` is not
857 858 instance of ``DirNode``
858 859 """
859 860 raise NotImplementedError
860 861
861 862 def get_node(self, path):
862 863 """
863 864 Returns ``Node`` object from the given ``path``.
864 865
865 866 :raises ``NodeDoesNotExistError``: if there is no node at the given
866 867 ``path``
867 868 """
868 869 raise NotImplementedError
869 870
870 871 def get_largefile_node(self, path):
871 872 """
872 873 Returns the path to largefile from Mercurial storage.
873 874 """
874 875 raise NotImplementedError
875 876
876 877 def archive_repo(self, file_path, kind='tgz', subrepos=None,
877 878 prefix=None, write_metadata=False, mtime=None):
878 879 """
879 880 Creates an archive containing the contents of the repository.
880 881
881 882 :param file_path: path to the file which to create the archive.
882 883 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
883 884 :param prefix: name of root directory in archive.
884 885 Default is repository name and commit's short_id joined with dash:
885 886 ``"{repo_name}-{short_id}"``.
886 887 :param write_metadata: write a metadata file into archive.
887 888 :param mtime: custom modification time for archive creation, defaults
888 889 to time.time() if not given.
889 890
890 891 :raise VCSError: If prefix has a problem.
891 892 """
892 893 allowed_kinds = settings.ARCHIVE_SPECS.keys()
893 894 if kind not in allowed_kinds:
894 895 raise ImproperArchiveTypeError(
895 896 'Archive kind (%s) not supported use one of %s' %
896 897 (kind, allowed_kinds))
897 898
898 899 prefix = self._validate_archive_prefix(prefix)
899 900
900 901 mtime = mtime or time.mktime(self.date.timetuple())
901 902
902 903 file_info = []
903 904 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
904 905 for _r, _d, files in cur_rev.walk('/'):
905 906 for f in files:
906 907 f_path = os.path.join(prefix, f.path)
907 908 file_info.append(
908 909 (f_path, f.mode, f.is_link(), f.raw_bytes))
909 910
910 911 if write_metadata:
911 912 metadata = [
912 913 ('repo_name', self.repository.name),
913 914 ('rev', self.raw_id),
914 915 ('create_time', mtime),
915 916 ('branch', self.branch),
916 917 ('tags', ','.join(self.tags)),
917 918 ]
918 919 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
919 920 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
920 921
921 922 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
922 923
923 924 def _validate_archive_prefix(self, prefix):
924 925 if prefix is None:
925 926 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
926 927 repo_name=safe_str(self.repository.name),
927 928 short_id=self.short_id)
928 929 elif not isinstance(prefix, str):
929 930 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
930 931 elif prefix.startswith('/'):
931 932 raise VCSError("Prefix cannot start with leading slash")
932 933 elif prefix.strip() == '':
933 934 raise VCSError("Prefix cannot be empty")
934 935 return prefix
935 936
936 937 @LazyProperty
937 938 def root(self):
938 939 """
939 940 Returns ``RootNode`` object for this commit.
940 941 """
941 942 return self.get_node('')
942 943
943 944 def next(self, branch=None):
944 945 """
945 946 Returns next commit from current, if branch is gives it will return
946 947 next commit belonging to this branch
947 948
948 949 :param branch: show commits within the given named branch
949 950 """
950 951 indexes = xrange(self.idx + 1, self.repository.count())
951 952 return self._find_next(indexes, branch)
952 953
953 954 def prev(self, branch=None):
954 955 """
955 956 Returns previous commit from current, if branch is gives it will
956 957 return previous commit belonging to this branch
957 958
958 959 :param branch: show commit within the given named branch
959 960 """
960 961 indexes = xrange(self.idx - 1, -1, -1)
961 962 return self._find_next(indexes, branch)
962 963
963 964 def _find_next(self, indexes, branch=None):
964 965 if branch and self.branch != branch:
965 966 raise VCSError('Branch option used on commit not belonging '
966 967 'to that branch')
967 968
968 969 for next_idx in indexes:
969 970 commit = self.repository.get_commit(commit_idx=next_idx)
970 971 if branch and branch != commit.branch:
971 972 continue
972 973 return commit
973 974 raise CommitDoesNotExistError
974 975
975 976 def diff(self, ignore_whitespace=True, context=3):
976 977 """
977 978 Returns a `Diff` object representing the change made by this commit.
978 979 """
979 980 parent = (
980 981 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
981 982 diff = self.repository.get_diff(
982 983 parent, self,
983 984 ignore_whitespace=ignore_whitespace,
984 985 context=context)
985 986 return diff
986 987
987 988 @LazyProperty
988 989 def added(self):
989 990 """
990 991 Returns list of added ``FileNode`` objects.
991 992 """
992 993 raise NotImplementedError
993 994
994 995 @LazyProperty
995 996 def changed(self):
996 997 """
997 998 Returns list of modified ``FileNode`` objects.
998 999 """
999 1000 raise NotImplementedError
1000 1001
1001 1002 @LazyProperty
1002 1003 def removed(self):
1003 1004 """
1004 1005 Returns list of removed ``FileNode`` objects.
1005 1006 """
1006 1007 raise NotImplementedError
1007 1008
1008 1009 @LazyProperty
1009 1010 def size(self):
1010 1011 """
1011 1012 Returns total number of bytes from contents of all filenodes.
1012 1013 """
1013 1014 return sum((node.size for node in self.get_filenodes_generator()))
1014 1015
1015 1016 def walk(self, topurl=''):
1016 1017 """
1017 1018 Similar to os.walk method. Insted of filesystem it walks through
1018 1019 commit starting at given ``topurl``. Returns generator of tuples
1019 1020 (topnode, dirnodes, filenodes).
1020 1021 """
1021 1022 topnode = self.get_node(topurl)
1022 1023 if not topnode.is_dir():
1023 1024 return
1024 1025 yield (topnode, topnode.dirs, topnode.files)
1025 1026 for dirnode in topnode.dirs:
1026 1027 for tup in self.walk(dirnode.path):
1027 1028 yield tup
1028 1029
1029 1030 def get_filenodes_generator(self):
1030 1031 """
1031 1032 Returns generator that yields *all* file nodes.
1032 1033 """
1033 1034 for topnode, dirs, files in self.walk():
1034 1035 for node in files:
1035 1036 yield node
1036 1037
1037 1038 #
1038 1039 # Utilities for sub classes to support consistent behavior
1039 1040 #
1040 1041
1041 1042 def no_node_at_path(self, path):
1042 1043 return NodeDoesNotExistError(
1043 1044 "There is no file nor directory at the given path: "
1044 1045 "'%s' at commit %s" % (path, self.short_id))
1045 1046
1046 1047 def _fix_path(self, path):
1047 1048 """
1048 1049 Paths are stored without trailing slash so we need to get rid off it if
1049 1050 needed.
1050 1051 """
1051 1052 return path.rstrip('/')
1052 1053
1053 1054 #
1054 1055 # Deprecated API based on changesets
1055 1056 #
1056 1057
1057 1058 @property
1058 1059 def revision(self):
1059 1060 warnings.warn("Use idx instead", DeprecationWarning)
1060 1061 return self.idx
1061 1062
1062 1063 @revision.setter
1063 1064 def revision(self, value):
1064 1065 warnings.warn("Use idx instead", DeprecationWarning)
1065 1066 self.idx = value
1066 1067
1067 1068 def get_file_changeset(self, path):
1068 1069 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1069 1070 return self.get_file_commit(path)
1070 1071
1071 1072
1072 1073 class BaseChangesetClass(type):
1073 1074
1074 1075 def __instancecheck__(self, instance):
1075 1076 return isinstance(instance, BaseCommit)
1076 1077
1077 1078
1078 1079 class BaseChangeset(BaseCommit):
1079 1080
1080 1081 __metaclass__ = BaseChangesetClass
1081 1082
1082 1083 def __new__(cls, *args, **kwargs):
1083 1084 warnings.warn(
1084 1085 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1085 1086 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1086 1087
1087 1088
1088 1089 class BaseInMemoryCommit(object):
1089 1090 """
1090 1091 Represents differences between repository's state (most recent head) and
1091 1092 changes made *in place*.
1092 1093
1093 1094 **Attributes**
1094 1095
1095 1096 ``repository``
1096 1097 repository object for this in-memory-commit
1097 1098
1098 1099 ``added``
1099 1100 list of ``FileNode`` objects marked as *added*
1100 1101
1101 1102 ``changed``
1102 1103 list of ``FileNode`` objects marked as *changed*
1103 1104
1104 1105 ``removed``
1105 1106 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1106 1107 *removed*
1107 1108
1108 1109 ``parents``
1109 1110 list of :class:`BaseCommit` instances representing parents of
1110 1111 in-memory commit. Should always be 2-element sequence.
1111 1112
1112 1113 """
1113 1114
1114 1115 def __init__(self, repository):
1115 1116 self.repository = repository
1116 1117 self.added = []
1117 1118 self.changed = []
1118 1119 self.removed = []
1119 1120 self.parents = []
1120 1121
1121 1122 def add(self, *filenodes):
1122 1123 """
1123 1124 Marks given ``FileNode`` objects as *to be committed*.
1124 1125
1125 1126 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1126 1127 latest commit
1127 1128 :raises ``NodeAlreadyAddedError``: if node with same path is already
1128 1129 marked as *added*
1129 1130 """
1130 1131 # Check if not already marked as *added* first
1131 1132 for node in filenodes:
1132 1133 if node.path in (n.path for n in self.added):
1133 1134 raise NodeAlreadyAddedError(
1134 1135 "Such FileNode %s is already marked for addition"
1135 1136 % node.path)
1136 1137 for node in filenodes:
1137 1138 self.added.append(node)
1138 1139
1139 1140 def change(self, *filenodes):
1140 1141 """
1141 1142 Marks given ``FileNode`` objects to be *changed* in next commit.
1142 1143
1143 1144 :raises ``EmptyRepositoryError``: if there are no commits yet
1144 1145 :raises ``NodeAlreadyExistsError``: if node with same path is already
1145 1146 marked to be *changed*
1146 1147 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1147 1148 marked to be *removed*
1148 1149 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1149 1150 commit
1150 1151 :raises ``NodeNotChangedError``: if node hasn't really be changed
1151 1152 """
1152 1153 for node in filenodes:
1153 1154 if node.path in (n.path for n in self.removed):
1154 1155 raise NodeAlreadyRemovedError(
1155 1156 "Node at %s is already marked as removed" % node.path)
1156 1157 try:
1157 1158 self.repository.get_commit()
1158 1159 except EmptyRepositoryError:
1159 1160 raise EmptyRepositoryError(
1160 1161 "Nothing to change - try to *add* new nodes rather than "
1161 1162 "changing them")
1162 1163 for node in filenodes:
1163 1164 if node.path in (n.path for n in self.changed):
1164 1165 raise NodeAlreadyChangedError(
1165 1166 "Node at '%s' is already marked as changed" % node.path)
1166 1167 self.changed.append(node)
1167 1168
1168 1169 def remove(self, *filenodes):
1169 1170 """
1170 1171 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1171 1172 *removed* in next commit.
1172 1173
1173 1174 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1174 1175 be *removed*
1175 1176 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1176 1177 be *changed*
1177 1178 """
1178 1179 for node in filenodes:
1179 1180 if node.path in (n.path for n in self.removed):
1180 1181 raise NodeAlreadyRemovedError(
1181 1182 "Node is already marked to for removal at %s" % node.path)
1182 1183 if node.path in (n.path for n in self.changed):
1183 1184 raise NodeAlreadyChangedError(
1184 1185 "Node is already marked to be changed at %s" % node.path)
1185 1186 # We only mark node as *removed* - real removal is done by
1186 1187 # commit method
1187 1188 self.removed.append(node)
1188 1189
1189 1190 def reset(self):
1190 1191 """
1191 1192 Resets this instance to initial state (cleans ``added``, ``changed``
1192 1193 and ``removed`` lists).
1193 1194 """
1194 1195 self.added = []
1195 1196 self.changed = []
1196 1197 self.removed = []
1197 1198 self.parents = []
1198 1199
1199 1200 def get_ipaths(self):
1200 1201 """
1201 1202 Returns generator of paths from nodes marked as added, changed or
1202 1203 removed.
1203 1204 """
1204 1205 for node in itertools.chain(self.added, self.changed, self.removed):
1205 1206 yield node.path
1206 1207
1207 1208 def get_paths(self):
1208 1209 """
1209 1210 Returns list of paths from nodes marked as added, changed or removed.
1210 1211 """
1211 1212 return list(self.get_ipaths())
1212 1213
1213 1214 def check_integrity(self, parents=None):
1214 1215 """
1215 1216 Checks in-memory commit's integrity. Also, sets parents if not
1216 1217 already set.
1217 1218
1218 1219 :raises CommitError: if any error occurs (i.e.
1219 1220 ``NodeDoesNotExistError``).
1220 1221 """
1221 1222 if not self.parents:
1222 1223 parents = parents or []
1223 1224 if len(parents) == 0:
1224 1225 try:
1225 1226 parents = [self.repository.get_commit(), None]
1226 1227 except EmptyRepositoryError:
1227 1228 parents = [None, None]
1228 1229 elif len(parents) == 1:
1229 1230 parents += [None]
1230 1231 self.parents = parents
1231 1232
1232 1233 # Local parents, only if not None
1233 1234 parents = [p for p in self.parents if p]
1234 1235
1235 1236 # Check nodes marked as added
1236 1237 for p in parents:
1237 1238 for node in self.added:
1238 1239 try:
1239 1240 p.get_node(node.path)
1240 1241 except NodeDoesNotExistError:
1241 1242 pass
1242 1243 else:
1243 1244 raise NodeAlreadyExistsError(
1244 1245 "Node `%s` already exists at %s" % (node.path, p))
1245 1246
1246 1247 # Check nodes marked as changed
1247 1248 missing = set(self.changed)
1248 1249 not_changed = set(self.changed)
1249 1250 if self.changed and not parents:
1250 1251 raise NodeDoesNotExistError(str(self.changed[0].path))
1251 1252 for p in parents:
1252 1253 for node in self.changed:
1253 1254 try:
1254 1255 old = p.get_node(node.path)
1255 1256 missing.remove(node)
1256 1257 # if content actually changed, remove node from not_changed
1257 1258 if old.content != node.content:
1258 1259 not_changed.remove(node)
1259 1260 except NodeDoesNotExistError:
1260 1261 pass
1261 1262 if self.changed and missing:
1262 1263 raise NodeDoesNotExistError(
1263 1264 "Node `%s` marked as modified but missing in parents: %s"
1264 1265 % (node.path, parents))
1265 1266
1266 1267 if self.changed and not_changed:
1267 1268 raise NodeNotChangedError(
1268 1269 "Node `%s` wasn't actually changed (parents: %s)"
1269 1270 % (not_changed.pop().path, parents))
1270 1271
1271 1272 # Check nodes marked as removed
1272 1273 if self.removed and not parents:
1273 1274 raise NodeDoesNotExistError(
1274 1275 "Cannot remove node at %s as there "
1275 1276 "were no parents specified" % self.removed[0].path)
1276 1277 really_removed = set()
1277 1278 for p in parents:
1278 1279 for node in self.removed:
1279 1280 try:
1280 1281 p.get_node(node.path)
1281 1282 really_removed.add(node)
1282 1283 except CommitError:
1283 1284 pass
1284 1285 not_removed = set(self.removed) - really_removed
1285 1286 if not_removed:
1286 1287 # TODO: johbo: This code branch does not seem to be covered
1287 1288 raise NodeDoesNotExistError(
1288 1289 "Cannot remove node at %s from "
1289 1290 "following parents: %s" % (not_removed, parents))
1290 1291
1291 1292 def commit(
1292 1293 self, message, author, parents=None, branch=None, date=None,
1293 1294 **kwargs):
1294 1295 """
1295 1296 Performs in-memory commit (doesn't check workdir in any way) and
1296 1297 returns newly created :class:`BaseCommit`. Updates repository's
1297 1298 attribute `commits`.
1298 1299
1299 1300 .. note::
1300 1301
1301 1302 While overriding this method each backend's should call
1302 1303 ``self.check_integrity(parents)`` in the first place.
1303 1304
1304 1305 :param message: message of the commit
1305 1306 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1306 1307 :param parents: single parent or sequence of parents from which commit
1307 1308 would be derived
1308 1309 :param date: ``datetime.datetime`` instance. Defaults to
1309 1310 ``datetime.datetime.now()``.
1310 1311 :param branch: branch name, as string. If none given, default backend's
1311 1312 branch would be used.
1312 1313
1313 1314 :raises ``CommitError``: if any error occurs while committing
1314 1315 """
1315 1316 raise NotImplementedError
1316 1317
1317 1318
1318 1319 class BaseInMemoryChangesetClass(type):
1319 1320
1320 1321 def __instancecheck__(self, instance):
1321 1322 return isinstance(instance, BaseInMemoryCommit)
1322 1323
1323 1324
1324 1325 class BaseInMemoryChangeset(BaseInMemoryCommit):
1325 1326
1326 1327 __metaclass__ = BaseInMemoryChangesetClass
1327 1328
1328 1329 def __new__(cls, *args, **kwargs):
1329 1330 warnings.warn(
1330 1331 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1331 1332 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1332 1333
1333 1334
1334 1335 class EmptyCommit(BaseCommit):
1335 1336 """
1336 1337 An dummy empty commit. It's possible to pass hash when creating
1337 1338 an EmptyCommit
1338 1339 """
1339 1340
1340 1341 def __init__(
1341 1342 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1342 1343 message='', author='', date=None):
1343 1344 self._empty_commit_id = commit_id
1344 1345 # TODO: johbo: Solve idx parameter, default value does not make
1345 1346 # too much sense
1346 1347 self.idx = idx
1347 1348 self.message = message
1348 1349 self.author = author
1349 1350 self.date = date or datetime.datetime.fromtimestamp(0)
1350 1351 self.repository = repo
1351 1352 self.alias = alias
1352 1353
1353 1354 @LazyProperty
1354 1355 def raw_id(self):
1355 1356 """
1356 1357 Returns raw string identifying this commit, useful for web
1357 1358 representation.
1358 1359 """
1359 1360
1360 1361 return self._empty_commit_id
1361 1362
1362 1363 @LazyProperty
1363 1364 def branch(self):
1364 1365 if self.alias:
1365 1366 from rhodecode.lib.vcs.backends import get_backend
1366 1367 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1367 1368
1368 1369 @LazyProperty
1369 1370 def short_id(self):
1370 1371 return self.raw_id[:12]
1371 1372
1372 1373 @LazyProperty
1373 1374 def id(self):
1374 1375 return self.raw_id
1375 1376
1376 1377 def get_file_commit(self, path):
1377 1378 return self
1378 1379
1379 1380 def get_file_content(self, path):
1380 1381 return u''
1381 1382
1382 1383 def get_file_size(self, path):
1383 1384 return 0
1384 1385
1385 1386
1386 1387 class EmptyChangesetClass(type):
1387 1388
1388 1389 def __instancecheck__(self, instance):
1389 1390 return isinstance(instance, EmptyCommit)
1390 1391
1391 1392
1392 1393 class EmptyChangeset(EmptyCommit):
1393 1394
1394 1395 __metaclass__ = EmptyChangesetClass
1395 1396
1396 1397 def __new__(cls, *args, **kwargs):
1397 1398 warnings.warn(
1398 1399 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1399 1400 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1400 1401
1401 1402 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1402 1403 alias=None, revision=-1, message='', author='', date=None):
1403 1404 if requested_revision is not None:
1404 1405 warnings.warn(
1405 1406 "Parameter requested_revision not supported anymore",
1406 1407 DeprecationWarning)
1407 1408 super(EmptyChangeset, self).__init__(
1408 1409 commit_id=cs, repo=repo, alias=alias, idx=revision,
1409 1410 message=message, author=author, date=date)
1410 1411
1411 1412 @property
1412 1413 def revision(self):
1413 1414 warnings.warn("Use idx instead", DeprecationWarning)
1414 1415 return self.idx
1415 1416
1416 1417 @revision.setter
1417 1418 def revision(self, value):
1418 1419 warnings.warn("Use idx instead", DeprecationWarning)
1419 1420 self.idx = value
1420 1421
1421 1422
1422 1423 class CollectionGenerator(object):
1423 1424
1424 1425 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1425 1426 self.repo = repo
1426 1427 self.commit_ids = commit_ids
1427 1428 # TODO: (oliver) this isn't currently hooked up
1428 1429 self.collection_size = None
1429 1430 self.pre_load = pre_load
1430 1431
1431 1432 def __len__(self):
1432 1433 if self.collection_size is not None:
1433 1434 return self.collection_size
1434 1435 return self.commit_ids.__len__()
1435 1436
1436 1437 def __iter__(self):
1437 1438 for commit_id in self.commit_ids:
1438 1439 # TODO: johbo: Mercurial passes in commit indices or commit ids
1439 1440 yield self._commit_factory(commit_id)
1440 1441
1441 1442 def _commit_factory(self, commit_id):
1442 1443 """
1443 1444 Allows backends to override the way commits are generated.
1444 1445 """
1445 1446 return self.repo.get_commit(commit_id=commit_id,
1446 1447 pre_load=self.pre_load)
1447 1448
1448 1449 def __getslice__(self, i, j):
1449 1450 """
1450 1451 Returns an iterator of sliced repository
1451 1452 """
1452 1453 commit_ids = self.commit_ids[i:j]
1453 1454 return self.__class__(
1454 1455 self.repo, commit_ids, pre_load=self.pre_load)
1455 1456
1456 1457 def __repr__(self):
1457 1458 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1458 1459
1459 1460
1460 1461 class Config(object):
1461 1462 """
1462 1463 Represents the configuration for a repository.
1463 1464
1464 1465 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1465 1466 standard library. It implements only the needed subset.
1466 1467 """
1467 1468
1468 1469 def __init__(self):
1469 1470 self._values = {}
1470 1471
1471 1472 def copy(self):
1472 1473 clone = Config()
1473 1474 for section, values in self._values.items():
1474 1475 clone._values[section] = values.copy()
1475 1476 return clone
1476 1477
1477 1478 def __repr__(self):
1478 1479 return '<Config(%s sections) at %s>' % (
1479 1480 len(self._values), hex(id(self)))
1480 1481
1481 1482 def items(self, section):
1482 1483 return self._values.get(section, {}).iteritems()
1483 1484
1484 1485 def get(self, section, option):
1485 1486 return self._values.get(section, {}).get(option)
1486 1487
1487 1488 def set(self, section, option, value):
1488 1489 section_values = self._values.setdefault(section, {})
1489 1490 section_values[option] = value
1490 1491
1491 1492 def clear_section(self, section):
1492 1493 self._values[section] = {}
1493 1494
1494 1495 def serialize(self):
1495 1496 """
1496 1497 Creates a list of three tuples (section, key, value) representing
1497 1498 this config object.
1498 1499 """
1499 1500 items = []
1500 1501 for section in self._values:
1501 1502 for option, value in self._values[section].items():
1502 1503 items.append(
1503 1504 (safe_str(section), safe_str(option), safe_str(value)))
1504 1505 return items
1505 1506
1506 1507
1507 1508 class Diff(object):
1508 1509 """
1509 1510 Represents a diff result from a repository backend.
1510 1511
1511 1512 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1512 1513 """
1513 1514
1514 1515 _header_re = None
1515 1516
1516 1517 def __init__(self, raw_diff):
1517 1518 self.raw = raw_diff
1518 1519
1519 1520 def chunks(self):
1520 1521 """
1521 1522 split the diff in chunks of separate --git a/file b/file chunks
1522 1523 to make diffs consistent we must prepend with \n, and make sure
1523 1524 we can detect last chunk as this was also has special rule
1524 1525 """
1525 1526 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1526 1527 total_chunks = len(chunks)
1527 1528 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1528 1529 for cur_chunk, chunk in enumerate(chunks, start=1))
1529 1530
1530 1531
1531 1532 class DiffChunk(object):
1532 1533
1533 1534 def __init__(self, chunk, diff, last_chunk):
1534 1535 self._diff = diff
1535 1536
1536 1537 # since we split by \ndiff --git that part is lost from original diff
1537 1538 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1538 1539 if not last_chunk:
1539 1540 chunk += '\n'
1540 1541
1541 1542 match = self._diff._header_re.match(chunk)
1542 1543 self.header = match.groupdict()
1543 1544 self.diff = chunk[match.end():]
1544 1545 self.raw = chunk
@@ -1,1250 +1,1250 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34
35 35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 38 from rhodecode.lib.markup_renderer import (
39 39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 40 from rhodecode.lib.utils import action_logger
41 41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 42 from rhodecode.lib.vcs.backends.base import (
43 43 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
44 44 from rhodecode.lib.vcs.conf import settings as vcs_settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitDoesNotExistError, EmptyRepositoryError)
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.changeset_status import ChangesetStatusModel
49 49 from rhodecode.model.comment import ChangesetCommentsModel
50 50 from rhodecode.model.db import (
51 51 PullRequest, PullRequestReviewers, ChangesetStatus,
52 52 PullRequestVersion, ChangesetComment)
53 53 from rhodecode.model.meta import Session
54 54 from rhodecode.model.notification import NotificationModel, \
55 55 EmailNotificationModel
56 56 from rhodecode.model.scm import ScmModel
57 57 from rhodecode.model.settings import VcsSettingsModel
58 58
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62
63 63 # Data structure to hold the response data when updating commits during a pull
64 64 # request update.
65 65 UpdateResponse = namedtuple(
66 66 'UpdateResponse', 'success, reason, new, old, changes')
67 67
68 68
69 69 class PullRequestModel(BaseModel):
70 70
71 71 cls = PullRequest
72 72
73 73 DIFF_CONTEXT = 3
74 74
75 75 MERGE_STATUS_MESSAGES = {
76 76 MergeFailureReason.NONE: lazy_ugettext(
77 77 'This pull request can be automatically merged.'),
78 78 MergeFailureReason.UNKNOWN: lazy_ugettext(
79 79 'This pull request cannot be merged because of an unhandled'
80 80 ' exception.'),
81 81 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
82 82 'This pull request cannot be merged because of conflicts.'),
83 83 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
84 84 'This pull request could not be merged because push to target'
85 85 ' failed.'),
86 86 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
87 87 'This pull request cannot be merged because the target is not a'
88 88 ' head.'),
89 89 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
90 90 'This pull request cannot be merged because the source contains'
91 91 ' more branches than the target.'),
92 92 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
93 93 'This pull request cannot be merged because the target has'
94 94 ' multiple heads.'),
95 95 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
96 96 'This pull request cannot be merged because the target repository'
97 97 ' is locked.'),
98 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
98 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
99 99 'This pull request cannot be merged because the target or the '
100 100 'source reference is missing.'),
101 101 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
102 102 'This pull request cannot be merged because the target '
103 103 'reference is missing.'),
104 104 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
105 105 'This pull request cannot be merged because the source '
106 106 'reference is missing.'),
107 107 }
108 108
109 109 UPDATE_STATUS_MESSAGES = {
110 110 UpdateFailureReason.NONE: lazy_ugettext(
111 111 'Pull request update successful.'),
112 112 UpdateFailureReason.UNKNOWN: lazy_ugettext(
113 113 'Pull request update failed because of an unknown error.'),
114 114 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
115 115 'No update needed because the source reference is already '
116 116 'up to date.'),
117 117 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
118 118 'Pull request cannot be updated because the reference type is '
119 119 'not supported for an update.'),
120 120 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
121 121 'This pull request cannot be updated because the target '
122 122 'reference is missing.'),
123 123 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
124 124 'This pull request cannot be updated because the source '
125 125 'reference is missing.'),
126 126 }
127 127
128 128 def __get_pull_request(self, pull_request):
129 129 return self._get_instance(PullRequest, pull_request)
130 130
131 131 def _check_perms(self, perms, pull_request, user, api=False):
132 132 if not api:
133 133 return h.HasRepoPermissionAny(*perms)(
134 134 user=user, repo_name=pull_request.target_repo.repo_name)
135 135 else:
136 136 return h.HasRepoPermissionAnyApi(*perms)(
137 137 user=user, repo_name=pull_request.target_repo.repo_name)
138 138
139 139 def check_user_read(self, pull_request, user, api=False):
140 140 _perms = ('repository.admin', 'repository.write', 'repository.read',)
141 141 return self._check_perms(_perms, pull_request, user, api)
142 142
143 143 def check_user_merge(self, pull_request, user, api=False):
144 144 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
145 145 return self._check_perms(_perms, pull_request, user, api)
146 146
147 147 def check_user_update(self, pull_request, user, api=False):
148 148 owner = user.user_id == pull_request.user_id
149 149 return self.check_user_merge(pull_request, user, api) or owner
150 150
151 151 def check_user_change_status(self, pull_request, user, api=False):
152 152 reviewer = user.user_id in [x.user_id for x in
153 153 pull_request.reviewers]
154 154 return self.check_user_update(pull_request, user, api) or reviewer
155 155
156 156 def get(self, pull_request):
157 157 return self.__get_pull_request(pull_request)
158 158
159 159 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
160 160 opened_by=None, order_by=None,
161 161 order_dir='desc'):
162 162 repo = self._get_repo(repo_name)
163 163 q = PullRequest.query()
164 164 # source or target
165 165 if source:
166 166 q = q.filter(PullRequest.source_repo == repo)
167 167 else:
168 168 q = q.filter(PullRequest.target_repo == repo)
169 169
170 170 # closed,opened
171 171 if statuses:
172 172 q = q.filter(PullRequest.status.in_(statuses))
173 173
174 174 # opened by filter
175 175 if opened_by:
176 176 q = q.filter(PullRequest.user_id.in_(opened_by))
177 177
178 178 if order_by:
179 179 order_map = {
180 180 'name_raw': PullRequest.pull_request_id,
181 181 'title': PullRequest.title,
182 182 'updated_on_raw': PullRequest.updated_on
183 183 }
184 184 if order_dir == 'asc':
185 185 q = q.order_by(order_map[order_by].asc())
186 186 else:
187 187 q = q.order_by(order_map[order_by].desc())
188 188
189 189 return q
190 190
191 191 def count_all(self, repo_name, source=False, statuses=None,
192 192 opened_by=None):
193 193 """
194 194 Count the number of pull requests for a specific repository.
195 195
196 196 :param repo_name: target or source repo
197 197 :param source: boolean flag to specify if repo_name refers to source
198 198 :param statuses: list of pull request statuses
199 199 :param opened_by: author user of the pull request
200 200 :returns: int number of pull requests
201 201 """
202 202 q = self._prepare_get_all_query(
203 203 repo_name, source=source, statuses=statuses, opened_by=opened_by)
204 204
205 205 return q.count()
206 206
207 207 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
208 208 offset=0, length=None, order_by=None, order_dir='desc'):
209 209 """
210 210 Get all pull requests for a specific repository.
211 211
212 212 :param repo_name: target or source repo
213 213 :param source: boolean flag to specify if repo_name refers to source
214 214 :param statuses: list of pull request statuses
215 215 :param opened_by: author user of the pull request
216 216 :param offset: pagination offset
217 217 :param length: length of returned list
218 218 :param order_by: order of the returned list
219 219 :param order_dir: 'asc' or 'desc' ordering direction
220 220 :returns: list of pull requests
221 221 """
222 222 q = self._prepare_get_all_query(
223 223 repo_name, source=source, statuses=statuses, opened_by=opened_by,
224 224 order_by=order_by, order_dir=order_dir)
225 225
226 226 if length:
227 227 pull_requests = q.limit(length).offset(offset).all()
228 228 else:
229 229 pull_requests = q.all()
230 230
231 231 return pull_requests
232 232
233 233 def count_awaiting_review(self, repo_name, source=False, statuses=None,
234 234 opened_by=None):
235 235 """
236 236 Count the number of pull requests for a specific repository that are
237 237 awaiting review.
238 238
239 239 :param repo_name: target or source repo
240 240 :param source: boolean flag to specify if repo_name refers to source
241 241 :param statuses: list of pull request statuses
242 242 :param opened_by: author user of the pull request
243 243 :returns: int number of pull requests
244 244 """
245 245 pull_requests = self.get_awaiting_review(
246 246 repo_name, source=source, statuses=statuses, opened_by=opened_by)
247 247
248 248 return len(pull_requests)
249 249
250 250 def get_awaiting_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None, offset=0, length=None,
252 252 order_by=None, order_dir='desc'):
253 253 """
254 254 Get all pull requests for a specific repository that are awaiting
255 255 review.
256 256
257 257 :param repo_name: target or source repo
258 258 :param source: boolean flag to specify if repo_name refers to source
259 259 :param statuses: list of pull request statuses
260 260 :param opened_by: author user of the pull request
261 261 :param offset: pagination offset
262 262 :param length: length of returned list
263 263 :param order_by: order of the returned list
264 264 :param order_dir: 'asc' or 'desc' ordering direction
265 265 :returns: list of pull requests
266 266 """
267 267 pull_requests = self.get_all(
268 268 repo_name, source=source, statuses=statuses, opened_by=opened_by,
269 269 order_by=order_by, order_dir=order_dir)
270 270
271 271 _filtered_pull_requests = []
272 272 for pr in pull_requests:
273 273 status = pr.calculated_review_status()
274 274 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
275 275 ChangesetStatus.STATUS_UNDER_REVIEW]:
276 276 _filtered_pull_requests.append(pr)
277 277 if length:
278 278 return _filtered_pull_requests[offset:offset+length]
279 279 else:
280 280 return _filtered_pull_requests
281 281
282 282 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
283 283 opened_by=None, user_id=None):
284 284 """
285 285 Count the number of pull requests for a specific repository that are
286 286 awaiting review from a specific user.
287 287
288 288 :param repo_name: target or source repo
289 289 :param source: boolean flag to specify if repo_name refers to source
290 290 :param statuses: list of pull request statuses
291 291 :param opened_by: author user of the pull request
292 292 :param user_id: reviewer user of the pull request
293 293 :returns: int number of pull requests
294 294 """
295 295 pull_requests = self.get_awaiting_my_review(
296 296 repo_name, source=source, statuses=statuses, opened_by=opened_by,
297 297 user_id=user_id)
298 298
299 299 return len(pull_requests)
300 300
301 301 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
302 302 opened_by=None, user_id=None, offset=0,
303 303 length=None, order_by=None, order_dir='desc'):
304 304 """
305 305 Get all pull requests for a specific repository that are awaiting
306 306 review from a specific user.
307 307
308 308 :param repo_name: target or source repo
309 309 :param source: boolean flag to specify if repo_name refers to source
310 310 :param statuses: list of pull request statuses
311 311 :param opened_by: author user of the pull request
312 312 :param user_id: reviewer user of the pull request
313 313 :param offset: pagination offset
314 314 :param length: length of returned list
315 315 :param order_by: order of the returned list
316 316 :param order_dir: 'asc' or 'desc' ordering direction
317 317 :returns: list of pull requests
318 318 """
319 319 pull_requests = self.get_all(
320 320 repo_name, source=source, statuses=statuses, opened_by=opened_by,
321 321 order_by=order_by, order_dir=order_dir)
322 322
323 323 _my = PullRequestModel().get_not_reviewed(user_id)
324 324 my_participation = []
325 325 for pr in pull_requests:
326 326 if pr in _my:
327 327 my_participation.append(pr)
328 328 _filtered_pull_requests = my_participation
329 329 if length:
330 330 return _filtered_pull_requests[offset:offset+length]
331 331 else:
332 332 return _filtered_pull_requests
333 333
334 334 def get_not_reviewed(self, user_id):
335 335 return [
336 336 x.pull_request for x in PullRequestReviewers.query().filter(
337 337 PullRequestReviewers.user_id == user_id).all()
338 338 ]
339 339
340 340 def get_versions(self, pull_request):
341 341 """
342 342 returns version of pull request sorted by ID descending
343 343 """
344 344 return PullRequestVersion.query()\
345 345 .filter(PullRequestVersion.pull_request == pull_request)\
346 346 .order_by(PullRequestVersion.pull_request_version_id.asc())\
347 347 .all()
348 348
349 349 def create(self, created_by, source_repo, source_ref, target_repo,
350 350 target_ref, revisions, reviewers, title, description=None):
351 351 created_by_user = self._get_user(created_by)
352 352 source_repo = self._get_repo(source_repo)
353 353 target_repo = self._get_repo(target_repo)
354 354
355 355 pull_request = PullRequest()
356 356 pull_request.source_repo = source_repo
357 357 pull_request.source_ref = source_ref
358 358 pull_request.target_repo = target_repo
359 359 pull_request.target_ref = target_ref
360 360 pull_request.revisions = revisions
361 361 pull_request.title = title
362 362 pull_request.description = description
363 363 pull_request.author = created_by_user
364 364
365 365 Session().add(pull_request)
366 366 Session().flush()
367 367
368 368 reviewer_ids = set()
369 369 # members / reviewers
370 370 for reviewer_object in reviewers:
371 371 if isinstance(reviewer_object, tuple):
372 372 user_id, reasons = reviewer_object
373 373 else:
374 374 user_id, reasons = reviewer_object, []
375 375
376 376 user = self._get_user(user_id)
377 377 reviewer_ids.add(user.user_id)
378 378
379 379 reviewer = PullRequestReviewers(user, pull_request, reasons)
380 380 Session().add(reviewer)
381 381
382 382 # Set approval status to "Under Review" for all commits which are
383 383 # part of this pull request.
384 384 ChangesetStatusModel().set_status(
385 385 repo=target_repo,
386 386 status=ChangesetStatus.STATUS_UNDER_REVIEW,
387 387 user=created_by_user,
388 388 pull_request=pull_request
389 389 )
390 390
391 391 self.notify_reviewers(pull_request, reviewer_ids)
392 392 self._trigger_pull_request_hook(
393 393 pull_request, created_by_user, 'create')
394 394
395 395 return pull_request
396 396
397 397 def _trigger_pull_request_hook(self, pull_request, user, action):
398 398 pull_request = self.__get_pull_request(pull_request)
399 399 target_scm = pull_request.target_repo.scm_instance()
400 400 if action == 'create':
401 401 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
402 402 elif action == 'merge':
403 403 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
404 404 elif action == 'close':
405 405 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
406 406 elif action == 'review_status_change':
407 407 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
408 408 elif action == 'update':
409 409 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
410 410 else:
411 411 return
412 412
413 413 trigger_hook(
414 414 username=user.username,
415 415 repo_name=pull_request.target_repo.repo_name,
416 416 repo_alias=target_scm.alias,
417 417 pull_request=pull_request)
418 418
419 419 def _get_commit_ids(self, pull_request):
420 420 """
421 421 Return the commit ids of the merged pull request.
422 422
423 423 This method is not dealing correctly yet with the lack of autoupdates
424 424 nor with the implicit target updates.
425 425 For example: if a commit in the source repo is already in the target it
426 426 will be reported anyways.
427 427 """
428 428 merge_rev = pull_request.merge_rev
429 429 if merge_rev is None:
430 430 raise ValueError('This pull request was not merged yet')
431 431
432 432 commit_ids = list(pull_request.revisions)
433 433 if merge_rev not in commit_ids:
434 434 commit_ids.append(merge_rev)
435 435
436 436 return commit_ids
437 437
438 438 def merge(self, pull_request, user, extras):
439 439 log.debug("Merging pull request %s", pull_request.pull_request_id)
440 440 merge_state = self._merge_pull_request(pull_request, user, extras)
441 441 if merge_state.executed:
442 442 log.debug(
443 443 "Merge was successful, updating the pull request comments.")
444 444 self._comment_and_close_pr(pull_request, user, merge_state)
445 445 self._log_action('user_merged_pull_request', user, pull_request)
446 446 else:
447 447 log.warn("Merge failed, not updating the pull request.")
448 448 return merge_state
449 449
450 450 def _merge_pull_request(self, pull_request, user, extras):
451 451 target_vcs = pull_request.target_repo.scm_instance()
452 452 source_vcs = pull_request.source_repo.scm_instance()
453 453 target_ref = self._refresh_reference(
454 454 pull_request.target_ref_parts, target_vcs)
455 455
456 456 message = _(
457 457 'Merge pull request #%(pr_id)s from '
458 458 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
459 459 'pr_id': pull_request.pull_request_id,
460 460 'source_repo': source_vcs.name,
461 461 'source_ref_name': pull_request.source_ref_parts.name,
462 462 'pr_title': pull_request.title
463 463 }
464 464
465 465 workspace_id = self._workspace_id(pull_request)
466 466 use_rebase = self._use_rebase_for_merging(pull_request)
467 467
468 468 callback_daemon, extras = prepare_callback_daemon(
469 469 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
470 470 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
471 471
472 472 with callback_daemon:
473 473 # TODO: johbo: Implement a clean way to run a config_override
474 474 # for a single call.
475 475 target_vcs.config.set(
476 476 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
477 477 merge_state = target_vcs.merge(
478 478 target_ref, source_vcs, pull_request.source_ref_parts,
479 479 workspace_id, user_name=user.username,
480 480 user_email=user.email, message=message, use_rebase=use_rebase)
481 481 return merge_state
482 482
483 483 def _comment_and_close_pr(self, pull_request, user, merge_state):
484 484 pull_request.merge_rev = merge_state.merge_ref.commit_id
485 485 pull_request.updated_on = datetime.datetime.now()
486 486
487 487 ChangesetCommentsModel().create(
488 488 text=unicode(_('Pull request merged and closed')),
489 489 repo=pull_request.target_repo.repo_id,
490 490 user=user.user_id,
491 491 pull_request=pull_request.pull_request_id,
492 492 f_path=None,
493 493 line_no=None,
494 494 closing_pr=True
495 495 )
496 496
497 497 Session().add(pull_request)
498 498 Session().flush()
499 499 # TODO: paris: replace invalidation with less radical solution
500 500 ScmModel().mark_for_invalidation(
501 501 pull_request.target_repo.repo_name)
502 502 self._trigger_pull_request_hook(pull_request, user, 'merge')
503 503
504 504 def has_valid_update_type(self, pull_request):
505 505 source_ref_type = pull_request.source_ref_parts.type
506 506 return source_ref_type in ['book', 'branch', 'tag']
507 507
508 508 def update_commits(self, pull_request):
509 509 """
510 510 Get the updated list of commits for the pull request
511 511 and return the new pull request version and the list
512 512 of commits processed by this update action
513 513 """
514 514 pull_request = self.__get_pull_request(pull_request)
515 515 source_ref_type = pull_request.source_ref_parts.type
516 516 source_ref_name = pull_request.source_ref_parts.name
517 517 source_ref_id = pull_request.source_ref_parts.commit_id
518 518
519 519 if not self.has_valid_update_type(pull_request):
520 520 log.debug(
521 521 "Skipping update of pull request %s due to ref type: %s",
522 522 pull_request, source_ref_type)
523 523 return UpdateResponse(
524 524 success=False,
525 525 reason=UpdateFailureReason.WRONG_REF_TPYE,
526 526 old=pull_request, new=None, changes=None)
527 527
528 528 source_repo = pull_request.source_repo.scm_instance()
529 529 try:
530 530 source_commit = source_repo.get_commit(commit_id=source_ref_name)
531 531 except CommitDoesNotExistError:
532 532 return UpdateResponse(
533 533 success=False,
534 534 reason=UpdateFailureReason.MISSING_SOURCE_REF,
535 535 old=pull_request, new=None, changes=None)
536 536
537 537 if source_ref_id == source_commit.raw_id:
538 538 log.debug("Nothing changed in pull request %s", pull_request)
539 539 return UpdateResponse(
540 540 success=True,
541 541 reason=UpdateFailureReason.NO_CHANGE,
542 542 old=pull_request, new=None, changes=None)
543 543
544 544 # Finally there is a need for an update
545 545 pull_request_version = self._create_version_from_snapshot(pull_request)
546 546 self._link_comments_to_version(pull_request_version)
547 547
548 548 target_ref_type = pull_request.target_ref_parts.type
549 549 target_ref_name = pull_request.target_ref_parts.name
550 550 target_ref_id = pull_request.target_ref_parts.commit_id
551 551 target_repo = pull_request.target_repo.scm_instance()
552 552
553 553 try:
554 554 if target_ref_type in ('tag', 'branch', 'book'):
555 555 target_commit = target_repo.get_commit(target_ref_name)
556 556 else:
557 557 target_commit = target_repo.get_commit(target_ref_id)
558 558 except CommitDoesNotExistError:
559 559 return UpdateResponse(
560 560 success=False,
561 561 reason=UpdateFailureReason.MISSING_TARGET_REF,
562 562 old=pull_request, new=None, changes=None)
563 563
564 564 # re-compute commit ids
565 565 old_commit_ids = set(pull_request.revisions)
566 566 pre_load = ["author", "branch", "date", "message"]
567 567 commit_ranges = target_repo.compare(
568 568 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
569 569 pre_load=pre_load)
570 570
571 571 ancestor = target_repo.get_common_ancestor(
572 572 target_commit.raw_id, source_commit.raw_id, source_repo)
573 573
574 574 pull_request.source_ref = '%s:%s:%s' % (
575 575 source_ref_type, source_ref_name, source_commit.raw_id)
576 576 pull_request.target_ref = '%s:%s:%s' % (
577 577 target_ref_type, target_ref_name, ancestor)
578 578 pull_request.revisions = [
579 579 commit.raw_id for commit in reversed(commit_ranges)]
580 580 pull_request.updated_on = datetime.datetime.now()
581 581 Session().add(pull_request)
582 582 new_commit_ids = set(pull_request.revisions)
583 583
584 584 changes = self._calculate_commit_id_changes(
585 585 old_commit_ids, new_commit_ids)
586 586
587 587 old_diff_data, new_diff_data = self._generate_update_diffs(
588 588 pull_request, pull_request_version)
589 589
590 590 ChangesetCommentsModel().outdate_comments(
591 591 pull_request, old_diff_data=old_diff_data,
592 592 new_diff_data=new_diff_data)
593 593
594 594 file_changes = self._calculate_file_changes(
595 595 old_diff_data, new_diff_data)
596 596
597 597 # Add an automatic comment to the pull request
598 598 update_comment = ChangesetCommentsModel().create(
599 599 text=self._render_update_message(changes, file_changes),
600 600 repo=pull_request.target_repo,
601 601 user=pull_request.author,
602 602 pull_request=pull_request,
603 603 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
604 604
605 605 # Update status to "Under Review" for added commits
606 606 for commit_id in changes.added:
607 607 ChangesetStatusModel().set_status(
608 608 repo=pull_request.source_repo,
609 609 status=ChangesetStatus.STATUS_UNDER_REVIEW,
610 610 comment=update_comment,
611 611 user=pull_request.author,
612 612 pull_request=pull_request,
613 613 revision=commit_id)
614 614
615 615 log.debug(
616 616 'Updated pull request %s, added_ids: %s, common_ids: %s, '
617 617 'removed_ids: %s', pull_request.pull_request_id,
618 618 changes.added, changes.common, changes.removed)
619 619 log.debug('Updated pull request with the following file changes: %s',
620 620 file_changes)
621 621
622 622 log.info(
623 623 "Updated pull request %s from commit %s to commit %s, "
624 624 "stored new version %s of this pull request.",
625 625 pull_request.pull_request_id, source_ref_id,
626 626 pull_request.source_ref_parts.commit_id,
627 627 pull_request_version.pull_request_version_id)
628 628 Session().commit()
629 629 self._trigger_pull_request_hook(pull_request, pull_request.author,
630 630 'update')
631 631
632 632 return UpdateResponse(
633 633 success=True, reason=UpdateFailureReason.NONE,
634 634 old=pull_request, new=pull_request_version, changes=changes)
635 635
636 636 def _create_version_from_snapshot(self, pull_request):
637 637 version = PullRequestVersion()
638 638 version.title = pull_request.title
639 639 version.description = pull_request.description
640 640 version.status = pull_request.status
641 641 version.created_on = pull_request.created_on
642 642 version.updated_on = pull_request.updated_on
643 643 version.user_id = pull_request.user_id
644 644 version.source_repo = pull_request.source_repo
645 645 version.source_ref = pull_request.source_ref
646 646 version.target_repo = pull_request.target_repo
647 647 version.target_ref = pull_request.target_ref
648 648
649 649 version._last_merge_source_rev = pull_request._last_merge_source_rev
650 650 version._last_merge_target_rev = pull_request._last_merge_target_rev
651 651 version._last_merge_status = pull_request._last_merge_status
652 652 version.shadow_merge_ref = pull_request.shadow_merge_ref
653 653 version.merge_rev = pull_request.merge_rev
654 654
655 655 version.revisions = pull_request.revisions
656 656 version.pull_request = pull_request
657 657 Session().add(version)
658 658 Session().flush()
659 659
660 660 return version
661 661
662 662 def _generate_update_diffs(self, pull_request, pull_request_version):
663 663 diff_context = (
664 664 self.DIFF_CONTEXT +
665 665 ChangesetCommentsModel.needed_extra_diff_context())
666 666 old_diff = self._get_diff_from_pr_or_version(
667 667 pull_request_version, context=diff_context)
668 668 new_diff = self._get_diff_from_pr_or_version(
669 669 pull_request, context=diff_context)
670 670
671 671 old_diff_data = diffs.DiffProcessor(old_diff)
672 672 old_diff_data.prepare()
673 673 new_diff_data = diffs.DiffProcessor(new_diff)
674 674 new_diff_data.prepare()
675 675
676 676 return old_diff_data, new_diff_data
677 677
678 678 def _link_comments_to_version(self, pull_request_version):
679 679 """
680 680 Link all unlinked comments of this pull request to the given version.
681 681
682 682 :param pull_request_version: The `PullRequestVersion` to which
683 683 the comments shall be linked.
684 684
685 685 """
686 686 pull_request = pull_request_version.pull_request
687 687 comments = ChangesetComment.query().filter(
688 688 # TODO: johbo: Should we query for the repo at all here?
689 689 # Pending decision on how comments of PRs are to be related
690 690 # to either the source repo, the target repo or no repo at all.
691 691 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
692 692 ChangesetComment.pull_request == pull_request,
693 693 ChangesetComment.pull_request_version == None)
694 694
695 695 # TODO: johbo: Find out why this breaks if it is done in a bulk
696 696 # operation.
697 697 for comment in comments:
698 698 comment.pull_request_version_id = (
699 699 pull_request_version.pull_request_version_id)
700 700 Session().add(comment)
701 701
702 702 def _calculate_commit_id_changes(self, old_ids, new_ids):
703 703 added = new_ids.difference(old_ids)
704 704 common = old_ids.intersection(new_ids)
705 705 removed = old_ids.difference(new_ids)
706 706 return ChangeTuple(added, common, removed)
707 707
708 708 def _calculate_file_changes(self, old_diff_data, new_diff_data):
709 709
710 710 old_files = OrderedDict()
711 711 for diff_data in old_diff_data.parsed_diff:
712 712 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
713 713
714 714 added_files = []
715 715 modified_files = []
716 716 removed_files = []
717 717 for diff_data in new_diff_data.parsed_diff:
718 718 new_filename = diff_data['filename']
719 719 new_hash = md5_safe(diff_data['raw_diff'])
720 720
721 721 old_hash = old_files.get(new_filename)
722 722 if not old_hash:
723 723 # file is not present in old diff, means it's added
724 724 added_files.append(new_filename)
725 725 else:
726 726 if new_hash != old_hash:
727 727 modified_files.append(new_filename)
728 728 # now remove a file from old, since we have seen it already
729 729 del old_files[new_filename]
730 730
731 731 # removed files is when there are present in old, but not in NEW,
732 732 # since we remove old files that are present in new diff, left-overs
733 733 # if any should be the removed files
734 734 removed_files.extend(old_files.keys())
735 735
736 736 return FileChangeTuple(added_files, modified_files, removed_files)
737 737
738 738 def _render_update_message(self, changes, file_changes):
739 739 """
740 740 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
741 741 so it's always looking the same disregarding on which default
742 742 renderer system is using.
743 743
744 744 :param changes: changes named tuple
745 745 :param file_changes: file changes named tuple
746 746
747 747 """
748 748 new_status = ChangesetStatus.get_status_lbl(
749 749 ChangesetStatus.STATUS_UNDER_REVIEW)
750 750
751 751 changed_files = (
752 752 file_changes.added + file_changes.modified + file_changes.removed)
753 753
754 754 params = {
755 755 'under_review_label': new_status,
756 756 'added_commits': changes.added,
757 757 'removed_commits': changes.removed,
758 758 'changed_files': changed_files,
759 759 'added_files': file_changes.added,
760 760 'modified_files': file_changes.modified,
761 761 'removed_files': file_changes.removed,
762 762 }
763 763 renderer = RstTemplateRenderer()
764 764 return renderer.render('pull_request_update.mako', **params)
765 765
766 766 def edit(self, pull_request, title, description):
767 767 pull_request = self.__get_pull_request(pull_request)
768 768 if pull_request.is_closed():
769 769 raise ValueError('This pull request is closed')
770 770 if title:
771 771 pull_request.title = title
772 772 pull_request.description = description
773 773 pull_request.updated_on = datetime.datetime.now()
774 774 Session().add(pull_request)
775 775
776 776 def update_reviewers(self, pull_request, reviewer_data):
777 777 """
778 778 Update the reviewers in the pull request
779 779
780 780 :param pull_request: the pr to update
781 781 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
782 782 """
783 783
784 784 reviewers_reasons = {}
785 785 for user_id, reasons in reviewer_data:
786 786 if isinstance(user_id, (int, basestring)):
787 787 user_id = self._get_user(user_id).user_id
788 788 reviewers_reasons[user_id] = reasons
789 789
790 790 reviewers_ids = set(reviewers_reasons.keys())
791 791 pull_request = self.__get_pull_request(pull_request)
792 792 current_reviewers = PullRequestReviewers.query()\
793 793 .filter(PullRequestReviewers.pull_request ==
794 794 pull_request).all()
795 795 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
796 796
797 797 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
798 798 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
799 799
800 800 log.debug("Adding %s reviewers", ids_to_add)
801 801 log.debug("Removing %s reviewers", ids_to_remove)
802 802 changed = False
803 803 for uid in ids_to_add:
804 804 changed = True
805 805 _usr = self._get_user(uid)
806 806 reasons = reviewers_reasons[uid]
807 807 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
808 808 Session().add(reviewer)
809 809
810 810 self.notify_reviewers(pull_request, ids_to_add)
811 811
812 812 for uid in ids_to_remove:
813 813 changed = True
814 814 reviewer = PullRequestReviewers.query()\
815 815 .filter(PullRequestReviewers.user_id == uid,
816 816 PullRequestReviewers.pull_request == pull_request)\
817 817 .scalar()
818 818 if reviewer:
819 819 Session().delete(reviewer)
820 820 if changed:
821 821 pull_request.updated_on = datetime.datetime.now()
822 822 Session().add(pull_request)
823 823
824 824 return ids_to_add, ids_to_remove
825 825
826 826 def get_url(self, pull_request):
827 827 return h.url('pullrequest_show',
828 828 repo_name=safe_str(pull_request.target_repo.repo_name),
829 829 pull_request_id=pull_request.pull_request_id,
830 830 qualified=True)
831 831
832 832 def get_shadow_clone_url(self, pull_request):
833 833 """
834 834 Returns qualified url pointing to the shadow repository. If this pull
835 835 request is closed there is no shadow repository and ``None`` will be
836 836 returned.
837 837 """
838 838 if pull_request.is_closed():
839 839 return None
840 840 else:
841 841 pr_url = urllib.unquote(self.get_url(pull_request))
842 842 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
843 843
844 844 def notify_reviewers(self, pull_request, reviewers_ids):
845 845 # notification to reviewers
846 846 if not reviewers_ids:
847 847 return
848 848
849 849 pull_request_obj = pull_request
850 850 # get the current participants of this pull request
851 851 recipients = reviewers_ids
852 852 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
853 853
854 854 pr_source_repo = pull_request_obj.source_repo
855 855 pr_target_repo = pull_request_obj.target_repo
856 856
857 857 pr_url = h.url(
858 858 'pullrequest_show',
859 859 repo_name=pr_target_repo.repo_name,
860 860 pull_request_id=pull_request_obj.pull_request_id,
861 861 qualified=True,)
862 862
863 863 # set some variables for email notification
864 864 pr_target_repo_url = h.url(
865 865 'summary_home',
866 866 repo_name=pr_target_repo.repo_name,
867 867 qualified=True)
868 868
869 869 pr_source_repo_url = h.url(
870 870 'summary_home',
871 871 repo_name=pr_source_repo.repo_name,
872 872 qualified=True)
873 873
874 874 # pull request specifics
875 875 pull_request_commits = [
876 876 (x.raw_id, x.message)
877 877 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
878 878
879 879 kwargs = {
880 880 'user': pull_request.author,
881 881 'pull_request': pull_request_obj,
882 882 'pull_request_commits': pull_request_commits,
883 883
884 884 'pull_request_target_repo': pr_target_repo,
885 885 'pull_request_target_repo_url': pr_target_repo_url,
886 886
887 887 'pull_request_source_repo': pr_source_repo,
888 888 'pull_request_source_repo_url': pr_source_repo_url,
889 889
890 890 'pull_request_url': pr_url,
891 891 }
892 892
893 893 # pre-generate the subject for notification itself
894 894 (subject,
895 895 _h, _e, # we don't care about those
896 896 body_plaintext) = EmailNotificationModel().render_email(
897 897 notification_type, **kwargs)
898 898
899 899 # create notification objects, and emails
900 900 NotificationModel().create(
901 901 created_by=pull_request.author,
902 902 notification_subject=subject,
903 903 notification_body=body_plaintext,
904 904 notification_type=notification_type,
905 905 recipients=recipients,
906 906 email_kwargs=kwargs,
907 907 )
908 908
909 909 def delete(self, pull_request):
910 910 pull_request = self.__get_pull_request(pull_request)
911 911 self._cleanup_merge_workspace(pull_request)
912 912 Session().delete(pull_request)
913 913
914 914 def close_pull_request(self, pull_request, user):
915 915 pull_request = self.__get_pull_request(pull_request)
916 916 self._cleanup_merge_workspace(pull_request)
917 917 pull_request.status = PullRequest.STATUS_CLOSED
918 918 pull_request.updated_on = datetime.datetime.now()
919 919 Session().add(pull_request)
920 920 self._trigger_pull_request_hook(
921 921 pull_request, pull_request.author, 'close')
922 922 self._log_action('user_closed_pull_request', user, pull_request)
923 923
924 924 def close_pull_request_with_comment(self, pull_request, user, repo,
925 925 message=None):
926 926 status = ChangesetStatus.STATUS_REJECTED
927 927
928 928 if not message:
929 929 message = (
930 930 _('Status change %(transition_icon)s %(status)s') % {
931 931 'transition_icon': '>',
932 932 'status': ChangesetStatus.get_status_lbl(status)})
933 933
934 934 internal_message = _('Closing with') + ' ' + message
935 935
936 936 comm = ChangesetCommentsModel().create(
937 937 text=internal_message,
938 938 repo=repo.repo_id,
939 939 user=user.user_id,
940 940 pull_request=pull_request.pull_request_id,
941 941 f_path=None,
942 942 line_no=None,
943 943 status_change=ChangesetStatus.get_status_lbl(status),
944 944 status_change_type=status,
945 945 closing_pr=True
946 946 )
947 947
948 948 ChangesetStatusModel().set_status(
949 949 repo.repo_id,
950 950 status,
951 951 user.user_id,
952 952 comm,
953 953 pull_request=pull_request.pull_request_id
954 954 )
955 955 Session().flush()
956 956
957 957 PullRequestModel().close_pull_request(
958 958 pull_request.pull_request_id, user)
959 959
960 960 def merge_status(self, pull_request):
961 961 if not self._is_merge_enabled(pull_request):
962 962 return False, _('Server-side pull request merging is disabled.')
963 963 if pull_request.is_closed():
964 964 return False, _('This pull request is closed.')
965 965 merge_possible, msg = self._check_repo_requirements(
966 966 target=pull_request.target_repo, source=pull_request.source_repo)
967 967 if not merge_possible:
968 968 return merge_possible, msg
969 969
970 970 try:
971 971 resp = self._try_merge(pull_request)
972 972 log.debug("Merge response: %s", resp)
973 973 status = resp.possible, self.merge_status_message(
974 974 resp.failure_reason)
975 975 except NotImplementedError:
976 976 status = False, _('Pull request merging is not supported.')
977 977
978 978 return status
979 979
980 980 def _check_repo_requirements(self, target, source):
981 981 """
982 982 Check if `target` and `source` have compatible requirements.
983 983
984 984 Currently this is just checking for largefiles.
985 985 """
986 986 target_has_largefiles = self._has_largefiles(target)
987 987 source_has_largefiles = self._has_largefiles(source)
988 988 merge_possible = True
989 989 message = u''
990 990
991 991 if target_has_largefiles != source_has_largefiles:
992 992 merge_possible = False
993 993 if source_has_largefiles:
994 994 message = _(
995 995 'Target repository large files support is disabled.')
996 996 else:
997 997 message = _(
998 998 'Source repository large files support is disabled.')
999 999
1000 1000 return merge_possible, message
1001 1001
1002 1002 def _has_largefiles(self, repo):
1003 1003 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1004 1004 'extensions', 'largefiles')
1005 1005 return largefiles_ui and largefiles_ui[0].active
1006 1006
1007 1007 def _try_merge(self, pull_request):
1008 1008 """
1009 1009 Try to merge the pull request and return the merge status.
1010 1010 """
1011 1011 log.debug(
1012 1012 "Trying out if the pull request %s can be merged.",
1013 1013 pull_request.pull_request_id)
1014 1014 target_vcs = pull_request.target_repo.scm_instance()
1015 1015
1016 1016 # Refresh the target reference.
1017 1017 try:
1018 1018 target_ref = self._refresh_reference(
1019 1019 pull_request.target_ref_parts, target_vcs)
1020 1020 except CommitDoesNotExistError:
1021 1021 merge_state = MergeResponse(
1022 1022 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1023 1023 return merge_state
1024 1024
1025 1025 target_locked = pull_request.target_repo.locked
1026 1026 if target_locked and target_locked[0]:
1027 1027 log.debug("The target repository is locked.")
1028 1028 merge_state = MergeResponse(
1029 1029 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1030 1030 elif self._needs_merge_state_refresh(pull_request, target_ref):
1031 1031 log.debug("Refreshing the merge status of the repository.")
1032 1032 merge_state = self._refresh_merge_state(
1033 1033 pull_request, target_vcs, target_ref)
1034 1034 else:
1035 1035 possible = pull_request.\
1036 1036 _last_merge_status == MergeFailureReason.NONE
1037 1037 merge_state = MergeResponse(
1038 1038 possible, False, None, pull_request._last_merge_status)
1039 1039
1040 1040 return merge_state
1041 1041
1042 1042 def _refresh_reference(self, reference, vcs_repository):
1043 1043 if reference.type in ('branch', 'book'):
1044 1044 name_or_id = reference.name
1045 1045 else:
1046 1046 name_or_id = reference.commit_id
1047 1047 refreshed_commit = vcs_repository.get_commit(name_or_id)
1048 1048 refreshed_reference = Reference(
1049 1049 reference.type, reference.name, refreshed_commit.raw_id)
1050 1050 return refreshed_reference
1051 1051
1052 1052 def _needs_merge_state_refresh(self, pull_request, target_reference):
1053 1053 return not(
1054 1054 pull_request.revisions and
1055 1055 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1056 1056 target_reference.commit_id == pull_request._last_merge_target_rev)
1057 1057
1058 1058 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1059 1059 workspace_id = self._workspace_id(pull_request)
1060 1060 source_vcs = pull_request.source_repo.scm_instance()
1061 1061 use_rebase = self._use_rebase_for_merging(pull_request)
1062 1062 merge_state = target_vcs.merge(
1063 1063 target_reference, source_vcs, pull_request.source_ref_parts,
1064 1064 workspace_id, dry_run=True, use_rebase=use_rebase)
1065 1065
1066 1066 # Do not store the response if there was an unknown error.
1067 1067 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1068 1068 pull_request._last_merge_source_rev = \
1069 1069 pull_request.source_ref_parts.commit_id
1070 1070 pull_request._last_merge_target_rev = target_reference.commit_id
1071 1071 pull_request._last_merge_status = merge_state.failure_reason
1072 1072 pull_request.shadow_merge_ref = merge_state.merge_ref
1073 1073 Session().add(pull_request)
1074 1074 Session().commit()
1075 1075
1076 1076 return merge_state
1077 1077
1078 1078 def _workspace_id(self, pull_request):
1079 1079 workspace_id = 'pr-%s' % pull_request.pull_request_id
1080 1080 return workspace_id
1081 1081
1082 1082 def merge_status_message(self, status_code):
1083 1083 """
1084 1084 Return a human friendly error message for the given merge status code.
1085 1085 """
1086 1086 return self.MERGE_STATUS_MESSAGES[status_code]
1087 1087
1088 1088 def generate_repo_data(self, repo, commit_id=None, branch=None,
1089 1089 bookmark=None):
1090 1090 all_refs, selected_ref = \
1091 1091 self._get_repo_pullrequest_sources(
1092 1092 repo.scm_instance(), commit_id=commit_id,
1093 1093 branch=branch, bookmark=bookmark)
1094 1094
1095 1095 refs_select2 = []
1096 1096 for element in all_refs:
1097 1097 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1098 1098 refs_select2.append({'text': element[1], 'children': children})
1099 1099
1100 1100 return {
1101 1101 'user': {
1102 1102 'user_id': repo.user.user_id,
1103 1103 'username': repo.user.username,
1104 1104 'firstname': repo.user.firstname,
1105 1105 'lastname': repo.user.lastname,
1106 1106 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1107 1107 },
1108 1108 'description': h.chop_at_smart(repo.description, '\n'),
1109 1109 'refs': {
1110 1110 'all_refs': all_refs,
1111 1111 'selected_ref': selected_ref,
1112 1112 'select2_refs': refs_select2
1113 1113 }
1114 1114 }
1115 1115
1116 1116 def generate_pullrequest_title(self, source, source_ref, target):
1117 1117 return u'{source}#{at_ref} to {target}'.format(
1118 1118 source=source,
1119 1119 at_ref=source_ref,
1120 1120 target=target,
1121 1121 )
1122 1122
1123 1123 def _cleanup_merge_workspace(self, pull_request):
1124 1124 # Merging related cleanup
1125 1125 target_scm = pull_request.target_repo.scm_instance()
1126 1126 workspace_id = 'pr-%s' % pull_request.pull_request_id
1127 1127
1128 1128 try:
1129 1129 target_scm.cleanup_merge_workspace(workspace_id)
1130 1130 except NotImplementedError:
1131 1131 pass
1132 1132
1133 1133 def _get_repo_pullrequest_sources(
1134 1134 self, repo, commit_id=None, branch=None, bookmark=None):
1135 1135 """
1136 1136 Return a structure with repo's interesting commits, suitable for
1137 1137 the selectors in pullrequest controller
1138 1138
1139 1139 :param commit_id: a commit that must be in the list somehow
1140 1140 and selected by default
1141 1141 :param branch: a branch that must be in the list and selected
1142 1142 by default - even if closed
1143 1143 :param bookmark: a bookmark that must be in the list and selected
1144 1144 """
1145 1145
1146 1146 commit_id = safe_str(commit_id) if commit_id else None
1147 1147 branch = safe_str(branch) if branch else None
1148 1148 bookmark = safe_str(bookmark) if bookmark else None
1149 1149
1150 1150 selected = None
1151 1151
1152 1152 # order matters: first source that has commit_id in it will be selected
1153 1153 sources = []
1154 1154 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1155 1155 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1156 1156
1157 1157 if commit_id:
1158 1158 ref_commit = (h.short_id(commit_id), commit_id)
1159 1159 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1160 1160
1161 1161 sources.append(
1162 1162 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1163 1163 )
1164 1164
1165 1165 groups = []
1166 1166 for group_key, ref_list, group_name, match in sources:
1167 1167 group_refs = []
1168 1168 for ref_name, ref_id in ref_list:
1169 1169 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1170 1170 group_refs.append((ref_key, ref_name))
1171 1171
1172 1172 if not selected:
1173 1173 if set([commit_id, match]) & set([ref_id, ref_name]):
1174 1174 selected = ref_key
1175 1175
1176 1176 if group_refs:
1177 1177 groups.append((group_refs, group_name))
1178 1178
1179 1179 if not selected:
1180 1180 ref = commit_id or branch or bookmark
1181 1181 if ref:
1182 1182 raise CommitDoesNotExistError(
1183 1183 'No commit refs could be found matching: %s' % ref)
1184 1184 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1185 1185 selected = 'branch:%s:%s' % (
1186 1186 repo.DEFAULT_BRANCH_NAME,
1187 1187 repo.branches[repo.DEFAULT_BRANCH_NAME]
1188 1188 )
1189 1189 elif repo.commit_ids:
1190 1190 rev = repo.commit_ids[0]
1191 1191 selected = 'rev:%s:%s' % (rev, rev)
1192 1192 else:
1193 1193 raise EmptyRepositoryError()
1194 1194 return groups, selected
1195 1195
1196 1196 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1197 1197 pull_request = self.__get_pull_request(pull_request)
1198 1198 return self._get_diff_from_pr_or_version(pull_request, context=context)
1199 1199
1200 1200 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1201 1201 source_repo = pr_or_version.source_repo
1202 1202
1203 1203 # we swap org/other ref since we run a simple diff on one repo
1204 1204 target_ref_id = pr_or_version.target_ref_parts.commit_id
1205 1205 source_ref_id = pr_or_version.source_ref_parts.commit_id
1206 1206 target_commit = source_repo.get_commit(
1207 1207 commit_id=safe_str(target_ref_id))
1208 1208 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1209 1209 vcs_repo = source_repo.scm_instance()
1210 1210
1211 1211 # TODO: johbo: In the context of an update, we cannot reach
1212 1212 # the old commit anymore with our normal mechanisms. It needs
1213 1213 # some sort of special support in the vcs layer to avoid this
1214 1214 # workaround.
1215 1215 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1216 1216 vcs_repo.alias == 'git'):
1217 1217 source_commit.raw_id = safe_str(source_ref_id)
1218 1218
1219 1219 log.debug('calculating diff between '
1220 1220 'source_ref:%s and target_ref:%s for repo `%s`',
1221 1221 target_ref_id, source_ref_id,
1222 1222 safe_unicode(vcs_repo.path))
1223 1223
1224 1224 vcs_diff = vcs_repo.get_diff(
1225 1225 commit1=target_commit, commit2=source_commit, context=context)
1226 1226 return vcs_diff
1227 1227
1228 1228 def _is_merge_enabled(self, pull_request):
1229 1229 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1230 1230 settings = settings_model.get_general_settings()
1231 1231 return settings.get('rhodecode_pr_merge_enabled', False)
1232 1232
1233 1233 def _use_rebase_for_merging(self, pull_request):
1234 1234 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1235 1235 settings = settings_model.get_general_settings()
1236 1236 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1237 1237
1238 1238 def _log_action(self, action, user, pull_request):
1239 1239 action_logger(
1240 1240 user,
1241 1241 '{action}:{pr_id}'.format(
1242 1242 action=action, pr_id=pull_request.pull_request_id),
1243 1243 pull_request.target_repo)
1244 1244
1245 1245
1246 1246 ChangeTuple = namedtuple('ChangeTuple',
1247 1247 ['added', 'common', 'removed'])
1248 1248
1249 1249 FileChangeTuple = namedtuple('FileChangeTuple',
1250 1250 ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now