##// END OF EJS Templates
vcs: use a real two part name for merge operation....
marcink -
r3040:dcaa9d67 stable
parent child Browse files
Show More
@@ -1,1749 +1,1749 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34 import shutil
35 35
36 36 from zope.cachedescriptors.property import Lazy as LazyProperty
37 37
38 38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 39 from rhodecode.lib.vcs import connection
40 40 from rhodecode.lib.vcs.utils import author_name, author_email
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 47 RepositoryError)
48 48
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 FILEMODE_DEFAULT = 0100644
54 54 FILEMODE_EXECUTABLE = 0100755
55 55
56 56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 57 MergeResponse = collections.namedtuple(
58 58 'MergeResponse',
59 59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60 60
61 61
62 62 class MergeFailureReason(object):
63 63 """
64 64 Enumeration with all the reasons why the server side merge could fail.
65 65
66 66 DO NOT change the number of the reasons, as they may be stored in the
67 67 database.
68 68
69 69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 70 reasons.
71 71 """
72 72
73 73 # Everything went well.
74 74 NONE = 0
75 75
76 76 # An unexpected exception was raised. Check the logs for more details.
77 77 UNKNOWN = 1
78 78
79 79 # The merge was not successful, there are conflicts.
80 80 MERGE_FAILED = 2
81 81
82 82 # The merge succeeded but we could not push it to the target repository.
83 83 PUSH_FAILED = 3
84 84
85 85 # The specified target is not a head in the target repository.
86 86 TARGET_IS_NOT_HEAD = 4
87 87
88 88 # The source repository contains more branches than the target. Pushing
89 89 # the merge will create additional branches in the target.
90 90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 91
92 92 # The target reference has multiple heads. That does not allow to correctly
93 93 # identify the target location. This could only happen for mercurial
94 94 # branches.
95 95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 96
97 97 # The target repository is locked
98 98 TARGET_IS_LOCKED = 7
99 99
100 100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 101 # A involved commit could not be found.
102 102 _DEPRECATED_MISSING_COMMIT = 8
103 103
104 104 # The target repo reference is missing.
105 105 MISSING_TARGET_REF = 9
106 106
107 107 # The source repo reference is missing.
108 108 MISSING_SOURCE_REF = 10
109 109
110 110 # The merge was not successful, there are conflicts related to sub
111 111 # repositories.
112 112 SUBREPO_MERGE_FAILED = 11
113 113
114 114
115 115 class UpdateFailureReason(object):
116 116 """
117 117 Enumeration with all the reasons why the pull request update could fail.
118 118
119 119 DO NOT change the number of the reasons, as they may be stored in the
120 120 database.
121 121
122 122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 123 reasons.
124 124 """
125 125
126 126 # Everything went well.
127 127 NONE = 0
128 128
129 129 # An unexpected exception was raised. Check the logs for more details.
130 130 UNKNOWN = 1
131 131
132 132 # The pull request is up to date.
133 133 NO_CHANGE = 2
134 134
135 135 # The pull request has a reference type that is not supported for update.
136 136 WRONG_REF_TYPE = 3
137 137
138 138 # Update failed because the target reference is missing.
139 139 MISSING_TARGET_REF = 4
140 140
141 141 # Update failed because the source reference is missing.
142 142 MISSING_SOURCE_REF = 5
143 143
144 144
145 145 class BaseRepository(object):
146 146 """
147 147 Base Repository for final backends
148 148
149 149 .. attribute:: DEFAULT_BRANCH_NAME
150 150
151 151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152 152
153 153 .. attribute:: commit_ids
154 154
155 155 list of all available commit ids, in ascending order
156 156
157 157 .. attribute:: path
158 158
159 159 absolute path to the repository
160 160
161 161 .. attribute:: bookmarks
162 162
163 163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 164 there are no bookmarks or the backend implementation does not support
165 165 bookmarks.
166 166
167 167 .. attribute:: tags
168 168
169 169 Mapping from name to :term:`Commit ID` of the tag.
170 170
171 171 """
172 172
173 173 DEFAULT_BRANCH_NAME = None
174 174 DEFAULT_CONTACT = u"Unknown"
175 175 DEFAULT_DESCRIPTION = u"unknown"
176 176 EMPTY_COMMIT_ID = '0' * 40
177 177
178 178 path = None
179 179
180 180 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 181 """
182 182 Initializes repository. Raises RepositoryError if repository could
183 183 not be find at the given ``repo_path`` or directory at ``repo_path``
184 184 exists and ``create`` is set to True.
185 185
186 186 :param repo_path: local path of the repository
187 187 :param config: repository configuration
188 188 :param create=False: if set to True, would try to create repository.
189 189 :param src_url=None: if set, should be proper url from which repository
190 190 would be cloned; requires ``create`` parameter to be set to True -
191 191 raises RepositoryError if src_url is set and create evaluates to
192 192 False
193 193 """
194 194 raise NotImplementedError
195 195
196 196 def __repr__(self):
197 197 return '<%s at %s>' % (self.__class__.__name__, self.path)
198 198
199 199 def __len__(self):
200 200 return self.count()
201 201
202 202 def __eq__(self, other):
203 203 same_instance = isinstance(other, self.__class__)
204 204 return same_instance and other.path == self.path
205 205
206 206 def __ne__(self, other):
207 207 return not self.__eq__(other)
208 208
209 209 def get_create_shadow_cache_pr_path(self, db_repo):
210 210 path = db_repo.cached_diffs_dir
211 211 if not os.path.exists(path):
212 212 os.makedirs(path, 0755)
213 213 return path
214 214
215 215 @classmethod
216 216 def get_default_config(cls, default=None):
217 217 config = Config()
218 218 if default and isinstance(default, list):
219 219 for section, key, val in default:
220 220 config.set(section, key, val)
221 221 return config
222 222
223 223 @LazyProperty
224 224 def _remote(self):
225 225 raise NotImplementedError
226 226
227 227 @LazyProperty
228 228 def EMPTY_COMMIT(self):
229 229 return EmptyCommit(self.EMPTY_COMMIT_ID)
230 230
231 231 @LazyProperty
232 232 def alias(self):
233 233 for k, v in settings.BACKENDS.items():
234 234 if v.split('.')[-1] == str(self.__class__.__name__):
235 235 return k
236 236
237 237 @LazyProperty
238 238 def name(self):
239 239 return safe_unicode(os.path.basename(self.path))
240 240
241 241 @LazyProperty
242 242 def description(self):
243 243 raise NotImplementedError
244 244
245 245 def refs(self):
246 246 """
247 247 returns a `dict` with branches, bookmarks, tags, and closed_branches
248 248 for this repository
249 249 """
250 250 return dict(
251 251 branches=self.branches,
252 252 branches_closed=self.branches_closed,
253 253 tags=self.tags,
254 254 bookmarks=self.bookmarks
255 255 )
256 256
257 257 @LazyProperty
258 258 def branches(self):
259 259 """
260 260 A `dict` which maps branch names to commit ids.
261 261 """
262 262 raise NotImplementedError
263 263
264 264 @LazyProperty
265 265 def branches_closed(self):
266 266 """
267 267 A `dict` which maps tags names to commit ids.
268 268 """
269 269 raise NotImplementedError
270 270
271 271 @LazyProperty
272 272 def bookmarks(self):
273 273 """
274 274 A `dict` which maps tags names to commit ids.
275 275 """
276 276 raise NotImplementedError
277 277
278 278 @LazyProperty
279 279 def tags(self):
280 280 """
281 281 A `dict` which maps tags names to commit ids.
282 282 """
283 283 raise NotImplementedError
284 284
285 285 @LazyProperty
286 286 def size(self):
287 287 """
288 288 Returns combined size in bytes for all repository files
289 289 """
290 290 tip = self.get_commit()
291 291 return tip.size
292 292
293 293 def size_at_commit(self, commit_id):
294 294 commit = self.get_commit(commit_id)
295 295 return commit.size
296 296
297 297 def is_empty(self):
298 298 return not bool(self.commit_ids)
299 299
300 300 @staticmethod
301 301 def check_url(url, config):
302 302 """
303 303 Function will check given url and try to verify if it's a valid
304 304 link.
305 305 """
306 306 raise NotImplementedError
307 307
308 308 @staticmethod
309 309 def is_valid_repository(path):
310 310 """
311 311 Check if given `path` contains a valid repository of this backend
312 312 """
313 313 raise NotImplementedError
314 314
315 315 # ==========================================================================
316 316 # COMMITS
317 317 # ==========================================================================
318 318
319 319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
320 320 """
321 321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
322 322 are both None, most recent commit is returned.
323 323
324 324 :param pre_load: Optional. List of commit attributes to load.
325 325
326 326 :raises ``EmptyRepositoryError``: if there are no commits
327 327 """
328 328 raise NotImplementedError
329 329
330 330 def __iter__(self):
331 331 for commit_id in self.commit_ids:
332 332 yield self.get_commit(commit_id=commit_id)
333 333
334 334 def get_commits(
335 335 self, start_id=None, end_id=None, start_date=None, end_date=None,
336 336 branch_name=None, show_hidden=False, pre_load=None):
337 337 """
338 338 Returns iterator of `BaseCommit` objects from start to end
339 339 not inclusive. This should behave just like a list, ie. end is not
340 340 inclusive.
341 341
342 342 :param start_id: None or str, must be a valid commit id
343 343 :param end_id: None or str, must be a valid commit id
344 344 :param start_date:
345 345 :param end_date:
346 346 :param branch_name:
347 347 :param show_hidden:
348 348 :param pre_load:
349 349 """
350 350 raise NotImplementedError
351 351
352 352 def __getitem__(self, key):
353 353 """
354 354 Allows index based access to the commit objects of this repository.
355 355 """
356 356 pre_load = ["author", "branch", "date", "message", "parents"]
357 357 if isinstance(key, slice):
358 358 return self._get_range(key, pre_load)
359 359 return self.get_commit(commit_idx=key, pre_load=pre_load)
360 360
361 361 def _get_range(self, slice_obj, pre_load):
362 362 for commit_id in self.commit_ids.__getitem__(slice_obj):
363 363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
364 364
365 365 def count(self):
366 366 return len(self.commit_ids)
367 367
368 368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
369 369 """
370 370 Creates and returns a tag for the given ``commit_id``.
371 371
372 372 :param name: name for new tag
373 373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
374 374 :param commit_id: commit id for which new tag would be created
375 375 :param message: message of the tag's commit
376 376 :param date: date of tag's commit
377 377
378 378 :raises TagAlreadyExistError: if tag with same name already exists
379 379 """
380 380 raise NotImplementedError
381 381
382 382 def remove_tag(self, name, user, message=None, date=None):
383 383 """
384 384 Removes tag with the given ``name``.
385 385
386 386 :param name: name of the tag to be removed
387 387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 388 :param message: message of the tag's removal commit
389 389 :param date: date of tag's removal commit
390 390
391 391 :raises TagDoesNotExistError: if tag with given name does not exists
392 392 """
393 393 raise NotImplementedError
394 394
395 395 def get_diff(
396 396 self, commit1, commit2, path=None, ignore_whitespace=False,
397 397 context=3, path1=None):
398 398 """
399 399 Returns (git like) *diff*, as plain text. Shows changes introduced by
400 400 `commit2` since `commit1`.
401 401
402 402 :param commit1: Entry point from which diff is shown. Can be
403 403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
404 404 the changes since empty state of the repository until `commit2`
405 405 :param commit2: Until which commit changes should be shown.
406 406 :param path: Can be set to a path of a file to create a diff of that
407 407 file. If `path1` is also set, this value is only associated to
408 408 `commit2`.
409 409 :param ignore_whitespace: If set to ``True``, would not show whitespace
410 410 changes. Defaults to ``False``.
411 411 :param context: How many lines before/after changed lines should be
412 412 shown. Defaults to ``3``.
413 413 :param path1: Can be set to a path to associate with `commit1`. This
414 414 parameter works only for backends which support diff generation for
415 415 different paths. Other backends will raise a `ValueError` if `path1`
416 416 is set and has a different value than `path`.
417 417 :param file_path: filter this diff by given path pattern
418 418 """
419 419 raise NotImplementedError
420 420
421 421 def strip(self, commit_id, branch=None):
422 422 """
423 423 Strip given commit_id from the repository
424 424 """
425 425 raise NotImplementedError
426 426
427 427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
428 428 """
429 429 Return a latest common ancestor commit if one exists for this repo
430 430 `commit_id1` vs `commit_id2` from `repo2`.
431 431
432 432 :param commit_id1: Commit it from this repository to use as a
433 433 target for the comparison.
434 434 :param commit_id2: Source commit id to use for comparison.
435 435 :param repo2: Source repository to use for comparison.
436 436 """
437 437 raise NotImplementedError
438 438
439 439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
440 440 """
441 441 Compare this repository's revision `commit_id1` with `commit_id2`.
442 442
443 443 Returns a tuple(commits, ancestor) that would be merged from
444 444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
445 445 will be returned as ancestor.
446 446
447 447 :param commit_id1: Commit it from this repository to use as a
448 448 target for the comparison.
449 449 :param commit_id2: Source commit id to use for comparison.
450 450 :param repo2: Source repository to use for comparison.
451 451 :param merge: If set to ``True`` will do a merge compare which also
452 452 returns the common ancestor.
453 453 :param pre_load: Optional. List of commit attributes to load.
454 454 """
455 455 raise NotImplementedError
456 456
457 457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
458 458 user_name='', user_email='', message='', dry_run=False,
459 459 use_rebase=False, close_branch=False):
460 460 """
461 461 Merge the revisions specified in `source_ref` from `source_repo`
462 462 onto the `target_ref` of this repository.
463 463
464 464 `source_ref` and `target_ref` are named tupls with the following
465 465 fields `type`, `name` and `commit_id`.
466 466
467 467 Returns a MergeResponse named tuple with the following fields
468 468 'possible', 'executed', 'source_commit', 'target_commit',
469 469 'merge_commit'.
470 470
471 471 :param repo_id: `repo_id` target repo id.
472 472 :param workspace_id: `workspace_id` unique identifier.
473 473 :param target_ref: `target_ref` points to the commit on top of which
474 474 the `source_ref` should be merged.
475 475 :param source_repo: The repository that contains the commits to be
476 476 merged.
477 477 :param source_ref: `source_ref` points to the topmost commit from
478 478 the `source_repo` which should be merged.
479 479 :param user_name: Merge commit `user_name`.
480 480 :param user_email: Merge commit `user_email`.
481 481 :param message: Merge commit `message`.
482 482 :param dry_run: If `True` the merge will not take place.
483 483 :param use_rebase: If `True` commits from the source will be rebased
484 484 on top of the target instead of being merged.
485 485 :param close_branch: If `True` branch will be close before merging it
486 486 """
487 487 if dry_run:
488 message = message or 'dry_run_merge_message'
489 user_email = user_email or 'dry-run-merge@rhodecode.com'
490 user_name = user_name or 'Dry-Run User'
488 message = message or settings.MERGE_DRY_RUN_MESSAGE
489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
490 user_name = user_name or settings.MERGE_DRY_RUN_USER
491 491 else:
492 492 if not user_name:
493 493 raise ValueError('user_name cannot be empty')
494 494 if not user_email:
495 495 raise ValueError('user_email cannot be empty')
496 496 if not message:
497 497 raise ValueError('message cannot be empty')
498 498
499 499 try:
500 500 return self._merge_repo(
501 501 repo_id, workspace_id, target_ref, source_repo,
502 502 source_ref, message, user_name, user_email, dry_run=dry_run,
503 503 use_rebase=use_rebase, close_branch=close_branch)
504 504 except RepositoryError:
505 505 log.exception(
506 506 'Unexpected failure when running merge, dry-run=%s',
507 507 dry_run)
508 508 return MergeResponse(
509 509 False, False, None, MergeFailureReason.UNKNOWN)
510 510
511 511 def _merge_repo(self, repo_id, workspace_id, target_ref,
512 512 source_repo, source_ref, merge_message,
513 513 merger_name, merger_email, dry_run=False,
514 514 use_rebase=False, close_branch=False):
515 515 """Internal implementation of merge."""
516 516 raise NotImplementedError
517 517
518 518 def _maybe_prepare_merge_workspace(
519 519 self, repo_id, workspace_id, target_ref, source_ref):
520 520 """
521 521 Create the merge workspace.
522 522
523 523 :param workspace_id: `workspace_id` unique identifier.
524 524 """
525 525 raise NotImplementedError
526 526
527 527 def _get_legacy_shadow_repository_path(self, workspace_id):
528 528 """
529 529 Legacy version that was used before. We still need it for
530 530 backward compat
531 531 """
532 532 return os.path.join(
533 533 os.path.dirname(self.path),
534 534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
535 535
536 536 def _get_shadow_repository_path(self, repo_id, workspace_id):
537 537 # The name of the shadow repository must start with '.', so it is
538 538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
539 539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
540 540 if os.path.exists(legacy_repository_path):
541 541 return legacy_repository_path
542 542 else:
543 543 return os.path.join(
544 544 os.path.dirname(self.path),
545 545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
546 546
547 547 def cleanup_merge_workspace(self, repo_id, workspace_id):
548 548 """
549 549 Remove merge workspace.
550 550
551 551 This function MUST not fail in case there is no workspace associated to
552 552 the given `workspace_id`.
553 553
554 554 :param workspace_id: `workspace_id` unique identifier.
555 555 """
556 556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
557 557 shadow_repository_path_del = '{}.{}.delete'.format(
558 558 shadow_repository_path, time.time())
559 559
560 560 # move the shadow repo, so it never conflicts with the one used.
561 561 # we use this method because shutil.rmtree had some edge case problems
562 562 # removing symlinked repositories
563 563 if not os.path.isdir(shadow_repository_path):
564 564 return
565 565
566 566 shutil.move(shadow_repository_path, shadow_repository_path_del)
567 567 try:
568 568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
569 569 except Exception:
570 570 log.exception('Failed to gracefully remove shadow repo under %s',
571 571 shadow_repository_path_del)
572 572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
573 573
574 574 # ========== #
575 575 # COMMIT API #
576 576 # ========== #
577 577
578 578 @LazyProperty
579 579 def in_memory_commit(self):
580 580 """
581 581 Returns :class:`InMemoryCommit` object for this repository.
582 582 """
583 583 raise NotImplementedError
584 584
585 585 # ======================== #
586 586 # UTILITIES FOR SUBCLASSES #
587 587 # ======================== #
588 588
589 589 def _validate_diff_commits(self, commit1, commit2):
590 590 """
591 591 Validates that the given commits are related to this repository.
592 592
593 593 Intended as a utility for sub classes to have a consistent validation
594 594 of input parameters in methods like :meth:`get_diff`.
595 595 """
596 596 self._validate_commit(commit1)
597 597 self._validate_commit(commit2)
598 598 if (isinstance(commit1, EmptyCommit) and
599 599 isinstance(commit2, EmptyCommit)):
600 600 raise ValueError("Cannot compare two empty commits")
601 601
602 602 def _validate_commit(self, commit):
603 603 if not isinstance(commit, BaseCommit):
604 604 raise TypeError(
605 605 "%s is not of type BaseCommit" % repr(commit))
606 606 if commit.repository != self and not isinstance(commit, EmptyCommit):
607 607 raise ValueError(
608 608 "Commit %s must be a valid commit from this repository %s, "
609 609 "related to this repository instead %s." %
610 610 (commit, self, commit.repository))
611 611
612 612 def _validate_commit_id(self, commit_id):
613 613 if not isinstance(commit_id, basestring):
614 614 raise TypeError("commit_id must be a string value")
615 615
616 616 def _validate_commit_idx(self, commit_idx):
617 617 if not isinstance(commit_idx, (int, long)):
618 618 raise TypeError("commit_idx must be a numeric value")
619 619
620 620 def _validate_branch_name(self, branch_name):
621 621 if branch_name and branch_name not in self.branches_all:
622 622 msg = ("Branch %s not found in %s" % (branch_name, self))
623 623 raise BranchDoesNotExistError(msg)
624 624
625 625 #
626 626 # Supporting deprecated API parts
627 627 # TODO: johbo: consider to move this into a mixin
628 628 #
629 629
630 630 @property
631 631 def EMPTY_CHANGESET(self):
632 632 warnings.warn(
633 633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
634 634 return self.EMPTY_COMMIT_ID
635 635
636 636 @property
637 637 def revisions(self):
638 638 warnings.warn("Use commits attribute instead", DeprecationWarning)
639 639 return self.commit_ids
640 640
641 641 @revisions.setter
642 642 def revisions(self, value):
643 643 warnings.warn("Use commits attribute instead", DeprecationWarning)
644 644 self.commit_ids = value
645 645
646 646 def get_changeset(self, revision=None, pre_load=None):
647 647 warnings.warn("Use get_commit instead", DeprecationWarning)
648 648 commit_id = None
649 649 commit_idx = None
650 650 if isinstance(revision, basestring):
651 651 commit_id = revision
652 652 else:
653 653 commit_idx = revision
654 654 return self.get_commit(
655 655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
656 656
657 657 def get_changesets(
658 658 self, start=None, end=None, start_date=None, end_date=None,
659 659 branch_name=None, pre_load=None):
660 660 warnings.warn("Use get_commits instead", DeprecationWarning)
661 661 start_id = self._revision_to_commit(start)
662 662 end_id = self._revision_to_commit(end)
663 663 return self.get_commits(
664 664 start_id=start_id, end_id=end_id, start_date=start_date,
665 665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
666 666
667 667 def _revision_to_commit(self, revision):
668 668 """
669 669 Translates a revision to a commit_id
670 670
671 671 Helps to support the old changeset based API which allows to use
672 672 commit ids and commit indices interchangeable.
673 673 """
674 674 if revision is None:
675 675 return revision
676 676
677 677 if isinstance(revision, basestring):
678 678 commit_id = revision
679 679 else:
680 680 commit_id = self.commit_ids[revision]
681 681 return commit_id
682 682
683 683 @property
684 684 def in_memory_changeset(self):
685 685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
686 686 return self.in_memory_commit
687 687
688 688 def get_path_permissions(self, username):
689 689 """
690 690 Returns a path permission checker or None if not supported
691 691
692 692 :param username: session user name
693 693 :return: an instance of BasePathPermissionChecker or None
694 694 """
695 695 return None
696 696
697 697 def install_hooks(self, force=False):
698 698 return self._remote.install_hooks(force)
699 699
700 700
701 701 class BaseCommit(object):
702 702 """
703 703 Each backend should implement it's commit representation.
704 704
705 705 **Attributes**
706 706
707 707 ``repository``
708 708 repository object within which commit exists
709 709
710 710 ``id``
711 711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
712 712 just ``tip``.
713 713
714 714 ``raw_id``
715 715 raw commit representation (i.e. full 40 length sha for git
716 716 backend)
717 717
718 718 ``short_id``
719 719 shortened (if apply) version of ``raw_id``; it would be simple
720 720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
721 721 as ``raw_id`` for subversion
722 722
723 723 ``idx``
724 724 commit index
725 725
726 726 ``files``
727 727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
728 728
729 729 ``dirs``
730 730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
731 731
732 732 ``nodes``
733 733 combined list of ``Node`` objects
734 734
735 735 ``author``
736 736 author of the commit, as unicode
737 737
738 738 ``message``
739 739 message of the commit, as unicode
740 740
741 741 ``parents``
742 742 list of parent commits
743 743
744 744 """
745 745
746 746 branch = None
747 747 """
748 748 Depending on the backend this should be set to the branch name of the
749 749 commit. Backends not supporting branches on commits should leave this
750 750 value as ``None``.
751 751 """
752 752
753 753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
754 754 """
755 755 This template is used to generate a default prefix for repository archives
756 756 if no prefix has been specified.
757 757 """
758 758
759 759 def __str__(self):
760 760 return '<%s at %s:%s>' % (
761 761 self.__class__.__name__, self.idx, self.short_id)
762 762
763 763 def __repr__(self):
764 764 return self.__str__()
765 765
766 766 def __unicode__(self):
767 767 return u'%s:%s' % (self.idx, self.short_id)
768 768
769 769 def __eq__(self, other):
770 770 same_instance = isinstance(other, self.__class__)
771 771 return same_instance and self.raw_id == other.raw_id
772 772
773 773 def __json__(self):
774 774 parents = []
775 775 try:
776 776 for parent in self.parents:
777 777 parents.append({'raw_id': parent.raw_id})
778 778 except NotImplementedError:
779 779 # empty commit doesn't have parents implemented
780 780 pass
781 781
782 782 return {
783 783 'short_id': self.short_id,
784 784 'raw_id': self.raw_id,
785 785 'revision': self.idx,
786 786 'message': self.message,
787 787 'date': self.date,
788 788 'author': self.author,
789 789 'parents': parents,
790 790 'branch': self.branch
791 791 }
792 792
793 793 def __getstate__(self):
794 794 d = self.__dict__.copy()
795 795 d.pop('_remote', None)
796 796 d.pop('repository', None)
797 797 return d
798 798
799 799 def _get_refs(self):
800 800 return {
801 801 'branches': [self.branch] if self.branch else [],
802 802 'bookmarks': getattr(self, 'bookmarks', []),
803 803 'tags': self.tags
804 804 }
805 805
806 806 @LazyProperty
807 807 def last(self):
808 808 """
809 809 ``True`` if this is last commit in repository, ``False``
810 810 otherwise; trying to access this attribute while there is no
811 811 commits would raise `EmptyRepositoryError`
812 812 """
813 813 if self.repository is None:
814 814 raise CommitError("Cannot check if it's most recent commit")
815 815 return self.raw_id == self.repository.commit_ids[-1]
816 816
817 817 @LazyProperty
818 818 def parents(self):
819 819 """
820 820 Returns list of parent commits.
821 821 """
822 822 raise NotImplementedError
823 823
824 824 @property
825 825 def merge(self):
826 826 """
827 827 Returns boolean if commit is a merge.
828 828 """
829 829 return len(self.parents) > 1
830 830
831 831 @LazyProperty
832 832 def children(self):
833 833 """
834 834 Returns list of child commits.
835 835 """
836 836 raise NotImplementedError
837 837
838 838 @LazyProperty
839 839 def id(self):
840 840 """
841 841 Returns string identifying this commit.
842 842 """
843 843 raise NotImplementedError
844 844
845 845 @LazyProperty
846 846 def raw_id(self):
847 847 """
848 848 Returns raw string identifying this commit.
849 849 """
850 850 raise NotImplementedError
851 851
852 852 @LazyProperty
853 853 def short_id(self):
854 854 """
855 855 Returns shortened version of ``raw_id`` attribute, as string,
856 856 identifying this commit, useful for presentation to users.
857 857 """
858 858 raise NotImplementedError
859 859
860 860 @LazyProperty
861 861 def idx(self):
862 862 """
863 863 Returns integer identifying this commit.
864 864 """
865 865 raise NotImplementedError
866 866
867 867 @LazyProperty
868 868 def committer(self):
869 869 """
870 870 Returns committer for this commit
871 871 """
872 872 raise NotImplementedError
873 873
874 874 @LazyProperty
875 875 def committer_name(self):
876 876 """
877 877 Returns committer name for this commit
878 878 """
879 879
880 880 return author_name(self.committer)
881 881
882 882 @LazyProperty
883 883 def committer_email(self):
884 884 """
885 885 Returns committer email address for this commit
886 886 """
887 887
888 888 return author_email(self.committer)
889 889
890 890 @LazyProperty
891 891 def author(self):
892 892 """
893 893 Returns author for this commit
894 894 """
895 895
896 896 raise NotImplementedError
897 897
898 898 @LazyProperty
899 899 def author_name(self):
900 900 """
901 901 Returns author name for this commit
902 902 """
903 903
904 904 return author_name(self.author)
905 905
906 906 @LazyProperty
907 907 def author_email(self):
908 908 """
909 909 Returns author email address for this commit
910 910 """
911 911
912 912 return author_email(self.author)
913 913
914 914 def get_file_mode(self, path):
915 915 """
916 916 Returns stat mode of the file at `path`.
917 917 """
918 918 raise NotImplementedError
919 919
920 920 def is_link(self, path):
921 921 """
922 922 Returns ``True`` if given `path` is a symlink
923 923 """
924 924 raise NotImplementedError
925 925
926 926 def get_file_content(self, path):
927 927 """
928 928 Returns content of the file at the given `path`.
929 929 """
930 930 raise NotImplementedError
931 931
932 932 def get_file_size(self, path):
933 933 """
934 934 Returns size of the file at the given `path`.
935 935 """
936 936 raise NotImplementedError
937 937
938 938 def get_file_commit(self, path, pre_load=None):
939 939 """
940 940 Returns last commit of the file at the given `path`.
941 941
942 942 :param pre_load: Optional. List of commit attributes to load.
943 943 """
944 944 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
945 945 if not commits:
946 946 raise RepositoryError(
947 947 'Failed to fetch history for path {}. '
948 948 'Please check if such path exists in your repository'.format(
949 949 path))
950 950 return commits[0]
951 951
952 952 def get_file_history(self, path, limit=None, pre_load=None):
953 953 """
954 954 Returns history of file as reversed list of :class:`BaseCommit`
955 955 objects for which file at given `path` has been modified.
956 956
957 957 :param limit: Optional. Allows to limit the size of the returned
958 958 history. This is intended as a hint to the underlying backend, so
959 959 that it can apply optimizations depending on the limit.
960 960 :param pre_load: Optional. List of commit attributes to load.
961 961 """
962 962 raise NotImplementedError
963 963
964 964 def get_file_annotate(self, path, pre_load=None):
965 965 """
966 966 Returns a generator of four element tuples with
967 967 lineno, sha, commit lazy loader and line
968 968
969 969 :param pre_load: Optional. List of commit attributes to load.
970 970 """
971 971 raise NotImplementedError
972 972
973 973 def get_nodes(self, path):
974 974 """
975 975 Returns combined ``DirNode`` and ``FileNode`` objects list representing
976 976 state of commit at the given ``path``.
977 977
978 978 :raises ``CommitError``: if node at the given ``path`` is not
979 979 instance of ``DirNode``
980 980 """
981 981 raise NotImplementedError
982 982
983 983 def get_node(self, path):
984 984 """
985 985 Returns ``Node`` object from the given ``path``.
986 986
987 987 :raises ``NodeDoesNotExistError``: if there is no node at the given
988 988 ``path``
989 989 """
990 990 raise NotImplementedError
991 991
992 992 def get_largefile_node(self, path):
993 993 """
994 994 Returns the path to largefile from Mercurial/Git-lfs storage.
995 995 or None if it's not a largefile node
996 996 """
997 997 return None
998 998
999 999 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1000 1000 prefix=None, write_metadata=False, mtime=None):
1001 1001 """
1002 1002 Creates an archive containing the contents of the repository.
1003 1003
1004 1004 :param file_path: path to the file which to create the archive.
1005 1005 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1006 1006 :param prefix: name of root directory in archive.
1007 1007 Default is repository name and commit's short_id joined with dash:
1008 1008 ``"{repo_name}-{short_id}"``.
1009 1009 :param write_metadata: write a metadata file into archive.
1010 1010 :param mtime: custom modification time for archive creation, defaults
1011 1011 to time.time() if not given.
1012 1012
1013 1013 :raise VCSError: If prefix has a problem.
1014 1014 """
1015 1015 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1016 1016 if kind not in allowed_kinds:
1017 1017 raise ImproperArchiveTypeError(
1018 1018 'Archive kind (%s) not supported use one of %s' %
1019 1019 (kind, allowed_kinds))
1020 1020
1021 1021 prefix = self._validate_archive_prefix(prefix)
1022 1022
1023 1023 mtime = mtime or time.mktime(self.date.timetuple())
1024 1024
1025 1025 file_info = []
1026 1026 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1027 1027 for _r, _d, files in cur_rev.walk('/'):
1028 1028 for f in files:
1029 1029 f_path = os.path.join(prefix, f.path)
1030 1030 file_info.append(
1031 1031 (f_path, f.mode, f.is_link(), f.raw_bytes))
1032 1032
1033 1033 if write_metadata:
1034 1034 metadata = [
1035 1035 ('repo_name', self.repository.name),
1036 1036 ('rev', self.raw_id),
1037 1037 ('create_time', mtime),
1038 1038 ('branch', self.branch),
1039 1039 ('tags', ','.join(self.tags)),
1040 1040 ]
1041 1041 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1042 1042 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1043 1043
1044 1044 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1045 1045
1046 1046 def _validate_archive_prefix(self, prefix):
1047 1047 if prefix is None:
1048 1048 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1049 1049 repo_name=safe_str(self.repository.name),
1050 1050 short_id=self.short_id)
1051 1051 elif not isinstance(prefix, str):
1052 1052 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1053 1053 elif prefix.startswith('/'):
1054 1054 raise VCSError("Prefix cannot start with leading slash")
1055 1055 elif prefix.strip() == '':
1056 1056 raise VCSError("Prefix cannot be empty")
1057 1057 return prefix
1058 1058
1059 1059 @LazyProperty
1060 1060 def root(self):
1061 1061 """
1062 1062 Returns ``RootNode`` object for this commit.
1063 1063 """
1064 1064 return self.get_node('')
1065 1065
1066 1066 def next(self, branch=None):
1067 1067 """
1068 1068 Returns next commit from current, if branch is gives it will return
1069 1069 next commit belonging to this branch
1070 1070
1071 1071 :param branch: show commits within the given named branch
1072 1072 """
1073 1073 indexes = xrange(self.idx + 1, self.repository.count())
1074 1074 return self._find_next(indexes, branch)
1075 1075
1076 1076 def prev(self, branch=None):
1077 1077 """
1078 1078 Returns previous commit from current, if branch is gives it will
1079 1079 return previous commit belonging to this branch
1080 1080
1081 1081 :param branch: show commit within the given named branch
1082 1082 """
1083 1083 indexes = xrange(self.idx - 1, -1, -1)
1084 1084 return self._find_next(indexes, branch)
1085 1085
1086 1086 def _find_next(self, indexes, branch=None):
1087 1087 if branch and self.branch != branch:
1088 1088 raise VCSError('Branch option used on commit not belonging '
1089 1089 'to that branch')
1090 1090
1091 1091 for next_idx in indexes:
1092 1092 commit = self.repository.get_commit(commit_idx=next_idx)
1093 1093 if branch and branch != commit.branch:
1094 1094 continue
1095 1095 return commit
1096 1096 raise CommitDoesNotExistError
1097 1097
1098 1098 def diff(self, ignore_whitespace=True, context=3):
1099 1099 """
1100 1100 Returns a `Diff` object representing the change made by this commit.
1101 1101 """
1102 1102 parent = (
1103 1103 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1104 1104 diff = self.repository.get_diff(
1105 1105 parent, self,
1106 1106 ignore_whitespace=ignore_whitespace,
1107 1107 context=context)
1108 1108 return diff
1109 1109
1110 1110 @LazyProperty
1111 1111 def added(self):
1112 1112 """
1113 1113 Returns list of added ``FileNode`` objects.
1114 1114 """
1115 1115 raise NotImplementedError
1116 1116
1117 1117 @LazyProperty
1118 1118 def changed(self):
1119 1119 """
1120 1120 Returns list of modified ``FileNode`` objects.
1121 1121 """
1122 1122 raise NotImplementedError
1123 1123
1124 1124 @LazyProperty
1125 1125 def removed(self):
1126 1126 """
1127 1127 Returns list of removed ``FileNode`` objects.
1128 1128 """
1129 1129 raise NotImplementedError
1130 1130
1131 1131 @LazyProperty
1132 1132 def size(self):
1133 1133 """
1134 1134 Returns total number of bytes from contents of all filenodes.
1135 1135 """
1136 1136 return sum((node.size for node in self.get_filenodes_generator()))
1137 1137
1138 1138 def walk(self, topurl=''):
1139 1139 """
1140 1140 Similar to os.walk method. Insted of filesystem it walks through
1141 1141 commit starting at given ``topurl``. Returns generator of tuples
1142 1142 (topnode, dirnodes, filenodes).
1143 1143 """
1144 1144 topnode = self.get_node(topurl)
1145 1145 if not topnode.is_dir():
1146 1146 return
1147 1147 yield (topnode, topnode.dirs, topnode.files)
1148 1148 for dirnode in topnode.dirs:
1149 1149 for tup in self.walk(dirnode.path):
1150 1150 yield tup
1151 1151
1152 1152 def get_filenodes_generator(self):
1153 1153 """
1154 1154 Returns generator that yields *all* file nodes.
1155 1155 """
1156 1156 for topnode, dirs, files in self.walk():
1157 1157 for node in files:
1158 1158 yield node
1159 1159
1160 1160 #
1161 1161 # Utilities for sub classes to support consistent behavior
1162 1162 #
1163 1163
1164 1164 def no_node_at_path(self, path):
1165 1165 return NodeDoesNotExistError(
1166 1166 u"There is no file nor directory at the given path: "
1167 1167 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1168 1168
1169 1169 def _fix_path(self, path):
1170 1170 """
1171 1171 Paths are stored without trailing slash so we need to get rid off it if
1172 1172 needed.
1173 1173 """
1174 1174 return path.rstrip('/')
1175 1175
1176 1176 #
1177 1177 # Deprecated API based on changesets
1178 1178 #
1179 1179
1180 1180 @property
1181 1181 def revision(self):
1182 1182 warnings.warn("Use idx instead", DeprecationWarning)
1183 1183 return self.idx
1184 1184
1185 1185 @revision.setter
1186 1186 def revision(self, value):
1187 1187 warnings.warn("Use idx instead", DeprecationWarning)
1188 1188 self.idx = value
1189 1189
1190 1190 def get_file_changeset(self, path):
1191 1191 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1192 1192 return self.get_file_commit(path)
1193 1193
1194 1194
1195 1195 class BaseChangesetClass(type):
1196 1196
1197 1197 def __instancecheck__(self, instance):
1198 1198 return isinstance(instance, BaseCommit)
1199 1199
1200 1200
1201 1201 class BaseChangeset(BaseCommit):
1202 1202
1203 1203 __metaclass__ = BaseChangesetClass
1204 1204
1205 1205 def __new__(cls, *args, **kwargs):
1206 1206 warnings.warn(
1207 1207 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1208 1208 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1209 1209
1210 1210
1211 1211 class BaseInMemoryCommit(object):
1212 1212 """
1213 1213 Represents differences between repository's state (most recent head) and
1214 1214 changes made *in place*.
1215 1215
1216 1216 **Attributes**
1217 1217
1218 1218 ``repository``
1219 1219 repository object for this in-memory-commit
1220 1220
1221 1221 ``added``
1222 1222 list of ``FileNode`` objects marked as *added*
1223 1223
1224 1224 ``changed``
1225 1225 list of ``FileNode`` objects marked as *changed*
1226 1226
1227 1227 ``removed``
1228 1228 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1229 1229 *removed*
1230 1230
1231 1231 ``parents``
1232 1232 list of :class:`BaseCommit` instances representing parents of
1233 1233 in-memory commit. Should always be 2-element sequence.
1234 1234
1235 1235 """
1236 1236
1237 1237 def __init__(self, repository):
1238 1238 self.repository = repository
1239 1239 self.added = []
1240 1240 self.changed = []
1241 1241 self.removed = []
1242 1242 self.parents = []
1243 1243
1244 1244 def add(self, *filenodes):
1245 1245 """
1246 1246 Marks given ``FileNode`` objects as *to be committed*.
1247 1247
1248 1248 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1249 1249 latest commit
1250 1250 :raises ``NodeAlreadyAddedError``: if node with same path is already
1251 1251 marked as *added*
1252 1252 """
1253 1253 # Check if not already marked as *added* first
1254 1254 for node in filenodes:
1255 1255 if node.path in (n.path for n in self.added):
1256 1256 raise NodeAlreadyAddedError(
1257 1257 "Such FileNode %s is already marked for addition"
1258 1258 % node.path)
1259 1259 for node in filenodes:
1260 1260 self.added.append(node)
1261 1261
1262 1262 def change(self, *filenodes):
1263 1263 """
1264 1264 Marks given ``FileNode`` objects to be *changed* in next commit.
1265 1265
1266 1266 :raises ``EmptyRepositoryError``: if there are no commits yet
1267 1267 :raises ``NodeAlreadyExistsError``: if node with same path is already
1268 1268 marked to be *changed*
1269 1269 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1270 1270 marked to be *removed*
1271 1271 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1272 1272 commit
1273 1273 :raises ``NodeNotChangedError``: if node hasn't really be changed
1274 1274 """
1275 1275 for node in filenodes:
1276 1276 if node.path in (n.path for n in self.removed):
1277 1277 raise NodeAlreadyRemovedError(
1278 1278 "Node at %s is already marked as removed" % node.path)
1279 1279 try:
1280 1280 self.repository.get_commit()
1281 1281 except EmptyRepositoryError:
1282 1282 raise EmptyRepositoryError(
1283 1283 "Nothing to change - try to *add* new nodes rather than "
1284 1284 "changing them")
1285 1285 for node in filenodes:
1286 1286 if node.path in (n.path for n in self.changed):
1287 1287 raise NodeAlreadyChangedError(
1288 1288 "Node at '%s' is already marked as changed" % node.path)
1289 1289 self.changed.append(node)
1290 1290
1291 1291 def remove(self, *filenodes):
1292 1292 """
1293 1293 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1294 1294 *removed* in next commit.
1295 1295
1296 1296 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1297 1297 be *removed*
1298 1298 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1299 1299 be *changed*
1300 1300 """
1301 1301 for node in filenodes:
1302 1302 if node.path in (n.path for n in self.removed):
1303 1303 raise NodeAlreadyRemovedError(
1304 1304 "Node is already marked to for removal at %s" % node.path)
1305 1305 if node.path in (n.path for n in self.changed):
1306 1306 raise NodeAlreadyChangedError(
1307 1307 "Node is already marked to be changed at %s" % node.path)
1308 1308 # We only mark node as *removed* - real removal is done by
1309 1309 # commit method
1310 1310 self.removed.append(node)
1311 1311
1312 1312 def reset(self):
1313 1313 """
1314 1314 Resets this instance to initial state (cleans ``added``, ``changed``
1315 1315 and ``removed`` lists).
1316 1316 """
1317 1317 self.added = []
1318 1318 self.changed = []
1319 1319 self.removed = []
1320 1320 self.parents = []
1321 1321
1322 1322 def get_ipaths(self):
1323 1323 """
1324 1324 Returns generator of paths from nodes marked as added, changed or
1325 1325 removed.
1326 1326 """
1327 1327 for node in itertools.chain(self.added, self.changed, self.removed):
1328 1328 yield node.path
1329 1329
1330 1330 def get_paths(self):
1331 1331 """
1332 1332 Returns list of paths from nodes marked as added, changed or removed.
1333 1333 """
1334 1334 return list(self.get_ipaths())
1335 1335
1336 1336 def check_integrity(self, parents=None):
1337 1337 """
1338 1338 Checks in-memory commit's integrity. Also, sets parents if not
1339 1339 already set.
1340 1340
1341 1341 :raises CommitError: if any error occurs (i.e.
1342 1342 ``NodeDoesNotExistError``).
1343 1343 """
1344 1344 if not self.parents:
1345 1345 parents = parents or []
1346 1346 if len(parents) == 0:
1347 1347 try:
1348 1348 parents = [self.repository.get_commit(), None]
1349 1349 except EmptyRepositoryError:
1350 1350 parents = [None, None]
1351 1351 elif len(parents) == 1:
1352 1352 parents += [None]
1353 1353 self.parents = parents
1354 1354
1355 1355 # Local parents, only if not None
1356 1356 parents = [p for p in self.parents if p]
1357 1357
1358 1358 # Check nodes marked as added
1359 1359 for p in parents:
1360 1360 for node in self.added:
1361 1361 try:
1362 1362 p.get_node(node.path)
1363 1363 except NodeDoesNotExistError:
1364 1364 pass
1365 1365 else:
1366 1366 raise NodeAlreadyExistsError(
1367 1367 "Node `%s` already exists at %s" % (node.path, p))
1368 1368
1369 1369 # Check nodes marked as changed
1370 1370 missing = set(self.changed)
1371 1371 not_changed = set(self.changed)
1372 1372 if self.changed and not parents:
1373 1373 raise NodeDoesNotExistError(str(self.changed[0].path))
1374 1374 for p in parents:
1375 1375 for node in self.changed:
1376 1376 try:
1377 1377 old = p.get_node(node.path)
1378 1378 missing.remove(node)
1379 1379 # if content actually changed, remove node from not_changed
1380 1380 if old.content != node.content:
1381 1381 not_changed.remove(node)
1382 1382 except NodeDoesNotExistError:
1383 1383 pass
1384 1384 if self.changed and missing:
1385 1385 raise NodeDoesNotExistError(
1386 1386 "Node `%s` marked as modified but missing in parents: %s"
1387 1387 % (node.path, parents))
1388 1388
1389 1389 if self.changed and not_changed:
1390 1390 raise NodeNotChangedError(
1391 1391 "Node `%s` wasn't actually changed (parents: %s)"
1392 1392 % (not_changed.pop().path, parents))
1393 1393
1394 1394 # Check nodes marked as removed
1395 1395 if self.removed and not parents:
1396 1396 raise NodeDoesNotExistError(
1397 1397 "Cannot remove node at %s as there "
1398 1398 "were no parents specified" % self.removed[0].path)
1399 1399 really_removed = set()
1400 1400 for p in parents:
1401 1401 for node in self.removed:
1402 1402 try:
1403 1403 p.get_node(node.path)
1404 1404 really_removed.add(node)
1405 1405 except CommitError:
1406 1406 pass
1407 1407 not_removed = set(self.removed) - really_removed
1408 1408 if not_removed:
1409 1409 # TODO: johbo: This code branch does not seem to be covered
1410 1410 raise NodeDoesNotExistError(
1411 1411 "Cannot remove node at %s from "
1412 1412 "following parents: %s" % (not_removed, parents))
1413 1413
1414 1414 def commit(
1415 1415 self, message, author, parents=None, branch=None, date=None,
1416 1416 **kwargs):
1417 1417 """
1418 1418 Performs in-memory commit (doesn't check workdir in any way) and
1419 1419 returns newly created :class:`BaseCommit`. Updates repository's
1420 1420 attribute `commits`.
1421 1421
1422 1422 .. note::
1423 1423
1424 1424 While overriding this method each backend's should call
1425 1425 ``self.check_integrity(parents)`` in the first place.
1426 1426
1427 1427 :param message: message of the commit
1428 1428 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1429 1429 :param parents: single parent or sequence of parents from which commit
1430 1430 would be derived
1431 1431 :param date: ``datetime.datetime`` instance. Defaults to
1432 1432 ``datetime.datetime.now()``.
1433 1433 :param branch: branch name, as string. If none given, default backend's
1434 1434 branch would be used.
1435 1435
1436 1436 :raises ``CommitError``: if any error occurs while committing
1437 1437 """
1438 1438 raise NotImplementedError
1439 1439
1440 1440
1441 1441 class BaseInMemoryChangesetClass(type):
1442 1442
1443 1443 def __instancecheck__(self, instance):
1444 1444 return isinstance(instance, BaseInMemoryCommit)
1445 1445
1446 1446
1447 1447 class BaseInMemoryChangeset(BaseInMemoryCommit):
1448 1448
1449 1449 __metaclass__ = BaseInMemoryChangesetClass
1450 1450
1451 1451 def __new__(cls, *args, **kwargs):
1452 1452 warnings.warn(
1453 1453 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1454 1454 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1455 1455
1456 1456
1457 1457 class EmptyCommit(BaseCommit):
1458 1458 """
1459 1459 An dummy empty commit. It's possible to pass hash when creating
1460 1460 an EmptyCommit
1461 1461 """
1462 1462
1463 1463 def __init__(
1464 1464 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1465 1465 message='', author='', date=None):
1466 1466 self._empty_commit_id = commit_id
1467 1467 # TODO: johbo: Solve idx parameter, default value does not make
1468 1468 # too much sense
1469 1469 self.idx = idx
1470 1470 self.message = message
1471 1471 self.author = author
1472 1472 self.date = date or datetime.datetime.fromtimestamp(0)
1473 1473 self.repository = repo
1474 1474 self.alias = alias
1475 1475
1476 1476 @LazyProperty
1477 1477 def raw_id(self):
1478 1478 """
1479 1479 Returns raw string identifying this commit, useful for web
1480 1480 representation.
1481 1481 """
1482 1482
1483 1483 return self._empty_commit_id
1484 1484
1485 1485 @LazyProperty
1486 1486 def branch(self):
1487 1487 if self.alias:
1488 1488 from rhodecode.lib.vcs.backends import get_backend
1489 1489 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1490 1490
1491 1491 @LazyProperty
1492 1492 def short_id(self):
1493 1493 return self.raw_id[:12]
1494 1494
1495 1495 @LazyProperty
1496 1496 def id(self):
1497 1497 return self.raw_id
1498 1498
1499 1499 def get_file_commit(self, path):
1500 1500 return self
1501 1501
1502 1502 def get_file_content(self, path):
1503 1503 return u''
1504 1504
1505 1505 def get_file_size(self, path):
1506 1506 return 0
1507 1507
1508 1508
1509 1509 class EmptyChangesetClass(type):
1510 1510
1511 1511 def __instancecheck__(self, instance):
1512 1512 return isinstance(instance, EmptyCommit)
1513 1513
1514 1514
1515 1515 class EmptyChangeset(EmptyCommit):
1516 1516
1517 1517 __metaclass__ = EmptyChangesetClass
1518 1518
1519 1519 def __new__(cls, *args, **kwargs):
1520 1520 warnings.warn(
1521 1521 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1522 1522 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1523 1523
1524 1524 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1525 1525 alias=None, revision=-1, message='', author='', date=None):
1526 1526 if requested_revision is not None:
1527 1527 warnings.warn(
1528 1528 "Parameter requested_revision not supported anymore",
1529 1529 DeprecationWarning)
1530 1530 super(EmptyChangeset, self).__init__(
1531 1531 commit_id=cs, repo=repo, alias=alias, idx=revision,
1532 1532 message=message, author=author, date=date)
1533 1533
1534 1534 @property
1535 1535 def revision(self):
1536 1536 warnings.warn("Use idx instead", DeprecationWarning)
1537 1537 return self.idx
1538 1538
1539 1539 @revision.setter
1540 1540 def revision(self, value):
1541 1541 warnings.warn("Use idx instead", DeprecationWarning)
1542 1542 self.idx = value
1543 1543
1544 1544
1545 1545 class EmptyRepository(BaseRepository):
1546 1546 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1547 1547 pass
1548 1548
1549 1549 def get_diff(self, *args, **kwargs):
1550 1550 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1551 1551 return GitDiff('')
1552 1552
1553 1553
1554 1554 class CollectionGenerator(object):
1555 1555
1556 1556 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1557 1557 self.repo = repo
1558 1558 self.commit_ids = commit_ids
1559 1559 # TODO: (oliver) this isn't currently hooked up
1560 1560 self.collection_size = None
1561 1561 self.pre_load = pre_load
1562 1562
1563 1563 def __len__(self):
1564 1564 if self.collection_size is not None:
1565 1565 return self.collection_size
1566 1566 return self.commit_ids.__len__()
1567 1567
1568 1568 def __iter__(self):
1569 1569 for commit_id in self.commit_ids:
1570 1570 # TODO: johbo: Mercurial passes in commit indices or commit ids
1571 1571 yield self._commit_factory(commit_id)
1572 1572
1573 1573 def _commit_factory(self, commit_id):
1574 1574 """
1575 1575 Allows backends to override the way commits are generated.
1576 1576 """
1577 1577 return self.repo.get_commit(commit_id=commit_id,
1578 1578 pre_load=self.pre_load)
1579 1579
1580 1580 def __getslice__(self, i, j):
1581 1581 """
1582 1582 Returns an iterator of sliced repository
1583 1583 """
1584 1584 commit_ids = self.commit_ids[i:j]
1585 1585 return self.__class__(
1586 1586 self.repo, commit_ids, pre_load=self.pre_load)
1587 1587
1588 1588 def __repr__(self):
1589 1589 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1590 1590
1591 1591
1592 1592 class Config(object):
1593 1593 """
1594 1594 Represents the configuration for a repository.
1595 1595
1596 1596 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1597 1597 standard library. It implements only the needed subset.
1598 1598 """
1599 1599
1600 1600 def __init__(self):
1601 1601 self._values = {}
1602 1602
1603 1603 def copy(self):
1604 1604 clone = Config()
1605 1605 for section, values in self._values.items():
1606 1606 clone._values[section] = values.copy()
1607 1607 return clone
1608 1608
1609 1609 def __repr__(self):
1610 1610 return '<Config(%s sections) at %s>' % (
1611 1611 len(self._values), hex(id(self)))
1612 1612
1613 1613 def items(self, section):
1614 1614 return self._values.get(section, {}).iteritems()
1615 1615
1616 1616 def get(self, section, option):
1617 1617 return self._values.get(section, {}).get(option)
1618 1618
1619 1619 def set(self, section, option, value):
1620 1620 section_values = self._values.setdefault(section, {})
1621 1621 section_values[option] = value
1622 1622
1623 1623 def clear_section(self, section):
1624 1624 self._values[section] = {}
1625 1625
1626 1626 def serialize(self):
1627 1627 """
1628 1628 Creates a list of three tuples (section, key, value) representing
1629 1629 this config object.
1630 1630 """
1631 1631 items = []
1632 1632 for section in self._values:
1633 1633 for option, value in self._values[section].items():
1634 1634 items.append(
1635 1635 (safe_str(section), safe_str(option), safe_str(value)))
1636 1636 return items
1637 1637
1638 1638
1639 1639 class Diff(object):
1640 1640 """
1641 1641 Represents a diff result from a repository backend.
1642 1642
1643 1643 Subclasses have to provide a backend specific value for
1644 1644 :attr:`_header_re` and :attr:`_meta_re`.
1645 1645 """
1646 1646 _meta_re = None
1647 1647 _header_re = None
1648 1648
1649 1649 def __init__(self, raw_diff):
1650 1650 self.raw = raw_diff
1651 1651
1652 1652 def chunks(self):
1653 1653 """
1654 1654 split the diff in chunks of separate --git a/file b/file chunks
1655 1655 to make diffs consistent we must prepend with \n, and make sure
1656 1656 we can detect last chunk as this was also has special rule
1657 1657 """
1658 1658
1659 1659 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1660 1660 header = diff_parts[0]
1661 1661
1662 1662 if self._meta_re:
1663 1663 match = self._meta_re.match(header)
1664 1664
1665 1665 chunks = diff_parts[1:]
1666 1666 total_chunks = len(chunks)
1667 1667
1668 1668 return (
1669 1669 DiffChunk(chunk, self, cur_chunk == total_chunks)
1670 1670 for cur_chunk, chunk in enumerate(chunks, start=1))
1671 1671
1672 1672
1673 1673 class DiffChunk(object):
1674 1674
1675 1675 def __init__(self, chunk, diff, last_chunk):
1676 1676 self._diff = diff
1677 1677
1678 1678 # since we split by \ndiff --git that part is lost from original diff
1679 1679 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1680 1680 if not last_chunk:
1681 1681 chunk += '\n'
1682 1682
1683 1683 match = self._diff._header_re.match(chunk)
1684 1684 self.header = match.groupdict()
1685 1685 self.diff = chunk[match.end():]
1686 1686 self.raw = chunk
1687 1687
1688 1688
1689 1689 class BasePathPermissionChecker(object):
1690 1690
1691 1691 @staticmethod
1692 1692 def create_from_patterns(includes, excludes):
1693 1693 if includes and '*' in includes and not excludes:
1694 1694 return AllPathPermissionChecker()
1695 1695 elif excludes and '*' in excludes:
1696 1696 return NonePathPermissionChecker()
1697 1697 else:
1698 1698 return PatternPathPermissionChecker(includes, excludes)
1699 1699
1700 1700 @property
1701 1701 def has_full_access(self):
1702 1702 raise NotImplemented()
1703 1703
1704 1704 def has_access(self, path):
1705 1705 raise NotImplemented()
1706 1706
1707 1707
1708 1708 class AllPathPermissionChecker(BasePathPermissionChecker):
1709 1709
1710 1710 @property
1711 1711 def has_full_access(self):
1712 1712 return True
1713 1713
1714 1714 def has_access(self, path):
1715 1715 return True
1716 1716
1717 1717
1718 1718 class NonePathPermissionChecker(BasePathPermissionChecker):
1719 1719
1720 1720 @property
1721 1721 def has_full_access(self):
1722 1722 return False
1723 1723
1724 1724 def has_access(self, path):
1725 1725 return False
1726 1726
1727 1727
1728 1728 class PatternPathPermissionChecker(BasePathPermissionChecker):
1729 1729
1730 1730 def __init__(self, includes, excludes):
1731 1731 self.includes = includes
1732 1732 self.excludes = excludes
1733 1733 self.includes_re = [] if not includes else [
1734 1734 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1735 1735 self.excludes_re = [] if not excludes else [
1736 1736 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1737 1737
1738 1738 @property
1739 1739 def has_full_access(self):
1740 1740 return '*' in self.includes and not self.excludes
1741 1741
1742 1742 def has_access(self, path):
1743 1743 for regex in self.excludes_re:
1744 1744 if regex.match(path):
1745 1745 return False
1746 1746 for regex in self.includes_re:
1747 1747 if regex.match(path):
1748 1748 return True
1749 1749 return False
@@ -1,67 +1,75 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Internal settings for vcs-lib
23 23 """
24 24
25 25 # list of default encoding used in safe_unicode/safe_str methods
26 26 DEFAULT_ENCODINGS = ['utf8']
27 27
28 28 # Optional arguments to rev-filter, it has to be a list
29 29 # It can also be ['--branches', '--tags']
30 30 GIT_REV_FILTER = ['--all']
31 31
32 32 # Compatibility version when creating SVN repositories. None means newest.
33 33 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
34 34 # pre-1.6-compatible, pre-1.8-compatible
35 35 SVN_COMPATIBLE_VERSION = None
36 36
37 37 ALIASES = ['hg', 'git', 'svn']
38 38
39 39 BACKENDS = {
40 40 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
41 41 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
42 42 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
43 43 }
44 44
45 45 # TODO: Remove once controllers/files.py is adjusted
46 46 ARCHIVE_SPECS = {
47 47 'tbz2': ('application/x-bzip2', '.tar.bz2'),
48 48 'tgz': ('application/x-gzip', '.tar.gz'),
49 49 'zip': ('application/zip', '.zip'),
50 50 }
51 51
52 52 HOOKS_PROTOCOL = None
53 53 HOOKS_DIRECT_CALLS = False
54 54 HOOKS_HOST = '127.0.0.1'
55 55
56 56
57 MERGE_MESSAGE_TMPL = (
58 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}\n\n '
59 u'{pr_title}')
60 MERGE_DRY_RUN_MESSAGE = 'dry_run_merge_message_from_rhodecode'
61 MERGE_DRY_RUN_USER = 'Dry-Run User'
62 MERGE_DRY_RUN_EMAIL = 'dry-run-merge@rhodecode.com'
63
64
57 65 def available_aliases():
58 66 """
59 67 Mercurial is required for the system to work, so in case vcs.backends does
60 68 not include it, we make sure it will be available internally
61 69 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
62 70 should be responsible to dictate available backends.
63 71 """
64 72 aliases = ALIASES[:]
65 73 if 'hg' not in aliases:
66 74 aliases += ['hg']
67 75 return aliases
@@ -1,1727 +1,1730 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34
35 35 from rhodecode import events
36 36 from rhodecode.translation import lazy_ugettext#, _
37 37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 41 from rhodecode.lib.markup_renderer import (
42 42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 44 from rhodecode.lib.vcs.backends.base import (
45 45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 47 from rhodecode.lib.vcs.exceptions import (
48 48 CommitDoesNotExistError, EmptyRepositoryError)
49 49 from rhodecode.model import BaseModel
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.db import (
53 53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.notification import NotificationModel, \
57 57 EmailNotificationModel
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 # Data structure to hold the response data when updating commits during a pull
66 66 # request update.
67 67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 68 'executed', 'reason', 'new', 'old', 'changes',
69 69 'source_changed', 'target_changed'])
70 70
71 71
72 72 class PullRequestModel(BaseModel):
73 73
74 74 cls = PullRequest
75 75
76 76 DIFF_CONTEXT = 3
77 77
78 78 MERGE_STATUS_MESSAGES = {
79 79 MergeFailureReason.NONE: lazy_ugettext(
80 80 'This pull request can be automatically merged.'),
81 81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 82 'This pull request cannot be merged because of an unhandled'
83 83 ' exception.'),
84 84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 85 'This pull request cannot be merged because of merge conflicts.'),
86 86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 87 'This pull request could not be merged because push to target'
88 88 ' failed.'),
89 89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 90 'This pull request cannot be merged because the target is not a'
91 91 ' head.'),
92 92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 93 'This pull request cannot be merged because the source contains'
94 94 ' more branches than the target.'),
95 95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 96 'This pull request cannot be merged because the target has'
97 97 ' multiple heads.'),
98 98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 99 'This pull request cannot be merged because the target repository'
100 100 ' is locked.'),
101 101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 102 'This pull request cannot be merged because the target or the '
103 103 'source reference is missing.'),
104 104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 105 'This pull request cannot be merged because the target '
106 106 'reference is missing.'),
107 107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 108 'This pull request cannot be merged because the source '
109 109 'reference is missing.'),
110 110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 111 'This pull request cannot be merged because of conflicts related '
112 112 'to sub repositories.'),
113 113 }
114 114
115 115 UPDATE_STATUS_MESSAGES = {
116 116 UpdateFailureReason.NONE: lazy_ugettext(
117 117 'Pull request update successful.'),
118 118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 119 'Pull request update failed because of an unknown error.'),
120 120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 121 'No update needed because the source and target have not changed.'),
122 122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 123 'Pull request cannot be updated because the reference type is '
124 124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 126 'This pull request cannot be updated because the target '
127 127 'reference is missing.'),
128 128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 129 'This pull request cannot be updated because the source '
130 130 'reference is missing.'),
131 131 }
132 132
133 133 def __get_pull_request(self, pull_request):
134 134 return self._get_instance((
135 135 PullRequest, PullRequestVersion), pull_request)
136 136
137 137 def _check_perms(self, perms, pull_request, user, api=False):
138 138 if not api:
139 139 return h.HasRepoPermissionAny(*perms)(
140 140 user=user, repo_name=pull_request.target_repo.repo_name)
141 141 else:
142 142 return h.HasRepoPermissionAnyApi(*perms)(
143 143 user=user, repo_name=pull_request.target_repo.repo_name)
144 144
145 145 def check_user_read(self, pull_request, user, api=False):
146 146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 147 return self._check_perms(_perms, pull_request, user, api)
148 148
149 149 def check_user_merge(self, pull_request, user, api=False):
150 150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 151 return self._check_perms(_perms, pull_request, user, api)
152 152
153 153 def check_user_update(self, pull_request, user, api=False):
154 154 owner = user.user_id == pull_request.user_id
155 155 return self.check_user_merge(pull_request, user, api) or owner
156 156
157 157 def check_user_delete(self, pull_request, user):
158 158 owner = user.user_id == pull_request.user_id
159 159 _perms = ('repository.admin',)
160 160 return self._check_perms(_perms, pull_request, user) or owner
161 161
162 162 def check_user_change_status(self, pull_request, user, api=False):
163 163 reviewer = user.user_id in [x.user_id for x in
164 164 pull_request.reviewers]
165 165 return self.check_user_update(pull_request, user, api) or reviewer
166 166
167 167 def check_user_comment(self, pull_request, user):
168 168 owner = user.user_id == pull_request.user_id
169 169 return self.check_user_read(pull_request, user) or owner
170 170
171 171 def get(self, pull_request):
172 172 return self.__get_pull_request(pull_request)
173 173
174 174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 175 opened_by=None, order_by=None,
176 176 order_dir='desc'):
177 177 repo = None
178 178 if repo_name:
179 179 repo = self._get_repo(repo_name)
180 180
181 181 q = PullRequest.query()
182 182
183 183 # source or target
184 184 if repo and source:
185 185 q = q.filter(PullRequest.source_repo == repo)
186 186 elif repo:
187 187 q = q.filter(PullRequest.target_repo == repo)
188 188
189 189 # closed,opened
190 190 if statuses:
191 191 q = q.filter(PullRequest.status.in_(statuses))
192 192
193 193 # opened by filter
194 194 if opened_by:
195 195 q = q.filter(PullRequest.user_id.in_(opened_by))
196 196
197 197 if order_by:
198 198 order_map = {
199 199 'name_raw': PullRequest.pull_request_id,
200 200 'title': PullRequest.title,
201 201 'updated_on_raw': PullRequest.updated_on,
202 202 'target_repo': PullRequest.target_repo_id
203 203 }
204 204 if order_dir == 'asc':
205 205 q = q.order_by(order_map[order_by].asc())
206 206 else:
207 207 q = q.order_by(order_map[order_by].desc())
208 208
209 209 return q
210 210
211 211 def count_all(self, repo_name, source=False, statuses=None,
212 212 opened_by=None):
213 213 """
214 214 Count the number of pull requests for a specific repository.
215 215
216 216 :param repo_name: target or source repo
217 217 :param source: boolean flag to specify if repo_name refers to source
218 218 :param statuses: list of pull request statuses
219 219 :param opened_by: author user of the pull request
220 220 :returns: int number of pull requests
221 221 """
222 222 q = self._prepare_get_all_query(
223 223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224 224
225 225 return q.count()
226 226
227 227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 228 offset=0, length=None, order_by=None, order_dir='desc'):
229 229 """
230 230 Get all pull requests for a specific repository.
231 231
232 232 :param repo_name: target or source repo
233 233 :param source: boolean flag to specify if repo_name refers to source
234 234 :param statuses: list of pull request statuses
235 235 :param opened_by: author user of the pull request
236 236 :param offset: pagination offset
237 237 :param length: length of returned list
238 238 :param order_by: order of the returned list
239 239 :param order_dir: 'asc' or 'desc' ordering direction
240 240 :returns: list of pull requests
241 241 """
242 242 q = self._prepare_get_all_query(
243 243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 244 order_by=order_by, order_dir=order_dir)
245 245
246 246 if length:
247 247 pull_requests = q.limit(length).offset(offset).all()
248 248 else:
249 249 pull_requests = q.all()
250 250
251 251 return pull_requests
252 252
253 253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 254 opened_by=None):
255 255 """
256 256 Count the number of pull requests for a specific repository that are
257 257 awaiting review.
258 258
259 259 :param repo_name: target or source repo
260 260 :param source: boolean flag to specify if repo_name refers to source
261 261 :param statuses: list of pull request statuses
262 262 :param opened_by: author user of the pull request
263 263 :returns: int number of pull requests
264 264 """
265 265 pull_requests = self.get_awaiting_review(
266 266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267 267
268 268 return len(pull_requests)
269 269
270 270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 271 opened_by=None, offset=0, length=None,
272 272 order_by=None, order_dir='desc'):
273 273 """
274 274 Get all pull requests for a specific repository that are awaiting
275 275 review.
276 276
277 277 :param repo_name: target or source repo
278 278 :param source: boolean flag to specify if repo_name refers to source
279 279 :param statuses: list of pull request statuses
280 280 :param opened_by: author user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _filtered_pull_requests = []
292 292 for pr in pull_requests:
293 293 status = pr.calculated_review_status()
294 294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 296 _filtered_pull_requests.append(pr)
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 303 opened_by=None, user_id=None):
304 304 """
305 305 Count the number of pull requests for a specific repository that are
306 306 awaiting review from a specific user.
307 307
308 308 :param repo_name: target or source repo
309 309 :param source: boolean flag to specify if repo_name refers to source
310 310 :param statuses: list of pull request statuses
311 311 :param opened_by: author user of the pull request
312 312 :param user_id: reviewer user of the pull request
313 313 :returns: int number of pull requests
314 314 """
315 315 pull_requests = self.get_awaiting_my_review(
316 316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 317 user_id=user_id)
318 318
319 319 return len(pull_requests)
320 320
321 321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 322 opened_by=None, user_id=None, offset=0,
323 323 length=None, order_by=None, order_dir='desc'):
324 324 """
325 325 Get all pull requests for a specific repository that are awaiting
326 326 review from a specific user.
327 327
328 328 :param repo_name: target or source repo
329 329 :param source: boolean flag to specify if repo_name refers to source
330 330 :param statuses: list of pull request statuses
331 331 :param opened_by: author user of the pull request
332 332 :param user_id: reviewer user of the pull request
333 333 :param offset: pagination offset
334 334 :param length: length of returned list
335 335 :param order_by: order of the returned list
336 336 :param order_dir: 'asc' or 'desc' ordering direction
337 337 :returns: list of pull requests
338 338 """
339 339 pull_requests = self.get_all(
340 340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 341 order_by=order_by, order_dir=order_dir)
342 342
343 343 _my = PullRequestModel().get_not_reviewed(user_id)
344 344 my_participation = []
345 345 for pr in pull_requests:
346 346 if pr in _my:
347 347 my_participation.append(pr)
348 348 _filtered_pull_requests = my_participation
349 349 if length:
350 350 return _filtered_pull_requests[offset:offset+length]
351 351 else:
352 352 return _filtered_pull_requests
353 353
354 354 def get_not_reviewed(self, user_id):
355 355 return [
356 356 x.pull_request for x in PullRequestReviewers.query().filter(
357 357 PullRequestReviewers.user_id == user_id).all()
358 358 ]
359 359
360 360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 361 order_by=None, order_dir='desc'):
362 362 q = PullRequest.query()
363 363 if user_id:
364 364 reviewers_subquery = Session().query(
365 365 PullRequestReviewers.pull_request_id).filter(
366 366 PullRequestReviewers.user_id == user_id).subquery()
367 367 user_filter = or_(
368 368 PullRequest.user_id == user_id,
369 369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 370 )
371 371 q = PullRequest.query().filter(user_filter)
372 372
373 373 # closed,opened
374 374 if statuses:
375 375 q = q.filter(PullRequest.status.in_(statuses))
376 376
377 377 if order_by:
378 378 order_map = {
379 379 'name_raw': PullRequest.pull_request_id,
380 380 'title': PullRequest.title,
381 381 'updated_on_raw': PullRequest.updated_on,
382 382 'target_repo': PullRequest.target_repo_id
383 383 }
384 384 if order_dir == 'asc':
385 385 q = q.order_by(order_map[order_by].asc())
386 386 else:
387 387 q = q.order_by(order_map[order_by].desc())
388 388
389 389 return q
390 390
391 391 def count_im_participating_in(self, user_id=None, statuses=None):
392 392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 393 return q.count()
394 394
395 395 def get_im_participating_in(
396 396 self, user_id=None, statuses=None, offset=0,
397 397 length=None, order_by=None, order_dir='desc'):
398 398 """
399 399 Get all Pull requests that i'm participating in, or i have opened
400 400 """
401 401
402 402 q = self._prepare_participating_query(
403 403 user_id, statuses=statuses, order_by=order_by,
404 404 order_dir=order_dir)
405 405
406 406 if length:
407 407 pull_requests = q.limit(length).offset(offset).all()
408 408 else:
409 409 pull_requests = q.all()
410 410
411 411 return pull_requests
412 412
413 413 def get_versions(self, pull_request):
414 414 """
415 415 returns version of pull request sorted by ID descending
416 416 """
417 417 return PullRequestVersion.query()\
418 418 .filter(PullRequestVersion.pull_request == pull_request)\
419 419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 420 .all()
421 421
422 422 def get_pr_version(self, pull_request_id, version=None):
423 423 at_version = None
424 424
425 425 if version and version == 'latest':
426 426 pull_request_ver = PullRequest.get(pull_request_id)
427 427 pull_request_obj = pull_request_ver
428 428 _org_pull_request_obj = pull_request_obj
429 429 at_version = 'latest'
430 430 elif version:
431 431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 432 pull_request_obj = pull_request_ver
433 433 _org_pull_request_obj = pull_request_ver.pull_request
434 434 at_version = pull_request_ver.pull_request_version_id
435 435 else:
436 436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 437 pull_request_id)
438 438
439 439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 440 pull_request_obj, _org_pull_request_obj)
441 441
442 442 return _org_pull_request_obj, pull_request_obj, \
443 443 pull_request_display_obj, at_version
444 444
445 445 def create(self, created_by, source_repo, source_ref, target_repo,
446 446 target_ref, revisions, reviewers, title, description=None,
447 447 description_renderer=None,
448 448 reviewer_data=None, translator=None, auth_user=None):
449 449 translator = translator or get_current_request().translate
450 450
451 451 created_by_user = self._get_user(created_by)
452 452 auth_user = auth_user or created_by_user.AuthUser()
453 453 source_repo = self._get_repo(source_repo)
454 454 target_repo = self._get_repo(target_repo)
455 455
456 456 pull_request = PullRequest()
457 457 pull_request.source_repo = source_repo
458 458 pull_request.source_ref = source_ref
459 459 pull_request.target_repo = target_repo
460 460 pull_request.target_ref = target_ref
461 461 pull_request.revisions = revisions
462 462 pull_request.title = title
463 463 pull_request.description = description
464 464 pull_request.description_renderer = description_renderer
465 465 pull_request.author = created_by_user
466 466 pull_request.reviewer_data = reviewer_data
467 467
468 468 Session().add(pull_request)
469 469 Session().flush()
470 470
471 471 reviewer_ids = set()
472 472 # members / reviewers
473 473 for reviewer_object in reviewers:
474 474 user_id, reasons, mandatory, rules = reviewer_object
475 475 user = self._get_user(user_id)
476 476
477 477 # skip duplicates
478 478 if user.user_id in reviewer_ids:
479 479 continue
480 480
481 481 reviewer_ids.add(user.user_id)
482 482
483 483 reviewer = PullRequestReviewers()
484 484 reviewer.user = user
485 485 reviewer.pull_request = pull_request
486 486 reviewer.reasons = reasons
487 487 reviewer.mandatory = mandatory
488 488
489 489 # NOTE(marcink): pick only first rule for now
490 490 rule_id = list(rules)[0] if rules else None
491 491 rule = RepoReviewRule.get(rule_id) if rule_id else None
492 492 if rule:
493 493 review_group = rule.user_group_vote_rule(user_id)
494 494 # we check if this particular reviewer is member of a voting group
495 495 if review_group:
496 496 # NOTE(marcink):
497 497 # can be that user is member of more but we pick the first same,
498 498 # same as default reviewers algo
499 499 review_group = review_group[0]
500 500
501 501 rule_data = {
502 502 'rule_name':
503 503 rule.review_rule_name,
504 504 'rule_user_group_entry_id':
505 505 review_group.repo_review_rule_users_group_id,
506 506 'rule_user_group_name':
507 507 review_group.users_group.users_group_name,
508 508 'rule_user_group_members':
509 509 [x.user.username for x in review_group.users_group.members],
510 510 'rule_user_group_members_id':
511 511 [x.user.user_id for x in review_group.users_group.members],
512 512 }
513 513 # e.g {'vote_rule': -1, 'mandatory': True}
514 514 rule_data.update(review_group.rule_data())
515 515
516 516 reviewer.rule_data = rule_data
517 517
518 518 Session().add(reviewer)
519 519 Session().flush()
520 520
521 521 # Set approval status to "Under Review" for all commits which are
522 522 # part of this pull request.
523 523 ChangesetStatusModel().set_status(
524 524 repo=target_repo,
525 525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
526 526 user=created_by_user,
527 527 pull_request=pull_request
528 528 )
529 529 # we commit early at this point. This has to do with a fact
530 530 # that before queries do some row-locking. And because of that
531 531 # we need to commit and finish transation before below validate call
532 532 # that for large repos could be long resulting in long row locks
533 533 Session().commit()
534 534
535 535 # prepare workspace, and run initial merge simulation
536 536 MergeCheck.validate(
537 537 pull_request, auth_user=auth_user, translator=translator)
538 538
539 539 self.notify_reviewers(pull_request, reviewer_ids)
540 540 self._trigger_pull_request_hook(
541 541 pull_request, created_by_user, 'create')
542 542
543 543 creation_data = pull_request.get_api_data(with_merge_state=False)
544 544 self._log_audit_action(
545 545 'repo.pull_request.create', {'data': creation_data},
546 546 auth_user, pull_request)
547 547
548 548 return pull_request
549 549
550 550 def _trigger_pull_request_hook(self, pull_request, user, action):
551 551 pull_request = self.__get_pull_request(pull_request)
552 552 target_scm = pull_request.target_repo.scm_instance()
553 553 if action == 'create':
554 554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
555 555 elif action == 'merge':
556 556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
557 557 elif action == 'close':
558 558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
559 559 elif action == 'review_status_change':
560 560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
561 561 elif action == 'update':
562 562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
563 563 else:
564 564 return
565 565
566 566 trigger_hook(
567 567 username=user.username,
568 568 repo_name=pull_request.target_repo.repo_name,
569 569 repo_alias=target_scm.alias,
570 570 pull_request=pull_request)
571 571
572 572 def _get_commit_ids(self, pull_request):
573 573 """
574 574 Return the commit ids of the merged pull request.
575 575
576 576 This method is not dealing correctly yet with the lack of autoupdates
577 577 nor with the implicit target updates.
578 578 For example: if a commit in the source repo is already in the target it
579 579 will be reported anyways.
580 580 """
581 581 merge_rev = pull_request.merge_rev
582 582 if merge_rev is None:
583 583 raise ValueError('This pull request was not merged yet')
584 584
585 585 commit_ids = list(pull_request.revisions)
586 586 if merge_rev not in commit_ids:
587 587 commit_ids.append(merge_rev)
588 588
589 589 return commit_ids
590 590
591 591 def merge_repo(self, pull_request, user, extras):
592 592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 593 merge_state = self._merge_pull_request(pull_request, user, extras)
594 594 if merge_state.executed:
595 595 log.debug(
596 596 "Merge was successful, updating the pull request comments.")
597 597 self._comment_and_close_pr(pull_request, user, merge_state)
598 598
599 599 self._log_audit_action(
600 600 'repo.pull_request.merge',
601 601 {'merge_state': merge_state.__dict__},
602 602 user, pull_request)
603 603
604 604 else:
605 605 log.warn("Merge failed, not updating the pull request.")
606 606 return merge_state
607 607
608 608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 609 target_vcs = pull_request.target_repo.scm_instance()
610 610 source_vcs = pull_request.source_repo.scm_instance()
611 target_ref = self._refresh_reference(
612 pull_request.target_ref_parts, target_vcs)
613 611
614 message = merge_msg or (
615 'Merge pull request #%(pr_id)s from '
616 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
617 'pr_id': pull_request.pull_request_id,
618 'source_repo': source_vcs.name,
619 'source_ref_name': pull_request.source_ref_parts.name,
620 'pr_title': pull_request.title
621 }
612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 pr_id=pull_request.pull_request_id,
614 pr_title=pull_request.title,
615 source_repo=source_vcs.name,
616 source_ref_name=pull_request.source_ref_parts.name,
617 target_repo=target_vcs.name,
618 target_ref_name=pull_request.target_ref_parts.name,
619 )
622 620
623 621 workspace_id = self._workspace_id(pull_request)
624 622 repo_id = pull_request.target_repo.repo_id
625 623 use_rebase = self._use_rebase_for_merging(pull_request)
626 624 close_branch = self._close_branch_before_merging(pull_request)
627 625
626 target_ref = self._refresh_reference(
627 pull_request.target_ref_parts, target_vcs)
628
628 629 callback_daemon, extras = prepare_callback_daemon(
629 630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
630 631 host=vcs_settings.HOOKS_HOST,
631 632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
632 633
633 634 with callback_daemon:
634 635 # TODO: johbo: Implement a clean way to run a config_override
635 636 # for a single call.
636 637 target_vcs.config.set(
637 638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639
640 user_name = user.short_contact
638 641 merge_state = target_vcs.merge(
639 642 repo_id, workspace_id, target_ref, source_vcs,
640 643 pull_request.source_ref_parts,
641 user_name=user.username, user_email=user.email,
644 user_name=user_name, user_email=user.email,
642 645 message=message, use_rebase=use_rebase,
643 646 close_branch=close_branch)
644 647 return merge_state
645 648
646 649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
647 650 pull_request.merge_rev = merge_state.merge_ref.commit_id
648 651 pull_request.updated_on = datetime.datetime.now()
649 652 close_msg = close_msg or 'Pull request merged and closed'
650 653
651 654 CommentsModel().create(
652 655 text=safe_unicode(close_msg),
653 656 repo=pull_request.target_repo.repo_id,
654 657 user=user.user_id,
655 658 pull_request=pull_request.pull_request_id,
656 659 f_path=None,
657 660 line_no=None,
658 661 closing_pr=True
659 662 )
660 663
661 664 Session().add(pull_request)
662 665 Session().flush()
663 666 # TODO: paris: replace invalidation with less radical solution
664 667 ScmModel().mark_for_invalidation(
665 668 pull_request.target_repo.repo_name)
666 669 self._trigger_pull_request_hook(pull_request, user, 'merge')
667 670
668 671 def has_valid_update_type(self, pull_request):
669 672 source_ref_type = pull_request.source_ref_parts.type
670 673 return source_ref_type in ['book', 'branch', 'tag']
671 674
672 675 def update_commits(self, pull_request):
673 676 """
674 677 Get the updated list of commits for the pull request
675 678 and return the new pull request version and the list
676 679 of commits processed by this update action
677 680 """
678 681 pull_request = self.__get_pull_request(pull_request)
679 682 source_ref_type = pull_request.source_ref_parts.type
680 683 source_ref_name = pull_request.source_ref_parts.name
681 684 source_ref_id = pull_request.source_ref_parts.commit_id
682 685
683 686 target_ref_type = pull_request.target_ref_parts.type
684 687 target_ref_name = pull_request.target_ref_parts.name
685 688 target_ref_id = pull_request.target_ref_parts.commit_id
686 689
687 690 if not self.has_valid_update_type(pull_request):
688 691 log.debug(
689 692 "Skipping update of pull request %s due to ref type: %s",
690 693 pull_request, source_ref_type)
691 694 return UpdateResponse(
692 695 executed=False,
693 696 reason=UpdateFailureReason.WRONG_REF_TYPE,
694 697 old=pull_request, new=None, changes=None,
695 698 source_changed=False, target_changed=False)
696 699
697 700 # source repo
698 701 source_repo = pull_request.source_repo.scm_instance()
699 702 try:
700 703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
701 704 except CommitDoesNotExistError:
702 705 return UpdateResponse(
703 706 executed=False,
704 707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
705 708 old=pull_request, new=None, changes=None,
706 709 source_changed=False, target_changed=False)
707 710
708 711 source_changed = source_ref_id != source_commit.raw_id
709 712
710 713 # target repo
711 714 target_repo = pull_request.target_repo.scm_instance()
712 715 try:
713 716 target_commit = target_repo.get_commit(commit_id=target_ref_name)
714 717 except CommitDoesNotExistError:
715 718 return UpdateResponse(
716 719 executed=False,
717 720 reason=UpdateFailureReason.MISSING_TARGET_REF,
718 721 old=pull_request, new=None, changes=None,
719 722 source_changed=False, target_changed=False)
720 723 target_changed = target_ref_id != target_commit.raw_id
721 724
722 725 if not (source_changed or target_changed):
723 726 log.debug("Nothing changed in pull request %s", pull_request)
724 727 return UpdateResponse(
725 728 executed=False,
726 729 reason=UpdateFailureReason.NO_CHANGE,
727 730 old=pull_request, new=None, changes=None,
728 731 source_changed=target_changed, target_changed=source_changed)
729 732
730 733 change_in_found = 'target repo' if target_changed else 'source repo'
731 734 log.debug('Updating pull request because of change in %s detected',
732 735 change_in_found)
733 736
734 737 # Finally there is a need for an update, in case of source change
735 738 # we create a new version, else just an update
736 739 if source_changed:
737 740 pull_request_version = self._create_version_from_snapshot(pull_request)
738 741 self._link_comments_to_version(pull_request_version)
739 742 else:
740 743 try:
741 744 ver = pull_request.versions[-1]
742 745 except IndexError:
743 746 ver = None
744 747
745 748 pull_request.pull_request_version_id = \
746 749 ver.pull_request_version_id if ver else None
747 750 pull_request_version = pull_request
748 751
749 752 try:
750 753 if target_ref_type in ('tag', 'branch', 'book'):
751 754 target_commit = target_repo.get_commit(target_ref_name)
752 755 else:
753 756 target_commit = target_repo.get_commit(target_ref_id)
754 757 except CommitDoesNotExistError:
755 758 return UpdateResponse(
756 759 executed=False,
757 760 reason=UpdateFailureReason.MISSING_TARGET_REF,
758 761 old=pull_request, new=None, changes=None,
759 762 source_changed=source_changed, target_changed=target_changed)
760 763
761 764 # re-compute commit ids
762 765 old_commit_ids = pull_request.revisions
763 766 pre_load = ["author", "branch", "date", "message"]
764 767 commit_ranges = target_repo.compare(
765 768 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
766 769 pre_load=pre_load)
767 770
768 771 ancestor = target_repo.get_common_ancestor(
769 772 target_commit.raw_id, source_commit.raw_id, source_repo)
770 773
771 774 pull_request.source_ref = '%s:%s:%s' % (
772 775 source_ref_type, source_ref_name, source_commit.raw_id)
773 776 pull_request.target_ref = '%s:%s:%s' % (
774 777 target_ref_type, target_ref_name, ancestor)
775 778
776 779 pull_request.revisions = [
777 780 commit.raw_id for commit in reversed(commit_ranges)]
778 781 pull_request.updated_on = datetime.datetime.now()
779 782 Session().add(pull_request)
780 783 new_commit_ids = pull_request.revisions
781 784
782 785 old_diff_data, new_diff_data = self._generate_update_diffs(
783 786 pull_request, pull_request_version)
784 787
785 788 # calculate commit and file changes
786 789 changes = self._calculate_commit_id_changes(
787 790 old_commit_ids, new_commit_ids)
788 791 file_changes = self._calculate_file_changes(
789 792 old_diff_data, new_diff_data)
790 793
791 794 # set comments as outdated if DIFFS changed
792 795 CommentsModel().outdate_comments(
793 796 pull_request, old_diff_data=old_diff_data,
794 797 new_diff_data=new_diff_data)
795 798
796 799 commit_changes = (changes.added or changes.removed)
797 800 file_node_changes = (
798 801 file_changes.added or file_changes.modified or file_changes.removed)
799 802 pr_has_changes = commit_changes or file_node_changes
800 803
801 804 # Add an automatic comment to the pull request, in case
802 805 # anything has changed
803 806 if pr_has_changes:
804 807 update_comment = CommentsModel().create(
805 808 text=self._render_update_message(changes, file_changes),
806 809 repo=pull_request.target_repo,
807 810 user=pull_request.author,
808 811 pull_request=pull_request,
809 812 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
810 813
811 814 # Update status to "Under Review" for added commits
812 815 for commit_id in changes.added:
813 816 ChangesetStatusModel().set_status(
814 817 repo=pull_request.source_repo,
815 818 status=ChangesetStatus.STATUS_UNDER_REVIEW,
816 819 comment=update_comment,
817 820 user=pull_request.author,
818 821 pull_request=pull_request,
819 822 revision=commit_id)
820 823
821 824 log.debug(
822 825 'Updated pull request %s, added_ids: %s, common_ids: %s, '
823 826 'removed_ids: %s', pull_request.pull_request_id,
824 827 changes.added, changes.common, changes.removed)
825 828 log.debug(
826 829 'Updated pull request with the following file changes: %s',
827 830 file_changes)
828 831
829 832 log.info(
830 833 "Updated pull request %s from commit %s to commit %s, "
831 834 "stored new version %s of this pull request.",
832 835 pull_request.pull_request_id, source_ref_id,
833 836 pull_request.source_ref_parts.commit_id,
834 837 pull_request_version.pull_request_version_id)
835 838 Session().commit()
836 839 self._trigger_pull_request_hook(
837 840 pull_request, pull_request.author, 'update')
838 841
839 842 return UpdateResponse(
840 843 executed=True, reason=UpdateFailureReason.NONE,
841 844 old=pull_request, new=pull_request_version, changes=changes,
842 845 source_changed=source_changed, target_changed=target_changed)
843 846
844 847 def _create_version_from_snapshot(self, pull_request):
845 848 version = PullRequestVersion()
846 849 version.title = pull_request.title
847 850 version.description = pull_request.description
848 851 version.status = pull_request.status
849 852 version.created_on = datetime.datetime.now()
850 853 version.updated_on = pull_request.updated_on
851 854 version.user_id = pull_request.user_id
852 855 version.source_repo = pull_request.source_repo
853 856 version.source_ref = pull_request.source_ref
854 857 version.target_repo = pull_request.target_repo
855 858 version.target_ref = pull_request.target_ref
856 859
857 860 version._last_merge_source_rev = pull_request._last_merge_source_rev
858 861 version._last_merge_target_rev = pull_request._last_merge_target_rev
859 862 version.last_merge_status = pull_request.last_merge_status
860 863 version.shadow_merge_ref = pull_request.shadow_merge_ref
861 864 version.merge_rev = pull_request.merge_rev
862 865 version.reviewer_data = pull_request.reviewer_data
863 866
864 867 version.revisions = pull_request.revisions
865 868 version.pull_request = pull_request
866 869 Session().add(version)
867 870 Session().flush()
868 871
869 872 return version
870 873
871 874 def _generate_update_diffs(self, pull_request, pull_request_version):
872 875
873 876 diff_context = (
874 877 self.DIFF_CONTEXT +
875 878 CommentsModel.needed_extra_diff_context())
876 879
877 880 source_repo = pull_request_version.source_repo
878 881 source_ref_id = pull_request_version.source_ref_parts.commit_id
879 882 target_ref_id = pull_request_version.target_ref_parts.commit_id
880 883 old_diff = self._get_diff_from_pr_or_version(
881 884 source_repo, source_ref_id, target_ref_id, context=diff_context)
882 885
883 886 source_repo = pull_request.source_repo
884 887 source_ref_id = pull_request.source_ref_parts.commit_id
885 888 target_ref_id = pull_request.target_ref_parts.commit_id
886 889
887 890 new_diff = self._get_diff_from_pr_or_version(
888 891 source_repo, source_ref_id, target_ref_id, context=diff_context)
889 892
890 893 old_diff_data = diffs.DiffProcessor(old_diff)
891 894 old_diff_data.prepare()
892 895 new_diff_data = diffs.DiffProcessor(new_diff)
893 896 new_diff_data.prepare()
894 897
895 898 return old_diff_data, new_diff_data
896 899
897 900 def _link_comments_to_version(self, pull_request_version):
898 901 """
899 902 Link all unlinked comments of this pull request to the given version.
900 903
901 904 :param pull_request_version: The `PullRequestVersion` to which
902 905 the comments shall be linked.
903 906
904 907 """
905 908 pull_request = pull_request_version.pull_request
906 909 comments = ChangesetComment.query()\
907 910 .filter(
908 911 # TODO: johbo: Should we query for the repo at all here?
909 912 # Pending decision on how comments of PRs are to be related
910 913 # to either the source repo, the target repo or no repo at all.
911 914 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
912 915 ChangesetComment.pull_request == pull_request,
913 916 ChangesetComment.pull_request_version == None)\
914 917 .order_by(ChangesetComment.comment_id.asc())
915 918
916 919 # TODO: johbo: Find out why this breaks if it is done in a bulk
917 920 # operation.
918 921 for comment in comments:
919 922 comment.pull_request_version_id = (
920 923 pull_request_version.pull_request_version_id)
921 924 Session().add(comment)
922 925
923 926 def _calculate_commit_id_changes(self, old_ids, new_ids):
924 927 added = [x for x in new_ids if x not in old_ids]
925 928 common = [x for x in new_ids if x in old_ids]
926 929 removed = [x for x in old_ids if x not in new_ids]
927 930 total = new_ids
928 931 return ChangeTuple(added, common, removed, total)
929 932
930 933 def _calculate_file_changes(self, old_diff_data, new_diff_data):
931 934
932 935 old_files = OrderedDict()
933 936 for diff_data in old_diff_data.parsed_diff:
934 937 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
935 938
936 939 added_files = []
937 940 modified_files = []
938 941 removed_files = []
939 942 for diff_data in new_diff_data.parsed_diff:
940 943 new_filename = diff_data['filename']
941 944 new_hash = md5_safe(diff_data['raw_diff'])
942 945
943 946 old_hash = old_files.get(new_filename)
944 947 if not old_hash:
945 948 # file is not present in old diff, means it's added
946 949 added_files.append(new_filename)
947 950 else:
948 951 if new_hash != old_hash:
949 952 modified_files.append(new_filename)
950 953 # now remove a file from old, since we have seen it already
951 954 del old_files[new_filename]
952 955
953 956 # removed files is when there are present in old, but not in NEW,
954 957 # since we remove old files that are present in new diff, left-overs
955 958 # if any should be the removed files
956 959 removed_files.extend(old_files.keys())
957 960
958 961 return FileChangeTuple(added_files, modified_files, removed_files)
959 962
960 963 def _render_update_message(self, changes, file_changes):
961 964 """
962 965 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
963 966 so it's always looking the same disregarding on which default
964 967 renderer system is using.
965 968
966 969 :param changes: changes named tuple
967 970 :param file_changes: file changes named tuple
968 971
969 972 """
970 973 new_status = ChangesetStatus.get_status_lbl(
971 974 ChangesetStatus.STATUS_UNDER_REVIEW)
972 975
973 976 changed_files = (
974 977 file_changes.added + file_changes.modified + file_changes.removed)
975 978
976 979 params = {
977 980 'under_review_label': new_status,
978 981 'added_commits': changes.added,
979 982 'removed_commits': changes.removed,
980 983 'changed_files': changed_files,
981 984 'added_files': file_changes.added,
982 985 'modified_files': file_changes.modified,
983 986 'removed_files': file_changes.removed,
984 987 }
985 988 renderer = RstTemplateRenderer()
986 989 return renderer.render('pull_request_update.mako', **params)
987 990
988 991 def edit(self, pull_request, title, description, description_renderer, user):
989 992 pull_request = self.__get_pull_request(pull_request)
990 993 old_data = pull_request.get_api_data(with_merge_state=False)
991 994 if pull_request.is_closed():
992 995 raise ValueError('This pull request is closed')
993 996 if title:
994 997 pull_request.title = title
995 998 pull_request.description = description
996 999 pull_request.updated_on = datetime.datetime.now()
997 1000 pull_request.description_renderer = description_renderer
998 1001 Session().add(pull_request)
999 1002 self._log_audit_action(
1000 1003 'repo.pull_request.edit', {'old_data': old_data},
1001 1004 user, pull_request)
1002 1005
1003 1006 def update_reviewers(self, pull_request, reviewer_data, user):
1004 1007 """
1005 1008 Update the reviewers in the pull request
1006 1009
1007 1010 :param pull_request: the pr to update
1008 1011 :param reviewer_data: list of tuples
1009 1012 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1010 1013 """
1011 1014 pull_request = self.__get_pull_request(pull_request)
1012 1015 if pull_request.is_closed():
1013 1016 raise ValueError('This pull request is closed')
1014 1017
1015 1018 reviewers = {}
1016 1019 for user_id, reasons, mandatory, rules in reviewer_data:
1017 1020 if isinstance(user_id, (int, basestring)):
1018 1021 user_id = self._get_user(user_id).user_id
1019 1022 reviewers[user_id] = {
1020 1023 'reasons': reasons, 'mandatory': mandatory}
1021 1024
1022 1025 reviewers_ids = set(reviewers.keys())
1023 1026 current_reviewers = PullRequestReviewers.query()\
1024 1027 .filter(PullRequestReviewers.pull_request ==
1025 1028 pull_request).all()
1026 1029 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1027 1030
1028 1031 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1029 1032 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1030 1033
1031 1034 log.debug("Adding %s reviewers", ids_to_add)
1032 1035 log.debug("Removing %s reviewers", ids_to_remove)
1033 1036 changed = False
1034 1037 for uid in ids_to_add:
1035 1038 changed = True
1036 1039 _usr = self._get_user(uid)
1037 1040 reviewer = PullRequestReviewers()
1038 1041 reviewer.user = _usr
1039 1042 reviewer.pull_request = pull_request
1040 1043 reviewer.reasons = reviewers[uid]['reasons']
1041 1044 # NOTE(marcink): mandatory shouldn't be changed now
1042 1045 # reviewer.mandatory = reviewers[uid]['reasons']
1043 1046 Session().add(reviewer)
1044 1047 self._log_audit_action(
1045 1048 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1046 1049 user, pull_request)
1047 1050
1048 1051 for uid in ids_to_remove:
1049 1052 changed = True
1050 1053 reviewers = PullRequestReviewers.query()\
1051 1054 .filter(PullRequestReviewers.user_id == uid,
1052 1055 PullRequestReviewers.pull_request == pull_request)\
1053 1056 .all()
1054 1057 # use .all() in case we accidentally added the same person twice
1055 1058 # this CAN happen due to the lack of DB checks
1056 1059 for obj in reviewers:
1057 1060 old_data = obj.get_dict()
1058 1061 Session().delete(obj)
1059 1062 self._log_audit_action(
1060 1063 'repo.pull_request.reviewer.delete',
1061 1064 {'old_data': old_data}, user, pull_request)
1062 1065
1063 1066 if changed:
1064 1067 pull_request.updated_on = datetime.datetime.now()
1065 1068 Session().add(pull_request)
1066 1069
1067 1070 self.notify_reviewers(pull_request, ids_to_add)
1068 1071 return ids_to_add, ids_to_remove
1069 1072
1070 1073 def get_url(self, pull_request, request=None, permalink=False):
1071 1074 if not request:
1072 1075 request = get_current_request()
1073 1076
1074 1077 if permalink:
1075 1078 return request.route_url(
1076 1079 'pull_requests_global',
1077 1080 pull_request_id=pull_request.pull_request_id,)
1078 1081 else:
1079 1082 return request.route_url('pullrequest_show',
1080 1083 repo_name=safe_str(pull_request.target_repo.repo_name),
1081 1084 pull_request_id=pull_request.pull_request_id,)
1082 1085
1083 1086 def get_shadow_clone_url(self, pull_request, request=None):
1084 1087 """
1085 1088 Returns qualified url pointing to the shadow repository. If this pull
1086 1089 request is closed there is no shadow repository and ``None`` will be
1087 1090 returned.
1088 1091 """
1089 1092 if pull_request.is_closed():
1090 1093 return None
1091 1094 else:
1092 1095 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1093 1096 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1094 1097
1095 1098 def notify_reviewers(self, pull_request, reviewers_ids):
1096 1099 # notification to reviewers
1097 1100 if not reviewers_ids:
1098 1101 return
1099 1102
1100 1103 pull_request_obj = pull_request
1101 1104 # get the current participants of this pull request
1102 1105 recipients = reviewers_ids
1103 1106 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1104 1107
1105 1108 pr_source_repo = pull_request_obj.source_repo
1106 1109 pr_target_repo = pull_request_obj.target_repo
1107 1110
1108 1111 pr_url = h.route_url('pullrequest_show',
1109 1112 repo_name=pr_target_repo.repo_name,
1110 1113 pull_request_id=pull_request_obj.pull_request_id,)
1111 1114
1112 1115 # set some variables for email notification
1113 1116 pr_target_repo_url = h.route_url(
1114 1117 'repo_summary', repo_name=pr_target_repo.repo_name)
1115 1118
1116 1119 pr_source_repo_url = h.route_url(
1117 1120 'repo_summary', repo_name=pr_source_repo.repo_name)
1118 1121
1119 1122 # pull request specifics
1120 1123 pull_request_commits = [
1121 1124 (x.raw_id, x.message)
1122 1125 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1123 1126
1124 1127 kwargs = {
1125 1128 'user': pull_request.author,
1126 1129 'pull_request': pull_request_obj,
1127 1130 'pull_request_commits': pull_request_commits,
1128 1131
1129 1132 'pull_request_target_repo': pr_target_repo,
1130 1133 'pull_request_target_repo_url': pr_target_repo_url,
1131 1134
1132 1135 'pull_request_source_repo': pr_source_repo,
1133 1136 'pull_request_source_repo_url': pr_source_repo_url,
1134 1137
1135 1138 'pull_request_url': pr_url,
1136 1139 }
1137 1140
1138 1141 # pre-generate the subject for notification itself
1139 1142 (subject,
1140 1143 _h, _e, # we don't care about those
1141 1144 body_plaintext) = EmailNotificationModel().render_email(
1142 1145 notification_type, **kwargs)
1143 1146
1144 1147 # create notification objects, and emails
1145 1148 NotificationModel().create(
1146 1149 created_by=pull_request.author,
1147 1150 notification_subject=subject,
1148 1151 notification_body=body_plaintext,
1149 1152 notification_type=notification_type,
1150 1153 recipients=recipients,
1151 1154 email_kwargs=kwargs,
1152 1155 )
1153 1156
1154 1157 def delete(self, pull_request, user):
1155 1158 pull_request = self.__get_pull_request(pull_request)
1156 1159 old_data = pull_request.get_api_data(with_merge_state=False)
1157 1160 self._cleanup_merge_workspace(pull_request)
1158 1161 self._log_audit_action(
1159 1162 'repo.pull_request.delete', {'old_data': old_data},
1160 1163 user, pull_request)
1161 1164 Session().delete(pull_request)
1162 1165
1163 1166 def close_pull_request(self, pull_request, user):
1164 1167 pull_request = self.__get_pull_request(pull_request)
1165 1168 self._cleanup_merge_workspace(pull_request)
1166 1169 pull_request.status = PullRequest.STATUS_CLOSED
1167 1170 pull_request.updated_on = datetime.datetime.now()
1168 1171 Session().add(pull_request)
1169 1172 self._trigger_pull_request_hook(
1170 1173 pull_request, pull_request.author, 'close')
1171 1174
1172 1175 pr_data = pull_request.get_api_data(with_merge_state=False)
1173 1176 self._log_audit_action(
1174 1177 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1175 1178
1176 1179 def close_pull_request_with_comment(
1177 1180 self, pull_request, user, repo, message=None, auth_user=None):
1178 1181
1179 1182 pull_request_review_status = pull_request.calculated_review_status()
1180 1183
1181 1184 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1182 1185 # approved only if we have voting consent
1183 1186 status = ChangesetStatus.STATUS_APPROVED
1184 1187 else:
1185 1188 status = ChangesetStatus.STATUS_REJECTED
1186 1189 status_lbl = ChangesetStatus.get_status_lbl(status)
1187 1190
1188 1191 default_message = (
1189 1192 'Closing with status change {transition_icon} {status}.'
1190 1193 ).format(transition_icon='>', status=status_lbl)
1191 1194 text = message or default_message
1192 1195
1193 1196 # create a comment, and link it to new status
1194 1197 comment = CommentsModel().create(
1195 1198 text=text,
1196 1199 repo=repo.repo_id,
1197 1200 user=user.user_id,
1198 1201 pull_request=pull_request.pull_request_id,
1199 1202 status_change=status_lbl,
1200 1203 status_change_type=status,
1201 1204 closing_pr=True,
1202 1205 auth_user=auth_user,
1203 1206 )
1204 1207
1205 1208 # calculate old status before we change it
1206 1209 old_calculated_status = pull_request.calculated_review_status()
1207 1210 ChangesetStatusModel().set_status(
1208 1211 repo.repo_id,
1209 1212 status,
1210 1213 user.user_id,
1211 1214 comment=comment,
1212 1215 pull_request=pull_request.pull_request_id
1213 1216 )
1214 1217
1215 1218 Session().flush()
1216 1219 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1217 1220 # we now calculate the status of pull request again, and based on that
1218 1221 # calculation trigger status change. This might happen in cases
1219 1222 # that non-reviewer admin closes a pr, which means his vote doesn't
1220 1223 # change the status, while if he's a reviewer this might change it.
1221 1224 calculated_status = pull_request.calculated_review_status()
1222 1225 if old_calculated_status != calculated_status:
1223 1226 self._trigger_pull_request_hook(
1224 1227 pull_request, user, 'review_status_change')
1225 1228
1226 1229 # finally close the PR
1227 1230 PullRequestModel().close_pull_request(
1228 1231 pull_request.pull_request_id, user)
1229 1232
1230 1233 return comment, status
1231 1234
1232 1235 def merge_status(self, pull_request, translator=None,
1233 1236 force_shadow_repo_refresh=False):
1234 1237 _ = translator or get_current_request().translate
1235 1238
1236 1239 if not self._is_merge_enabled(pull_request):
1237 1240 return False, _('Server-side pull request merging is disabled.')
1238 1241 if pull_request.is_closed():
1239 1242 return False, _('This pull request is closed.')
1240 1243 merge_possible, msg = self._check_repo_requirements(
1241 1244 target=pull_request.target_repo, source=pull_request.source_repo,
1242 1245 translator=_)
1243 1246 if not merge_possible:
1244 1247 return merge_possible, msg
1245 1248
1246 1249 try:
1247 1250 resp = self._try_merge(
1248 1251 pull_request,
1249 1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1250 1253 log.debug("Merge response: %s", resp)
1251 1254 status = resp.possible, self.merge_status_message(
1252 1255 resp.failure_reason)
1253 1256 except NotImplementedError:
1254 1257 status = False, _('Pull request merging is not supported.')
1255 1258
1256 1259 return status
1257 1260
1258 1261 def _check_repo_requirements(self, target, source, translator):
1259 1262 """
1260 1263 Check if `target` and `source` have compatible requirements.
1261 1264
1262 1265 Currently this is just checking for largefiles.
1263 1266 """
1264 1267 _ = translator
1265 1268 target_has_largefiles = self._has_largefiles(target)
1266 1269 source_has_largefiles = self._has_largefiles(source)
1267 1270 merge_possible = True
1268 1271 message = u''
1269 1272
1270 1273 if target_has_largefiles != source_has_largefiles:
1271 1274 merge_possible = False
1272 1275 if source_has_largefiles:
1273 1276 message = _(
1274 1277 'Target repository large files support is disabled.')
1275 1278 else:
1276 1279 message = _(
1277 1280 'Source repository large files support is disabled.')
1278 1281
1279 1282 return merge_possible, message
1280 1283
1281 1284 def _has_largefiles(self, repo):
1282 1285 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1283 1286 'extensions', 'largefiles')
1284 1287 return largefiles_ui and largefiles_ui[0].active
1285 1288
1286 1289 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1287 1290 """
1288 1291 Try to merge the pull request and return the merge status.
1289 1292 """
1290 1293 log.debug(
1291 1294 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1292 1295 pull_request.pull_request_id, force_shadow_repo_refresh)
1293 1296 target_vcs = pull_request.target_repo.scm_instance()
1294 1297
1295 1298 # Refresh the target reference.
1296 1299 try:
1297 1300 target_ref = self._refresh_reference(
1298 1301 pull_request.target_ref_parts, target_vcs)
1299 1302 except CommitDoesNotExistError:
1300 1303 merge_state = MergeResponse(
1301 1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1302 1305 return merge_state
1303 1306
1304 1307 target_locked = pull_request.target_repo.locked
1305 1308 if target_locked and target_locked[0]:
1306 1309 log.debug("The target repository is locked.")
1307 1310 merge_state = MergeResponse(
1308 1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1309 1312 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1310 1313 pull_request, target_ref):
1311 1314 log.debug("Refreshing the merge status of the repository.")
1312 1315 merge_state = self._refresh_merge_state(
1313 1316 pull_request, target_vcs, target_ref)
1314 1317 else:
1315 1318 possible = pull_request.\
1316 1319 last_merge_status == MergeFailureReason.NONE
1317 1320 merge_state = MergeResponse(
1318 1321 possible, False, None, pull_request.last_merge_status)
1319 1322
1320 1323 return merge_state
1321 1324
1322 1325 def _refresh_reference(self, reference, vcs_repository):
1323 1326 if reference.type in ('branch', 'book'):
1324 1327 name_or_id = reference.name
1325 1328 else:
1326 1329 name_or_id = reference.commit_id
1327 1330 refreshed_commit = vcs_repository.get_commit(name_or_id)
1328 1331 refreshed_reference = Reference(
1329 1332 reference.type, reference.name, refreshed_commit.raw_id)
1330 1333 return refreshed_reference
1331 1334
1332 1335 def _needs_merge_state_refresh(self, pull_request, target_reference):
1333 1336 return not(
1334 1337 pull_request.revisions and
1335 1338 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1336 1339 target_reference.commit_id == pull_request._last_merge_target_rev)
1337 1340
1338 1341 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1339 1342 workspace_id = self._workspace_id(pull_request)
1340 1343 source_vcs = pull_request.source_repo.scm_instance()
1341 1344 repo_id = pull_request.target_repo.repo_id
1342 1345 use_rebase = self._use_rebase_for_merging(pull_request)
1343 1346 close_branch = self._close_branch_before_merging(pull_request)
1344 1347 merge_state = target_vcs.merge(
1345 1348 repo_id, workspace_id,
1346 1349 target_reference, source_vcs, pull_request.source_ref_parts,
1347 1350 dry_run=True, use_rebase=use_rebase,
1348 1351 close_branch=close_branch)
1349 1352
1350 1353 # Do not store the response if there was an unknown error.
1351 1354 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1352 1355 pull_request._last_merge_source_rev = \
1353 1356 pull_request.source_ref_parts.commit_id
1354 1357 pull_request._last_merge_target_rev = target_reference.commit_id
1355 1358 pull_request.last_merge_status = merge_state.failure_reason
1356 1359 pull_request.shadow_merge_ref = merge_state.merge_ref
1357 1360 Session().add(pull_request)
1358 1361 Session().commit()
1359 1362
1360 1363 return merge_state
1361 1364
1362 1365 def _workspace_id(self, pull_request):
1363 1366 workspace_id = 'pr-%s' % pull_request.pull_request_id
1364 1367 return workspace_id
1365 1368
1366 1369 def merge_status_message(self, status_code):
1367 1370 """
1368 1371 Return a human friendly error message for the given merge status code.
1369 1372 """
1370 1373 return self.MERGE_STATUS_MESSAGES[status_code]
1371 1374
1372 1375 def generate_repo_data(self, repo, commit_id=None, branch=None,
1373 1376 bookmark=None, translator=None):
1374 1377 from rhodecode.model.repo import RepoModel
1375 1378
1376 1379 all_refs, selected_ref = \
1377 1380 self._get_repo_pullrequest_sources(
1378 1381 repo.scm_instance(), commit_id=commit_id,
1379 1382 branch=branch, bookmark=bookmark, translator=translator)
1380 1383
1381 1384 refs_select2 = []
1382 1385 for element in all_refs:
1383 1386 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1384 1387 refs_select2.append({'text': element[1], 'children': children})
1385 1388
1386 1389 return {
1387 1390 'user': {
1388 1391 'user_id': repo.user.user_id,
1389 1392 'username': repo.user.username,
1390 1393 'firstname': repo.user.first_name,
1391 1394 'lastname': repo.user.last_name,
1392 1395 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1393 1396 },
1394 1397 'name': repo.repo_name,
1395 1398 'link': RepoModel().get_url(repo),
1396 1399 'description': h.chop_at_smart(repo.description_safe, '\n'),
1397 1400 'refs': {
1398 1401 'all_refs': all_refs,
1399 1402 'selected_ref': selected_ref,
1400 1403 'select2_refs': refs_select2
1401 1404 }
1402 1405 }
1403 1406
1404 1407 def generate_pullrequest_title(self, source, source_ref, target):
1405 1408 return u'{source}#{at_ref} to {target}'.format(
1406 1409 source=source,
1407 1410 at_ref=source_ref,
1408 1411 target=target,
1409 1412 )
1410 1413
1411 1414 def _cleanup_merge_workspace(self, pull_request):
1412 1415 # Merging related cleanup
1413 1416 repo_id = pull_request.target_repo.repo_id
1414 1417 target_scm = pull_request.target_repo.scm_instance()
1415 1418 workspace_id = self._workspace_id(pull_request)
1416 1419
1417 1420 try:
1418 1421 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1419 1422 except NotImplementedError:
1420 1423 pass
1421 1424
1422 1425 def _get_repo_pullrequest_sources(
1423 1426 self, repo, commit_id=None, branch=None, bookmark=None,
1424 1427 translator=None):
1425 1428 """
1426 1429 Return a structure with repo's interesting commits, suitable for
1427 1430 the selectors in pullrequest controller
1428 1431
1429 1432 :param commit_id: a commit that must be in the list somehow
1430 1433 and selected by default
1431 1434 :param branch: a branch that must be in the list and selected
1432 1435 by default - even if closed
1433 1436 :param bookmark: a bookmark that must be in the list and selected
1434 1437 """
1435 1438 _ = translator or get_current_request().translate
1436 1439
1437 1440 commit_id = safe_str(commit_id) if commit_id else None
1438 1441 branch = safe_str(branch) if branch else None
1439 1442 bookmark = safe_str(bookmark) if bookmark else None
1440 1443
1441 1444 selected = None
1442 1445
1443 1446 # order matters: first source that has commit_id in it will be selected
1444 1447 sources = []
1445 1448 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1446 1449 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1447 1450
1448 1451 if commit_id:
1449 1452 ref_commit = (h.short_id(commit_id), commit_id)
1450 1453 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1451 1454
1452 1455 sources.append(
1453 1456 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1454 1457 )
1455 1458
1456 1459 groups = []
1457 1460 for group_key, ref_list, group_name, match in sources:
1458 1461 group_refs = []
1459 1462 for ref_name, ref_id in ref_list:
1460 1463 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1461 1464 group_refs.append((ref_key, ref_name))
1462 1465
1463 1466 if not selected:
1464 1467 if set([commit_id, match]) & set([ref_id, ref_name]):
1465 1468 selected = ref_key
1466 1469
1467 1470 if group_refs:
1468 1471 groups.append((group_refs, group_name))
1469 1472
1470 1473 if not selected:
1471 1474 ref = commit_id or branch or bookmark
1472 1475 if ref:
1473 1476 raise CommitDoesNotExistError(
1474 1477 'No commit refs could be found matching: %s' % ref)
1475 1478 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1476 1479 selected = 'branch:%s:%s' % (
1477 1480 repo.DEFAULT_BRANCH_NAME,
1478 1481 repo.branches[repo.DEFAULT_BRANCH_NAME]
1479 1482 )
1480 1483 elif repo.commit_ids:
1481 1484 # make the user select in this case
1482 1485 selected = None
1483 1486 else:
1484 1487 raise EmptyRepositoryError()
1485 1488 return groups, selected
1486 1489
1487 1490 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1488 1491 return self._get_diff_from_pr_or_version(
1489 1492 source_repo, source_ref_id, target_ref_id, context=context)
1490 1493
1491 1494 def _get_diff_from_pr_or_version(
1492 1495 self, source_repo, source_ref_id, target_ref_id, context):
1493 1496 target_commit = source_repo.get_commit(
1494 1497 commit_id=safe_str(target_ref_id))
1495 1498 source_commit = source_repo.get_commit(
1496 1499 commit_id=safe_str(source_ref_id))
1497 1500 if isinstance(source_repo, Repository):
1498 1501 vcs_repo = source_repo.scm_instance()
1499 1502 else:
1500 1503 vcs_repo = source_repo
1501 1504
1502 1505 # TODO: johbo: In the context of an update, we cannot reach
1503 1506 # the old commit anymore with our normal mechanisms. It needs
1504 1507 # some sort of special support in the vcs layer to avoid this
1505 1508 # workaround.
1506 1509 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1507 1510 vcs_repo.alias == 'git'):
1508 1511 source_commit.raw_id = safe_str(source_ref_id)
1509 1512
1510 1513 log.debug('calculating diff between '
1511 1514 'source_ref:%s and target_ref:%s for repo `%s`',
1512 1515 target_ref_id, source_ref_id,
1513 1516 safe_unicode(vcs_repo.path))
1514 1517
1515 1518 vcs_diff = vcs_repo.get_diff(
1516 1519 commit1=target_commit, commit2=source_commit, context=context)
1517 1520 return vcs_diff
1518 1521
1519 1522 def _is_merge_enabled(self, pull_request):
1520 1523 return self._get_general_setting(
1521 1524 pull_request, 'rhodecode_pr_merge_enabled')
1522 1525
1523 1526 def _use_rebase_for_merging(self, pull_request):
1524 1527 repo_type = pull_request.target_repo.repo_type
1525 1528 if repo_type == 'hg':
1526 1529 return self._get_general_setting(
1527 1530 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1528 1531 elif repo_type == 'git':
1529 1532 return self._get_general_setting(
1530 1533 pull_request, 'rhodecode_git_use_rebase_for_merging')
1531 1534
1532 1535 return False
1533 1536
1534 1537 def _close_branch_before_merging(self, pull_request):
1535 1538 repo_type = pull_request.target_repo.repo_type
1536 1539 if repo_type == 'hg':
1537 1540 return self._get_general_setting(
1538 1541 pull_request, 'rhodecode_hg_close_branch_before_merging')
1539 1542 elif repo_type == 'git':
1540 1543 return self._get_general_setting(
1541 1544 pull_request, 'rhodecode_git_close_branch_before_merging')
1542 1545
1543 1546 return False
1544 1547
1545 1548 def _get_general_setting(self, pull_request, settings_key, default=False):
1546 1549 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1547 1550 settings = settings_model.get_general_settings()
1548 1551 return settings.get(settings_key, default)
1549 1552
1550 1553 def _log_audit_action(self, action, action_data, user, pull_request):
1551 1554 audit_logger.store(
1552 1555 action=action,
1553 1556 action_data=action_data,
1554 1557 user=user,
1555 1558 repo=pull_request.target_repo)
1556 1559
1557 1560 def get_reviewer_functions(self):
1558 1561 """
1559 1562 Fetches functions for validation and fetching default reviewers.
1560 1563 If available we use the EE package, else we fallback to CE
1561 1564 package functions
1562 1565 """
1563 1566 try:
1564 1567 from rc_reviewers.utils import get_default_reviewers_data
1565 1568 from rc_reviewers.utils import validate_default_reviewers
1566 1569 except ImportError:
1567 1570 from rhodecode.apps.repository.utils import \
1568 1571 get_default_reviewers_data
1569 1572 from rhodecode.apps.repository.utils import \
1570 1573 validate_default_reviewers
1571 1574
1572 1575 return get_default_reviewers_data, validate_default_reviewers
1573 1576
1574 1577
1575 1578 class MergeCheck(object):
1576 1579 """
1577 1580 Perform Merge Checks and returns a check object which stores information
1578 1581 about merge errors, and merge conditions
1579 1582 """
1580 1583 TODO_CHECK = 'todo'
1581 1584 PERM_CHECK = 'perm'
1582 1585 REVIEW_CHECK = 'review'
1583 1586 MERGE_CHECK = 'merge'
1584 1587
1585 1588 def __init__(self):
1586 1589 self.review_status = None
1587 1590 self.merge_possible = None
1588 1591 self.merge_msg = ''
1589 1592 self.failed = None
1590 1593 self.errors = []
1591 1594 self.error_details = OrderedDict()
1592 1595
1593 1596 def push_error(self, error_type, message, error_key, details):
1594 1597 self.failed = True
1595 1598 self.errors.append([error_type, message])
1596 1599 self.error_details[error_key] = dict(
1597 1600 details=details,
1598 1601 error_type=error_type,
1599 1602 message=message
1600 1603 )
1601 1604
1602 1605 @classmethod
1603 1606 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1604 1607 force_shadow_repo_refresh=False):
1605 1608 _ = translator
1606 1609 merge_check = cls()
1607 1610
1608 1611 # permissions to merge
1609 1612 user_allowed_to_merge = PullRequestModel().check_user_merge(
1610 1613 pull_request, auth_user)
1611 1614 if not user_allowed_to_merge:
1612 1615 log.debug("MergeCheck: cannot merge, approval is pending.")
1613 1616
1614 1617 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1615 1618 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1616 1619 if fail_early:
1617 1620 return merge_check
1618 1621
1619 1622 # permission to merge into the target branch
1620 1623 target_commit_id = pull_request.target_ref_parts.commit_id
1621 1624 if pull_request.target_ref_parts.type == 'branch':
1622 1625 branch_name = pull_request.target_ref_parts.name
1623 1626 else:
1624 1627 # for mercurial we can always figure out the branch from the commit
1625 1628 # in case of bookmark
1626 1629 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1627 1630 branch_name = target_commit.branch
1628 1631
1629 1632 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1630 1633 pull_request.target_repo.repo_name, branch_name)
1631 1634 if branch_perm and branch_perm == 'branch.none':
1632 1635 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1633 1636 branch_name, rule)
1634 1637 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1635 1638 if fail_early:
1636 1639 return merge_check
1637 1640
1638 1641 # review status, must be always present
1639 1642 review_status = pull_request.calculated_review_status()
1640 1643 merge_check.review_status = review_status
1641 1644
1642 1645 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1643 1646 if not status_approved:
1644 1647 log.debug("MergeCheck: cannot merge, approval is pending.")
1645 1648
1646 1649 msg = _('Pull request reviewer approval is pending.')
1647 1650
1648 1651 merge_check.push_error(
1649 1652 'warning', msg, cls.REVIEW_CHECK, review_status)
1650 1653
1651 1654 if fail_early:
1652 1655 return merge_check
1653 1656
1654 1657 # left over TODOs
1655 1658 todos = CommentsModel().get_unresolved_todos(pull_request)
1656 1659 if todos:
1657 1660 log.debug("MergeCheck: cannot merge, {} "
1658 1661 "unresolved todos left.".format(len(todos)))
1659 1662
1660 1663 if len(todos) == 1:
1661 1664 msg = _('Cannot merge, {} TODO still not resolved.').format(
1662 1665 len(todos))
1663 1666 else:
1664 1667 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1665 1668 len(todos))
1666 1669
1667 1670 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1668 1671
1669 1672 if fail_early:
1670 1673 return merge_check
1671 1674
1672 1675 # merge possible, here is the filesystem simulation + shadow repo
1673 1676 merge_status, msg = PullRequestModel().merge_status(
1674 1677 pull_request, translator=translator,
1675 1678 force_shadow_repo_refresh=force_shadow_repo_refresh)
1676 1679 merge_check.merge_possible = merge_status
1677 1680 merge_check.merge_msg = msg
1678 1681 if not merge_status:
1679 1682 log.debug(
1680 1683 "MergeCheck: cannot merge, pull request merge not possible.")
1681 1684 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1682 1685
1683 1686 if fail_early:
1684 1687 return merge_check
1685 1688
1686 1689 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1687 1690 return merge_check
1688 1691
1689 1692 @classmethod
1690 1693 def get_merge_conditions(cls, pull_request, translator):
1691 1694 _ = translator
1692 1695 merge_details = {}
1693 1696
1694 1697 model = PullRequestModel()
1695 1698 use_rebase = model._use_rebase_for_merging(pull_request)
1696 1699
1697 1700 if use_rebase:
1698 1701 merge_details['merge_strategy'] = dict(
1699 1702 details={},
1700 1703 message=_('Merge strategy: rebase')
1701 1704 )
1702 1705 else:
1703 1706 merge_details['merge_strategy'] = dict(
1704 1707 details={},
1705 1708 message=_('Merge strategy: explicit merge commit')
1706 1709 )
1707 1710
1708 1711 close_branch = model._close_branch_before_merging(pull_request)
1709 1712 if close_branch:
1710 1713 repo_type = pull_request.target_repo.repo_type
1711 1714 if repo_type == 'hg':
1712 1715 close_msg = _('Source branch will be closed after merge.')
1713 1716 elif repo_type == 'git':
1714 1717 close_msg = _('Source branch will be deleted after merge.')
1715 1718
1716 1719 merge_details['close_branch'] = dict(
1717 1720 details={},
1718 1721 message=close_msg
1719 1722 )
1720 1723
1721 1724 return merge_details
1722 1725
1723 1726 ChangeTuple = collections.namedtuple(
1724 1727 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1725 1728
1726 1729 FileChangeTuple = collections.namedtuple(
1727 1730 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,868 +1,868 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 self.merge_patcher = mock.patch.object(
54 54 BackendClass, 'merge', return_value=MergeResponse(
55 55 False, False, None, MergeFailureReason.UNKNOWN))
56 56 self.workspace_remove_patcher = mock.patch.object(
57 57 BackendClass, 'cleanup_merge_workspace')
58 58
59 59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 60 self.merge_mock = self.merge_patcher.start()
61 61 self.comment_patcher = mock.patch(
62 62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 63 self.comment_patcher.start()
64 64 self.notification_patcher = mock.patch(
65 65 'rhodecode.model.notification.NotificationModel.create')
66 66 self.notification_patcher.start()
67 67 self.helper_patcher = mock.patch(
68 68 'rhodecode.lib.helpers.route_path')
69 69 self.helper_patcher.start()
70 70
71 71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 72 '_trigger_pull_request_hook')
73 73 self.hook_mock = self.hook_patcher.start()
74 74
75 75 self.invalidation_patcher = mock.patch(
76 76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 77 self.invalidation_mock = self.invalidation_patcher.start()
78 78
79 79 self.pull_request = pr_util.create_pull_request(
80 80 mergeable=True, name_suffix=u'Δ…Δ‡')
81 81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 84 self.repo_id = self.pull_request.target_repo.repo_id
85 85
86 86 @request.addfinalizer
87 87 def cleanup_pull_request():
88 88 calls = [mock.call(
89 89 self.pull_request, self.pull_request.author, 'create')]
90 90 self.hook_mock.assert_has_calls(calls)
91 91
92 92 self.workspace_remove_patcher.stop()
93 93 self.merge_patcher.stop()
94 94 self.comment_patcher.stop()
95 95 self.notification_patcher.stop()
96 96 self.helper_patcher.stop()
97 97 self.hook_patcher.stop()
98 98 self.invalidation_patcher.stop()
99 99
100 100 return self.pull_request
101 101
102 102 def test_get_all(self, pull_request):
103 103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 104 assert isinstance(prs, list)
105 105 assert len(prs) == 1
106 106
107 107 def test_count_all(self, pull_request):
108 108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 109 assert pr_count == 1
110 110
111 111 def test_get_awaiting_review(self, pull_request):
112 112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 113 assert isinstance(prs, list)
114 114 assert len(prs) == 1
115 115
116 116 def test_count_awaiting_review(self, pull_request):
117 117 pr_count = PullRequestModel().count_awaiting_review(
118 118 pull_request.target_repo)
119 119 assert pr_count == 1
120 120
121 121 def test_get_awaiting_my_review(self, pull_request):
122 122 PullRequestModel().update_reviewers(
123 123 pull_request, [(pull_request.author, ['author'], False, [])],
124 124 pull_request.author)
125 125 prs = PullRequestModel().get_awaiting_my_review(
126 126 pull_request.target_repo, user_id=pull_request.author.user_id)
127 127 assert isinstance(prs, list)
128 128 assert len(prs) == 1
129 129
130 130 def test_count_awaiting_my_review(self, pull_request):
131 131 PullRequestModel().update_reviewers(
132 132 pull_request, [(pull_request.author, ['author'], False, [])],
133 133 pull_request.author)
134 134 pr_count = PullRequestModel().count_awaiting_my_review(
135 135 pull_request.target_repo, user_id=pull_request.author.user_id)
136 136 assert pr_count == 1
137 137
138 138 def test_delete_calls_cleanup_merge(self, pull_request):
139 139 repo_id = pull_request.target_repo.repo_id
140 140 PullRequestModel().delete(pull_request, pull_request.author)
141 141
142 142 self.workspace_remove_mock.assert_called_once_with(
143 143 repo_id, self.workspace_id)
144 144
145 145 def test_close_calls_cleanup_and_hook(self, pull_request):
146 146 PullRequestModel().close_pull_request(
147 147 pull_request, pull_request.author)
148 148 repo_id = pull_request.target_repo.repo_id
149 149
150 150 self.workspace_remove_mock.assert_called_once_with(
151 151 repo_id, self.workspace_id)
152 152 self.hook_mock.assert_called_with(
153 153 self.pull_request, self.pull_request.author, 'close')
154 154
155 155 def test_merge_status(self, pull_request):
156 156 self.merge_mock.return_value = MergeResponse(
157 157 True, False, None, MergeFailureReason.NONE)
158 158
159 159 assert pull_request._last_merge_source_rev is None
160 160 assert pull_request._last_merge_target_rev is None
161 161 assert pull_request.last_merge_status is None
162 162
163 163 status, msg = PullRequestModel().merge_status(pull_request)
164 164 assert status is True
165 165 assert msg.eval() == 'This pull request can be automatically merged.'
166 166 self.merge_mock.assert_called_with(
167 167 self.repo_id, self.workspace_id,
168 168 pull_request.target_ref_parts,
169 169 pull_request.source_repo.scm_instance(),
170 170 pull_request.source_ref_parts, dry_run=True,
171 171 use_rebase=False, close_branch=False)
172 172
173 173 assert pull_request._last_merge_source_rev == self.source_commit
174 174 assert pull_request._last_merge_target_rev == self.target_commit
175 175 assert pull_request.last_merge_status is MergeFailureReason.NONE
176 176
177 177 self.merge_mock.reset_mock()
178 178 status, msg = PullRequestModel().merge_status(pull_request)
179 179 assert status is True
180 180 assert msg.eval() == 'This pull request can be automatically merged.'
181 181 assert self.merge_mock.called is False
182 182
183 183 def test_merge_status_known_failure(self, pull_request):
184 184 self.merge_mock.return_value = MergeResponse(
185 185 False, False, None, MergeFailureReason.MERGE_FAILED)
186 186
187 187 assert pull_request._last_merge_source_rev is None
188 188 assert pull_request._last_merge_target_rev is None
189 189 assert pull_request.last_merge_status is None
190 190
191 191 status, msg = PullRequestModel().merge_status(pull_request)
192 192 assert status is False
193 193 assert (
194 194 msg.eval() ==
195 195 'This pull request cannot be merged because of merge conflicts.')
196 196 self.merge_mock.assert_called_with(
197 197 self.repo_id, self.workspace_id,
198 198 pull_request.target_ref_parts,
199 199 pull_request.source_repo.scm_instance(),
200 200 pull_request.source_ref_parts, dry_run=True,
201 201 use_rebase=False, close_branch=False)
202 202
203 203 assert pull_request._last_merge_source_rev == self.source_commit
204 204 assert pull_request._last_merge_target_rev == self.target_commit
205 205 assert (
206 206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207 207
208 208 self.merge_mock.reset_mock()
209 209 status, msg = PullRequestModel().merge_status(pull_request)
210 210 assert status is False
211 211 assert (
212 212 msg.eval() ==
213 213 'This pull request cannot be merged because of merge conflicts.')
214 214 assert self.merge_mock.called is False
215 215
216 216 def test_merge_status_unknown_failure(self, pull_request):
217 217 self.merge_mock.return_value = MergeResponse(
218 218 False, False, None, MergeFailureReason.UNKNOWN)
219 219
220 220 assert pull_request._last_merge_source_rev is None
221 221 assert pull_request._last_merge_target_rev is None
222 222 assert pull_request.last_merge_status is None
223 223
224 224 status, msg = PullRequestModel().merge_status(pull_request)
225 225 assert status is False
226 226 assert msg.eval() == (
227 227 'This pull request cannot be merged because of an unhandled'
228 228 ' exception.')
229 229 self.merge_mock.assert_called_with(
230 230 self.repo_id, self.workspace_id,
231 231 pull_request.target_ref_parts,
232 232 pull_request.source_repo.scm_instance(),
233 233 pull_request.source_ref_parts, dry_run=True,
234 234 use_rebase=False, close_branch=False)
235 235
236 236 assert pull_request._last_merge_source_rev is None
237 237 assert pull_request._last_merge_target_rev is None
238 238 assert pull_request.last_merge_status is None
239 239
240 240 self.merge_mock.reset_mock()
241 241 status, msg = PullRequestModel().merge_status(pull_request)
242 242 assert status is False
243 243 assert msg.eval() == (
244 244 'This pull request cannot be merged because of an unhandled'
245 245 ' exception.')
246 246 assert self.merge_mock.called is True
247 247
248 248 def test_merge_status_when_target_is_locked(self, pull_request):
249 249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
250 250 status, msg = PullRequestModel().merge_status(pull_request)
251 251 assert status is False
252 252 assert msg.eval() == (
253 253 'This pull request cannot be merged because the target repository'
254 254 ' is locked.')
255 255
256 256 def test_merge_status_requirements_check_target(self, pull_request):
257 257
258 258 def has_largefiles(self, repo):
259 259 return repo == pull_request.source_repo
260 260
261 261 patcher = mock.patch.object(
262 262 PullRequestModel, '_has_largefiles', has_largefiles)
263 263 with patcher:
264 264 status, msg = PullRequestModel().merge_status(pull_request)
265 265
266 266 assert status is False
267 267 assert msg == 'Target repository large files support is disabled.'
268 268
269 269 def test_merge_status_requirements_check_source(self, pull_request):
270 270
271 271 def has_largefiles(self, repo):
272 272 return repo == pull_request.target_repo
273 273
274 274 patcher = mock.patch.object(
275 275 PullRequestModel, '_has_largefiles', has_largefiles)
276 276 with patcher:
277 277 status, msg = PullRequestModel().merge_status(pull_request)
278 278
279 279 assert status is False
280 280 assert msg == 'Source repository large files support is disabled.'
281 281
282 282 def test_merge(self, pull_request, merge_extras):
283 283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
284 284 merge_ref = Reference(
285 285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
286 286 self.merge_mock.return_value = MergeResponse(
287 287 True, True, merge_ref, MergeFailureReason.NONE)
288 288
289 289 merge_extras['repository'] = pull_request.target_repo.repo_name
290 290 PullRequestModel().merge_repo(
291 291 pull_request, pull_request.author, extras=merge_extras)
292 292
293 293 message = (
294 294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
295 295 u'\n\n {pr_title}'.format(
296 296 pr_id=pull_request.pull_request_id,
297 297 source_repo=safe_unicode(
298 298 pull_request.source_repo.scm_instance().name),
299 299 source_ref_name=pull_request.source_ref_parts.name,
300 300 pr_title=safe_unicode(pull_request.title)
301 301 )
302 302 )
303 303 self.merge_mock.assert_called_with(
304 304 self.repo_id, self.workspace_id,
305 305 pull_request.target_ref_parts,
306 306 pull_request.source_repo.scm_instance(),
307 307 pull_request.source_ref_parts,
308 user_name=user.username, user_email=user.email, message=message,
308 user_name=user.short_contact, user_email=user.email, message=message,
309 309 use_rebase=False, close_branch=False
310 310 )
311 311 self.invalidation_mock.assert_called_once_with(
312 312 pull_request.target_repo.repo_name)
313 313
314 314 self.hook_mock.assert_called_with(
315 315 self.pull_request, self.pull_request.author, 'merge')
316 316
317 317 pull_request = PullRequest.get(pull_request.pull_request_id)
318 318 assert (
319 319 pull_request.merge_rev ==
320 320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 321
322 322 def test_merge_failed(self, pull_request, merge_extras):
323 323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
324 324 merge_ref = Reference(
325 325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
326 326 self.merge_mock.return_value = MergeResponse(
327 327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
328 328
329 329 merge_extras['repository'] = pull_request.target_repo.repo_name
330 330 PullRequestModel().merge_repo(
331 331 pull_request, pull_request.author, extras=merge_extras)
332 332
333 333 message = (
334 334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 335 u'\n\n {pr_title}'.format(
336 336 pr_id=pull_request.pull_request_id,
337 337 source_repo=safe_unicode(
338 338 pull_request.source_repo.scm_instance().name),
339 339 source_ref_name=pull_request.source_ref_parts.name,
340 340 pr_title=safe_unicode(pull_request.title)
341 341 )
342 342 )
343 343 self.merge_mock.assert_called_with(
344 344 self.repo_id, self.workspace_id,
345 345 pull_request.target_ref_parts,
346 346 pull_request.source_repo.scm_instance(),
347 347 pull_request.source_ref_parts,
348 user_name=user.username, user_email=user.email, message=message,
348 user_name=user.short_contact, user_email=user.email, message=message,
349 349 use_rebase=False, close_branch=False
350 350 )
351 351
352 352 pull_request = PullRequest.get(pull_request.pull_request_id)
353 353 assert self.invalidation_mock.called is False
354 354 assert pull_request.merge_rev is None
355 355
356 356 def test_get_commit_ids(self, pull_request):
357 357 # The PR has been not merget yet, so expect an exception
358 358 with pytest.raises(ValueError):
359 359 PullRequestModel()._get_commit_ids(pull_request)
360 360
361 361 # Merge revision is in the revisions list
362 362 pull_request.merge_rev = pull_request.revisions[0]
363 363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
364 364 assert commit_ids == pull_request.revisions
365 365
366 366 # Merge revision is not in the revisions list
367 367 pull_request.merge_rev = 'f000' * 10
368 368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
369 369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
370 370
371 371 def test_get_diff_from_pr_version(self, pull_request):
372 372 source_repo = pull_request.source_repo
373 373 source_ref_id = pull_request.source_ref_parts.commit_id
374 374 target_ref_id = pull_request.target_ref_parts.commit_id
375 375 diff = PullRequestModel()._get_diff_from_pr_or_version(
376 376 source_repo, source_ref_id, target_ref_id, context=6)
377 377 assert 'file_1' in diff.raw
378 378
379 379 def test_generate_title_returns_unicode(self):
380 380 title = PullRequestModel().generate_pullrequest_title(
381 381 source='source-dummy',
382 382 source_ref='source-ref-dummy',
383 383 target='target-dummy',
384 384 )
385 385 assert type(title) == unicode
386 386
387 387
388 388 @pytest.mark.usefixtures('config_stub')
389 389 class TestIntegrationMerge(object):
390 390 @pytest.mark.parametrize('extra_config', (
391 391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
392 392 ))
393 393 def test_merge_triggers_push_hooks(
394 394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
395 395 extra_config):
396 396 pull_request = pr_util.create_pull_request(
397 397 approved=True, mergeable=True)
398 398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 399 merge_extras['repository'] = pull_request.target_repo.repo_name
400 400 Session().commit()
401 401
402 402 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
403 403 merge_state = PullRequestModel().merge_repo(
404 404 pull_request, user_admin, extras=merge_extras)
405 405
406 406 assert merge_state.executed
407 407 assert 'pre_push' in capture_rcextensions
408 408 assert 'post_push' in capture_rcextensions
409 409
410 410 def test_merge_can_be_rejected_by_pre_push_hook(
411 411 self, pr_util, user_admin, capture_rcextensions, merge_extras):
412 412 pull_request = pr_util.create_pull_request(
413 413 approved=True, mergeable=True)
414 414 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
415 415 merge_extras['repository'] = pull_request.target_repo.repo_name
416 416 Session().commit()
417 417
418 418 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
419 419 pre_pull.side_effect = RepositoryError("Disallow push!")
420 420 merge_status = PullRequestModel().merge_repo(
421 421 pull_request, user_admin, extras=merge_extras)
422 422
423 423 assert not merge_status.executed
424 424 assert 'pre_push' not in capture_rcextensions
425 425 assert 'post_push' not in capture_rcextensions
426 426
427 427 def test_merge_fails_if_target_is_locked(
428 428 self, pr_util, user_regular, merge_extras):
429 429 pull_request = pr_util.create_pull_request(
430 430 approved=True, mergeable=True)
431 431 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
432 432 pull_request.target_repo.locked = locked_by
433 433 # TODO: johbo: Check if this can work based on the database, currently
434 434 # all data is pre-computed, that's why just updating the DB is not
435 435 # enough.
436 436 merge_extras['locked_by'] = locked_by
437 437 merge_extras['repository'] = pull_request.target_repo.repo_name
438 438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 439 Session().commit()
440 440 merge_status = PullRequestModel().merge_repo(
441 441 pull_request, user_regular, extras=merge_extras)
442 442 assert not merge_status.executed
443 443
444 444
445 445 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
446 446 (False, 1, 0),
447 447 (True, 0, 1),
448 448 ])
449 449 def test_outdated_comments(
450 450 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
451 451 pull_request = pr_util.create_pull_request()
452 452 pr_util.create_inline_comment(file_path='not_in_updated_diff')
453 453
454 454 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
455 455 pr_util.add_one_commit()
456 456 assert_inline_comments(
457 457 pull_request, visible=inlines_count, outdated=outdated_count)
458 458 outdated_comment_mock.assert_called_with(pull_request)
459 459
460 460
461 461 @pytest.fixture
462 462 def merge_extras(user_regular):
463 463 """
464 464 Context for the vcs operation when running a merge.
465 465 """
466 466 extras = {
467 467 'ip': '127.0.0.1',
468 468 'username': user_regular.username,
469 469 'user_id': user_regular.user_id,
470 470 'action': 'push',
471 471 'repository': 'fake_target_repo_name',
472 472 'scm': 'git',
473 473 'config': 'fake_config_ini_path',
474 474 'make_lock': None,
475 475 'locked_by': [None, None, None],
476 476 'server_url': 'http://test.example.com:5000',
477 477 'hooks': ['push', 'pull'],
478 478 'is_shadow_repo': False,
479 479 }
480 480 return extras
481 481
482 482
483 483 @pytest.mark.usefixtures('config_stub')
484 484 class TestUpdateCommentHandling(object):
485 485
486 486 @pytest.fixture(autouse=True, scope='class')
487 487 def enable_outdated_comments(self, request, baseapp):
488 488 config_patch = mock.patch.dict(
489 489 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
490 490 config_patch.start()
491 491
492 492 @request.addfinalizer
493 493 def cleanup():
494 494 config_patch.stop()
495 495
496 496 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
497 497 commits = [
498 498 {'message': 'a'},
499 499 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
500 500 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
501 501 ]
502 502 pull_request = pr_util.create_pull_request(
503 503 commits=commits, target_head='a', source_head='b', revisions=['b'])
504 504 pr_util.create_inline_comment(file_path='file_b')
505 505 pr_util.add_one_commit(head='c')
506 506
507 507 assert_inline_comments(pull_request, visible=1, outdated=0)
508 508
509 509 def test_comment_stays_unflagged_on_change_above(self, pr_util):
510 510 original_content = ''.join(
511 511 ['line {}\n'.format(x) for x in range(1, 11)])
512 512 updated_content = 'new_line_at_top\n' + original_content
513 513 commits = [
514 514 {'message': 'a'},
515 515 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
516 516 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
517 517 ]
518 518 pull_request = pr_util.create_pull_request(
519 519 commits=commits, target_head='a', source_head='b', revisions=['b'])
520 520
521 521 with outdated_comments_patcher():
522 522 comment = pr_util.create_inline_comment(
523 523 line_no=u'n8', file_path='file_b')
524 524 pr_util.add_one_commit(head='c')
525 525
526 526 assert_inline_comments(pull_request, visible=1, outdated=0)
527 527 assert comment.line_no == u'n9'
528 528
529 529 def test_comment_stays_unflagged_on_change_below(self, pr_util):
530 530 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
531 531 updated_content = original_content + 'new_line_at_end\n'
532 532 commits = [
533 533 {'message': 'a'},
534 534 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
535 535 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
536 536 ]
537 537 pull_request = pr_util.create_pull_request(
538 538 commits=commits, target_head='a', source_head='b', revisions=['b'])
539 539 pr_util.create_inline_comment(file_path='file_b')
540 540 pr_util.add_one_commit(head='c')
541 541
542 542 assert_inline_comments(pull_request, visible=1, outdated=0)
543 543
544 544 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
545 545 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
546 546 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
547 547 change_lines = list(base_lines)
548 548 change_lines.insert(6, 'line 6a added\n')
549 549
550 550 # Changes on the last line of sight
551 551 update_lines = list(change_lines)
552 552 update_lines[0] = 'line 1 changed\n'
553 553 update_lines[-1] = 'line 12 changed\n'
554 554
555 555 def file_b(lines):
556 556 return FileNode('file_b', ''.join(lines))
557 557
558 558 commits = [
559 559 {'message': 'a', 'added': [file_b(base_lines)]},
560 560 {'message': 'b', 'changed': [file_b(change_lines)]},
561 561 {'message': 'c', 'changed': [file_b(update_lines)]},
562 562 ]
563 563
564 564 pull_request = pr_util.create_pull_request(
565 565 commits=commits, target_head='a', source_head='b', revisions=['b'])
566 566 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
567 567
568 568 with outdated_comments_patcher():
569 569 pr_util.add_one_commit(head='c')
570 570 assert_inline_comments(pull_request, visible=0, outdated=1)
571 571
572 572 @pytest.mark.parametrize("change, content", [
573 573 ('changed', 'changed\n'),
574 574 ('removed', ''),
575 575 ], ids=['changed', 'removed'])
576 576 def test_comment_flagged_on_change(self, pr_util, change, content):
577 577 commits = [
578 578 {'message': 'a'},
579 579 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
580 580 {'message': 'c', change: [FileNode('file_b', content)]},
581 581 ]
582 582 pull_request = pr_util.create_pull_request(
583 583 commits=commits, target_head='a', source_head='b', revisions=['b'])
584 584 pr_util.create_inline_comment(file_path='file_b')
585 585
586 586 with outdated_comments_patcher():
587 587 pr_util.add_one_commit(head='c')
588 588 assert_inline_comments(pull_request, visible=0, outdated=1)
589 589
590 590
591 591 @pytest.mark.usefixtures('config_stub')
592 592 class TestUpdateChangedFiles(object):
593 593
594 594 def test_no_changes_on_unchanged_diff(self, pr_util):
595 595 commits = [
596 596 {'message': 'a'},
597 597 {'message': 'b',
598 598 'added': [FileNode('file_b', 'test_content b\n')]},
599 599 {'message': 'c',
600 600 'added': [FileNode('file_c', 'test_content c\n')]},
601 601 ]
602 602 # open a PR from a to b, adding file_b
603 603 pull_request = pr_util.create_pull_request(
604 604 commits=commits, target_head='a', source_head='b', revisions=['b'],
605 605 name_suffix='per-file-review')
606 606
607 607 # modify PR adding new file file_c
608 608 pr_util.add_one_commit(head='c')
609 609
610 610 assert_pr_file_changes(
611 611 pull_request,
612 612 added=['file_c'],
613 613 modified=[],
614 614 removed=[])
615 615
616 616 def test_modify_and_undo_modification_diff(self, pr_util):
617 617 commits = [
618 618 {'message': 'a'},
619 619 {'message': 'b',
620 620 'added': [FileNode('file_b', 'test_content b\n')]},
621 621 {'message': 'c',
622 622 'changed': [FileNode('file_b', 'test_content b modified\n')]},
623 623 {'message': 'd',
624 624 'changed': [FileNode('file_b', 'test_content b\n')]},
625 625 ]
626 626 # open a PR from a to b, adding file_b
627 627 pull_request = pr_util.create_pull_request(
628 628 commits=commits, target_head='a', source_head='b', revisions=['b'],
629 629 name_suffix='per-file-review')
630 630
631 631 # modify PR modifying file file_b
632 632 pr_util.add_one_commit(head='c')
633 633
634 634 assert_pr_file_changes(
635 635 pull_request,
636 636 added=[],
637 637 modified=['file_b'],
638 638 removed=[])
639 639
640 640 # move the head again to d, which rollbacks change,
641 641 # meaning we should indicate no changes
642 642 pr_util.add_one_commit(head='d')
643 643
644 644 assert_pr_file_changes(
645 645 pull_request,
646 646 added=[],
647 647 modified=[],
648 648 removed=[])
649 649
650 650 def test_updated_all_files_in_pr(self, pr_util):
651 651 commits = [
652 652 {'message': 'a'},
653 653 {'message': 'b', 'added': [
654 654 FileNode('file_a', 'test_content a\n'),
655 655 FileNode('file_b', 'test_content b\n'),
656 656 FileNode('file_c', 'test_content c\n')]},
657 657 {'message': 'c', 'changed': [
658 658 FileNode('file_a', 'test_content a changed\n'),
659 659 FileNode('file_b', 'test_content b changed\n'),
660 660 FileNode('file_c', 'test_content c changed\n')]},
661 661 ]
662 662 # open a PR from a to b, changing 3 files
663 663 pull_request = pr_util.create_pull_request(
664 664 commits=commits, target_head='a', source_head='b', revisions=['b'],
665 665 name_suffix='per-file-review')
666 666
667 667 pr_util.add_one_commit(head='c')
668 668
669 669 assert_pr_file_changes(
670 670 pull_request,
671 671 added=[],
672 672 modified=['file_a', 'file_b', 'file_c'],
673 673 removed=[])
674 674
675 675 def test_updated_and_removed_all_files_in_pr(self, pr_util):
676 676 commits = [
677 677 {'message': 'a'},
678 678 {'message': 'b', 'added': [
679 679 FileNode('file_a', 'test_content a\n'),
680 680 FileNode('file_b', 'test_content b\n'),
681 681 FileNode('file_c', 'test_content c\n')]},
682 682 {'message': 'c', 'removed': [
683 683 FileNode('file_a', 'test_content a changed\n'),
684 684 FileNode('file_b', 'test_content b changed\n'),
685 685 FileNode('file_c', 'test_content c changed\n')]},
686 686 ]
687 687 # open a PR from a to b, removing 3 files
688 688 pull_request = pr_util.create_pull_request(
689 689 commits=commits, target_head='a', source_head='b', revisions=['b'],
690 690 name_suffix='per-file-review')
691 691
692 692 pr_util.add_one_commit(head='c')
693 693
694 694 assert_pr_file_changes(
695 695 pull_request,
696 696 added=[],
697 697 modified=[],
698 698 removed=['file_a', 'file_b', 'file_c'])
699 699
700 700
701 701 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
702 702 model = PullRequestModel()
703 703 pull_request = pr_util.create_pull_request()
704 704 pr_util.update_source_repository()
705 705
706 706 model.update_commits(pull_request)
707 707
708 708 # Expect that it has a version entry now
709 709 assert len(model.get_versions(pull_request)) == 1
710 710
711 711
712 712 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
713 713 pull_request = pr_util.create_pull_request()
714 714 model = PullRequestModel()
715 715 model.update_commits(pull_request)
716 716
717 717 # Expect that it still has no versions
718 718 assert len(model.get_versions(pull_request)) == 0
719 719
720 720
721 721 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
722 722 model = PullRequestModel()
723 723 pull_request = pr_util.create_pull_request()
724 724 comment = pr_util.create_comment()
725 725 pr_util.update_source_repository()
726 726
727 727 model.update_commits(pull_request)
728 728
729 729 # Expect that the comment is linked to the pr version now
730 730 assert comment.pull_request_version == model.get_versions(pull_request)[0]
731 731
732 732
733 733 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
734 734 model = PullRequestModel()
735 735 pull_request = pr_util.create_pull_request()
736 736 pr_util.update_source_repository()
737 737 pr_util.update_source_repository()
738 738
739 739 model.update_commits(pull_request)
740 740
741 741 # Expect to find a new comment about the change
742 742 expected_message = textwrap.dedent(
743 743 """\
744 744 Pull request updated. Auto status change to |under_review|
745 745
746 746 .. role:: added
747 747 .. role:: removed
748 748 .. parsed-literal::
749 749
750 750 Changed commits:
751 751 * :added:`1 added`
752 752 * :removed:`0 removed`
753 753
754 754 Changed files:
755 755 * `A file_2 <#a_c--92ed3b5f07b4>`_
756 756
757 757 .. |under_review| replace:: *"Under Review"*"""
758 758 )
759 759 pull_request_comments = sorted(
760 760 pull_request.comments, key=lambda c: c.modified_at)
761 761 update_comment = pull_request_comments[-1]
762 762 assert update_comment.text == expected_message
763 763
764 764
765 765 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
766 766 pull_request = pr_util.create_pull_request()
767 767
768 768 # Avoiding default values
769 769 pull_request.status = PullRequest.STATUS_CLOSED
770 770 pull_request._last_merge_source_rev = "0" * 40
771 771 pull_request._last_merge_target_rev = "1" * 40
772 772 pull_request.last_merge_status = 1
773 773 pull_request.merge_rev = "2" * 40
774 774
775 775 # Remember automatic values
776 776 created_on = pull_request.created_on
777 777 updated_on = pull_request.updated_on
778 778
779 779 # Create a new version of the pull request
780 780 version = PullRequestModel()._create_version_from_snapshot(pull_request)
781 781
782 782 # Check attributes
783 783 assert version.title == pr_util.create_parameters['title']
784 784 assert version.description == pr_util.create_parameters['description']
785 785 assert version.status == PullRequest.STATUS_CLOSED
786 786
787 787 # versions get updated created_on
788 788 assert version.created_on != created_on
789 789
790 790 assert version.updated_on == updated_on
791 791 assert version.user_id == pull_request.user_id
792 792 assert version.revisions == pr_util.create_parameters['revisions']
793 793 assert version.source_repo == pr_util.source_repository
794 794 assert version.source_ref == pr_util.create_parameters['source_ref']
795 795 assert version.target_repo == pr_util.target_repository
796 796 assert version.target_ref == pr_util.create_parameters['target_ref']
797 797 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
798 798 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
799 799 assert version.last_merge_status == pull_request.last_merge_status
800 800 assert version.merge_rev == pull_request.merge_rev
801 801 assert version.pull_request == pull_request
802 802
803 803
804 804 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
805 805 version1 = pr_util.create_version_of_pull_request()
806 806 comment_linked = pr_util.create_comment(linked_to=version1)
807 807 comment_unlinked = pr_util.create_comment()
808 808 version2 = pr_util.create_version_of_pull_request()
809 809
810 810 PullRequestModel()._link_comments_to_version(version2)
811 811
812 812 # Expect that only the new comment is linked to version2
813 813 assert (
814 814 comment_unlinked.pull_request_version_id ==
815 815 version2.pull_request_version_id)
816 816 assert (
817 817 comment_linked.pull_request_version_id ==
818 818 version1.pull_request_version_id)
819 819 assert (
820 820 comment_unlinked.pull_request_version_id !=
821 821 comment_linked.pull_request_version_id)
822 822
823 823
824 824 def test_calculate_commits():
825 825 old_ids = [1, 2, 3]
826 826 new_ids = [1, 3, 4, 5]
827 827 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
828 828 assert change.added == [4, 5]
829 829 assert change.common == [1, 3]
830 830 assert change.removed == [2]
831 831 assert change.total == [1, 3, 4, 5]
832 832
833 833
834 834 def assert_inline_comments(pull_request, visible=None, outdated=None):
835 835 if visible is not None:
836 836 inline_comments = CommentsModel().get_inline_comments(
837 837 pull_request.target_repo.repo_id, pull_request=pull_request)
838 838 inline_cnt = CommentsModel().get_inline_comments_count(
839 839 inline_comments)
840 840 assert inline_cnt == visible
841 841 if outdated is not None:
842 842 outdated_comments = CommentsModel().get_outdated_comments(
843 843 pull_request.target_repo.repo_id, pull_request)
844 844 assert len(outdated_comments) == outdated
845 845
846 846
847 847 def assert_pr_file_changes(
848 848 pull_request, added=None, modified=None, removed=None):
849 849 pr_versions = PullRequestModel().get_versions(pull_request)
850 850 # always use first version, ie original PR to calculate changes
851 851 pull_request_version = pr_versions[0]
852 852 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
853 853 pull_request, pull_request_version)
854 854 file_changes = PullRequestModel()._calculate_file_changes(
855 855 old_diff_data, new_diff_data)
856 856
857 857 assert added == file_changes.added, \
858 858 'expected added:%s vs value:%s' % (added, file_changes.added)
859 859 assert modified == file_changes.modified, \
860 860 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
861 861 assert removed == file_changes.removed, \
862 862 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
863 863
864 864
865 865 def outdated_comments_patcher(use_outdated=True):
866 866 return mock.patch.object(
867 867 CommentsModel, 'use_outdated_comments',
868 868 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now