##// END OF EJS Templates
pull-requests: expose unresolved files in merge response.
marcink -
r4080:df62e32a default
parent child Browse files
Show More
@@ -1,1899 +1,1899 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class MergeFailureReason(object):
64 64 """
65 65 Enumeration with all the reasons why the server side merge could fail.
66 66
67 67 DO NOT change the number of the reasons, as they may be stored in the
68 68 database.
69 69
70 70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 71 reasons.
72 72 """
73 73
74 74 # Everything went well.
75 75 NONE = 0
76 76
77 77 # An unexpected exception was raised. Check the logs for more details.
78 78 UNKNOWN = 1
79 79
80 80 # The merge was not successful, there are conflicts.
81 81 MERGE_FAILED = 2
82 82
83 83 # The merge succeeded but we could not push it to the target repository.
84 84 PUSH_FAILED = 3
85 85
86 86 # The specified target is not a head in the target repository.
87 87 TARGET_IS_NOT_HEAD = 4
88 88
89 89 # The source repository contains more branches than the target. Pushing
90 90 # the merge will create additional branches in the target.
91 91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92 92
93 93 # The target reference has multiple heads. That does not allow to correctly
94 94 # identify the target location. This could only happen for mercurial
95 95 # branches.
96 96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97 97
98 98 # The target repository is locked
99 99 TARGET_IS_LOCKED = 7
100 100
101 101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 102 # A involved commit could not be found.
103 103 _DEPRECATED_MISSING_COMMIT = 8
104 104
105 105 # The target repo reference is missing.
106 106 MISSING_TARGET_REF = 9
107 107
108 108 # The source repo reference is missing.
109 109 MISSING_SOURCE_REF = 10
110 110
111 111 # The merge was not successful, there are conflicts related to sub
112 112 # repositories.
113 113 SUBREPO_MERGE_FAILED = 11
114 114
115 115
116 116 class UpdateFailureReason(object):
117 117 """
118 118 Enumeration with all the reasons why the pull request update could fail.
119 119
120 120 DO NOT change the number of the reasons, as they may be stored in the
121 121 database.
122 122
123 123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 124 reasons.
125 125 """
126 126
127 127 # Everything went well.
128 128 NONE = 0
129 129
130 130 # An unexpected exception was raised. Check the logs for more details.
131 131 UNKNOWN = 1
132 132
133 133 # The pull request is up to date.
134 134 NO_CHANGE = 2
135 135
136 136 # The pull request has a reference type that is not supported for update.
137 137 WRONG_REF_TYPE = 3
138 138
139 139 # Update failed because the target reference is missing.
140 140 MISSING_TARGET_REF = 4
141 141
142 142 # Update failed because the source reference is missing.
143 143 MISSING_SOURCE_REF = 5
144 144
145 145
146 146 class MergeResponse(object):
147 147
148 148 # uses .format(**metadata) for variables
149 149 MERGE_STATUS_MESSAGES = {
150 150 MergeFailureReason.NONE: lazy_ugettext(
151 151 u'This pull request can be automatically merged.'),
152 152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 153 u'This pull request cannot be merged because of an unhandled exception. '
154 154 u'{exception}'),
155 155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts.'),
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 158 u'This pull request could not be merged because push to '
159 159 u'target:`{target}@{merge_commit}` failed.'),
160 160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 161 u'This pull request cannot be merged because the target '
162 162 u'`{target_ref.name}` is not a head.'),
163 163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 164 u'This pull request cannot be merged because the source contains '
165 165 u'more branches than the target.'),
166 166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 168 u'has multiple heads: `{heads}`.'),
169 169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 170 u'This pull request cannot be merged because the target repository is '
171 171 u'locked by {locked_by}.'),
172 172
173 173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the target '
175 175 u'reference `{target_ref.name}` is missing.'),
176 176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 177 u'This pull request cannot be merged because the source '
178 178 u'reference `{source_ref.name}` is missing.'),
179 179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 180 u'This pull request cannot be merged because of conflicts related '
181 181 u'to sub repositories.'),
182 182
183 183 # Deprecations
184 184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 185 u'This pull request cannot be merged because the target or the '
186 186 u'source reference is missing.'),
187 187
188 188 }
189 189
190 190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 191 self.possible = possible
192 192 self.executed = executed
193 193 self.merge_ref = merge_ref
194 194 self.failure_reason = failure_reason
195 195 self.metadata = metadata or {}
196 196
197 197 def __repr__(self):
198 198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199 199
200 200 def __eq__(self, other):
201 201 same_instance = isinstance(other, self.__class__)
202 202 return same_instance \
203 203 and self.possible == other.possible \
204 204 and self.executed == other.executed \
205 205 and self.failure_reason == other.failure_reason
206 206
207 207 @property
208 208 def label(self):
209 209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 210 not k.startswith('_'))
211 211 return label_dict.get(self.failure_reason)
212 212
213 213 @property
214 214 def merge_status_message(self):
215 215 """
216 216 Return a human friendly error message for the given merge status code.
217 217 """
218 218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 219 try:
220 220 return msg.format(**self.metadata)
221 221 except Exception:
222 222 log.exception('Failed to format %s message', self)
223 223 return msg
224 224
225 225 def asdict(self):
226 226 data = {}
227 227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 228 'merge_status_message']:
229 229 data[k] = getattr(self, k)
230 230 return data
231 231
232 232
233 233 class BaseRepository(object):
234 234 """
235 235 Base Repository for final backends
236 236
237 237 .. attribute:: DEFAULT_BRANCH_NAME
238 238
239 239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 240
241 241 .. attribute:: commit_ids
242 242
243 243 list of all available commit ids, in ascending order
244 244
245 245 .. attribute:: path
246 246
247 247 absolute path to the repository
248 248
249 249 .. attribute:: bookmarks
250 250
251 251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 252 there are no bookmarks or the backend implementation does not support
253 253 bookmarks.
254 254
255 255 .. attribute:: tags
256 256
257 257 Mapping from name to :term:`Commit ID` of the tag.
258 258
259 259 """
260 260
261 261 DEFAULT_BRANCH_NAME = None
262 262 DEFAULT_CONTACT = u"Unknown"
263 263 DEFAULT_DESCRIPTION = u"unknown"
264 264 EMPTY_COMMIT_ID = '0' * 40
265 265
266 266 path = None
267 267
268 268 _is_empty = None
269 269 _commit_ids = {}
270 270
271 271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 272 """
273 273 Initializes repository. Raises RepositoryError if repository could
274 274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 275 exists and ``create`` is set to True.
276 276
277 277 :param repo_path: local path of the repository
278 278 :param config: repository configuration
279 279 :param create=False: if set to True, would try to create repository.
280 280 :param src_url=None: if set, should be proper url from which repository
281 281 would be cloned; requires ``create`` parameter to be set to True -
282 282 raises RepositoryError if src_url is set and create evaluates to
283 283 False
284 284 """
285 285 raise NotImplementedError
286 286
287 287 def __repr__(self):
288 288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 289
290 290 def __len__(self):
291 291 return self.count()
292 292
293 293 def __eq__(self, other):
294 294 same_instance = isinstance(other, self.__class__)
295 295 return same_instance and other.path == self.path
296 296
297 297 def __ne__(self, other):
298 298 return not self.__eq__(other)
299 299
300 300 def get_create_shadow_cache_pr_path(self, db_repo):
301 301 path = db_repo.cached_diffs_dir
302 302 if not os.path.exists(path):
303 303 os.makedirs(path, 0o755)
304 304 return path
305 305
306 306 @classmethod
307 307 def get_default_config(cls, default=None):
308 308 config = Config()
309 309 if default and isinstance(default, list):
310 310 for section, key, val in default:
311 311 config.set(section, key, val)
312 312 return config
313 313
314 314 @LazyProperty
315 315 def _remote(self):
316 316 raise NotImplementedError
317 317
318 318 def _heads(self, branch=None):
319 319 return []
320 320
321 321 @LazyProperty
322 322 def EMPTY_COMMIT(self):
323 323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 324
325 325 @LazyProperty
326 326 def alias(self):
327 327 for k, v in settings.BACKENDS.items():
328 328 if v.split('.')[-1] == str(self.__class__.__name__):
329 329 return k
330 330
331 331 @LazyProperty
332 332 def name(self):
333 333 return safe_unicode(os.path.basename(self.path))
334 334
335 335 @LazyProperty
336 336 def description(self):
337 337 raise NotImplementedError
338 338
339 339 def refs(self):
340 340 """
341 341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 342 for this repository
343 343 """
344 344 return dict(
345 345 branches=self.branches,
346 346 branches_closed=self.branches_closed,
347 347 tags=self.tags,
348 348 bookmarks=self.bookmarks
349 349 )
350 350
351 351 @LazyProperty
352 352 def branches(self):
353 353 """
354 354 A `dict` which maps branch names to commit ids.
355 355 """
356 356 raise NotImplementedError
357 357
358 358 @LazyProperty
359 359 def branches_closed(self):
360 360 """
361 361 A `dict` which maps tags names to commit ids.
362 362 """
363 363 raise NotImplementedError
364 364
365 365 @LazyProperty
366 366 def bookmarks(self):
367 367 """
368 368 A `dict` which maps tags names to commit ids.
369 369 """
370 370 raise NotImplementedError
371 371
372 372 @LazyProperty
373 373 def tags(self):
374 374 """
375 375 A `dict` which maps tags names to commit ids.
376 376 """
377 377 raise NotImplementedError
378 378
379 379 @LazyProperty
380 380 def size(self):
381 381 """
382 382 Returns combined size in bytes for all repository files
383 383 """
384 384 tip = self.get_commit()
385 385 return tip.size
386 386
387 387 def size_at_commit(self, commit_id):
388 388 commit = self.get_commit(commit_id)
389 389 return commit.size
390 390
391 391 def _check_for_empty(self):
392 392 no_commits = len(self._commit_ids) == 0
393 393 if no_commits:
394 394 # check on remote to be sure
395 395 return self._remote.is_empty()
396 396 else:
397 397 return False
398 398
399 399 def is_empty(self):
400 400 if rhodecode.is_test:
401 401 return self._check_for_empty()
402 402
403 403 if self._is_empty is None:
404 404 # cache empty for production, but not tests
405 405 self._is_empty = self._check_for_empty()
406 406
407 407 return self._is_empty
408 408
409 409 @staticmethod
410 410 def check_url(url, config):
411 411 """
412 412 Function will check given url and try to verify if it's a valid
413 413 link.
414 414 """
415 415 raise NotImplementedError
416 416
417 417 @staticmethod
418 418 def is_valid_repository(path):
419 419 """
420 420 Check if given `path` contains a valid repository of this backend
421 421 """
422 422 raise NotImplementedError
423 423
424 424 # ==========================================================================
425 425 # COMMITS
426 426 # ==========================================================================
427 427
428 428 @CachedProperty
429 429 def commit_ids(self):
430 430 raise NotImplementedError
431 431
432 432 def append_commit_id(self, commit_id):
433 433 if commit_id not in self.commit_ids:
434 434 self._rebuild_cache(self.commit_ids + [commit_id])
435 435
436 436 # clear cache
437 437 self._invalidate_prop_cache('commit_ids')
438 438 self._is_empty = False
439 439
440 440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 441 """
442 442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 443 are both None, most recent commit is returned.
444 444
445 445 :param pre_load: Optional. List of commit attributes to load.
446 446
447 447 :raises ``EmptyRepositoryError``: if there are no commits
448 448 """
449 449 raise NotImplementedError
450 450
451 451 def __iter__(self):
452 452 for commit_id in self.commit_ids:
453 453 yield self.get_commit(commit_id=commit_id)
454 454
455 455 def get_commits(
456 456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 458 """
459 459 Returns iterator of `BaseCommit` objects from start to end
460 460 not inclusive. This should behave just like a list, ie. end is not
461 461 inclusive.
462 462
463 463 :param start_id: None or str, must be a valid commit id
464 464 :param end_id: None or str, must be a valid commit id
465 465 :param start_date:
466 466 :param end_date:
467 467 :param branch_name:
468 468 :param show_hidden:
469 469 :param pre_load:
470 470 :param translate_tags:
471 471 """
472 472 raise NotImplementedError
473 473
474 474 def __getitem__(self, key):
475 475 """
476 476 Allows index based access to the commit objects of this repository.
477 477 """
478 478 pre_load = ["author", "branch", "date", "message", "parents"]
479 479 if isinstance(key, slice):
480 480 return self._get_range(key, pre_load)
481 481 return self.get_commit(commit_idx=key, pre_load=pre_load)
482 482
483 483 def _get_range(self, slice_obj, pre_load):
484 484 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486 486
487 487 def count(self):
488 488 return len(self.commit_ids)
489 489
490 490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 491 """
492 492 Creates and returns a tag for the given ``commit_id``.
493 493
494 494 :param name: name for new tag
495 495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 496 :param commit_id: commit id for which new tag would be created
497 497 :param message: message of the tag's commit
498 498 :param date: date of tag's commit
499 499
500 500 :raises TagAlreadyExistError: if tag with same name already exists
501 501 """
502 502 raise NotImplementedError
503 503
504 504 def remove_tag(self, name, user, message=None, date=None):
505 505 """
506 506 Removes tag with the given ``name``.
507 507
508 508 :param name: name of the tag to be removed
509 509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 510 :param message: message of the tag's removal commit
511 511 :param date: date of tag's removal commit
512 512
513 513 :raises TagDoesNotExistError: if tag with given name does not exists
514 514 """
515 515 raise NotImplementedError
516 516
517 517 def get_diff(
518 518 self, commit1, commit2, path=None, ignore_whitespace=False,
519 519 context=3, path1=None):
520 520 """
521 521 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 522 `commit2` since `commit1`.
523 523
524 524 :param commit1: Entry point from which diff is shown. Can be
525 525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 526 the changes since empty state of the repository until `commit2`
527 527 :param commit2: Until which commit changes should be shown.
528 528 :param path: Can be set to a path of a file to create a diff of that
529 529 file. If `path1` is also set, this value is only associated to
530 530 `commit2`.
531 531 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 532 changes. Defaults to ``False``.
533 533 :param context: How many lines before/after changed lines should be
534 534 shown. Defaults to ``3``.
535 535 :param path1: Can be set to a path to associate with `commit1`. This
536 536 parameter works only for backends which support diff generation for
537 537 different paths. Other backends will raise a `ValueError` if `path1`
538 538 is set and has a different value than `path`.
539 539 :param file_path: filter this diff by given path pattern
540 540 """
541 541 raise NotImplementedError
542 542
543 543 def strip(self, commit_id, branch=None):
544 544 """
545 545 Strip given commit_id from the repository
546 546 """
547 547 raise NotImplementedError
548 548
549 549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 550 """
551 551 Return a latest common ancestor commit if one exists for this repo
552 552 `commit_id1` vs `commit_id2` from `repo2`.
553 553
554 554 :param commit_id1: Commit it from this repository to use as a
555 555 target for the comparison.
556 556 :param commit_id2: Source commit id to use for comparison.
557 557 :param repo2: Source repository to use for comparison.
558 558 """
559 559 raise NotImplementedError
560 560
561 561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 562 """
563 563 Compare this repository's revision `commit_id1` with `commit_id2`.
564 564
565 565 Returns a tuple(commits, ancestor) that would be merged from
566 566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 567 will be returned as ancestor.
568 568
569 569 :param commit_id1: Commit it from this repository to use as a
570 570 target for the comparison.
571 571 :param commit_id2: Source commit id to use for comparison.
572 572 :param repo2: Source repository to use for comparison.
573 573 :param merge: If set to ``True`` will do a merge compare which also
574 574 returns the common ancestor.
575 575 :param pre_load: Optional. List of commit attributes to load.
576 576 """
577 577 raise NotImplementedError
578 578
579 579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 580 user_name='', user_email='', message='', dry_run=False,
581 581 use_rebase=False, close_branch=False):
582 582 """
583 583 Merge the revisions specified in `source_ref` from `source_repo`
584 584 onto the `target_ref` of this repository.
585 585
586 586 `source_ref` and `target_ref` are named tupls with the following
587 587 fields `type`, `name` and `commit_id`.
588 588
589 589 Returns a MergeResponse named tuple with the following fields
590 590 'possible', 'executed', 'source_commit', 'target_commit',
591 591 'merge_commit'.
592 592
593 593 :param repo_id: `repo_id` target repo id.
594 594 :param workspace_id: `workspace_id` unique identifier.
595 595 :param target_ref: `target_ref` points to the commit on top of which
596 596 the `source_ref` should be merged.
597 597 :param source_repo: The repository that contains the commits to be
598 598 merged.
599 599 :param source_ref: `source_ref` points to the topmost commit from
600 600 the `source_repo` which should be merged.
601 601 :param user_name: Merge commit `user_name`.
602 602 :param user_email: Merge commit `user_email`.
603 603 :param message: Merge commit `message`.
604 604 :param dry_run: If `True` the merge will not take place.
605 605 :param use_rebase: If `True` commits from the source will be rebased
606 606 on top of the target instead of being merged.
607 607 :param close_branch: If `True` branch will be close before merging it
608 608 """
609 609 if dry_run:
610 610 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 612 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 613 else:
614 614 if not user_name:
615 615 raise ValueError('user_name cannot be empty')
616 616 if not user_email:
617 617 raise ValueError('user_email cannot be empty')
618 618 if not message:
619 619 raise ValueError('message cannot be empty')
620 620
621 621 try:
622 622 return self._merge_repo(
623 623 repo_id, workspace_id, target_ref, source_repo,
624 624 source_ref, message, user_name, user_email, dry_run=dry_run,
625 625 use_rebase=use_rebase, close_branch=close_branch)
626 626 except RepositoryError as exc:
627 627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 628 return MergeResponse(
629 629 False, False, None, MergeFailureReason.UNKNOWN,
630 630 metadata={'exception': str(exc)})
631 631
632 632 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 633 source_repo, source_ref, merge_message,
634 634 merger_name, merger_email, dry_run=False,
635 635 use_rebase=False, close_branch=False):
636 636 """Internal implementation of merge."""
637 637 raise NotImplementedError
638 638
639 639 def _maybe_prepare_merge_workspace(
640 640 self, repo_id, workspace_id, target_ref, source_ref):
641 641 """
642 642 Create the merge workspace.
643 643
644 644 :param workspace_id: `workspace_id` unique identifier.
645 645 """
646 646 raise NotImplementedError
647 647
648 648 @classmethod
649 649 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
650 650 """
651 651 Legacy version that was used before. We still need it for
652 652 backward compat
653 653 """
654 654 return os.path.join(
655 655 os.path.dirname(repo_path),
656 656 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
657 657
658 658 @classmethod
659 659 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
660 660 # The name of the shadow repository must start with '.', so it is
661 661 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
662 662 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
663 663 if os.path.exists(legacy_repository_path):
664 664 return legacy_repository_path
665 665 else:
666 666 return os.path.join(
667 667 os.path.dirname(repo_path),
668 668 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
669 669
670 670 def cleanup_merge_workspace(self, repo_id, workspace_id):
671 671 """
672 672 Remove merge workspace.
673 673
674 674 This function MUST not fail in case there is no workspace associated to
675 675 the given `workspace_id`.
676 676
677 677 :param workspace_id: `workspace_id` unique identifier.
678 678 """
679 679 shadow_repository_path = self._get_shadow_repository_path(
680 680 self.path, repo_id, workspace_id)
681 681 shadow_repository_path_del = '{}.{}.delete'.format(
682 682 shadow_repository_path, time.time())
683 683
684 684 # move the shadow repo, so it never conflicts with the one used.
685 685 # we use this method because shutil.rmtree had some edge case problems
686 686 # removing symlinked repositories
687 687 if not os.path.isdir(shadow_repository_path):
688 688 return
689 689
690 690 shutil.move(shadow_repository_path, shadow_repository_path_del)
691 691 try:
692 692 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
693 693 except Exception:
694 694 log.exception('Failed to gracefully remove shadow repo under %s',
695 695 shadow_repository_path_del)
696 696 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
697 697
698 698 # ========== #
699 699 # COMMIT API #
700 700 # ========== #
701 701
702 702 @LazyProperty
703 703 def in_memory_commit(self):
704 704 """
705 705 Returns :class:`InMemoryCommit` object for this repository.
706 706 """
707 707 raise NotImplementedError
708 708
709 709 # ======================== #
710 710 # UTILITIES FOR SUBCLASSES #
711 711 # ======================== #
712 712
713 713 def _validate_diff_commits(self, commit1, commit2):
714 714 """
715 715 Validates that the given commits are related to this repository.
716 716
717 717 Intended as a utility for sub classes to have a consistent validation
718 718 of input parameters in methods like :meth:`get_diff`.
719 719 """
720 720 self._validate_commit(commit1)
721 721 self._validate_commit(commit2)
722 722 if (isinstance(commit1, EmptyCommit) and
723 723 isinstance(commit2, EmptyCommit)):
724 724 raise ValueError("Cannot compare two empty commits")
725 725
726 726 def _validate_commit(self, commit):
727 727 if not isinstance(commit, BaseCommit):
728 728 raise TypeError(
729 729 "%s is not of type BaseCommit" % repr(commit))
730 730 if commit.repository != self and not isinstance(commit, EmptyCommit):
731 731 raise ValueError(
732 732 "Commit %s must be a valid commit from this repository %s, "
733 733 "related to this repository instead %s." %
734 734 (commit, self, commit.repository))
735 735
736 736 def _validate_commit_id(self, commit_id):
737 737 if not isinstance(commit_id, compat.string_types):
738 738 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
739 739
740 740 def _validate_commit_idx(self, commit_idx):
741 741 if not isinstance(commit_idx, (int, long)):
742 742 raise TypeError("commit_idx must be a numeric value")
743 743
744 744 def _validate_branch_name(self, branch_name):
745 745 if branch_name and branch_name not in self.branches_all:
746 746 msg = ("Branch %s not found in %s" % (branch_name, self))
747 747 raise BranchDoesNotExistError(msg)
748 748
749 749 #
750 750 # Supporting deprecated API parts
751 751 # TODO: johbo: consider to move this into a mixin
752 752 #
753 753
754 754 @property
755 755 def EMPTY_CHANGESET(self):
756 756 warnings.warn(
757 757 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
758 758 return self.EMPTY_COMMIT_ID
759 759
760 760 @property
761 761 def revisions(self):
762 762 warnings.warn("Use commits attribute instead", DeprecationWarning)
763 763 return self.commit_ids
764 764
765 765 @revisions.setter
766 766 def revisions(self, value):
767 767 warnings.warn("Use commits attribute instead", DeprecationWarning)
768 768 self.commit_ids = value
769 769
770 770 def get_changeset(self, revision=None, pre_load=None):
771 771 warnings.warn("Use get_commit instead", DeprecationWarning)
772 772 commit_id = None
773 773 commit_idx = None
774 774 if isinstance(revision, compat.string_types):
775 775 commit_id = revision
776 776 else:
777 777 commit_idx = revision
778 778 return self.get_commit(
779 779 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
780 780
781 781 def get_changesets(
782 782 self, start=None, end=None, start_date=None, end_date=None,
783 783 branch_name=None, pre_load=None):
784 784 warnings.warn("Use get_commits instead", DeprecationWarning)
785 785 start_id = self._revision_to_commit(start)
786 786 end_id = self._revision_to_commit(end)
787 787 return self.get_commits(
788 788 start_id=start_id, end_id=end_id, start_date=start_date,
789 789 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
790 790
791 791 def _revision_to_commit(self, revision):
792 792 """
793 793 Translates a revision to a commit_id
794 794
795 795 Helps to support the old changeset based API which allows to use
796 796 commit ids and commit indices interchangeable.
797 797 """
798 798 if revision is None:
799 799 return revision
800 800
801 801 if isinstance(revision, compat.string_types):
802 802 commit_id = revision
803 803 else:
804 804 commit_id = self.commit_ids[revision]
805 805 return commit_id
806 806
807 807 @property
808 808 def in_memory_changeset(self):
809 809 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
810 810 return self.in_memory_commit
811 811
812 812 def get_path_permissions(self, username):
813 813 """
814 814 Returns a path permission checker or None if not supported
815 815
816 816 :param username: session user name
817 817 :return: an instance of BasePathPermissionChecker or None
818 818 """
819 819 return None
820 820
821 821 def install_hooks(self, force=False):
822 822 return self._remote.install_hooks(force)
823 823
824 824 def get_hooks_info(self):
825 825 return self._remote.get_hooks_info()
826 826
827 827
828 828 class BaseCommit(object):
829 829 """
830 830 Each backend should implement it's commit representation.
831 831
832 832 **Attributes**
833 833
834 834 ``repository``
835 835 repository object within which commit exists
836 836
837 837 ``id``
838 838 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
839 839 just ``tip``.
840 840
841 841 ``raw_id``
842 842 raw commit representation (i.e. full 40 length sha for git
843 843 backend)
844 844
845 845 ``short_id``
846 846 shortened (if apply) version of ``raw_id``; it would be simple
847 847 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
848 848 as ``raw_id`` for subversion
849 849
850 850 ``idx``
851 851 commit index
852 852
853 853 ``files``
854 854 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
855 855
856 856 ``dirs``
857 857 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
858 858
859 859 ``nodes``
860 860 combined list of ``Node`` objects
861 861
862 862 ``author``
863 863 author of the commit, as unicode
864 864
865 865 ``message``
866 866 message of the commit, as unicode
867 867
868 868 ``parents``
869 869 list of parent commits
870 870
871 871 """
872 872
873 873 branch = None
874 874 """
875 875 Depending on the backend this should be set to the branch name of the
876 876 commit. Backends not supporting branches on commits should leave this
877 877 value as ``None``.
878 878 """
879 879
880 880 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
881 881 """
882 882 This template is used to generate a default prefix for repository archives
883 883 if no prefix has been specified.
884 884 """
885 885
886 886 def __str__(self):
887 887 return '<%s at %s:%s>' % (
888 888 self.__class__.__name__, self.idx, self.short_id)
889 889
890 890 def __repr__(self):
891 891 return self.__str__()
892 892
893 893 def __unicode__(self):
894 894 return u'%s:%s' % (self.idx, self.short_id)
895 895
896 896 def __eq__(self, other):
897 897 same_instance = isinstance(other, self.__class__)
898 898 return same_instance and self.raw_id == other.raw_id
899 899
900 900 def __json__(self):
901 901 parents = []
902 902 try:
903 903 for parent in self.parents:
904 904 parents.append({'raw_id': parent.raw_id})
905 905 except NotImplementedError:
906 906 # empty commit doesn't have parents implemented
907 907 pass
908 908
909 909 return {
910 910 'short_id': self.short_id,
911 911 'raw_id': self.raw_id,
912 912 'revision': self.idx,
913 913 'message': self.message,
914 914 'date': self.date,
915 915 'author': self.author,
916 916 'parents': parents,
917 917 'branch': self.branch
918 918 }
919 919
920 920 def __getstate__(self):
921 921 d = self.__dict__.copy()
922 922 d.pop('_remote', None)
923 923 d.pop('repository', None)
924 924 return d
925 925
926 926 def _get_refs(self):
927 927 return {
928 928 'branches': [self.branch] if self.branch else [],
929 929 'bookmarks': getattr(self, 'bookmarks', []),
930 930 'tags': self.tags
931 931 }
932 932
933 933 @LazyProperty
934 934 def last(self):
935 935 """
936 936 ``True`` if this is last commit in repository, ``False``
937 937 otherwise; trying to access this attribute while there is no
938 938 commits would raise `EmptyRepositoryError`
939 939 """
940 940 if self.repository is None:
941 941 raise CommitError("Cannot check if it's most recent commit")
942 942 return self.raw_id == self.repository.commit_ids[-1]
943 943
944 944 @LazyProperty
945 945 def parents(self):
946 946 """
947 947 Returns list of parent commits.
948 948 """
949 949 raise NotImplementedError
950 950
951 951 @LazyProperty
952 952 def first_parent(self):
953 953 """
954 954 Returns list of parent commits.
955 955 """
956 956 return self.parents[0] if self.parents else EmptyCommit()
957 957
958 958 @property
959 959 def merge(self):
960 960 """
961 961 Returns boolean if commit is a merge.
962 962 """
963 963 return len(self.parents) > 1
964 964
965 965 @LazyProperty
966 966 def children(self):
967 967 """
968 968 Returns list of child commits.
969 969 """
970 970 raise NotImplementedError
971 971
972 972 @LazyProperty
973 973 def id(self):
974 974 """
975 975 Returns string identifying this commit.
976 976 """
977 977 raise NotImplementedError
978 978
979 979 @LazyProperty
980 980 def raw_id(self):
981 981 """
982 982 Returns raw string identifying this commit.
983 983 """
984 984 raise NotImplementedError
985 985
986 986 @LazyProperty
987 987 def short_id(self):
988 988 """
989 989 Returns shortened version of ``raw_id`` attribute, as string,
990 990 identifying this commit, useful for presentation to users.
991 991 """
992 992 raise NotImplementedError
993 993
994 994 @LazyProperty
995 995 def idx(self):
996 996 """
997 997 Returns integer identifying this commit.
998 998 """
999 999 raise NotImplementedError
1000 1000
1001 1001 @LazyProperty
1002 1002 def committer(self):
1003 1003 """
1004 1004 Returns committer for this commit
1005 1005 """
1006 1006 raise NotImplementedError
1007 1007
1008 1008 @LazyProperty
1009 1009 def committer_name(self):
1010 1010 """
1011 1011 Returns committer name for this commit
1012 1012 """
1013 1013
1014 1014 return author_name(self.committer)
1015 1015
1016 1016 @LazyProperty
1017 1017 def committer_email(self):
1018 1018 """
1019 1019 Returns committer email address for this commit
1020 1020 """
1021 1021
1022 1022 return author_email(self.committer)
1023 1023
1024 1024 @LazyProperty
1025 1025 def author(self):
1026 1026 """
1027 1027 Returns author for this commit
1028 1028 """
1029 1029
1030 1030 raise NotImplementedError
1031 1031
1032 1032 @LazyProperty
1033 1033 def author_name(self):
1034 1034 """
1035 1035 Returns author name for this commit
1036 1036 """
1037 1037
1038 1038 return author_name(self.author)
1039 1039
1040 1040 @LazyProperty
1041 1041 def author_email(self):
1042 1042 """
1043 1043 Returns author email address for this commit
1044 1044 """
1045 1045
1046 1046 return author_email(self.author)
1047 1047
1048 1048 def get_file_mode(self, path):
1049 1049 """
1050 1050 Returns stat mode of the file at `path`.
1051 1051 """
1052 1052 raise NotImplementedError
1053 1053
1054 1054 def is_link(self, path):
1055 1055 """
1056 1056 Returns ``True`` if given `path` is a symlink
1057 1057 """
1058 1058 raise NotImplementedError
1059 1059
1060 1060 def is_node_binary(self, path):
1061 1061 """
1062 1062 Returns ``True`` is given path is a binary file
1063 1063 """
1064 1064 raise NotImplementedError
1065 1065
1066 1066 def get_file_content(self, path):
1067 1067 """
1068 1068 Returns content of the file at the given `path`.
1069 1069 """
1070 1070 raise NotImplementedError
1071 1071
1072 1072 def get_file_content_streamed(self, path):
1073 1073 """
1074 1074 returns a streaming response from vcsserver with file content
1075 1075 """
1076 1076 raise NotImplementedError
1077 1077
1078 1078 def get_file_size(self, path):
1079 1079 """
1080 1080 Returns size of the file at the given `path`.
1081 1081 """
1082 1082 raise NotImplementedError
1083 1083
1084 1084 def get_path_commit(self, path, pre_load=None):
1085 1085 """
1086 1086 Returns last commit of the file at the given `path`.
1087 1087
1088 1088 :param pre_load: Optional. List of commit attributes to load.
1089 1089 """
1090 1090 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1091 1091 if not commits:
1092 1092 raise RepositoryError(
1093 1093 'Failed to fetch history for path {}. '
1094 1094 'Please check if such path exists in your repository'.format(
1095 1095 path))
1096 1096 return commits[0]
1097 1097
1098 1098 def get_path_history(self, path, limit=None, pre_load=None):
1099 1099 """
1100 1100 Returns history of file as reversed list of :class:`BaseCommit`
1101 1101 objects for which file at given `path` has been modified.
1102 1102
1103 1103 :param limit: Optional. Allows to limit the size of the returned
1104 1104 history. This is intended as a hint to the underlying backend, so
1105 1105 that it can apply optimizations depending on the limit.
1106 1106 :param pre_load: Optional. List of commit attributes to load.
1107 1107 """
1108 1108 raise NotImplementedError
1109 1109
1110 1110 def get_file_annotate(self, path, pre_load=None):
1111 1111 """
1112 1112 Returns a generator of four element tuples with
1113 1113 lineno, sha, commit lazy loader and line
1114 1114
1115 1115 :param pre_load: Optional. List of commit attributes to load.
1116 1116 """
1117 1117 raise NotImplementedError
1118 1118
1119 1119 def get_nodes(self, path):
1120 1120 """
1121 1121 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1122 1122 state of commit at the given ``path``.
1123 1123
1124 1124 :raises ``CommitError``: if node at the given ``path`` is not
1125 1125 instance of ``DirNode``
1126 1126 """
1127 1127 raise NotImplementedError
1128 1128
1129 1129 def get_node(self, path):
1130 1130 """
1131 1131 Returns ``Node`` object from the given ``path``.
1132 1132
1133 1133 :raises ``NodeDoesNotExistError``: if there is no node at the given
1134 1134 ``path``
1135 1135 """
1136 1136 raise NotImplementedError
1137 1137
1138 1138 def get_largefile_node(self, path):
1139 1139 """
1140 1140 Returns the path to largefile from Mercurial/Git-lfs storage.
1141 1141 or None if it's not a largefile node
1142 1142 """
1143 1143 return None
1144 1144
1145 1145 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1146 1146 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1147 1147 """
1148 1148 Creates an archive containing the contents of the repository.
1149 1149
1150 1150 :param archive_dest_path: path to the file which to create the archive.
1151 1151 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1152 1152 :param prefix: name of root directory in archive.
1153 1153 Default is repository name and commit's short_id joined with dash:
1154 1154 ``"{repo_name}-{short_id}"``.
1155 1155 :param write_metadata: write a metadata file into archive.
1156 1156 :param mtime: custom modification time for archive creation, defaults
1157 1157 to time.time() if not given.
1158 1158 :param archive_at_path: pack files at this path (default '/')
1159 1159
1160 1160 :raise VCSError: If prefix has a problem.
1161 1161 """
1162 1162 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1163 1163 if kind not in allowed_kinds:
1164 1164 raise ImproperArchiveTypeError(
1165 1165 'Archive kind (%s) not supported use one of %s' %
1166 1166 (kind, allowed_kinds))
1167 1167
1168 1168 prefix = self._validate_archive_prefix(prefix)
1169 1169
1170 1170 mtime = mtime is not None or time.mktime(self.date.timetuple())
1171 1171
1172 1172 file_info = []
1173 1173 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1174 1174 for _r, _d, files in cur_rev.walk(archive_at_path):
1175 1175 for f in files:
1176 1176 f_path = os.path.join(prefix, f.path)
1177 1177 file_info.append(
1178 1178 (f_path, f.mode, f.is_link(), f.raw_bytes))
1179 1179
1180 1180 if write_metadata:
1181 1181 metadata = [
1182 1182 ('repo_name', self.repository.name),
1183 1183 ('commit_id', self.raw_id),
1184 1184 ('mtime', mtime),
1185 1185 ('branch', self.branch),
1186 1186 ('tags', ','.join(self.tags)),
1187 1187 ]
1188 1188 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1189 1189 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1190 1190
1191 1191 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1192 1192
1193 1193 def _validate_archive_prefix(self, prefix):
1194 1194 if prefix is None:
1195 1195 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1196 1196 repo_name=safe_str(self.repository.name),
1197 1197 short_id=self.short_id)
1198 1198 elif not isinstance(prefix, str):
1199 1199 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1200 1200 elif prefix.startswith('/'):
1201 1201 raise VCSError("Prefix cannot start with leading slash")
1202 1202 elif prefix.strip() == '':
1203 1203 raise VCSError("Prefix cannot be empty")
1204 1204 return prefix
1205 1205
1206 1206 @LazyProperty
1207 1207 def root(self):
1208 1208 """
1209 1209 Returns ``RootNode`` object for this commit.
1210 1210 """
1211 1211 return self.get_node('')
1212 1212
1213 1213 def next(self, branch=None):
1214 1214 """
1215 1215 Returns next commit from current, if branch is gives it will return
1216 1216 next commit belonging to this branch
1217 1217
1218 1218 :param branch: show commits within the given named branch
1219 1219 """
1220 1220 indexes = xrange(self.idx + 1, self.repository.count())
1221 1221 return self._find_next(indexes, branch)
1222 1222
1223 1223 def prev(self, branch=None):
1224 1224 """
1225 1225 Returns previous commit from current, if branch is gives it will
1226 1226 return previous commit belonging to this branch
1227 1227
1228 1228 :param branch: show commit within the given named branch
1229 1229 """
1230 1230 indexes = xrange(self.idx - 1, -1, -1)
1231 1231 return self._find_next(indexes, branch)
1232 1232
1233 1233 def _find_next(self, indexes, branch=None):
1234 1234 if branch and self.branch != branch:
1235 1235 raise VCSError('Branch option used on commit not belonging '
1236 1236 'to that branch')
1237 1237
1238 1238 for next_idx in indexes:
1239 1239 commit = self.repository.get_commit(commit_idx=next_idx)
1240 1240 if branch and branch != commit.branch:
1241 1241 continue
1242 1242 return commit
1243 1243 raise CommitDoesNotExistError
1244 1244
1245 1245 def diff(self, ignore_whitespace=True, context=3):
1246 1246 """
1247 1247 Returns a `Diff` object representing the change made by this commit.
1248 1248 """
1249 1249 parent = self.first_parent
1250 1250 diff = self.repository.get_diff(
1251 1251 parent, self,
1252 1252 ignore_whitespace=ignore_whitespace,
1253 1253 context=context)
1254 1254 return diff
1255 1255
1256 1256 @LazyProperty
1257 1257 def added(self):
1258 1258 """
1259 1259 Returns list of added ``FileNode`` objects.
1260 1260 """
1261 1261 raise NotImplementedError
1262 1262
1263 1263 @LazyProperty
1264 1264 def changed(self):
1265 1265 """
1266 1266 Returns list of modified ``FileNode`` objects.
1267 1267 """
1268 1268 raise NotImplementedError
1269 1269
1270 1270 @LazyProperty
1271 1271 def removed(self):
1272 1272 """
1273 1273 Returns list of removed ``FileNode`` objects.
1274 1274 """
1275 1275 raise NotImplementedError
1276 1276
1277 1277 @LazyProperty
1278 1278 def size(self):
1279 1279 """
1280 1280 Returns total number of bytes from contents of all filenodes.
1281 1281 """
1282 1282 return sum((node.size for node in self.get_filenodes_generator()))
1283 1283
1284 1284 def walk(self, topurl=''):
1285 1285 """
1286 1286 Similar to os.walk method. Insted of filesystem it walks through
1287 1287 commit starting at given ``topurl``. Returns generator of tuples
1288 1288 (topnode, dirnodes, filenodes).
1289 1289 """
1290 1290 topnode = self.get_node(topurl)
1291 1291 if not topnode.is_dir():
1292 1292 return
1293 1293 yield (topnode, topnode.dirs, topnode.files)
1294 1294 for dirnode in topnode.dirs:
1295 1295 for tup in self.walk(dirnode.path):
1296 1296 yield tup
1297 1297
1298 1298 def get_filenodes_generator(self):
1299 1299 """
1300 1300 Returns generator that yields *all* file nodes.
1301 1301 """
1302 1302 for topnode, dirs, files in self.walk():
1303 1303 for node in files:
1304 1304 yield node
1305 1305
1306 1306 #
1307 1307 # Utilities for sub classes to support consistent behavior
1308 1308 #
1309 1309
1310 1310 def no_node_at_path(self, path):
1311 1311 return NodeDoesNotExistError(
1312 1312 u"There is no file nor directory at the given path: "
1313 1313 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1314 1314
1315 1315 def _fix_path(self, path):
1316 1316 """
1317 1317 Paths are stored without trailing slash so we need to get rid off it if
1318 1318 needed.
1319 1319 """
1320 1320 return path.rstrip('/')
1321 1321
1322 1322 #
1323 1323 # Deprecated API based on changesets
1324 1324 #
1325 1325
1326 1326 @property
1327 1327 def revision(self):
1328 1328 warnings.warn("Use idx instead", DeprecationWarning)
1329 1329 return self.idx
1330 1330
1331 1331 @revision.setter
1332 1332 def revision(self, value):
1333 1333 warnings.warn("Use idx instead", DeprecationWarning)
1334 1334 self.idx = value
1335 1335
1336 1336 def get_file_changeset(self, path):
1337 1337 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1338 1338 return self.get_path_commit(path)
1339 1339
1340 1340
1341 1341 class BaseChangesetClass(type):
1342 1342
1343 1343 def __instancecheck__(self, instance):
1344 1344 return isinstance(instance, BaseCommit)
1345 1345
1346 1346
1347 1347 class BaseChangeset(BaseCommit):
1348 1348
1349 1349 __metaclass__ = BaseChangesetClass
1350 1350
1351 1351 def __new__(cls, *args, **kwargs):
1352 1352 warnings.warn(
1353 1353 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1354 1354 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1355 1355
1356 1356
1357 1357 class BaseInMemoryCommit(object):
1358 1358 """
1359 1359 Represents differences between repository's state (most recent head) and
1360 1360 changes made *in place*.
1361 1361
1362 1362 **Attributes**
1363 1363
1364 1364 ``repository``
1365 1365 repository object for this in-memory-commit
1366 1366
1367 1367 ``added``
1368 1368 list of ``FileNode`` objects marked as *added*
1369 1369
1370 1370 ``changed``
1371 1371 list of ``FileNode`` objects marked as *changed*
1372 1372
1373 1373 ``removed``
1374 1374 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1375 1375 *removed*
1376 1376
1377 1377 ``parents``
1378 1378 list of :class:`BaseCommit` instances representing parents of
1379 1379 in-memory commit. Should always be 2-element sequence.
1380 1380
1381 1381 """
1382 1382
1383 1383 def __init__(self, repository):
1384 1384 self.repository = repository
1385 1385 self.added = []
1386 1386 self.changed = []
1387 1387 self.removed = []
1388 1388 self.parents = []
1389 1389
1390 1390 def add(self, *filenodes):
1391 1391 """
1392 1392 Marks given ``FileNode`` objects as *to be committed*.
1393 1393
1394 1394 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1395 1395 latest commit
1396 1396 :raises ``NodeAlreadyAddedError``: if node with same path is already
1397 1397 marked as *added*
1398 1398 """
1399 1399 # Check if not already marked as *added* first
1400 1400 for node in filenodes:
1401 1401 if node.path in (n.path for n in self.added):
1402 1402 raise NodeAlreadyAddedError(
1403 1403 "Such FileNode %s is already marked for addition"
1404 1404 % node.path)
1405 1405 for node in filenodes:
1406 1406 self.added.append(node)
1407 1407
1408 1408 def change(self, *filenodes):
1409 1409 """
1410 1410 Marks given ``FileNode`` objects to be *changed* in next commit.
1411 1411
1412 1412 :raises ``EmptyRepositoryError``: if there are no commits yet
1413 1413 :raises ``NodeAlreadyExistsError``: if node with same path is already
1414 1414 marked to be *changed*
1415 1415 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1416 1416 marked to be *removed*
1417 1417 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1418 1418 commit
1419 1419 :raises ``NodeNotChangedError``: if node hasn't really be changed
1420 1420 """
1421 1421 for node in filenodes:
1422 1422 if node.path in (n.path for n in self.removed):
1423 1423 raise NodeAlreadyRemovedError(
1424 1424 "Node at %s is already marked as removed" % node.path)
1425 1425 try:
1426 1426 self.repository.get_commit()
1427 1427 except EmptyRepositoryError:
1428 1428 raise EmptyRepositoryError(
1429 1429 "Nothing to change - try to *add* new nodes rather than "
1430 1430 "changing them")
1431 1431 for node in filenodes:
1432 1432 if node.path in (n.path for n in self.changed):
1433 1433 raise NodeAlreadyChangedError(
1434 1434 "Node at '%s' is already marked as changed" % node.path)
1435 1435 self.changed.append(node)
1436 1436
1437 1437 def remove(self, *filenodes):
1438 1438 """
1439 1439 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1440 1440 *removed* in next commit.
1441 1441
1442 1442 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1443 1443 be *removed*
1444 1444 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1445 1445 be *changed*
1446 1446 """
1447 1447 for node in filenodes:
1448 1448 if node.path in (n.path for n in self.removed):
1449 1449 raise NodeAlreadyRemovedError(
1450 1450 "Node is already marked to for removal at %s" % node.path)
1451 1451 if node.path in (n.path for n in self.changed):
1452 1452 raise NodeAlreadyChangedError(
1453 1453 "Node is already marked to be changed at %s" % node.path)
1454 1454 # We only mark node as *removed* - real removal is done by
1455 1455 # commit method
1456 1456 self.removed.append(node)
1457 1457
1458 1458 def reset(self):
1459 1459 """
1460 1460 Resets this instance to initial state (cleans ``added``, ``changed``
1461 1461 and ``removed`` lists).
1462 1462 """
1463 1463 self.added = []
1464 1464 self.changed = []
1465 1465 self.removed = []
1466 1466 self.parents = []
1467 1467
1468 1468 def get_ipaths(self):
1469 1469 """
1470 1470 Returns generator of paths from nodes marked as added, changed or
1471 1471 removed.
1472 1472 """
1473 1473 for node in itertools.chain(self.added, self.changed, self.removed):
1474 1474 yield node.path
1475 1475
1476 1476 def get_paths(self):
1477 1477 """
1478 1478 Returns list of paths from nodes marked as added, changed or removed.
1479 1479 """
1480 1480 return list(self.get_ipaths())
1481 1481
1482 1482 def check_integrity(self, parents=None):
1483 1483 """
1484 1484 Checks in-memory commit's integrity. Also, sets parents if not
1485 1485 already set.
1486 1486
1487 1487 :raises CommitError: if any error occurs (i.e.
1488 1488 ``NodeDoesNotExistError``).
1489 1489 """
1490 1490 if not self.parents:
1491 1491 parents = parents or []
1492 1492 if len(parents) == 0:
1493 1493 try:
1494 1494 parents = [self.repository.get_commit(), None]
1495 1495 except EmptyRepositoryError:
1496 1496 parents = [None, None]
1497 1497 elif len(parents) == 1:
1498 1498 parents += [None]
1499 1499 self.parents = parents
1500 1500
1501 1501 # Local parents, only if not None
1502 1502 parents = [p for p in self.parents if p]
1503 1503
1504 1504 # Check nodes marked as added
1505 1505 for p in parents:
1506 1506 for node in self.added:
1507 1507 try:
1508 1508 p.get_node(node.path)
1509 1509 except NodeDoesNotExistError:
1510 1510 pass
1511 1511 else:
1512 1512 raise NodeAlreadyExistsError(
1513 1513 "Node `%s` already exists at %s" % (node.path, p))
1514 1514
1515 1515 # Check nodes marked as changed
1516 1516 missing = set(self.changed)
1517 1517 not_changed = set(self.changed)
1518 1518 if self.changed and not parents:
1519 1519 raise NodeDoesNotExistError(str(self.changed[0].path))
1520 1520 for p in parents:
1521 1521 for node in self.changed:
1522 1522 try:
1523 1523 old = p.get_node(node.path)
1524 1524 missing.remove(node)
1525 1525 # if content actually changed, remove node from not_changed
1526 1526 if old.content != node.content:
1527 1527 not_changed.remove(node)
1528 1528 except NodeDoesNotExistError:
1529 1529 pass
1530 1530 if self.changed and missing:
1531 1531 raise NodeDoesNotExistError(
1532 1532 "Node `%s` marked as modified but missing in parents: %s"
1533 1533 % (node.path, parents))
1534 1534
1535 1535 if self.changed and not_changed:
1536 1536 raise NodeNotChangedError(
1537 1537 "Node `%s` wasn't actually changed (parents: %s)"
1538 1538 % (not_changed.pop().path, parents))
1539 1539
1540 1540 # Check nodes marked as removed
1541 1541 if self.removed and not parents:
1542 1542 raise NodeDoesNotExistError(
1543 1543 "Cannot remove node at %s as there "
1544 1544 "were no parents specified" % self.removed[0].path)
1545 1545 really_removed = set()
1546 1546 for p in parents:
1547 1547 for node in self.removed:
1548 1548 try:
1549 1549 p.get_node(node.path)
1550 1550 really_removed.add(node)
1551 1551 except CommitError:
1552 1552 pass
1553 1553 not_removed = set(self.removed) - really_removed
1554 1554 if not_removed:
1555 1555 # TODO: johbo: This code branch does not seem to be covered
1556 1556 raise NodeDoesNotExistError(
1557 1557 "Cannot remove node at %s from "
1558 1558 "following parents: %s" % (not_removed, parents))
1559 1559
1560 1560 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1561 1561 """
1562 1562 Performs in-memory commit (doesn't check workdir in any way) and
1563 1563 returns newly created :class:`BaseCommit`. Updates repository's
1564 1564 attribute `commits`.
1565 1565
1566 1566 .. note::
1567 1567
1568 1568 While overriding this method each backend's should call
1569 1569 ``self.check_integrity(parents)`` in the first place.
1570 1570
1571 1571 :param message: message of the commit
1572 1572 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1573 1573 :param parents: single parent or sequence of parents from which commit
1574 1574 would be derived
1575 1575 :param date: ``datetime.datetime`` instance. Defaults to
1576 1576 ``datetime.datetime.now()``.
1577 1577 :param branch: branch name, as string. If none given, default backend's
1578 1578 branch would be used.
1579 1579
1580 1580 :raises ``CommitError``: if any error occurs while committing
1581 1581 """
1582 1582 raise NotImplementedError
1583 1583
1584 1584
1585 1585 class BaseInMemoryChangesetClass(type):
1586 1586
1587 1587 def __instancecheck__(self, instance):
1588 1588 return isinstance(instance, BaseInMemoryCommit)
1589 1589
1590 1590
1591 1591 class BaseInMemoryChangeset(BaseInMemoryCommit):
1592 1592
1593 1593 __metaclass__ = BaseInMemoryChangesetClass
1594 1594
1595 1595 def __new__(cls, *args, **kwargs):
1596 1596 warnings.warn(
1597 1597 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1598 1598 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1599 1599
1600 1600
1601 1601 class EmptyCommit(BaseCommit):
1602 1602 """
1603 1603 An dummy empty commit. It's possible to pass hash when creating
1604 1604 an EmptyCommit
1605 1605 """
1606 1606
1607 1607 def __init__(
1608 1608 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1609 1609 message='', author='', date=None):
1610 1610 self._empty_commit_id = commit_id
1611 1611 # TODO: johbo: Solve idx parameter, default value does not make
1612 1612 # too much sense
1613 1613 self.idx = idx
1614 1614 self.message = message
1615 1615 self.author = author
1616 1616 self.date = date or datetime.datetime.fromtimestamp(0)
1617 1617 self.repository = repo
1618 1618 self.alias = alias
1619 1619
1620 1620 @LazyProperty
1621 1621 def raw_id(self):
1622 1622 """
1623 1623 Returns raw string identifying this commit, useful for web
1624 1624 representation.
1625 1625 """
1626 1626
1627 1627 return self._empty_commit_id
1628 1628
1629 1629 @LazyProperty
1630 1630 def branch(self):
1631 1631 if self.alias:
1632 1632 from rhodecode.lib.vcs.backends import get_backend
1633 1633 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1634 1634
1635 1635 @LazyProperty
1636 1636 def short_id(self):
1637 1637 return self.raw_id[:12]
1638 1638
1639 1639 @LazyProperty
1640 1640 def id(self):
1641 1641 return self.raw_id
1642 1642
1643 1643 def get_path_commit(self, path):
1644 1644 return self
1645 1645
1646 1646 def get_file_content(self, path):
1647 1647 return u''
1648 1648
1649 1649 def get_file_content_streamed(self, path):
1650 1650 yield self.get_file_content()
1651 1651
1652 1652 def get_file_size(self, path):
1653 1653 return 0
1654 1654
1655 1655
1656 1656 class EmptyChangesetClass(type):
1657 1657
1658 1658 def __instancecheck__(self, instance):
1659 1659 return isinstance(instance, EmptyCommit)
1660 1660
1661 1661
1662 1662 class EmptyChangeset(EmptyCommit):
1663 1663
1664 1664 __metaclass__ = EmptyChangesetClass
1665 1665
1666 1666 def __new__(cls, *args, **kwargs):
1667 1667 warnings.warn(
1668 1668 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1669 1669 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1670 1670
1671 1671 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1672 1672 alias=None, revision=-1, message='', author='', date=None):
1673 1673 if requested_revision is not None:
1674 1674 warnings.warn(
1675 1675 "Parameter requested_revision not supported anymore",
1676 1676 DeprecationWarning)
1677 1677 super(EmptyChangeset, self).__init__(
1678 1678 commit_id=cs, repo=repo, alias=alias, idx=revision,
1679 1679 message=message, author=author, date=date)
1680 1680
1681 1681 @property
1682 1682 def revision(self):
1683 1683 warnings.warn("Use idx instead", DeprecationWarning)
1684 1684 return self.idx
1685 1685
1686 1686 @revision.setter
1687 1687 def revision(self, value):
1688 1688 warnings.warn("Use idx instead", DeprecationWarning)
1689 1689 self.idx = value
1690 1690
1691 1691
1692 1692 class EmptyRepository(BaseRepository):
1693 1693 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1694 1694 pass
1695 1695
1696 1696 def get_diff(self, *args, **kwargs):
1697 1697 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1698 1698 return GitDiff('')
1699 1699
1700 1700
1701 1701 class CollectionGenerator(object):
1702 1702
1703 1703 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1704 1704 self.repo = repo
1705 1705 self.commit_ids = commit_ids
1706 1706 # TODO: (oliver) this isn't currently hooked up
1707 1707 self.collection_size = None
1708 1708 self.pre_load = pre_load
1709 1709 self.translate_tag = translate_tag
1710 1710
1711 1711 def __len__(self):
1712 1712 if self.collection_size is not None:
1713 1713 return self.collection_size
1714 1714 return self.commit_ids.__len__()
1715 1715
1716 1716 def __iter__(self):
1717 1717 for commit_id in self.commit_ids:
1718 1718 # TODO: johbo: Mercurial passes in commit indices or commit ids
1719 1719 yield self._commit_factory(commit_id)
1720 1720
1721 1721 def _commit_factory(self, commit_id):
1722 1722 """
1723 1723 Allows backends to override the way commits are generated.
1724 1724 """
1725 1725 return self.repo.get_commit(
1726 1726 commit_id=commit_id, pre_load=self.pre_load,
1727 1727 translate_tag=self.translate_tag)
1728 1728
1729 1729 def __getslice__(self, i, j):
1730 1730 """
1731 1731 Returns an iterator of sliced repository
1732 1732 """
1733 1733 commit_ids = self.commit_ids[i:j]
1734 1734 return self.__class__(
1735 1735 self.repo, commit_ids, pre_load=self.pre_load,
1736 1736 translate_tag=self.translate_tag)
1737 1737
1738 1738 def __repr__(self):
1739 1739 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1740 1740
1741 1741
1742 1742 class Config(object):
1743 1743 """
1744 1744 Represents the configuration for a repository.
1745 1745
1746 1746 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1747 1747 standard library. It implements only the needed subset.
1748 1748 """
1749 1749
1750 1750 def __init__(self):
1751 1751 self._values = {}
1752 1752
1753 1753 def copy(self):
1754 1754 clone = Config()
1755 1755 for section, values in self._values.items():
1756 1756 clone._values[section] = values.copy()
1757 1757 return clone
1758 1758
1759 1759 def __repr__(self):
1760 1760 return '<Config(%s sections) at %s>' % (
1761 1761 len(self._values), hex(id(self)))
1762 1762
1763 1763 def items(self, section):
1764 1764 return self._values.get(section, {}).iteritems()
1765 1765
1766 1766 def get(self, section, option):
1767 1767 return self._values.get(section, {}).get(option)
1768 1768
1769 1769 def set(self, section, option, value):
1770 1770 section_values = self._values.setdefault(section, {})
1771 1771 section_values[option] = value
1772 1772
1773 1773 def clear_section(self, section):
1774 1774 self._values[section] = {}
1775 1775
1776 1776 def serialize(self):
1777 1777 """
1778 1778 Creates a list of three tuples (section, key, value) representing
1779 1779 this config object.
1780 1780 """
1781 1781 items = []
1782 1782 for section in self._values:
1783 1783 for option, value in self._values[section].items():
1784 1784 items.append(
1785 1785 (safe_str(section), safe_str(option), safe_str(value)))
1786 1786 return items
1787 1787
1788 1788
1789 1789 class Diff(object):
1790 1790 """
1791 1791 Represents a diff result from a repository backend.
1792 1792
1793 1793 Subclasses have to provide a backend specific value for
1794 1794 :attr:`_header_re` and :attr:`_meta_re`.
1795 1795 """
1796 1796 _meta_re = None
1797 1797 _header_re = None
1798 1798
1799 1799 def __init__(self, raw_diff):
1800 1800 self.raw = raw_diff
1801 1801
1802 1802 def chunks(self):
1803 1803 """
1804 1804 split the diff in chunks of separate --git a/file b/file chunks
1805 1805 to make diffs consistent we must prepend with \n, and make sure
1806 1806 we can detect last chunk as this was also has special rule
1807 1807 """
1808 1808
1809 1809 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1810 1810 header = diff_parts[0]
1811 1811
1812 1812 if self._meta_re:
1813 1813 match = self._meta_re.match(header)
1814 1814
1815 1815 chunks = diff_parts[1:]
1816 1816 total_chunks = len(chunks)
1817 1817
1818 1818 return (
1819 1819 DiffChunk(chunk, self, cur_chunk == total_chunks)
1820 1820 for cur_chunk, chunk in enumerate(chunks, start=1))
1821 1821
1822 1822
1823 1823 class DiffChunk(object):
1824 1824
1825 1825 def __init__(self, chunk, diff, last_chunk):
1826 1826 self._diff = diff
1827 1827
1828 1828 # since we split by \ndiff --git that part is lost from original diff
1829 1829 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1830 1830 if not last_chunk:
1831 1831 chunk += '\n'
1832 1832
1833 1833 match = self._diff._header_re.match(chunk)
1834 1834 self.header = match.groupdict()
1835 1835 self.diff = chunk[match.end():]
1836 1836 self.raw = chunk
1837 1837
1838 1838
1839 1839 class BasePathPermissionChecker(object):
1840 1840
1841 1841 @staticmethod
1842 1842 def create_from_patterns(includes, excludes):
1843 1843 if includes and '*' in includes and not excludes:
1844 1844 return AllPathPermissionChecker()
1845 1845 elif excludes and '*' in excludes:
1846 1846 return NonePathPermissionChecker()
1847 1847 else:
1848 1848 return PatternPathPermissionChecker(includes, excludes)
1849 1849
1850 1850 @property
1851 1851 def has_full_access(self):
1852 1852 raise NotImplemented()
1853 1853
1854 1854 def has_access(self, path):
1855 1855 raise NotImplemented()
1856 1856
1857 1857
1858 1858 class AllPathPermissionChecker(BasePathPermissionChecker):
1859 1859
1860 1860 @property
1861 1861 def has_full_access(self):
1862 1862 return True
1863 1863
1864 1864 def has_access(self, path):
1865 1865 return True
1866 1866
1867 1867
1868 1868 class NonePathPermissionChecker(BasePathPermissionChecker):
1869 1869
1870 1870 @property
1871 1871 def has_full_access(self):
1872 1872 return False
1873 1873
1874 1874 def has_access(self, path):
1875 1875 return False
1876 1876
1877 1877
1878 1878 class PatternPathPermissionChecker(BasePathPermissionChecker):
1879 1879
1880 1880 def __init__(self, includes, excludes):
1881 1881 self.includes = includes
1882 1882 self.excludes = excludes
1883 1883 self.includes_re = [] if not includes else [
1884 1884 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1885 1885 self.excludes_re = [] if not excludes else [
1886 1886 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1887 1887
1888 1888 @property
1889 1889 def has_full_access(self):
1890 1890 return '*' in self.includes and not self.excludes
1891 1891
1892 1892 def has_access(self, path):
1893 1893 for regex in self.excludes_re:
1894 1894 if regex.match(path):
1895 1895 return False
1896 1896 for regex in self.includes_re:
1897 1897 if regex.match(path):
1898 1898 return True
1899 1899 return False
@@ -1,1004 +1,1017 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
232 232 def is_null(value):
233 233 return len(value) == commit_id_or_idx.count('0')
234 234
235 235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 236 return self.commit_ids[-1]
237 237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 238 *map(safe_str, [commit_id_or_idx, self.name]))
239 239
240 240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 243 try:
244 244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 245 except Exception:
246 246 raise CommitDoesNotExistError(commit_missing_err)
247 247
248 248 elif is_bstr:
249 249 # Need to call remote to translate id for tagging scenario
250 250 try:
251 251 remote_data = self._remote.get_object(commit_id_or_idx)
252 252 commit_id_or_idx = remote_data["commit_id"]
253 253 except (CommitDoesNotExistError,):
254 254 raise CommitDoesNotExistError(commit_missing_err)
255 255
256 256 # Ensure we return full id
257 257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 258 raise CommitDoesNotExistError(
259 259 "Given commit id %s not recognized" % commit_id_or_idx)
260 260 return commit_id_or_idx
261 261
262 262 def get_hook_location(self):
263 263 """
264 264 returns absolute path to location where hooks are stored
265 265 """
266 266 loc = os.path.join(self.path, 'hooks')
267 267 if not self.bare:
268 268 loc = os.path.join(self.path, '.git', 'hooks')
269 269 return loc
270 270
271 271 @LazyProperty
272 272 def last_change(self):
273 273 """
274 274 Returns last change made on this repository as
275 275 `datetime.datetime` object.
276 276 """
277 277 try:
278 278 return self.get_commit().date
279 279 except RepositoryError:
280 280 tzoffset = makedate()[1]
281 281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282 282
283 283 def _get_fs_mtime(self):
284 284 idx_loc = '' if self.bare else '.git'
285 285 # fallback to filesystem
286 286 in_path = os.path.join(self.path, idx_loc, "index")
287 287 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 288 if os.path.exists(in_path):
289 289 return os.stat(in_path).st_mtime
290 290 else:
291 291 return os.stat(he_path).st_mtime
292 292
293 293 @LazyProperty
294 294 def description(self):
295 295 description = self._remote.get_description()
296 296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297 297
298 298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 299 if self.is_empty():
300 300 return OrderedDict()
301 301
302 302 result = []
303 303 for ref, sha in self._refs.iteritems():
304 304 if ref.startswith(prefix):
305 305 ref_name = ref
306 306 if strip_prefix:
307 307 ref_name = ref[len(prefix):]
308 308 result.append((safe_unicode(ref_name), sha))
309 309
310 310 def get_name(entry):
311 311 return entry[0]
312 312
313 313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314 314
315 315 def _get_branches(self):
316 316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317 317
318 318 @CachedProperty
319 319 def branches(self):
320 320 return self._get_branches()
321 321
322 322 @CachedProperty
323 323 def branches_closed(self):
324 324 return {}
325 325
326 326 @CachedProperty
327 327 def bookmarks(self):
328 328 return {}
329 329
330 330 @CachedProperty
331 331 def branches_all(self):
332 332 all_branches = {}
333 333 all_branches.update(self.branches)
334 334 all_branches.update(self.branches_closed)
335 335 return all_branches
336 336
337 337 @CachedProperty
338 338 def tags(self):
339 339 return self._get_tags()
340 340
341 341 def _get_tags(self):
342 342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343 343
344 344 def tag(self, name, user, commit_id=None, message=None, date=None,
345 345 **kwargs):
346 346 # TODO: fix this method to apply annotated tags correct with message
347 347 """
348 348 Creates and returns a tag for the given ``commit_id``.
349 349
350 350 :param name: name for new tag
351 351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 352 :param commit_id: commit id for which new tag would be created
353 353 :param message: message of the tag's commit
354 354 :param date: date of tag's commit
355 355
356 356 :raises TagAlreadyExistError: if tag with same name already exists
357 357 """
358 358 if name in self.tags:
359 359 raise TagAlreadyExistError("Tag %s already exists" % name)
360 360 commit = self.get_commit(commit_id=commit_id)
361 361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362 362
363 363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364 364
365 365 self._invalidate_prop_cache('tags')
366 366 self._invalidate_prop_cache('_refs')
367 367
368 368 return commit
369 369
370 370 def remove_tag(self, name, user, message=None, date=None):
371 371 """
372 372 Removes tag with the given ``name``.
373 373
374 374 :param name: name of the tag to be removed
375 375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 376 :param message: message of the tag's removal commit
377 377 :param date: date of tag's removal commit
378 378
379 379 :raises TagDoesNotExistError: if tag with given name does not exists
380 380 """
381 381 if name not in self.tags:
382 382 raise TagDoesNotExistError("Tag %s does not exist" % name)
383 383
384 384 self._remote.tag_remove(name)
385 385 self._invalidate_prop_cache('tags')
386 386 self._invalidate_prop_cache('_refs')
387 387
388 388 def _get_refs(self):
389 389 return self._remote.get_refs()
390 390
391 391 @CachedProperty
392 392 def _refs(self):
393 393 return self._get_refs()
394 394
395 395 @property
396 396 def _ref_tree(self):
397 397 node = tree = {}
398 398 for ref, sha in self._refs.iteritems():
399 399 path = ref.split('/')
400 400 for bit in path[:-1]:
401 401 node = node.setdefault(bit, {})
402 402 node[path[-1]] = sha
403 403 node = tree
404 404 return tree
405 405
406 406 def get_remote_ref(self, ref_name):
407 407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 408 try:
409 409 return self._refs[ref_key]
410 410 except Exception:
411 411 return
412 412
413 413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
414 414 """
415 415 Returns `GitCommit` object representing commit from git repository
416 416 at the given `commit_id` or head (most recent commit) if None given.
417 417 """
418 418 if self.is_empty():
419 419 raise EmptyRepositoryError("There are no commits yet")
420 420
421 421 if commit_id is not None:
422 422 self._validate_commit_id(commit_id)
423 423 try:
424 424 # we have cached idx, use it without contacting the remote
425 425 idx = self._commit_ids[commit_id]
426 426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 427 except KeyError:
428 428 pass
429 429
430 430 elif commit_idx is not None:
431 431 self._validate_commit_idx(commit_idx)
432 432 try:
433 433 _commit_id = self.commit_ids[commit_idx]
434 434 if commit_idx < 0:
435 435 commit_idx = self.commit_ids.index(_commit_id)
436 436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 437 except IndexError:
438 438 commit_id = commit_idx
439 439 else:
440 440 commit_id = "tip"
441 441
442 442 if translate_tag:
443 443 commit_id = self._lookup_commit(commit_id)
444 444
445 445 try:
446 446 idx = self._commit_ids[commit_id]
447 447 except KeyError:
448 448 idx = -1
449 449
450 450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451 451
452 452 def get_commits(
453 453 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 455 """
456 456 Returns generator of `GitCommit` objects from start to end (both
457 457 are inclusive), in ascending date order.
458 458
459 459 :param start_id: None, str(commit_id)
460 460 :param end_id: None, str(commit_id)
461 461 :param start_date: if specified, commits with commit date less than
462 462 ``start_date`` would be filtered out from returned set
463 463 :param end_date: if specified, commits with commit date greater than
464 464 ``end_date`` would be filtered out from returned set
465 465 :param branch_name: if specified, commits not reachable from given
466 466 branch would be filtered out from returned set
467 467 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 468 Mercurial evolve
469 469 :raise BranchDoesNotExistError: If given `branch_name` does not
470 470 exist.
471 471 :raise CommitDoesNotExistError: If commits for given `start` or
472 472 `end` could not be found.
473 473
474 474 """
475 475 if self.is_empty():
476 476 raise EmptyRepositoryError("There are no commits yet")
477 477
478 478 self._validate_branch_name(branch_name)
479 479
480 480 if start_id is not None:
481 481 self._validate_commit_id(start_id)
482 482 if end_id is not None:
483 483 self._validate_commit_id(end_id)
484 484
485 485 start_raw_id = self._lookup_commit(start_id)
486 486 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 487 end_raw_id = self._lookup_commit(end_id)
488 488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489 489
490 490 if None not in [start_id, end_id] and start_pos > end_pos:
491 491 raise RepositoryError(
492 492 "Start commit '%s' cannot be after end commit '%s'" %
493 493 (start_id, end_id))
494 494
495 495 if end_pos is not None:
496 496 end_pos += 1
497 497
498 498 filter_ = []
499 499 if branch_name:
500 500 filter_.append({'branch_name': branch_name})
501 501 if start_date and not end_date:
502 502 filter_.append({'since': start_date})
503 503 if end_date and not start_date:
504 504 filter_.append({'until': end_date})
505 505 if start_date and end_date:
506 506 filter_.append({'since': start_date})
507 507 filter_.append({'until': end_date})
508 508
509 509 # if start_pos or end_pos:
510 510 # filter_.append({'start': start_pos})
511 511 # filter_.append({'end': end_pos})
512 512
513 513 if filter_:
514 514 revfilters = {
515 515 'branch_name': branch_name,
516 516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 518 'start': start_pos,
519 519 'end': end_pos,
520 520 }
521 521 commit_ids = self._get_commit_ids(filters=revfilters)
522 522
523 523 else:
524 524 commit_ids = self.commit_ids
525 525
526 526 if start_pos or end_pos:
527 527 commit_ids = commit_ids[start_pos: end_pos]
528 528
529 529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 530 translate_tag=translate_tags)
531 531
532 532 def get_diff(
533 533 self, commit1, commit2, path='', ignore_whitespace=False,
534 534 context=3, path1=None):
535 535 """
536 536 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 537 ``commit2`` since ``commit1``.
538 538
539 539 :param commit1: Entry point from which diff is shown. Can be
540 540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 541 the changes since empty state of the repository until ``commit2``
542 542 :param commit2: Until which commits changes should be shown.
543 543 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 544 changes. Defaults to ``False``.
545 545 :param context: How many lines before/after changed lines should be
546 546 shown. Defaults to ``3``.
547 547 """
548 548 self._validate_diff_commits(commit1, commit2)
549 549 if path1 is not None and path1 != path:
550 550 raise ValueError("Diff of two different paths not supported.")
551 551
552 552 if path:
553 553 file_filter = path
554 554 else:
555 555 file_filter = None
556 556
557 557 diff = self._remote.diff(
558 558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 559 opt_ignorews=ignore_whitespace,
560 560 context=context)
561 561 return GitDiff(diff)
562 562
563 563 def strip(self, commit_id, branch_name):
564 564 commit = self.get_commit(commit_id=commit_id)
565 565 if commit.merge:
566 566 raise Exception('Cannot reset to merge commit')
567 567
568 568 # parent is going to be the new head now
569 569 commit = commit.parents[0]
570 570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571 571
572 572 # clear cached properties
573 573 self._invalidate_prop_cache('commit_ids')
574 574 self._invalidate_prop_cache('_refs')
575 575 self._invalidate_prop_cache('branches')
576 576
577 577 return len(self.commit_ids)
578 578
579 579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 580 if commit_id1 == commit_id2:
581 581 return commit_id1
582 582
583 583 if self != repo2:
584 584 commits = self._remote.get_missing_revs(
585 585 commit_id1, commit_id2, repo2.path)
586 586 if commits:
587 587 commit = repo2.get_commit(commits[-1])
588 588 if commit.parents:
589 589 ancestor_id = commit.parents[0].raw_id
590 590 else:
591 591 ancestor_id = None
592 592 else:
593 593 # no commits from other repo, ancestor_id is the commit_id2
594 594 ancestor_id = commit_id2
595 595 else:
596 596 output, __ = self.run_git_command(
597 597 ['merge-base', commit_id1, commit_id2])
598 598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599 599
600 600 return ancestor_id
601 601
602 602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 603 repo1 = self
604 604 ancestor_id = None
605 605
606 606 if commit_id1 == commit_id2:
607 607 commits = []
608 608 elif repo1 != repo2:
609 609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 610 repo2.path)
611 611 commits = [
612 612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 613 for commit_id in reversed(missing_ids)]
614 614 else:
615 615 output, __ = repo1.run_git_command(
616 616 ['log', '--reverse', '--pretty=format: %H', '-s',
617 617 '%s..%s' % (commit_id1, commit_id2)])
618 618 commits = [
619 619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621 621
622 622 return commits
623 623
624 624 @LazyProperty
625 625 def in_memory_commit(self):
626 626 """
627 627 Returns ``GitInMemoryCommit`` object for this repository.
628 628 """
629 629 return GitInMemoryCommit(self)
630 630
631 631 def pull(self, url, commit_ids=None, update_after=False):
632 632 """
633 633 Pull changes from external location. Pull is different in GIT
634 634 that fetch since it's doing a checkout
635 635
636 636 :param commit_ids: Optional. Can be set to a list of commit ids
637 637 which shall be pulled from the other repository.
638 638 """
639 639 refs = None
640 640 if commit_ids is not None:
641 641 remote_refs = self._remote.get_remote_refs(url)
642 642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 643 self._remote.pull(url, refs=refs, update_after=update_after)
644 644 self._remote.invalidate_vcs_cache()
645 645
646 646 def fetch(self, url, commit_ids=None):
647 647 """
648 648 Fetch all git objects from external location.
649 649 """
650 650 self._remote.sync_fetch(url, refs=commit_ids)
651 651 self._remote.invalidate_vcs_cache()
652 652
653 653 def push(self, url):
654 654 refs = None
655 655 self._remote.sync_push(url, refs=refs)
656 656
657 657 def set_refs(self, ref_name, commit_id):
658 658 self._remote.set_refs(ref_name, commit_id)
659 659 self._invalidate_prop_cache('_refs')
660 660
661 661 def remove_ref(self, ref_name):
662 662 self._remote.remove_ref(ref_name)
663 663 self._invalidate_prop_cache('_refs')
664 664
665 665 def _update_server_info(self):
666 666 """
667 667 runs gits update-server-info command in this repo instance
668 668 """
669 669 self._remote.update_server_info()
670 670
671 671 def _current_branch(self):
672 672 """
673 673 Return the name of the current branch.
674 674
675 675 It only works for non bare repositories (i.e. repositories with a
676 676 working copy)
677 677 """
678 678 if self.bare:
679 679 raise RepositoryError('Bare git repos do not have active branches')
680 680
681 681 if self.is_empty():
682 682 return None
683 683
684 684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 685 return stdout.strip()
686 686
687 687 def _checkout(self, branch_name, create=False, force=False):
688 688 """
689 689 Checkout a branch in the working directory.
690 690
691 691 It tries to create the branch if create is True, failing if the branch
692 692 already exists.
693 693
694 694 It only works for non bare repositories (i.e. repositories with a
695 695 working copy)
696 696 """
697 697 if self.bare:
698 698 raise RepositoryError('Cannot checkout branches in a bare git repo')
699 699
700 700 cmd = ['checkout']
701 701 if force:
702 702 cmd.append('-f')
703 703 if create:
704 704 cmd.append('-b')
705 705 cmd.append(branch_name)
706 706 self.run_git_command(cmd, fail_on_stderr=False)
707 707
708 708 def _create_branch(self, branch_name, commit_id):
709 709 """
710 710 creates a branch in a GIT repo
711 711 """
712 712 self._remote.create_branch(branch_name, commit_id)
713 713
714 714 def _identify(self):
715 715 """
716 716 Return the current state of the working directory.
717 717 """
718 718 if self.bare:
719 719 raise RepositoryError('Bare git repos do not have active branches')
720 720
721 721 if self.is_empty():
722 722 return None
723 723
724 724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 725 return stdout.strip()
726 726
727 727 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 728 """
729 729 Create a local clone of the current repo.
730 730 """
731 731 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 732 # clone will only fetch the active branch.
733 733 cmd = ['clone', '--branch', branch_name,
734 734 self.path, os.path.abspath(clone_path)]
735 735
736 736 self.run_git_command(cmd, fail_on_stderr=False)
737 737
738 738 # if we get the different source branch, make sure we also fetch it for
739 739 # merge conditions
740 740 if source_branch and source_branch != branch_name:
741 741 # check if the ref exists.
742 742 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 743 if shadow_repo.get_remote_ref(source_branch):
744 744 cmd = ['fetch', self.path, source_branch]
745 745 self.run_git_command(cmd, fail_on_stderr=False)
746 746
747 747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 748 """
749 749 Fetch a branch from a local repository.
750 750 """
751 751 repository_path = os.path.abspath(repository_path)
752 752 if repository_path == self.path:
753 753 raise ValueError('Cannot fetch from the same repository')
754 754
755 755 if use_origin:
756 756 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 757 branch=branch_name)
758 758
759 759 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 760 repository_path, branch_name]
761 761 self.run_git_command(cmd, fail_on_stderr=False)
762 762
763 763 def _local_reset(self, branch_name):
764 764 branch_name = '{}'.format(branch_name)
765 765 cmd = ['reset', '--hard', branch_name, '--']
766 766 self.run_git_command(cmd, fail_on_stderr=False)
767 767
768 768 def _last_fetch_heads(self):
769 769 """
770 770 Return the last fetched heads that need merging.
771 771
772 772 The algorithm is defined at
773 773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 774 """
775 775 if not self.bare:
776 776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 777 else:
778 778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779 779
780 780 heads = []
781 781 with open(fetch_heads_path) as f:
782 782 for line in f:
783 783 if ' not-for-merge ' in line:
784 784 continue
785 785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 786 heads.append(line)
787 787
788 788 return heads
789 789
790 790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792 792
793 793 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 794 """
795 795 Pull a branch from a local repository.
796 796 """
797 797 if self.bare:
798 798 raise RepositoryError('Cannot pull into a bare git repository')
799 799 # N.B.(skreft): The --ff-only option is to make sure this is a
800 800 # fast-forward (i.e., we are only pulling new changes and there are no
801 801 # conflicts with our current branch)
802 802 # Additionally, that option needs to go before --no-tags, otherwise git
803 803 # pull complains about it being an unknown flag.
804 804 cmd = ['pull']
805 805 if ff_only:
806 806 cmd.append('--ff-only')
807 807 cmd.extend(['--no-tags', repository_path, branch_name])
808 808 self.run_git_command(cmd, fail_on_stderr=False)
809 809
810 810 def _local_merge(self, merge_message, user_name, user_email, heads):
811 811 """
812 812 Merge the given head into the checked out branch.
813 813
814 814 It will force a merge commit.
815 815
816 816 Currently it raises an error if the repo is empty, as it is not possible
817 817 to create a merge commit in an empty repo.
818 818
819 819 :param merge_message: The message to use for the merge commit.
820 820 :param heads: the heads to merge.
821 821 """
822 822 if self.bare:
823 823 raise RepositoryError('Cannot merge into a bare git repository')
824 824
825 825 if not heads:
826 826 return
827 827
828 828 if self.is_empty():
829 # TODO(skreft): do somehting more robust in this case.
829 # TODO(skreft): do something more robust in this case.
830 830 raise RepositoryError(
831 831 'Do not know how to merge into empty repositories yet')
832 unresolved = None
832 833
833 834 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
834 835 # commit message. We also specify the user who is doing the merge.
835 836 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
836 837 '-c', 'user.email=%s' % safe_str(user_email),
837 838 'merge', '--no-ff', '-m', safe_str(merge_message)]
838 839 cmd.extend(heads)
839 840 try:
840 841 output = self.run_git_command(cmd, fail_on_stderr=False)
841 842 except RepositoryError:
843 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
844 fail_on_stderr=False)[0].splitlines()
845 # NOTE(marcink): we add U notation for consistent with HG backend output
846 unresolved = ['U {}'.format(f) for f in files]
847
842 848 # Cleanup any merge leftovers
843 849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
844 raise
850
851 if unresolved:
852 raise UnresolvedFilesInRepo(unresolved)
853 else:
854 raise
845 855
846 856 def _local_push(
847 857 self, source_branch, repository_path, target_branch,
848 858 enable_hooks=False, rc_scm_data=None):
849 859 """
850 860 Push the source_branch to the given repository and target_branch.
851 861
852 862 Currently it if the target_branch is not master and the target repo is
853 863 empty, the push will work, but then GitRepository won't be able to find
854 864 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
855 865 pointing to master, which does not exist).
856 866
857 867 It does not run the hooks in the target repo.
858 868 """
859 869 # TODO(skreft): deal with the case in which the target repo is empty,
860 870 # and the target_branch is not master.
861 871 target_repo = GitRepository(repository_path)
862 872 if (not target_repo.bare and
863 873 target_repo._current_branch() == target_branch):
864 874 # Git prevents pushing to the checked out branch, so simulate it by
865 875 # pulling into the target repository.
866 876 target_repo._local_pull(self.path, source_branch)
867 877 else:
868 878 cmd = ['push', os.path.abspath(repository_path),
869 879 '%s:%s' % (source_branch, target_branch)]
870 880 gitenv = {}
871 881 if rc_scm_data:
872 882 gitenv.update({'RC_SCM_DATA': rc_scm_data})
873 883
874 884 if not enable_hooks:
875 885 gitenv['RC_SKIP_HOOKS'] = '1'
876 886 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
877 887
878 888 def _get_new_pr_branch(self, source_branch, target_branch):
879 889 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
880 890 pr_branches = []
881 891 for branch in self.branches:
882 892 if branch.startswith(prefix):
883 893 pr_branches.append(int(branch[len(prefix):]))
884 894
885 895 if not pr_branches:
886 896 branch_id = 0
887 897 else:
888 898 branch_id = max(pr_branches) + 1
889 899
890 900 return '%s%d' % (prefix, branch_id)
891 901
892 902 def _maybe_prepare_merge_workspace(
893 903 self, repo_id, workspace_id, target_ref, source_ref):
894 904 shadow_repository_path = self._get_shadow_repository_path(
895 905 self.path, repo_id, workspace_id)
896 906 if not os.path.exists(shadow_repository_path):
897 907 self._local_clone(
898 908 shadow_repository_path, target_ref.name, source_ref.name)
899 909 log.debug('Prepared %s shadow repository in %s',
900 910 self.alias, shadow_repository_path)
901 911
902 912 return shadow_repository_path
903 913
904 914 def _merge_repo(self, repo_id, workspace_id, target_ref,
905 915 source_repo, source_ref, merge_message,
906 916 merger_name, merger_email, dry_run=False,
907 917 use_rebase=False, close_branch=False):
908 918
909 919 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
910 920 'rebase' if use_rebase else 'merge', dry_run)
911 921 if target_ref.commit_id != self.branches[target_ref.name]:
912 922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
913 923 target_ref.commit_id, self.branches[target_ref.name])
914 924 return MergeResponse(
915 925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
916 926 metadata={'target_ref': target_ref})
917 927
918 928 shadow_repository_path = self._maybe_prepare_merge_workspace(
919 929 repo_id, workspace_id, target_ref, source_ref)
920 930 shadow_repo = self.get_shadow_instance(shadow_repository_path)
921 931
922 932 # checkout source, if it's different. Otherwise we could not
923 933 # fetch proper commits for merge testing
924 934 if source_ref.name != target_ref.name:
925 935 if shadow_repo.get_remote_ref(source_ref.name):
926 936 shadow_repo._checkout(source_ref.name, force=True)
927 937
928 938 # checkout target, and fetch changes
929 939 shadow_repo._checkout(target_ref.name, force=True)
930 940
931 941 # fetch/reset pull the target, in case it is changed
932 942 # this handles even force changes
933 943 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
934 944 shadow_repo._local_reset(target_ref.name)
935 945
936 946 # Need to reload repo to invalidate the cache, or otherwise we cannot
937 947 # retrieve the last target commit.
938 948 shadow_repo = self.get_shadow_instance(shadow_repository_path)
939 949 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
940 950 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
941 951 target_ref, target_ref.commit_id,
942 952 shadow_repo.branches[target_ref.name])
943 953 return MergeResponse(
944 954 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
945 955 metadata={'target_ref': target_ref})
946 956
947 957 # calculate new branch
948 958 pr_branch = shadow_repo._get_new_pr_branch(
949 959 source_ref.name, target_ref.name)
950 960 log.debug('using pull-request merge branch: `%s`', pr_branch)
951 961 # checkout to temp branch, and fetch changes
952 962 shadow_repo._checkout(pr_branch, create=True)
953 963 try:
954 964 shadow_repo._local_fetch(source_repo.path, source_ref.name)
955 965 except RepositoryError:
956 966 log.exception('Failure when doing local fetch on '
957 967 'shadow repo: %s', shadow_repo)
958 968 return MergeResponse(
959 969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
960 970 metadata={'source_ref': source_ref})
961 971
962 972 merge_ref = None
963 973 merge_failure_reason = MergeFailureReason.NONE
964 974 metadata = {}
965 975 try:
966 976 shadow_repo._local_merge(merge_message, merger_name, merger_email,
967 977 [source_ref.commit_id])
968 978 merge_possible = True
969 979
970 980 # Need to invalidate the cache, or otherwise we
971 981 # cannot retrieve the merge commit.
972 982 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
973 983 merge_commit_id = shadow_repo.branches[pr_branch]
974 984
975 985 # Set a reference pointing to the merge commit. This reference may
976 986 # be used to easily identify the last successful merge commit in
977 987 # the shadow repository.
978 988 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
979 989 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
980 except RepositoryError:
990 except RepositoryError as e:
981 991 log.exception('Failure when doing local merge on git shadow repo')
992 if isinstance(e, UnresolvedFilesInRepo):
993 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
994
982 995 merge_possible = False
983 996 merge_failure_reason = MergeFailureReason.MERGE_FAILED
984 997
985 998 if merge_possible and not dry_run:
986 999 try:
987 1000 shadow_repo._local_push(
988 1001 pr_branch, self.path, target_ref.name, enable_hooks=True,
989 1002 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
990 1003 merge_succeeded = True
991 1004 except RepositoryError:
992 1005 log.exception(
993 1006 'Failure when doing local push from the shadow '
994 1007 'repository to the target repository at %s.', self.path)
995 1008 merge_succeeded = False
996 1009 merge_failure_reason = MergeFailureReason.PUSH_FAILED
997 1010 metadata['target'] = 'git shadow repo'
998 1011 metadata['merge_commit'] = pr_branch
999 1012 else:
1000 1013 merge_succeeded = False
1001 1014
1002 1015 return MergeResponse(
1003 1016 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1004 1017 metadata=metadata)
@@ -1,952 +1,972 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
294 294 if commit_id1 == commit_id2:
295 295 return commit_id1
296 296
297 297 ancestors = self._remote.revs_from_revspec(
298 298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
299 299 other_path=repo2.path)
300 300 return repo2[ancestors[0]].raw_id if ancestors else None
301 301
302 302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
303 303 if commit_id1 == commit_id2:
304 304 commits = []
305 305 else:
306 306 if merge:
307 307 indexes = self._remote.revs_from_revspec(
308 308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
309 309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
310 310 else:
311 311 indexes = self._remote.revs_from_revspec(
312 312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
313 313 commit_id1, other_path=repo2.path)
314 314
315 315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
316 316 for idx in indexes]
317 317
318 318 return commits
319 319
320 320 @staticmethod
321 321 def check_url(url, config):
322 322 """
323 323 Function will check given url and try to verify if it's a valid
324 324 link. Sometimes it may happened that mercurial will issue basic
325 325 auth request that can cause whole API to hang when used from python
326 326 or other external calls.
327 327
328 328 On failures it'll raise urllib2.HTTPError, exception is also thrown
329 329 when the return code is non 200
330 330 """
331 331 # check first if it's not an local url
332 332 if os.path.isdir(url) or url.startswith('file:'):
333 333 return True
334 334
335 335 # Request the _remote to verify the url
336 336 return connection.Hg.check_url(url, config.serialize())
337 337
338 338 @staticmethod
339 339 def is_valid_repository(path):
340 340 return os.path.isdir(os.path.join(path, '.hg'))
341 341
342 342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
343 343 """
344 344 Function will check for mercurial repository in given path. If there
345 345 is no repository in that path it will raise an exception unless
346 346 `create` parameter is set to True - in that case repository would
347 347 be created.
348 348
349 349 If `src_url` is given, would try to clone repository from the
350 350 location at given clone_point. Additionally it'll make update to
351 351 working copy accordingly to `do_workspace_checkout` flag.
352 352 """
353 353 if create and os.path.exists(self.path):
354 354 raise RepositoryError(
355 355 "Cannot create repository at %s, location already exist"
356 356 % self.path)
357 357
358 358 if src_url:
359 359 url = str(self._get_url(src_url))
360 360 MercurialRepository.check_url(url, self.config)
361 361
362 362 self._remote.clone(url, self.path, do_workspace_checkout)
363 363
364 364 # Don't try to create if we've already cloned repo
365 365 create = False
366 366
367 367 if create:
368 368 os.makedirs(self.path, mode=0o755)
369 369 self._remote.localrepository(create)
370 370
371 371 @LazyProperty
372 372 def in_memory_commit(self):
373 373 return MercurialInMemoryCommit(self)
374 374
375 375 @LazyProperty
376 376 def description(self):
377 377 description = self._remote.get_config_value(
378 378 'web', 'description', untrusted=True)
379 379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
380 380
381 381 @LazyProperty
382 382 def contact(self):
383 383 contact = (
384 384 self._remote.get_config_value("web", "contact") or
385 385 self._remote.get_config_value("ui", "username"))
386 386 return safe_unicode(contact or self.DEFAULT_CONTACT)
387 387
388 388 @LazyProperty
389 389 def last_change(self):
390 390 """
391 391 Returns last change made on this repository as
392 392 `datetime.datetime` object.
393 393 """
394 394 try:
395 395 return self.get_commit().date
396 396 except RepositoryError:
397 397 tzoffset = makedate()[1]
398 398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
399 399
400 400 def _get_fs_mtime(self):
401 401 # fallback to filesystem
402 402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
403 403 st_path = os.path.join(self.path, '.hg', "store")
404 404 if os.path.exists(cl_path):
405 405 return os.stat(cl_path).st_mtime
406 406 else:
407 407 return os.stat(st_path).st_mtime
408 408
409 409 def _get_url(self, url):
410 410 """
411 411 Returns normalized url. If schema is not given, would fall
412 412 to filesystem
413 413 (``file:///``) schema.
414 414 """
415 415 url = url.encode('utf8')
416 416 if url != 'default' and '://' not in url:
417 417 url = "file:" + urllib.pathname2url(url)
418 418 return url
419 419
420 420 def get_hook_location(self):
421 421 """
422 422 returns absolute path to location where hooks are stored
423 423 """
424 424 return os.path.join(self.path, '.hg', '.hgrc')
425 425
426 426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
427 427 """
428 428 Returns ``MercurialCommit`` object representing repository's
429 429 commit at the given `commit_id` or `commit_idx`.
430 430 """
431 431 if self.is_empty():
432 432 raise EmptyRepositoryError("There are no commits yet")
433 433
434 434 if commit_id is not None:
435 435 self._validate_commit_id(commit_id)
436 436 try:
437 437 # we have cached idx, use it without contacting the remote
438 438 idx = self._commit_ids[commit_id]
439 439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
440 440 except KeyError:
441 441 pass
442 442
443 443 elif commit_idx is not None:
444 444 self._validate_commit_idx(commit_idx)
445 445 try:
446 446 _commit_id = self.commit_ids[commit_idx]
447 447 if commit_idx < 0:
448 448 commit_idx = self.commit_ids.index(_commit_id)
449 449
450 450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
451 451 except IndexError:
452 452 commit_id = commit_idx
453 453 else:
454 454 commit_id = "tip"
455 455
456 456 if isinstance(commit_id, unicode):
457 457 commit_id = safe_str(commit_id)
458 458
459 459 try:
460 460 raw_id, idx = self._remote.lookup(commit_id, both=True)
461 461 except CommitDoesNotExistError:
462 462 msg = "Commit {} does not exist for `{}`".format(
463 463 *map(safe_str, [commit_id, self.name]))
464 464 raise CommitDoesNotExistError(msg)
465 465
466 466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
467 467
468 468 def get_commits(
469 469 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
471 471 """
472 472 Returns generator of ``MercurialCommit`` objects from start to end
473 473 (both are inclusive)
474 474
475 475 :param start_id: None, str(commit_id)
476 476 :param end_id: None, str(commit_id)
477 477 :param start_date: if specified, commits with commit date less than
478 478 ``start_date`` would be filtered out from returned set
479 479 :param end_date: if specified, commits with commit date greater than
480 480 ``end_date`` would be filtered out from returned set
481 481 :param branch_name: if specified, commits not reachable from given
482 482 branch would be filtered out from returned set
483 483 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 484 Mercurial evolve
485 485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
486 486 exist.
487 487 :raise CommitDoesNotExistError: If commit for given ``start`` or
488 488 ``end`` could not be found.
489 489 """
490 490 # actually we should check now if it's not an empty repo
491 491 if self.is_empty():
492 492 raise EmptyRepositoryError("There are no commits yet")
493 493 self._validate_branch_name(branch_name)
494 494
495 495 branch_ancestors = False
496 496 if start_id is not None:
497 497 self._validate_commit_id(start_id)
498 498 c_start = self.get_commit(commit_id=start_id)
499 499 start_pos = self._commit_ids[c_start.raw_id]
500 500 else:
501 501 start_pos = None
502 502
503 503 if end_id is not None:
504 504 self._validate_commit_id(end_id)
505 505 c_end = self.get_commit(commit_id=end_id)
506 506 end_pos = max(0, self._commit_ids[c_end.raw_id])
507 507 else:
508 508 end_pos = None
509 509
510 510 if None not in [start_id, end_id] and start_pos > end_pos:
511 511 raise RepositoryError(
512 512 "Start commit '%s' cannot be after end commit '%s'" %
513 513 (start_id, end_id))
514 514
515 515 if end_pos is not None:
516 516 end_pos += 1
517 517
518 518 commit_filter = []
519 519
520 520 if branch_name and not branch_ancestors:
521 521 commit_filter.append('branch("%s")' % (branch_name,))
522 522 elif branch_name and branch_ancestors:
523 523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
524 524
525 525 if start_date and not end_date:
526 526 commit_filter.append('date(">%s")' % (start_date,))
527 527 if end_date and not start_date:
528 528 commit_filter.append('date("<%s")' % (end_date,))
529 529 if start_date and end_date:
530 530 commit_filter.append(
531 531 'date(">%s") and date("<%s")' % (start_date, end_date))
532 532
533 533 if not show_hidden:
534 534 commit_filter.append('not obsolete()')
535 535 commit_filter.append('not hidden()')
536 536
537 537 # TODO: johbo: Figure out a simpler way for this solution
538 538 collection_generator = CollectionGenerator
539 539 if commit_filter:
540 540 commit_filter = ' and '.join(map(safe_str, commit_filter))
541 541 revisions = self._remote.rev_range([commit_filter])
542 542 collection_generator = MercurialIndexBasedCollectionGenerator
543 543 else:
544 544 revisions = self.commit_ids
545 545
546 546 if start_pos or end_pos:
547 547 revisions = revisions[start_pos:end_pos]
548 548
549 549 return collection_generator(self, revisions, pre_load=pre_load)
550 550
551 551 def pull(self, url, commit_ids=None):
552 552 """
553 553 Pull changes from external location.
554 554
555 555 :param commit_ids: Optional. Can be set to a list of commit ids
556 556 which shall be pulled from the other repository.
557 557 """
558 558 url = self._get_url(url)
559 559 self._remote.pull(url, commit_ids=commit_ids)
560 560 self._remote.invalidate_vcs_cache()
561 561
562 562 def fetch(self, url, commit_ids=None):
563 563 """
564 564 Backward compatibility with GIT fetch==pull
565 565 """
566 566 return self.pull(url, commit_ids=commit_ids)
567 567
568 568 def push(self, url):
569 569 url = self._get_url(url)
570 570 self._remote.sync_push(url)
571 571
572 572 def _local_clone(self, clone_path):
573 573 """
574 574 Create a local clone of the current repo.
575 575 """
576 576 self._remote.clone(self.path, clone_path, update_after_clone=True,
577 577 hooks=False)
578 578
579 579 def _update(self, revision, clean=False):
580 580 """
581 581 Update the working copy to the specified revision.
582 582 """
583 583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
584 584 self._remote.update(revision, clean=clean)
585 585
586 586 def _identify(self):
587 587 """
588 588 Return the current state of the working directory.
589 589 """
590 590 return self._remote.identify().strip().rstrip('+')
591 591
592 592 def _heads(self, branch=None):
593 593 """
594 594 Return the commit ids of the repository heads.
595 595 """
596 596 return self._remote.heads(branch=branch).strip().split(' ')
597 597
598 598 def _ancestor(self, revision1, revision2):
599 599 """
600 600 Return the common ancestor of the two revisions.
601 601 """
602 602 return self._remote.ancestor(revision1, revision2)
603 603
604 604 def _local_push(
605 605 self, revision, repository_path, push_branches=False,
606 606 enable_hooks=False):
607 607 """
608 608 Push the given revision to the specified repository.
609 609
610 610 :param push_branches: allow to create branches in the target repo.
611 611 """
612 612 self._remote.push(
613 613 [revision], repository_path, hooks=enable_hooks,
614 614 push_branches=push_branches)
615 615
616 616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
617 617 source_ref, use_rebase=False, dry_run=False):
618 618 """
619 619 Merge the given source_revision into the checked out revision.
620 620
621 621 Returns the commit id of the merge and a boolean indicating if the
622 622 commit needs to be pushed.
623 623 """
624 624 self._update(target_ref.commit_id, clean=True)
625 625
626 626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
627 627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
628 628
629 629 if ancestor == source_ref.commit_id:
630 630 # Nothing to do, the changes were already integrated
631 631 return target_ref.commit_id, False
632 632
633 633 elif ancestor == target_ref.commit_id and is_the_same_branch:
634 634 # In this case we should force a commit message
635 635 return source_ref.commit_id, True
636 636
637 unresolved = None
637 638 if use_rebase:
638 639 try:
639 640 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
640 641 target_ref.commit_id)
641 642 self.bookmark(bookmark_name, revision=source_ref.commit_id)
642 643 self._remote.rebase(
643 644 source=source_ref.commit_id, dest=target_ref.commit_id)
644 645 self._remote.invalidate_vcs_cache()
645 646 self._update(bookmark_name, clean=True)
646 647 return self._identify(), True
647 except RepositoryError:
648 except RepositoryError as e:
648 649 # The rebase-abort may raise another exception which 'hides'
649 650 # the original one, therefore we log it here.
650 651 log.exception('Error while rebasing shadow repo during merge.')
652 if 'unresolved conflicts' in e.message:
653 unresolved = self._remote.get_unresolved_files()
654 log.debug('unresolved files: %s', unresolved)
651 655
652 656 # Cleanup any rebase leftovers
653 657 self._remote.invalidate_vcs_cache()
654 658 self._remote.rebase(abort=True)
655 659 self._remote.invalidate_vcs_cache()
656 660 self._remote.update(clean=True)
657 raise
661 if unresolved:
662 raise UnresolvedFilesInRepo(unresolved)
663 else:
664 raise
658 665 else:
659 666 try:
660 667 self._remote.merge(source_ref.commit_id)
661 668 self._remote.invalidate_vcs_cache()
662 669 self._remote.commit(
663 670 message=safe_str(merge_message),
664 671 username=safe_str('%s <%s>' % (user_name, user_email)))
665 672 self._remote.invalidate_vcs_cache()
666 673 return self._identify(), True
667 except RepositoryError:
674 except RepositoryError as e:
675 # The merge-abort may raise another exception which 'hides'
676 # the original one, therefore we log it here.
677 log.exception('Error while merging shadow repo during merge.')
678 if 'unresolved merge conflicts' in e.message:
679 unresolved = self._remote.get_unresolved_files()
680 log.debug('unresolved files: %s', unresolved)
681
668 682 # Cleanup any merge leftovers
669 683 self._remote.update(clean=True)
670 raise
684 if unresolved:
685 raise UnresolvedFilesInRepo(unresolved)
686 else:
687 raise
671 688
672 689 def _local_close(self, target_ref, user_name, user_email,
673 690 source_ref, close_message=''):
674 691 """
675 692 Close the branch of the given source_revision
676 693
677 694 Returns the commit id of the close and a boolean indicating if the
678 695 commit needs to be pushed.
679 696 """
680 697 self._update(source_ref.commit_id)
681 698 message = close_message or "Closing branch: `{}`".format(source_ref.name)
682 699 try:
683 700 self._remote.commit(
684 701 message=safe_str(message),
685 702 username=safe_str('%s <%s>' % (user_name, user_email)),
686 703 close_branch=True)
687 704 self._remote.invalidate_vcs_cache()
688 705 return self._identify(), True
689 706 except RepositoryError:
690 707 # Cleanup any commit leftovers
691 708 self._remote.update(clean=True)
692 709 raise
693 710
694 711 def _is_the_same_branch(self, target_ref, source_ref):
695 712 return (
696 713 self._get_branch_name(target_ref) ==
697 714 self._get_branch_name(source_ref))
698 715
699 716 def _get_branch_name(self, ref):
700 717 if ref.type == 'branch':
701 718 return ref.name
702 719 return self._remote.ctx_branch(ref.commit_id)
703 720
704 721 def _maybe_prepare_merge_workspace(
705 722 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
706 723 shadow_repository_path = self._get_shadow_repository_path(
707 724 self.path, repo_id, workspace_id)
708 725 if not os.path.exists(shadow_repository_path):
709 726 self._local_clone(shadow_repository_path)
710 727 log.debug(
711 728 'Prepared shadow repository in %s', shadow_repository_path)
712 729
713 730 return shadow_repository_path
714 731
715 732 def _merge_repo(self, repo_id, workspace_id, target_ref,
716 733 source_repo, source_ref, merge_message,
717 734 merger_name, merger_email, dry_run=False,
718 735 use_rebase=False, close_branch=False):
719 736
720 737 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
721 738 'rebase' if use_rebase else 'merge', dry_run)
722 739 if target_ref.commit_id not in self._heads():
723 740 return MergeResponse(
724 741 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
725 742 metadata={'target_ref': target_ref})
726 743
727 744 try:
728 745 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
729 746 heads = '\n,'.join(self._heads(target_ref.name))
730 747 metadata = {
731 748 'target_ref': target_ref,
732 749 'source_ref': source_ref,
733 750 'heads': heads
734 751 }
735 752 return MergeResponse(
736 753 False, False, None,
737 754 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
738 755 metadata=metadata)
739 756 except CommitDoesNotExistError:
740 757 log.exception('Failure when looking up branch heads on hg target')
741 758 return MergeResponse(
742 759 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
743 760 metadata={'target_ref': target_ref})
744 761
745 762 shadow_repository_path = self._maybe_prepare_merge_workspace(
746 763 repo_id, workspace_id, target_ref, source_ref)
747 764 shadow_repo = self.get_shadow_instance(shadow_repository_path)
748 765
749 766 log.debug('Pulling in target reference %s', target_ref)
750 767 self._validate_pull_reference(target_ref)
751 768 shadow_repo._local_pull(self.path, target_ref)
752 769
753 770 try:
754 771 log.debug('Pulling in source reference %s', source_ref)
755 772 source_repo._validate_pull_reference(source_ref)
756 773 shadow_repo._local_pull(source_repo.path, source_ref)
757 774 except CommitDoesNotExistError:
758 775 log.exception('Failure when doing local pull on hg shadow repo')
759 776 return MergeResponse(
760 777 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
761 778 metadata={'source_ref': source_ref})
762 779
763 780 merge_ref = None
764 781 merge_commit_id = None
765 782 close_commit_id = None
766 783 merge_failure_reason = MergeFailureReason.NONE
767 784 metadata = {}
768 785
769 786 # enforce that close branch should be used only in case we source from
770 787 # an actual Branch
771 788 close_branch = close_branch and source_ref.type == 'branch'
772 789
773 790 # don't allow to close branch if source and target are the same
774 791 close_branch = close_branch and source_ref.name != target_ref.name
775 792
776 793 needs_push_on_close = False
777 794 if close_branch and not use_rebase and not dry_run:
778 795 try:
779 796 close_commit_id, needs_push_on_close = shadow_repo._local_close(
780 797 target_ref, merger_name, merger_email, source_ref)
781 798 merge_possible = True
782 799 except RepositoryError:
783 800 log.exception('Failure when doing close branch on '
784 801 'shadow repo: %s', shadow_repo)
785 802 merge_possible = False
786 803 merge_failure_reason = MergeFailureReason.MERGE_FAILED
787 804 else:
788 805 merge_possible = True
789 806
790 807 needs_push = False
791 808 if merge_possible:
792 809 try:
793 810 merge_commit_id, needs_push = shadow_repo._local_merge(
794 811 target_ref, merge_message, merger_name, merger_email,
795 812 source_ref, use_rebase=use_rebase, dry_run=dry_run)
796 813 merge_possible = True
797 814
798 815 # read the state of the close action, if it
799 816 # maybe required a push
800 817 needs_push = needs_push or needs_push_on_close
801 818
802 819 # Set a bookmark pointing to the merge commit. This bookmark
803 820 # may be used to easily identify the last successful merge
804 821 # commit in the shadow repository.
805 822 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
806 823 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
807 824 except SubrepoMergeError:
808 825 log.exception(
809 826 'Subrepo merge error during local merge on hg shadow repo.')
810 827 merge_possible = False
811 828 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
812 829 needs_push = False
813 except RepositoryError:
830 except RepositoryError as e:
814 831 log.exception('Failure when doing local merge on hg shadow repo')
832 if isinstance(e, UnresolvedFilesInRepo):
833 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
834
815 835 merge_possible = False
816 836 merge_failure_reason = MergeFailureReason.MERGE_FAILED
817 837 needs_push = False
818 838
819 839 if merge_possible and not dry_run:
820 840 if needs_push:
821 841 # In case the target is a bookmark, update it, so after pushing
822 842 # the bookmarks is also updated in the target.
823 843 if target_ref.type == 'book':
824 844 shadow_repo.bookmark(
825 845 target_ref.name, revision=merge_commit_id)
826 846 try:
827 847 shadow_repo_with_hooks = self.get_shadow_instance(
828 848 shadow_repository_path,
829 849 enable_hooks=True)
830 850 # This is the actual merge action, we push from shadow
831 851 # into origin.
832 852 # Note: the push_branches option will push any new branch
833 853 # defined in the source repository to the target. This may
834 854 # be dangerous as branches are permanent in Mercurial.
835 855 # This feature was requested in issue #441.
836 856 shadow_repo_with_hooks._local_push(
837 857 merge_commit_id, self.path, push_branches=True,
838 858 enable_hooks=True)
839 859
840 860 # maybe we also need to push the close_commit_id
841 861 if close_commit_id:
842 862 shadow_repo_with_hooks._local_push(
843 863 close_commit_id, self.path, push_branches=True,
844 864 enable_hooks=True)
845 865 merge_succeeded = True
846 866 except RepositoryError:
847 867 log.exception(
848 868 'Failure when doing local push from the shadow '
849 869 'repository to the target repository at %s.', self.path)
850 870 merge_succeeded = False
851 871 merge_failure_reason = MergeFailureReason.PUSH_FAILED
852 872 metadata['target'] = 'hg shadow repo'
853 873 metadata['merge_commit'] = merge_commit_id
854 874 else:
855 875 merge_succeeded = True
856 876 else:
857 877 merge_succeeded = False
858 878
859 879 return MergeResponse(
860 880 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
861 881 metadata=metadata)
862 882
863 883 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
864 884 config = self.config.copy()
865 885 if not enable_hooks:
866 886 config.clear_section('hooks')
867 887 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
868 888
869 889 def _validate_pull_reference(self, reference):
870 890 if not (reference.name in self.bookmarks or
871 891 reference.name in self.branches or
872 892 self.get_commit(reference.commit_id)):
873 893 raise CommitDoesNotExistError(
874 894 'Unknown branch, bookmark or commit id')
875 895
876 896 def _local_pull(self, repository_path, reference):
877 897 """
878 898 Fetch a branch, bookmark or commit from a local repository.
879 899 """
880 900 repository_path = os.path.abspath(repository_path)
881 901 if repository_path == self.path:
882 902 raise ValueError('Cannot pull from the same repository')
883 903
884 904 reference_type_to_option_name = {
885 905 'book': 'bookmark',
886 906 'branch': 'branch',
887 907 }
888 908 option_name = reference_type_to_option_name.get(
889 909 reference.type, 'revision')
890 910
891 911 if option_name == 'revision':
892 912 ref = reference.commit_id
893 913 else:
894 914 ref = reference.name
895 915
896 916 options = {option_name: [ref]}
897 917 self._remote.pull_cmd(repository_path, hooks=False, **options)
898 918 self._remote.invalidate_vcs_cache()
899 919
900 920 def bookmark(self, bookmark, revision=None):
901 921 if isinstance(bookmark, unicode):
902 922 bookmark = safe_str(bookmark)
903 923 self._remote.bookmark(bookmark, revision=revision)
904 924 self._remote.invalidate_vcs_cache()
905 925
906 926 def get_path_permissions(self, username):
907 927 hgacl_file = os.path.join(self.path, '.hg/hgacl')
908 928
909 929 def read_patterns(suffix):
910 930 svalue = None
911 931 for section, option in [
912 932 ('narrowacl', username + suffix),
913 933 ('narrowacl', 'default' + suffix),
914 934 ('narrowhgacl', username + suffix),
915 935 ('narrowhgacl', 'default' + suffix)
916 936 ]:
917 937 try:
918 938 svalue = hgacl.get(section, option)
919 939 break # stop at the first value we find
920 940 except configparser.NoOptionError:
921 941 pass
922 942 if not svalue:
923 943 return None
924 944 result = ['/']
925 945 for pattern in svalue.split():
926 946 result.append(pattern)
927 947 if '*' not in pattern and '?' not in pattern:
928 948 result.append(pattern + '/*')
929 949 return result
930 950
931 951 if os.path.exists(hgacl_file):
932 952 try:
933 953 hgacl = configparser.RawConfigParser()
934 954 hgacl.read(hgacl_file)
935 955
936 956 includes = read_patterns('.includes')
937 957 excludes = read_patterns('.excludes')
938 958 return BasePathPermissionChecker.create_from_patterns(
939 959 includes, excludes)
940 960 except BaseException as e:
941 961 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
942 962 hgacl_file, self.name, e)
943 963 raise exceptions.RepositoryRequirementError(msg)
944 964 else:
945 965 return None
946 966
947 967
948 968 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
949 969
950 970 def _commit_factory(self, commit_id):
951 971 return self.repo.get_commit(
952 972 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,226 +1,230 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Custom vcs exceptions module.
23 23 """
24 24 import logging
25 25 import functools
26 26 import urllib2
27 27 import rhodecode
28 28 from pyramid import compat
29 29
30 30 log = logging.getLogger(__name__)
31 31
32 32
33 33 class VCSCommunicationError(Exception):
34 34 pass
35 35
36 36
37 37 class HttpVCSCommunicationError(VCSCommunicationError):
38 38 pass
39 39
40 40
41 41 class VCSError(Exception):
42 42 pass
43 43
44 44
45 45 class RepositoryError(VCSError):
46 46 pass
47 47
48 48
49 49 class RepositoryRequirementError(RepositoryError):
50 50 pass
51 51
52 52
53 class UnresolvedFilesInRepo(RepositoryError):
54 pass
55
56
53 57 class VCSBackendNotSupportedError(VCSError):
54 58 """
55 59 Exception raised when VCSServer does not support requested backend
56 60 """
57 61
58 62
59 63 class EmptyRepositoryError(RepositoryError):
60 64 pass
61 65
62 66
63 67 class TagAlreadyExistError(RepositoryError):
64 68 pass
65 69
66 70
67 71 class TagDoesNotExistError(RepositoryError):
68 72 pass
69 73
70 74
71 75 class BranchAlreadyExistError(RepositoryError):
72 76 pass
73 77
74 78
75 79 class BranchDoesNotExistError(RepositoryError):
76 80 pass
77 81
78 82
79 83 class CommitError(RepositoryError):
80 84 """
81 85 Exceptions related to an existing commit
82 86 """
83 87
84 88
85 89 class CommitDoesNotExistError(CommitError):
86 90 pass
87 91
88 92
89 93 class CommittingError(RepositoryError):
90 94 """
91 95 Exceptions happening while creating a new commit
92 96 """
93 97
94 98
95 99 class NothingChangedError(CommittingError):
96 100 pass
97 101
98 102
99 103 class NodeError(VCSError):
100 104 pass
101 105
102 106
103 107 class RemovedFileNodeError(NodeError):
104 108 pass
105 109
106 110
107 111 class NodeAlreadyExistsError(CommittingError):
108 112 pass
109 113
110 114
111 115 class NodeAlreadyChangedError(CommittingError):
112 116 pass
113 117
114 118
115 119 class NodeDoesNotExistError(CommittingError):
116 120 pass
117 121
118 122
119 123 class NodeNotChangedError(CommittingError):
120 124 pass
121 125
122 126
123 127 class NodeAlreadyAddedError(CommittingError):
124 128 pass
125 129
126 130
127 131 class NodeAlreadyRemovedError(CommittingError):
128 132 pass
129 133
130 134
131 135 class SubrepoMergeError(RepositoryError):
132 136 """
133 137 This happens if we try to merge a repository which contains subrepos and
134 138 the subrepos cannot be merged. The subrepos are not merged itself but
135 139 their references in the root repo are merged.
136 140 """
137 141
138 142
139 143 class ImproperArchiveTypeError(VCSError):
140 144 pass
141 145
142 146
143 147 class CommandError(VCSError):
144 148 pass
145 149
146 150
147 151 class UnhandledException(VCSError):
148 152 """
149 153 Signals that something unexpected went wrong.
150 154
151 155 This usually means we have a programming error on the side of the VCSServer
152 156 and should inspect the logfile of the VCSServer to find more details.
153 157 """
154 158
155 159
156 160 _EXCEPTION_MAP = {
157 161 'abort': RepositoryError,
158 162 'archive': ImproperArchiveTypeError,
159 163 'error': RepositoryError,
160 164 'lookup': CommitDoesNotExistError,
161 165 'repo_locked': RepositoryError,
162 166 'requirement': RepositoryRequirementError,
163 167 'unhandled': UnhandledException,
164 168 # TODO: johbo: Define our own exception for this and stop abusing
165 169 # urllib's exception class.
166 170 'url_error': urllib2.URLError,
167 171 'subrepo_merge_error': SubrepoMergeError,
168 172 }
169 173
170 174
171 175 def map_vcs_exceptions(func):
172 176 """
173 177 Utility to decorate functions so that plain exceptions are translated.
174 178
175 179 The translation is based on `exc_map` which maps a `str` indicating
176 180 the error type into an exception class representing this error inside
177 181 of the vcs layer.
178 182 """
179 183
180 184 @functools.wraps(func)
181 185 def wrapper(*args, **kwargs):
182 186 try:
183 187 return func(*args, **kwargs)
184 188 except Exception as e:
185 189 from rhodecode.lib.utils2 import str2bool
186 190 debug = str2bool(rhodecode.CONFIG.get('debug'))
187 191
188 192 # The error middleware adds information if it finds
189 193 # __traceback_info__ in a frame object. This way the remote
190 194 # traceback information is made available in error reports.
191 195 remote_tb = getattr(e, '_vcs_server_traceback', None)
192 196 org_remote_tb = getattr(e, '_vcs_server_org_exc_tb', '')
193 197 __traceback_info__ = None
194 198 if remote_tb:
195 199 if isinstance(remote_tb, compat.string_types):
196 200 remote_tb = [remote_tb]
197 201 __traceback_info__ = (
198 202 'Found VCSServer remote traceback information:\n'
199 203 '{}\n'
200 204 '+++ BEG SOURCE EXCEPTION +++\n\n'
201 205 '{}\n'
202 206 '+++ END SOURCE EXCEPTION +++\n'
203 207 ''.format('\n'.join(remote_tb), org_remote_tb)
204 208 )
205 209
206 210 # Avoid that remote_tb also appears in the frame
207 211 del remote_tb
208 212
209 213 # Special vcs errors had an attribute "_vcs_kind" which is used
210 214 # to translate them to the proper exception class in the vcs
211 215 # client layer.
212 216 kind = getattr(e, '_vcs_kind', None)
213 217
214 218 if kind:
215 219 if any(e.args):
216 220 args = e.args
217 221 else:
218 222 args = [__traceback_info__ or 'unhandledException']
219 223 if debug or __traceback_info__ and kind not in ['unhandled', 'lookup']:
220 224 # for other than unhandled errors also log the traceback
221 225 # can be useful for debugging
222 226 log.error(__traceback_info__)
223 227 raise _EXCEPTION_MAP[kind](*args)
224 228 else:
225 229 raise
226 230 return wrapper
@@ -1,1759 +1,1760 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
141 141 statuses=None, opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=False):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 if search_q:
150 150 like_expression = u'%{}%'.format(safe_unicode(search_q))
151 151 q = q.filter(or_(
152 152 cast(PullRequest.pull_request_id, String).ilike(like_expression),
153 153 PullRequest.title.ilike(like_expression),
154 154 PullRequest.description.ilike(like_expression),
155 155 ))
156 156
157 157 # source or target
158 158 if repo and source:
159 159 q = q.filter(PullRequest.source_repo == repo)
160 160 elif repo:
161 161 q = q.filter(PullRequest.target_repo == repo)
162 162
163 163 # closed,opened
164 164 if statuses:
165 165 q = q.filter(PullRequest.status.in_(statuses))
166 166
167 167 # opened by filter
168 168 if opened_by:
169 169 q = q.filter(PullRequest.user_id.in_(opened_by))
170 170
171 171 # only get those that are in "created" state
172 172 if only_created:
173 173 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
174 174
175 175 if order_by:
176 176 order_map = {
177 177 'name_raw': PullRequest.pull_request_id,
178 178 'id': PullRequest.pull_request_id,
179 179 'title': PullRequest.title,
180 180 'updated_on_raw': PullRequest.updated_on,
181 181 'target_repo': PullRequest.target_repo_id
182 182 }
183 183 if order_dir == 'asc':
184 184 q = q.order_by(order_map[order_by].asc())
185 185 else:
186 186 q = q.order_by(order_map[order_by].desc())
187 187
188 188 return q
189 189
190 190 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
191 191 opened_by=None):
192 192 """
193 193 Count the number of pull requests for a specific repository.
194 194
195 195 :param repo_name: target or source repo
196 196 :param search_q: filter by text
197 197 :param source: boolean flag to specify if repo_name refers to source
198 198 :param statuses: list of pull request statuses
199 199 :param opened_by: author user of the pull request
200 200 :returns: int number of pull requests
201 201 """
202 202 q = self._prepare_get_all_query(
203 203 repo_name, search_q=search_q, source=source, statuses=statuses,
204 204 opened_by=opened_by)
205 205
206 206 return q.count()
207 207
208 208 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
209 209 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
210 210 """
211 211 Get all pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param search_q: filter by text
215 215 :param source: boolean flag to specify if repo_name refers to source
216 216 :param statuses: list of pull request statuses
217 217 :param opened_by: author user of the pull request
218 218 :param offset: pagination offset
219 219 :param length: length of returned list
220 220 :param order_by: order of the returned list
221 221 :param order_dir: 'asc' or 'desc' ordering direction
222 222 :returns: list of pull requests
223 223 """
224 224 q = self._prepare_get_all_query(
225 225 repo_name, search_q=search_q, source=source, statuses=statuses,
226 226 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
227 227
228 228 if length:
229 229 pull_requests = q.limit(length).offset(offset).all()
230 230 else:
231 231 pull_requests = q.all()
232 232
233 233 return pull_requests
234 234
235 235 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
236 236 opened_by=None):
237 237 """
238 238 Count the number of pull requests for a specific repository that are
239 239 awaiting review.
240 240
241 241 :param repo_name: target or source repo
242 242 :param search_q: filter by text
243 243 :param source: boolean flag to specify if repo_name refers to source
244 244 :param statuses: list of pull request statuses
245 245 :param opened_by: author user of the pull request
246 246 :returns: int number of pull requests
247 247 """
248 248 pull_requests = self.get_awaiting_review(
249 249 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
250 250
251 251 return len(pull_requests)
252 252
253 253 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
254 254 opened_by=None, offset=0, length=None,
255 255 order_by=None, order_dir='desc'):
256 256 """
257 257 Get all pull requests for a specific repository that are awaiting
258 258 review.
259 259
260 260 :param repo_name: target or source repo
261 261 :param search_q: filter by text
262 262 :param source: boolean flag to specify if repo_name refers to source
263 263 :param statuses: list of pull request statuses
264 264 :param opened_by: author user of the pull request
265 265 :param offset: pagination offset
266 266 :param length: length of returned list
267 267 :param order_by: order of the returned list
268 268 :param order_dir: 'asc' or 'desc' ordering direction
269 269 :returns: list of pull requests
270 270 """
271 271 pull_requests = self.get_all(
272 272 repo_name, search_q=search_q, source=source, statuses=statuses,
273 273 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
274 274
275 275 _filtered_pull_requests = []
276 276 for pr in pull_requests:
277 277 status = pr.calculated_review_status()
278 278 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
279 279 ChangesetStatus.STATUS_UNDER_REVIEW]:
280 280 _filtered_pull_requests.append(pr)
281 281 if length:
282 282 return _filtered_pull_requests[offset:offset+length]
283 283 else:
284 284 return _filtered_pull_requests
285 285
286 286 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
287 287 opened_by=None, user_id=None):
288 288 """
289 289 Count the number of pull requests for a specific repository that are
290 290 awaiting review from a specific user.
291 291
292 292 :param repo_name: target or source repo
293 293 :param search_q: filter by text
294 294 :param source: boolean flag to specify if repo_name refers to source
295 295 :param statuses: list of pull request statuses
296 296 :param opened_by: author user of the pull request
297 297 :param user_id: reviewer user of the pull request
298 298 :returns: int number of pull requests
299 299 """
300 300 pull_requests = self.get_awaiting_my_review(
301 301 repo_name, search_q=search_q, source=source, statuses=statuses,
302 302 opened_by=opened_by, user_id=user_id)
303 303
304 304 return len(pull_requests)
305 305
306 306 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
307 307 opened_by=None, user_id=None, offset=0,
308 308 length=None, order_by=None, order_dir='desc'):
309 309 """
310 310 Get all pull requests for a specific repository that are awaiting
311 311 review from a specific user.
312 312
313 313 :param repo_name: target or source repo
314 314 :param search_q: filter by text
315 315 :param source: boolean flag to specify if repo_name refers to source
316 316 :param statuses: list of pull request statuses
317 317 :param opened_by: author user of the pull request
318 318 :param user_id: reviewer user of the pull request
319 319 :param offset: pagination offset
320 320 :param length: length of returned list
321 321 :param order_by: order of the returned list
322 322 :param order_dir: 'asc' or 'desc' ordering direction
323 323 :returns: list of pull requests
324 324 """
325 325 pull_requests = self.get_all(
326 326 repo_name, search_q=search_q, source=source, statuses=statuses,
327 327 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
328 328
329 329 _my = PullRequestModel().get_not_reviewed(user_id)
330 330 my_participation = []
331 331 for pr in pull_requests:
332 332 if pr in _my:
333 333 my_participation.append(pr)
334 334 _filtered_pull_requests = my_participation
335 335 if length:
336 336 return _filtered_pull_requests[offset:offset+length]
337 337 else:
338 338 return _filtered_pull_requests
339 339
340 340 def get_not_reviewed(self, user_id):
341 341 return [
342 342 x.pull_request for x in PullRequestReviewers.query().filter(
343 343 PullRequestReviewers.user_id == user_id).all()
344 344 ]
345 345
346 346 def _prepare_participating_query(self, user_id=None, statuses=None,
347 347 order_by=None, order_dir='desc'):
348 348 q = PullRequest.query()
349 349 if user_id:
350 350 reviewers_subquery = Session().query(
351 351 PullRequestReviewers.pull_request_id).filter(
352 352 PullRequestReviewers.user_id == user_id).subquery()
353 353 user_filter = or_(
354 354 PullRequest.user_id == user_id,
355 355 PullRequest.pull_request_id.in_(reviewers_subquery)
356 356 )
357 357 q = PullRequest.query().filter(user_filter)
358 358
359 359 # closed,opened
360 360 if statuses:
361 361 q = q.filter(PullRequest.status.in_(statuses))
362 362
363 363 if order_by:
364 364 order_map = {
365 365 'name_raw': PullRequest.pull_request_id,
366 366 'title': PullRequest.title,
367 367 'updated_on_raw': PullRequest.updated_on,
368 368 'target_repo': PullRequest.target_repo_id
369 369 }
370 370 if order_dir == 'asc':
371 371 q = q.order_by(order_map[order_by].asc())
372 372 else:
373 373 q = q.order_by(order_map[order_by].desc())
374 374
375 375 return q
376 376
377 377 def count_im_participating_in(self, user_id=None, statuses=None):
378 378 q = self._prepare_participating_query(user_id, statuses=statuses)
379 379 return q.count()
380 380
381 381 def get_im_participating_in(
382 382 self, user_id=None, statuses=None, offset=0,
383 383 length=None, order_by=None, order_dir='desc'):
384 384 """
385 385 Get all Pull requests that i'm participating in, or i have opened
386 386 """
387 387
388 388 q = self._prepare_participating_query(
389 389 user_id, statuses=statuses, order_by=order_by,
390 390 order_dir=order_dir)
391 391
392 392 if length:
393 393 pull_requests = q.limit(length).offset(offset).all()
394 394 else:
395 395 pull_requests = q.all()
396 396
397 397 return pull_requests
398 398
399 399 def get_versions(self, pull_request):
400 400 """
401 401 returns version of pull request sorted by ID descending
402 402 """
403 403 return PullRequestVersion.query()\
404 404 .filter(PullRequestVersion.pull_request == pull_request)\
405 405 .order_by(PullRequestVersion.pull_request_version_id.asc())\
406 406 .all()
407 407
408 408 def get_pr_version(self, pull_request_id, version=None):
409 409 at_version = None
410 410
411 411 if version and version == 'latest':
412 412 pull_request_ver = PullRequest.get(pull_request_id)
413 413 pull_request_obj = pull_request_ver
414 414 _org_pull_request_obj = pull_request_obj
415 415 at_version = 'latest'
416 416 elif version:
417 417 pull_request_ver = PullRequestVersion.get_or_404(version)
418 418 pull_request_obj = pull_request_ver
419 419 _org_pull_request_obj = pull_request_ver.pull_request
420 420 at_version = pull_request_ver.pull_request_version_id
421 421 else:
422 422 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
423 423 pull_request_id)
424 424
425 425 pull_request_display_obj = PullRequest.get_pr_display_object(
426 426 pull_request_obj, _org_pull_request_obj)
427 427
428 428 return _org_pull_request_obj, pull_request_obj, \
429 429 pull_request_display_obj, at_version
430 430
431 431 def create(self, created_by, source_repo, source_ref, target_repo,
432 432 target_ref, revisions, reviewers, title, description=None,
433 433 description_renderer=None,
434 434 reviewer_data=None, translator=None, auth_user=None):
435 435 translator = translator or get_current_request().translate
436 436
437 437 created_by_user = self._get_user(created_by)
438 438 auth_user = auth_user or created_by_user.AuthUser()
439 439 source_repo = self._get_repo(source_repo)
440 440 target_repo = self._get_repo(target_repo)
441 441
442 442 pull_request = PullRequest()
443 443 pull_request.source_repo = source_repo
444 444 pull_request.source_ref = source_ref
445 445 pull_request.target_repo = target_repo
446 446 pull_request.target_ref = target_ref
447 447 pull_request.revisions = revisions
448 448 pull_request.title = title
449 449 pull_request.description = description
450 450 pull_request.description_renderer = description_renderer
451 451 pull_request.author = created_by_user
452 452 pull_request.reviewer_data = reviewer_data
453 453 pull_request.pull_request_state = pull_request.STATE_CREATING
454 454 Session().add(pull_request)
455 455 Session().flush()
456 456
457 457 reviewer_ids = set()
458 458 # members / reviewers
459 459 for reviewer_object in reviewers:
460 460 user_id, reasons, mandatory, rules = reviewer_object
461 461 user = self._get_user(user_id)
462 462
463 463 # skip duplicates
464 464 if user.user_id in reviewer_ids:
465 465 continue
466 466
467 467 reviewer_ids.add(user.user_id)
468 468
469 469 reviewer = PullRequestReviewers()
470 470 reviewer.user = user
471 471 reviewer.pull_request = pull_request
472 472 reviewer.reasons = reasons
473 473 reviewer.mandatory = mandatory
474 474
475 475 # NOTE(marcink): pick only first rule for now
476 476 rule_id = list(rules)[0] if rules else None
477 477 rule = RepoReviewRule.get(rule_id) if rule_id else None
478 478 if rule:
479 479 review_group = rule.user_group_vote_rule(user_id)
480 480 # we check if this particular reviewer is member of a voting group
481 481 if review_group:
482 482 # NOTE(marcink):
483 483 # can be that user is member of more but we pick the first same,
484 484 # same as default reviewers algo
485 485 review_group = review_group[0]
486 486
487 487 rule_data = {
488 488 'rule_name':
489 489 rule.review_rule_name,
490 490 'rule_user_group_entry_id':
491 491 review_group.repo_review_rule_users_group_id,
492 492 'rule_user_group_name':
493 493 review_group.users_group.users_group_name,
494 494 'rule_user_group_members':
495 495 [x.user.username for x in review_group.users_group.members],
496 496 'rule_user_group_members_id':
497 497 [x.user.user_id for x in review_group.users_group.members],
498 498 }
499 499 # e.g {'vote_rule': -1, 'mandatory': True}
500 500 rule_data.update(review_group.rule_data())
501 501
502 502 reviewer.rule_data = rule_data
503 503
504 504 Session().add(reviewer)
505 505 Session().flush()
506 506
507 507 # Set approval status to "Under Review" for all commits which are
508 508 # part of this pull request.
509 509 ChangesetStatusModel().set_status(
510 510 repo=target_repo,
511 511 status=ChangesetStatus.STATUS_UNDER_REVIEW,
512 512 user=created_by_user,
513 513 pull_request=pull_request
514 514 )
515 515 # we commit early at this point. This has to do with a fact
516 516 # that before queries do some row-locking. And because of that
517 517 # we need to commit and finish transaction before below validate call
518 518 # that for large repos could be long resulting in long row locks
519 519 Session().commit()
520 520
521 521 # prepare workspace, and run initial merge simulation. Set state during that
522 522 # operation
523 523 pull_request = PullRequest.get(pull_request.pull_request_id)
524 524
525 525 # set as merging, for merge simulation, and if finished to created so we mark
526 526 # simulation is working fine
527 527 with pull_request.set_state(PullRequest.STATE_MERGING,
528 528 final_state=PullRequest.STATE_CREATED) as state_obj:
529 529 MergeCheck.validate(
530 530 pull_request, auth_user=auth_user, translator=translator)
531 531
532 532 self.notify_reviewers(pull_request, reviewer_ids)
533 533 self.trigger_pull_request_hook(
534 534 pull_request, created_by_user, 'create')
535 535
536 536 creation_data = pull_request.get_api_data(with_merge_state=False)
537 537 self._log_audit_action(
538 538 'repo.pull_request.create', {'data': creation_data},
539 539 auth_user, pull_request)
540 540
541 541 return pull_request
542 542
543 543 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
544 544 pull_request = self.__get_pull_request(pull_request)
545 545 target_scm = pull_request.target_repo.scm_instance()
546 546 if action == 'create':
547 547 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
548 548 elif action == 'merge':
549 549 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
550 550 elif action == 'close':
551 551 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
552 552 elif action == 'review_status_change':
553 553 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
554 554 elif action == 'update':
555 555 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
556 556 elif action == 'comment':
557 557 # dummy hook ! for comment. We want this function to handle all cases
558 558 def trigger_hook(*args, **kwargs):
559 559 pass
560 560 comment = data['comment']
561 561 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
562 562 else:
563 563 return
564 564
565 565 trigger_hook(
566 566 username=user.username,
567 567 repo_name=pull_request.target_repo.repo_name,
568 568 repo_alias=target_scm.alias,
569 569 pull_request=pull_request,
570 570 data=data)
571 571
572 572 def _get_commit_ids(self, pull_request):
573 573 """
574 574 Return the commit ids of the merged pull request.
575 575
576 576 This method is not dealing correctly yet with the lack of autoupdates
577 577 nor with the implicit target updates.
578 578 For example: if a commit in the source repo is already in the target it
579 579 will be reported anyways.
580 580 """
581 581 merge_rev = pull_request.merge_rev
582 582 if merge_rev is None:
583 583 raise ValueError('This pull request was not merged yet')
584 584
585 585 commit_ids = list(pull_request.revisions)
586 586 if merge_rev not in commit_ids:
587 587 commit_ids.append(merge_rev)
588 588
589 589 return commit_ids
590 590
591 591 def merge_repo(self, pull_request, user, extras):
592 592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 593 extras['user_agent'] = 'internal-merge'
594 594 merge_state = self._merge_pull_request(pull_request, user, extras)
595 595 if merge_state.executed:
596 596 log.debug("Merge was successful, updating the pull request comments.")
597 597 self._comment_and_close_pr(pull_request, user, merge_state)
598 598
599 599 self._log_audit_action(
600 600 'repo.pull_request.merge',
601 601 {'merge_state': merge_state.__dict__},
602 602 user, pull_request)
603 603
604 604 else:
605 605 log.warn("Merge failed, not updating the pull request.")
606 606 return merge_state
607 607
608 608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 609 target_vcs = pull_request.target_repo.scm_instance()
610 610 source_vcs = pull_request.source_repo.scm_instance()
611 611
612 612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 613 pr_id=pull_request.pull_request_id,
614 614 pr_title=pull_request.title,
615 615 source_repo=source_vcs.name,
616 616 source_ref_name=pull_request.source_ref_parts.name,
617 617 target_repo=target_vcs.name,
618 618 target_ref_name=pull_request.target_ref_parts.name,
619 619 )
620 620
621 621 workspace_id = self._workspace_id(pull_request)
622 622 repo_id = pull_request.target_repo.repo_id
623 623 use_rebase = self._use_rebase_for_merging(pull_request)
624 624 close_branch = self._close_branch_before_merging(pull_request)
625 625
626 626 target_ref = self._refresh_reference(
627 627 pull_request.target_ref_parts, target_vcs)
628 628
629 629 callback_daemon, extras = prepare_callback_daemon(
630 630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 631 host=vcs_settings.HOOKS_HOST,
632 632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633 633
634 634 with callback_daemon:
635 635 # TODO: johbo: Implement a clean way to run a config_override
636 636 # for a single call.
637 637 target_vcs.config.set(
638 638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639 639
640 640 user_name = user.short_contact
641 641 merge_state = target_vcs.merge(
642 642 repo_id, workspace_id, target_ref, source_vcs,
643 643 pull_request.source_ref_parts,
644 644 user_name=user_name, user_email=user.email,
645 645 message=message, use_rebase=use_rebase,
646 646 close_branch=close_branch)
647 647 return merge_state
648 648
649 649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 650 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 651 pull_request.updated_on = datetime.datetime.now()
652 652 close_msg = close_msg or 'Pull request merged and closed'
653 653
654 654 CommentsModel().create(
655 655 text=safe_unicode(close_msg),
656 656 repo=pull_request.target_repo.repo_id,
657 657 user=user.user_id,
658 658 pull_request=pull_request.pull_request_id,
659 659 f_path=None,
660 660 line_no=None,
661 661 closing_pr=True
662 662 )
663 663
664 664 Session().add(pull_request)
665 665 Session().flush()
666 666 # TODO: paris: replace invalidation with less radical solution
667 667 ScmModel().mark_for_invalidation(
668 668 pull_request.target_repo.repo_name)
669 669 self.trigger_pull_request_hook(pull_request, user, 'merge')
670 670
671 671 def has_valid_update_type(self, pull_request):
672 672 source_ref_type = pull_request.source_ref_parts.type
673 673 return source_ref_type in self.REF_TYPES
674 674
675 675 def update_commits(self, pull_request):
676 676 """
677 677 Get the updated list of commits for the pull request
678 678 and return the new pull request version and the list
679 679 of commits processed by this update action
680 680 """
681 681 pull_request = self.__get_pull_request(pull_request)
682 682 source_ref_type = pull_request.source_ref_parts.type
683 683 source_ref_name = pull_request.source_ref_parts.name
684 684 source_ref_id = pull_request.source_ref_parts.commit_id
685 685
686 686 target_ref_type = pull_request.target_ref_parts.type
687 687 target_ref_name = pull_request.target_ref_parts.name
688 688 target_ref_id = pull_request.target_ref_parts.commit_id
689 689
690 690 if not self.has_valid_update_type(pull_request):
691 691 log.debug("Skipping update of pull request %s due to ref type: %s",
692 692 pull_request, source_ref_type)
693 693 return UpdateResponse(
694 694 executed=False,
695 695 reason=UpdateFailureReason.WRONG_REF_TYPE,
696 696 old=pull_request, new=None, changes=None,
697 697 source_changed=False, target_changed=False)
698 698
699 699 # source repo
700 700 source_repo = pull_request.source_repo.scm_instance()
701 701
702 702 try:
703 703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 704 except CommitDoesNotExistError:
705 705 return UpdateResponse(
706 706 executed=False,
707 707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 708 old=pull_request, new=None, changes=None,
709 709 source_changed=False, target_changed=False)
710 710
711 711 source_changed = source_ref_id != source_commit.raw_id
712 712
713 713 # target repo
714 714 target_repo = pull_request.target_repo.scm_instance()
715 715
716 716 try:
717 717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
718 718 except CommitDoesNotExistError:
719 719 return UpdateResponse(
720 720 executed=False,
721 721 reason=UpdateFailureReason.MISSING_TARGET_REF,
722 722 old=pull_request, new=None, changes=None,
723 723 source_changed=False, target_changed=False)
724 724 target_changed = target_ref_id != target_commit.raw_id
725 725
726 726 if not (source_changed or target_changed):
727 727 log.debug("Nothing changed in pull request %s", pull_request)
728 728 return UpdateResponse(
729 729 executed=False,
730 730 reason=UpdateFailureReason.NO_CHANGE,
731 731 old=pull_request, new=None, changes=None,
732 732 source_changed=target_changed, target_changed=source_changed)
733 733
734 734 change_in_found = 'target repo' if target_changed else 'source repo'
735 735 log.debug('Updating pull request because of change in %s detected',
736 736 change_in_found)
737 737
738 738 # Finally there is a need for an update, in case of source change
739 739 # we create a new version, else just an update
740 740 if source_changed:
741 741 pull_request_version = self._create_version_from_snapshot(pull_request)
742 742 self._link_comments_to_version(pull_request_version)
743 743 else:
744 744 try:
745 745 ver = pull_request.versions[-1]
746 746 except IndexError:
747 747 ver = None
748 748
749 749 pull_request.pull_request_version_id = \
750 750 ver.pull_request_version_id if ver else None
751 751 pull_request_version = pull_request
752 752
753 753 try:
754 754 if target_ref_type in self.REF_TYPES:
755 755 target_commit = target_repo.get_commit(target_ref_name)
756 756 else:
757 757 target_commit = target_repo.get_commit(target_ref_id)
758 758 except CommitDoesNotExistError:
759 759 return UpdateResponse(
760 760 executed=False,
761 761 reason=UpdateFailureReason.MISSING_TARGET_REF,
762 762 old=pull_request, new=None, changes=None,
763 763 source_changed=source_changed, target_changed=target_changed)
764 764
765 765 # re-compute commit ids
766 766 old_commit_ids = pull_request.revisions
767 767 pre_load = ["author", "date", "message", "branch"]
768 768 commit_ranges = target_repo.compare(
769 769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
770 770 pre_load=pre_load)
771 771
772 772 ancestor = source_repo.get_common_ancestor(
773 773 source_commit.raw_id, target_commit.raw_id, target_repo)
774 774
775 775 pull_request.source_ref = '%s:%s:%s' % (
776 776 source_ref_type, source_ref_name, source_commit.raw_id)
777 777 pull_request.target_ref = '%s:%s:%s' % (
778 778 target_ref_type, target_ref_name, ancestor)
779 779
780 780 pull_request.revisions = [
781 781 commit.raw_id for commit in reversed(commit_ranges)]
782 782 pull_request.updated_on = datetime.datetime.now()
783 783 Session().add(pull_request)
784 784 new_commit_ids = pull_request.revisions
785 785
786 786 old_diff_data, new_diff_data = self._generate_update_diffs(
787 787 pull_request, pull_request_version)
788 788
789 789 # calculate commit and file changes
790 790 changes = self._calculate_commit_id_changes(
791 791 old_commit_ids, new_commit_ids)
792 792 file_changes = self._calculate_file_changes(
793 793 old_diff_data, new_diff_data)
794 794
795 795 # set comments as outdated if DIFFS changed
796 796 CommentsModel().outdate_comments(
797 797 pull_request, old_diff_data=old_diff_data,
798 798 new_diff_data=new_diff_data)
799 799
800 800 commit_changes = (changes.added or changes.removed)
801 801 file_node_changes = (
802 802 file_changes.added or file_changes.modified or file_changes.removed)
803 803 pr_has_changes = commit_changes or file_node_changes
804 804
805 805 # Add an automatic comment to the pull request, in case
806 806 # anything has changed
807 807 if pr_has_changes:
808 808 update_comment = CommentsModel().create(
809 809 text=self._render_update_message(changes, file_changes),
810 810 repo=pull_request.target_repo,
811 811 user=pull_request.author,
812 812 pull_request=pull_request,
813 813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
814 814
815 815 # Update status to "Under Review" for added commits
816 816 for commit_id in changes.added:
817 817 ChangesetStatusModel().set_status(
818 818 repo=pull_request.source_repo,
819 819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
820 820 comment=update_comment,
821 821 user=pull_request.author,
822 822 pull_request=pull_request,
823 823 revision=commit_id)
824 824
825 825 log.debug(
826 826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
827 827 'removed_ids: %s', pull_request.pull_request_id,
828 828 changes.added, changes.common, changes.removed)
829 829 log.debug(
830 830 'Updated pull request with the following file changes: %s',
831 831 file_changes)
832 832
833 833 log.info(
834 834 "Updated pull request %s from commit %s to commit %s, "
835 835 "stored new version %s of this pull request.",
836 836 pull_request.pull_request_id, source_ref_id,
837 837 pull_request.source_ref_parts.commit_id,
838 838 pull_request_version.pull_request_version_id)
839 839 Session().commit()
840 840 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
841 841
842 842 return UpdateResponse(
843 843 executed=True, reason=UpdateFailureReason.NONE,
844 844 old=pull_request, new=pull_request_version, changes=changes,
845 845 source_changed=source_changed, target_changed=target_changed)
846 846
847 847 def _create_version_from_snapshot(self, pull_request):
848 848 version = PullRequestVersion()
849 849 version.title = pull_request.title
850 850 version.description = pull_request.description
851 851 version.status = pull_request.status
852 852 version.pull_request_state = pull_request.pull_request_state
853 853 version.created_on = datetime.datetime.now()
854 854 version.updated_on = pull_request.updated_on
855 855 version.user_id = pull_request.user_id
856 856 version.source_repo = pull_request.source_repo
857 857 version.source_ref = pull_request.source_ref
858 858 version.target_repo = pull_request.target_repo
859 859 version.target_ref = pull_request.target_ref
860 860
861 861 version._last_merge_source_rev = pull_request._last_merge_source_rev
862 862 version._last_merge_target_rev = pull_request._last_merge_target_rev
863 863 version.last_merge_status = pull_request.last_merge_status
864 864 version.shadow_merge_ref = pull_request.shadow_merge_ref
865 865 version.merge_rev = pull_request.merge_rev
866 866 version.reviewer_data = pull_request.reviewer_data
867 867
868 868 version.revisions = pull_request.revisions
869 869 version.pull_request = pull_request
870 870 Session().add(version)
871 871 Session().flush()
872 872
873 873 return version
874 874
875 875 def _generate_update_diffs(self, pull_request, pull_request_version):
876 876
877 877 diff_context = (
878 878 self.DIFF_CONTEXT +
879 879 CommentsModel.needed_extra_diff_context())
880 880 hide_whitespace_changes = False
881 881 source_repo = pull_request_version.source_repo
882 882 source_ref_id = pull_request_version.source_ref_parts.commit_id
883 883 target_ref_id = pull_request_version.target_ref_parts.commit_id
884 884 old_diff = self._get_diff_from_pr_or_version(
885 885 source_repo, source_ref_id, target_ref_id,
886 886 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
887 887
888 888 source_repo = pull_request.source_repo
889 889 source_ref_id = pull_request.source_ref_parts.commit_id
890 890 target_ref_id = pull_request.target_ref_parts.commit_id
891 891
892 892 new_diff = self._get_diff_from_pr_or_version(
893 893 source_repo, source_ref_id, target_ref_id,
894 894 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
895 895
896 896 old_diff_data = diffs.DiffProcessor(old_diff)
897 897 old_diff_data.prepare()
898 898 new_diff_data = diffs.DiffProcessor(new_diff)
899 899 new_diff_data.prepare()
900 900
901 901 return old_diff_data, new_diff_data
902 902
903 903 def _link_comments_to_version(self, pull_request_version):
904 904 """
905 905 Link all unlinked comments of this pull request to the given version.
906 906
907 907 :param pull_request_version: The `PullRequestVersion` to which
908 908 the comments shall be linked.
909 909
910 910 """
911 911 pull_request = pull_request_version.pull_request
912 912 comments = ChangesetComment.query()\
913 913 .filter(
914 914 # TODO: johbo: Should we query for the repo at all here?
915 915 # Pending decision on how comments of PRs are to be related
916 916 # to either the source repo, the target repo or no repo at all.
917 917 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
918 918 ChangesetComment.pull_request == pull_request,
919 919 ChangesetComment.pull_request_version == None)\
920 920 .order_by(ChangesetComment.comment_id.asc())
921 921
922 922 # TODO: johbo: Find out why this breaks if it is done in a bulk
923 923 # operation.
924 924 for comment in comments:
925 925 comment.pull_request_version_id = (
926 926 pull_request_version.pull_request_version_id)
927 927 Session().add(comment)
928 928
929 929 def _calculate_commit_id_changes(self, old_ids, new_ids):
930 930 added = [x for x in new_ids if x not in old_ids]
931 931 common = [x for x in new_ids if x in old_ids]
932 932 removed = [x for x in old_ids if x not in new_ids]
933 933 total = new_ids
934 934 return ChangeTuple(added, common, removed, total)
935 935
936 936 def _calculate_file_changes(self, old_diff_data, new_diff_data):
937 937
938 938 old_files = OrderedDict()
939 939 for diff_data in old_diff_data.parsed_diff:
940 940 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
941 941
942 942 added_files = []
943 943 modified_files = []
944 944 removed_files = []
945 945 for diff_data in new_diff_data.parsed_diff:
946 946 new_filename = diff_data['filename']
947 947 new_hash = md5_safe(diff_data['raw_diff'])
948 948
949 949 old_hash = old_files.get(new_filename)
950 950 if not old_hash:
951 951 # file is not present in old diff, means it's added
952 952 added_files.append(new_filename)
953 953 else:
954 954 if new_hash != old_hash:
955 955 modified_files.append(new_filename)
956 956 # now remove a file from old, since we have seen it already
957 957 del old_files[new_filename]
958 958
959 959 # removed files is when there are present in old, but not in NEW,
960 960 # since we remove old files that are present in new diff, left-overs
961 961 # if any should be the removed files
962 962 removed_files.extend(old_files.keys())
963 963
964 964 return FileChangeTuple(added_files, modified_files, removed_files)
965 965
966 966 def _render_update_message(self, changes, file_changes):
967 967 """
968 968 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
969 969 so it's always looking the same disregarding on which default
970 970 renderer system is using.
971 971
972 972 :param changes: changes named tuple
973 973 :param file_changes: file changes named tuple
974 974
975 975 """
976 976 new_status = ChangesetStatus.get_status_lbl(
977 977 ChangesetStatus.STATUS_UNDER_REVIEW)
978 978
979 979 changed_files = (
980 980 file_changes.added + file_changes.modified + file_changes.removed)
981 981
982 982 params = {
983 983 'under_review_label': new_status,
984 984 'added_commits': changes.added,
985 985 'removed_commits': changes.removed,
986 986 'changed_files': changed_files,
987 987 'added_files': file_changes.added,
988 988 'modified_files': file_changes.modified,
989 989 'removed_files': file_changes.removed,
990 990 }
991 991 renderer = RstTemplateRenderer()
992 992 return renderer.render('pull_request_update.mako', **params)
993 993
994 994 def edit(self, pull_request, title, description, description_renderer, user):
995 995 pull_request = self.__get_pull_request(pull_request)
996 996 old_data = pull_request.get_api_data(with_merge_state=False)
997 997 if pull_request.is_closed():
998 998 raise ValueError('This pull request is closed')
999 999 if title:
1000 1000 pull_request.title = title
1001 1001 pull_request.description = description
1002 1002 pull_request.updated_on = datetime.datetime.now()
1003 1003 pull_request.description_renderer = description_renderer
1004 1004 Session().add(pull_request)
1005 1005 self._log_audit_action(
1006 1006 'repo.pull_request.edit', {'old_data': old_data},
1007 1007 user, pull_request)
1008 1008
1009 1009 def update_reviewers(self, pull_request, reviewer_data, user):
1010 1010 """
1011 1011 Update the reviewers in the pull request
1012 1012
1013 1013 :param pull_request: the pr to update
1014 1014 :param reviewer_data: list of tuples
1015 1015 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1016 1016 """
1017 1017 pull_request = self.__get_pull_request(pull_request)
1018 1018 if pull_request.is_closed():
1019 1019 raise ValueError('This pull request is closed')
1020 1020
1021 1021 reviewers = {}
1022 1022 for user_id, reasons, mandatory, rules in reviewer_data:
1023 1023 if isinstance(user_id, (int, compat.string_types)):
1024 1024 user_id = self._get_user(user_id).user_id
1025 1025 reviewers[user_id] = {
1026 1026 'reasons': reasons, 'mandatory': mandatory}
1027 1027
1028 1028 reviewers_ids = set(reviewers.keys())
1029 1029 current_reviewers = PullRequestReviewers.query()\
1030 1030 .filter(PullRequestReviewers.pull_request ==
1031 1031 pull_request).all()
1032 1032 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1033 1033
1034 1034 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1035 1035 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1036 1036
1037 1037 log.debug("Adding %s reviewers", ids_to_add)
1038 1038 log.debug("Removing %s reviewers", ids_to_remove)
1039 1039 changed = False
1040 1040 added_audit_reviewers = []
1041 1041 removed_audit_reviewers = []
1042 1042
1043 1043 for uid in ids_to_add:
1044 1044 changed = True
1045 1045 _usr = self._get_user(uid)
1046 1046 reviewer = PullRequestReviewers()
1047 1047 reviewer.user = _usr
1048 1048 reviewer.pull_request = pull_request
1049 1049 reviewer.reasons = reviewers[uid]['reasons']
1050 1050 # NOTE(marcink): mandatory shouldn't be changed now
1051 1051 # reviewer.mandatory = reviewers[uid]['reasons']
1052 1052 Session().add(reviewer)
1053 1053 added_audit_reviewers.append(reviewer.get_dict())
1054 1054
1055 1055 for uid in ids_to_remove:
1056 1056 changed = True
1057 1057 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1058 1058 # that prevents and fixes cases that we added the same reviewer twice.
1059 1059 # this CAN happen due to the lack of DB checks
1060 1060 reviewers = PullRequestReviewers.query()\
1061 1061 .filter(PullRequestReviewers.user_id == uid,
1062 1062 PullRequestReviewers.pull_request == pull_request)\
1063 1063 .all()
1064 1064
1065 1065 for obj in reviewers:
1066 1066 added_audit_reviewers.append(obj.get_dict())
1067 1067 Session().delete(obj)
1068 1068
1069 1069 if changed:
1070 1070 Session().expire_all()
1071 1071 pull_request.updated_on = datetime.datetime.now()
1072 1072 Session().add(pull_request)
1073 1073
1074 1074 # finally store audit logs
1075 1075 for user_data in added_audit_reviewers:
1076 1076 self._log_audit_action(
1077 1077 'repo.pull_request.reviewer.add', {'data': user_data},
1078 1078 user, pull_request)
1079 1079 for user_data in removed_audit_reviewers:
1080 1080 self._log_audit_action(
1081 1081 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1082 1082 user, pull_request)
1083 1083
1084 1084 self.notify_reviewers(pull_request, ids_to_add)
1085 1085 return ids_to_add, ids_to_remove
1086 1086
1087 1087 def get_url(self, pull_request, request=None, permalink=False):
1088 1088 if not request:
1089 1089 request = get_current_request()
1090 1090
1091 1091 if permalink:
1092 1092 return request.route_url(
1093 1093 'pull_requests_global',
1094 1094 pull_request_id=pull_request.pull_request_id,)
1095 1095 else:
1096 1096 return request.route_url('pullrequest_show',
1097 1097 repo_name=safe_str(pull_request.target_repo.repo_name),
1098 1098 pull_request_id=pull_request.pull_request_id,)
1099 1099
1100 1100 def get_shadow_clone_url(self, pull_request, request=None):
1101 1101 """
1102 1102 Returns qualified url pointing to the shadow repository. If this pull
1103 1103 request is closed there is no shadow repository and ``None`` will be
1104 1104 returned.
1105 1105 """
1106 1106 if pull_request.is_closed():
1107 1107 return None
1108 1108 else:
1109 1109 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1110 1110 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1111 1111
1112 1112 def notify_reviewers(self, pull_request, reviewers_ids):
1113 1113 # notification to reviewers
1114 1114 if not reviewers_ids:
1115 1115 return
1116 1116
1117 1117 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1118 1118
1119 1119 pull_request_obj = pull_request
1120 1120 # get the current participants of this pull request
1121 1121 recipients = reviewers_ids
1122 1122 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1123 1123
1124 1124 pr_source_repo = pull_request_obj.source_repo
1125 1125 pr_target_repo = pull_request_obj.target_repo
1126 1126
1127 1127 pr_url = h.route_url('pullrequest_show',
1128 1128 repo_name=pr_target_repo.repo_name,
1129 1129 pull_request_id=pull_request_obj.pull_request_id,)
1130 1130
1131 1131 # set some variables for email notification
1132 1132 pr_target_repo_url = h.route_url(
1133 1133 'repo_summary', repo_name=pr_target_repo.repo_name)
1134 1134
1135 1135 pr_source_repo_url = h.route_url(
1136 1136 'repo_summary', repo_name=pr_source_repo.repo_name)
1137 1137
1138 1138 # pull request specifics
1139 1139 pull_request_commits = [
1140 1140 (x.raw_id, x.message)
1141 1141 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1142 1142
1143 1143 kwargs = {
1144 1144 'user': pull_request.author,
1145 1145 'pull_request': pull_request_obj,
1146 1146 'pull_request_commits': pull_request_commits,
1147 1147
1148 1148 'pull_request_target_repo': pr_target_repo,
1149 1149 'pull_request_target_repo_url': pr_target_repo_url,
1150 1150
1151 1151 'pull_request_source_repo': pr_source_repo,
1152 1152 'pull_request_source_repo_url': pr_source_repo_url,
1153 1153
1154 1154 'pull_request_url': pr_url,
1155 1155 }
1156 1156
1157 1157 # pre-generate the subject for notification itself
1158 1158 (subject,
1159 1159 _h, _e, # we don't care about those
1160 1160 body_plaintext) = EmailNotificationModel().render_email(
1161 1161 notification_type, **kwargs)
1162 1162
1163 1163 # create notification objects, and emails
1164 1164 NotificationModel().create(
1165 1165 created_by=pull_request.author,
1166 1166 notification_subject=subject,
1167 1167 notification_body=body_plaintext,
1168 1168 notification_type=notification_type,
1169 1169 recipients=recipients,
1170 1170 email_kwargs=kwargs,
1171 1171 )
1172 1172
1173 1173 def delete(self, pull_request, user):
1174 1174 pull_request = self.__get_pull_request(pull_request)
1175 1175 old_data = pull_request.get_api_data(with_merge_state=False)
1176 1176 self._cleanup_merge_workspace(pull_request)
1177 1177 self._log_audit_action(
1178 1178 'repo.pull_request.delete', {'old_data': old_data},
1179 1179 user, pull_request)
1180 1180 Session().delete(pull_request)
1181 1181
1182 1182 def close_pull_request(self, pull_request, user):
1183 1183 pull_request = self.__get_pull_request(pull_request)
1184 1184 self._cleanup_merge_workspace(pull_request)
1185 1185 pull_request.status = PullRequest.STATUS_CLOSED
1186 1186 pull_request.updated_on = datetime.datetime.now()
1187 1187 Session().add(pull_request)
1188 1188 self.trigger_pull_request_hook(
1189 1189 pull_request, pull_request.author, 'close')
1190 1190
1191 1191 pr_data = pull_request.get_api_data(with_merge_state=False)
1192 1192 self._log_audit_action(
1193 1193 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1194 1194
1195 1195 def close_pull_request_with_comment(
1196 1196 self, pull_request, user, repo, message=None, auth_user=None):
1197 1197
1198 1198 pull_request_review_status = pull_request.calculated_review_status()
1199 1199
1200 1200 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1201 1201 # approved only if we have voting consent
1202 1202 status = ChangesetStatus.STATUS_APPROVED
1203 1203 else:
1204 1204 status = ChangesetStatus.STATUS_REJECTED
1205 1205 status_lbl = ChangesetStatus.get_status_lbl(status)
1206 1206
1207 1207 default_message = (
1208 1208 'Closing with status change {transition_icon} {status}.'
1209 1209 ).format(transition_icon='>', status=status_lbl)
1210 1210 text = message or default_message
1211 1211
1212 1212 # create a comment, and link it to new status
1213 1213 comment = CommentsModel().create(
1214 1214 text=text,
1215 1215 repo=repo.repo_id,
1216 1216 user=user.user_id,
1217 1217 pull_request=pull_request.pull_request_id,
1218 1218 status_change=status_lbl,
1219 1219 status_change_type=status,
1220 1220 closing_pr=True,
1221 1221 auth_user=auth_user,
1222 1222 )
1223 1223
1224 1224 # calculate old status before we change it
1225 1225 old_calculated_status = pull_request.calculated_review_status()
1226 1226 ChangesetStatusModel().set_status(
1227 1227 repo.repo_id,
1228 1228 status,
1229 1229 user.user_id,
1230 1230 comment=comment,
1231 1231 pull_request=pull_request.pull_request_id
1232 1232 )
1233 1233
1234 1234 Session().flush()
1235 1235 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1236 1236 # we now calculate the status of pull request again, and based on that
1237 1237 # calculation trigger status change. This might happen in cases
1238 1238 # that non-reviewer admin closes a pr, which means his vote doesn't
1239 1239 # change the status, while if he's a reviewer this might change it.
1240 1240 calculated_status = pull_request.calculated_review_status()
1241 1241 if old_calculated_status != calculated_status:
1242 1242 self.trigger_pull_request_hook(
1243 1243 pull_request, user, 'review_status_change',
1244 1244 data={'status': calculated_status})
1245 1245
1246 1246 # finally close the PR
1247 1247 PullRequestModel().close_pull_request(
1248 1248 pull_request.pull_request_id, user)
1249 1249
1250 1250 return comment, status
1251 1251
1252 1252 def merge_status(self, pull_request, translator=None,
1253 1253 force_shadow_repo_refresh=False):
1254 1254 _ = translator or get_current_request().translate
1255 1255
1256 1256 if not self._is_merge_enabled(pull_request):
1257 1257 return False, _('Server-side pull request merging is disabled.')
1258 1258 if pull_request.is_closed():
1259 1259 return False, _('This pull request is closed.')
1260 1260 merge_possible, msg = self._check_repo_requirements(
1261 1261 target=pull_request.target_repo, source=pull_request.source_repo,
1262 1262 translator=_)
1263 1263 if not merge_possible:
1264 1264 return merge_possible, msg
1265 1265
1266 1266 try:
1267 1267 resp = self._try_merge(
1268 1268 pull_request,
1269 1269 force_shadow_repo_refresh=force_shadow_repo_refresh)
1270 1270 log.debug("Merge response: %s", resp)
1271 1271 status = resp.possible, resp.merge_status_message
1272 1272 except NotImplementedError:
1273 1273 status = False, _('Pull request merging is not supported.')
1274 1274
1275 1275 return status
1276 1276
1277 1277 def _check_repo_requirements(self, target, source, translator):
1278 1278 """
1279 1279 Check if `target` and `source` have compatible requirements.
1280 1280
1281 1281 Currently this is just checking for largefiles.
1282 1282 """
1283 1283 _ = translator
1284 1284 target_has_largefiles = self._has_largefiles(target)
1285 1285 source_has_largefiles = self._has_largefiles(source)
1286 1286 merge_possible = True
1287 1287 message = u''
1288 1288
1289 1289 if target_has_largefiles != source_has_largefiles:
1290 1290 merge_possible = False
1291 1291 if source_has_largefiles:
1292 1292 message = _(
1293 1293 'Target repository large files support is disabled.')
1294 1294 else:
1295 1295 message = _(
1296 1296 'Source repository large files support is disabled.')
1297 1297
1298 1298 return merge_possible, message
1299 1299
1300 1300 def _has_largefiles(self, repo):
1301 1301 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1302 1302 'extensions', 'largefiles')
1303 1303 return largefiles_ui and largefiles_ui[0].active
1304 1304
1305 1305 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1306 1306 """
1307 1307 Try to merge the pull request and return the merge status.
1308 1308 """
1309 1309 log.debug(
1310 1310 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1311 1311 pull_request.pull_request_id, force_shadow_repo_refresh)
1312 1312 target_vcs = pull_request.target_repo.scm_instance()
1313 1313 # Refresh the target reference.
1314 1314 try:
1315 1315 target_ref = self._refresh_reference(
1316 1316 pull_request.target_ref_parts, target_vcs)
1317 1317 except CommitDoesNotExistError:
1318 1318 merge_state = MergeResponse(
1319 1319 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1320 1320 metadata={'target_ref': pull_request.target_ref_parts})
1321 1321 return merge_state
1322 1322
1323 1323 target_locked = pull_request.target_repo.locked
1324 1324 if target_locked and target_locked[0]:
1325 1325 locked_by = 'user:{}'.format(target_locked[0])
1326 1326 log.debug("The target repository is locked by %s.", locked_by)
1327 1327 merge_state = MergeResponse(
1328 1328 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1329 1329 metadata={'locked_by': locked_by})
1330 1330 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1331 1331 pull_request, target_ref):
1332 1332 log.debug("Refreshing the merge status of the repository.")
1333 1333 merge_state = self._refresh_merge_state(
1334 1334 pull_request, target_vcs, target_ref)
1335 1335 else:
1336 1336 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1337 1337 metadata = {
1338 'unresolved_files': '',
1338 1339 'target_ref': pull_request.target_ref_parts,
1339 1340 'source_ref': pull_request.source_ref_parts,
1340 1341 }
1341 1342 if not possible and target_ref.type == 'branch':
1342 1343 # NOTE(marcink): case for mercurial multiple heads on branch
1343 1344 heads = target_vcs._heads(target_ref.name)
1344 1345 if len(heads) != 1:
1345 1346 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1346 1347 metadata.update({
1347 1348 'heads': heads
1348 1349 })
1349 1350 merge_state = MergeResponse(
1350 1351 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1351 1352
1352 1353 return merge_state
1353 1354
1354 1355 def _refresh_reference(self, reference, vcs_repository):
1355 1356 if reference.type in self.UPDATABLE_REF_TYPES:
1356 1357 name_or_id = reference.name
1357 1358 else:
1358 1359 name_or_id = reference.commit_id
1359 1360
1360 1361 refreshed_commit = vcs_repository.get_commit(name_or_id)
1361 1362 refreshed_reference = Reference(
1362 1363 reference.type, reference.name, refreshed_commit.raw_id)
1363 1364 return refreshed_reference
1364 1365
1365 1366 def _needs_merge_state_refresh(self, pull_request, target_reference):
1366 1367 return not(
1367 1368 pull_request.revisions and
1368 1369 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1369 1370 target_reference.commit_id == pull_request._last_merge_target_rev)
1370 1371
1371 1372 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1372 1373 workspace_id = self._workspace_id(pull_request)
1373 1374 source_vcs = pull_request.source_repo.scm_instance()
1374 1375 repo_id = pull_request.target_repo.repo_id
1375 1376 use_rebase = self._use_rebase_for_merging(pull_request)
1376 1377 close_branch = self._close_branch_before_merging(pull_request)
1377 1378 merge_state = target_vcs.merge(
1378 1379 repo_id, workspace_id,
1379 1380 target_reference, source_vcs, pull_request.source_ref_parts,
1380 1381 dry_run=True, use_rebase=use_rebase,
1381 1382 close_branch=close_branch)
1382 1383
1383 1384 # Do not store the response if there was an unknown error.
1384 1385 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1385 1386 pull_request._last_merge_source_rev = \
1386 1387 pull_request.source_ref_parts.commit_id
1387 1388 pull_request._last_merge_target_rev = target_reference.commit_id
1388 1389 pull_request.last_merge_status = merge_state.failure_reason
1389 1390 pull_request.shadow_merge_ref = merge_state.merge_ref
1390 1391 Session().add(pull_request)
1391 1392 Session().commit()
1392 1393
1393 1394 return merge_state
1394 1395
1395 1396 def _workspace_id(self, pull_request):
1396 1397 workspace_id = 'pr-%s' % pull_request.pull_request_id
1397 1398 return workspace_id
1398 1399
1399 1400 def generate_repo_data(self, repo, commit_id=None, branch=None,
1400 1401 bookmark=None, translator=None):
1401 1402 from rhodecode.model.repo import RepoModel
1402 1403
1403 1404 all_refs, selected_ref = \
1404 1405 self._get_repo_pullrequest_sources(
1405 1406 repo.scm_instance(), commit_id=commit_id,
1406 1407 branch=branch, bookmark=bookmark, translator=translator)
1407 1408
1408 1409 refs_select2 = []
1409 1410 for element in all_refs:
1410 1411 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1411 1412 refs_select2.append({'text': element[1], 'children': children})
1412 1413
1413 1414 return {
1414 1415 'user': {
1415 1416 'user_id': repo.user.user_id,
1416 1417 'username': repo.user.username,
1417 1418 'firstname': repo.user.first_name,
1418 1419 'lastname': repo.user.last_name,
1419 1420 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1420 1421 },
1421 1422 'name': repo.repo_name,
1422 1423 'link': RepoModel().get_url(repo),
1423 1424 'description': h.chop_at_smart(repo.description_safe, '\n'),
1424 1425 'refs': {
1425 1426 'all_refs': all_refs,
1426 1427 'selected_ref': selected_ref,
1427 1428 'select2_refs': refs_select2
1428 1429 }
1429 1430 }
1430 1431
1431 1432 def generate_pullrequest_title(self, source, source_ref, target):
1432 1433 return u'{source}#{at_ref} to {target}'.format(
1433 1434 source=source,
1434 1435 at_ref=source_ref,
1435 1436 target=target,
1436 1437 )
1437 1438
1438 1439 def _cleanup_merge_workspace(self, pull_request):
1439 1440 # Merging related cleanup
1440 1441 repo_id = pull_request.target_repo.repo_id
1441 1442 target_scm = pull_request.target_repo.scm_instance()
1442 1443 workspace_id = self._workspace_id(pull_request)
1443 1444
1444 1445 try:
1445 1446 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1446 1447 except NotImplementedError:
1447 1448 pass
1448 1449
1449 1450 def _get_repo_pullrequest_sources(
1450 1451 self, repo, commit_id=None, branch=None, bookmark=None,
1451 1452 translator=None):
1452 1453 """
1453 1454 Return a structure with repo's interesting commits, suitable for
1454 1455 the selectors in pullrequest controller
1455 1456
1456 1457 :param commit_id: a commit that must be in the list somehow
1457 1458 and selected by default
1458 1459 :param branch: a branch that must be in the list and selected
1459 1460 by default - even if closed
1460 1461 :param bookmark: a bookmark that must be in the list and selected
1461 1462 """
1462 1463 _ = translator or get_current_request().translate
1463 1464
1464 1465 commit_id = safe_str(commit_id) if commit_id else None
1465 1466 branch = safe_unicode(branch) if branch else None
1466 1467 bookmark = safe_unicode(bookmark) if bookmark else None
1467 1468
1468 1469 selected = None
1469 1470
1470 1471 # order matters: first source that has commit_id in it will be selected
1471 1472 sources = []
1472 1473 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1473 1474 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1474 1475
1475 1476 if commit_id:
1476 1477 ref_commit = (h.short_id(commit_id), commit_id)
1477 1478 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1478 1479
1479 1480 sources.append(
1480 1481 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1481 1482 )
1482 1483
1483 1484 groups = []
1484 1485
1485 1486 for group_key, ref_list, group_name, match in sources:
1486 1487 group_refs = []
1487 1488 for ref_name, ref_id in ref_list:
1488 1489 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1489 1490 group_refs.append((ref_key, ref_name))
1490 1491
1491 1492 if not selected:
1492 1493 if set([commit_id, match]) & set([ref_id, ref_name]):
1493 1494 selected = ref_key
1494 1495
1495 1496 if group_refs:
1496 1497 groups.append((group_refs, group_name))
1497 1498
1498 1499 if not selected:
1499 1500 ref = commit_id or branch or bookmark
1500 1501 if ref:
1501 1502 raise CommitDoesNotExistError(
1502 1503 u'No commit refs could be found matching: {}'.format(ref))
1503 1504 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1504 1505 selected = u'branch:{}:{}'.format(
1505 1506 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1506 1507 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1507 1508 )
1508 1509 elif repo.commit_ids:
1509 1510 # make the user select in this case
1510 1511 selected = None
1511 1512 else:
1512 1513 raise EmptyRepositoryError()
1513 1514 return groups, selected
1514 1515
1515 1516 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1516 1517 hide_whitespace_changes, diff_context):
1517 1518
1518 1519 return self._get_diff_from_pr_or_version(
1519 1520 source_repo, source_ref_id, target_ref_id,
1520 1521 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1521 1522
1522 1523 def _get_diff_from_pr_or_version(
1523 1524 self, source_repo, source_ref_id, target_ref_id,
1524 1525 hide_whitespace_changes, diff_context):
1525 1526
1526 1527 target_commit = source_repo.get_commit(
1527 1528 commit_id=safe_str(target_ref_id))
1528 1529 source_commit = source_repo.get_commit(
1529 1530 commit_id=safe_str(source_ref_id))
1530 1531 if isinstance(source_repo, Repository):
1531 1532 vcs_repo = source_repo.scm_instance()
1532 1533 else:
1533 1534 vcs_repo = source_repo
1534 1535
1535 1536 # TODO: johbo: In the context of an update, we cannot reach
1536 1537 # the old commit anymore with our normal mechanisms. It needs
1537 1538 # some sort of special support in the vcs layer to avoid this
1538 1539 # workaround.
1539 1540 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1540 1541 vcs_repo.alias == 'git'):
1541 1542 source_commit.raw_id = safe_str(source_ref_id)
1542 1543
1543 1544 log.debug('calculating diff between '
1544 1545 'source_ref:%s and target_ref:%s for repo `%s`',
1545 1546 target_ref_id, source_ref_id,
1546 1547 safe_unicode(vcs_repo.path))
1547 1548
1548 1549 vcs_diff = vcs_repo.get_diff(
1549 1550 commit1=target_commit, commit2=source_commit,
1550 1551 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1551 1552 return vcs_diff
1552 1553
1553 1554 def _is_merge_enabled(self, pull_request):
1554 1555 return self._get_general_setting(
1555 1556 pull_request, 'rhodecode_pr_merge_enabled')
1556 1557
1557 1558 def _use_rebase_for_merging(self, pull_request):
1558 1559 repo_type = pull_request.target_repo.repo_type
1559 1560 if repo_type == 'hg':
1560 1561 return self._get_general_setting(
1561 1562 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1562 1563 elif repo_type == 'git':
1563 1564 return self._get_general_setting(
1564 1565 pull_request, 'rhodecode_git_use_rebase_for_merging')
1565 1566
1566 1567 return False
1567 1568
1568 1569 def _close_branch_before_merging(self, pull_request):
1569 1570 repo_type = pull_request.target_repo.repo_type
1570 1571 if repo_type == 'hg':
1571 1572 return self._get_general_setting(
1572 1573 pull_request, 'rhodecode_hg_close_branch_before_merging')
1573 1574 elif repo_type == 'git':
1574 1575 return self._get_general_setting(
1575 1576 pull_request, 'rhodecode_git_close_branch_before_merging')
1576 1577
1577 1578 return False
1578 1579
1579 1580 def _get_general_setting(self, pull_request, settings_key, default=False):
1580 1581 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1581 1582 settings = settings_model.get_general_settings()
1582 1583 return settings.get(settings_key, default)
1583 1584
1584 1585 def _log_audit_action(self, action, action_data, user, pull_request):
1585 1586 audit_logger.store(
1586 1587 action=action,
1587 1588 action_data=action_data,
1588 1589 user=user,
1589 1590 repo=pull_request.target_repo)
1590 1591
1591 1592 def get_reviewer_functions(self):
1592 1593 """
1593 1594 Fetches functions for validation and fetching default reviewers.
1594 1595 If available we use the EE package, else we fallback to CE
1595 1596 package functions
1596 1597 """
1597 1598 try:
1598 1599 from rc_reviewers.utils import get_default_reviewers_data
1599 1600 from rc_reviewers.utils import validate_default_reviewers
1600 1601 except ImportError:
1601 1602 from rhodecode.apps.repository.utils import get_default_reviewers_data
1602 1603 from rhodecode.apps.repository.utils import validate_default_reviewers
1603 1604
1604 1605 return get_default_reviewers_data, validate_default_reviewers
1605 1606
1606 1607
1607 1608 class MergeCheck(object):
1608 1609 """
1609 1610 Perform Merge Checks and returns a check object which stores information
1610 1611 about merge errors, and merge conditions
1611 1612 """
1612 1613 TODO_CHECK = 'todo'
1613 1614 PERM_CHECK = 'perm'
1614 1615 REVIEW_CHECK = 'review'
1615 1616 MERGE_CHECK = 'merge'
1616 1617
1617 1618 def __init__(self):
1618 1619 self.review_status = None
1619 1620 self.merge_possible = None
1620 1621 self.merge_msg = ''
1621 1622 self.failed = None
1622 1623 self.errors = []
1623 1624 self.error_details = OrderedDict()
1624 1625
1625 1626 def push_error(self, error_type, message, error_key, details):
1626 1627 self.failed = True
1627 1628 self.errors.append([error_type, message])
1628 1629 self.error_details[error_key] = dict(
1629 1630 details=details,
1630 1631 error_type=error_type,
1631 1632 message=message
1632 1633 )
1633 1634
1634 1635 @classmethod
1635 1636 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1636 1637 force_shadow_repo_refresh=False):
1637 1638 _ = translator
1638 1639 merge_check = cls()
1639 1640
1640 1641 # permissions to merge
1641 1642 user_allowed_to_merge = PullRequestModel().check_user_merge(
1642 1643 pull_request, auth_user)
1643 1644 if not user_allowed_to_merge:
1644 1645 log.debug("MergeCheck: cannot merge, approval is pending.")
1645 1646
1646 1647 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1647 1648 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1648 1649 if fail_early:
1649 1650 return merge_check
1650 1651
1651 1652 # permission to merge into the target branch
1652 1653 target_commit_id = pull_request.target_ref_parts.commit_id
1653 1654 if pull_request.target_ref_parts.type == 'branch':
1654 1655 branch_name = pull_request.target_ref_parts.name
1655 1656 else:
1656 1657 # for mercurial we can always figure out the branch from the commit
1657 1658 # in case of bookmark
1658 1659 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1659 1660 branch_name = target_commit.branch
1660 1661
1661 1662 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1662 1663 pull_request.target_repo.repo_name, branch_name)
1663 1664 if branch_perm and branch_perm == 'branch.none':
1664 1665 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1665 1666 branch_name, rule)
1666 1667 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1667 1668 if fail_early:
1668 1669 return merge_check
1669 1670
1670 1671 # review status, must be always present
1671 1672 review_status = pull_request.calculated_review_status()
1672 1673 merge_check.review_status = review_status
1673 1674
1674 1675 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1675 1676 if not status_approved:
1676 1677 log.debug("MergeCheck: cannot merge, approval is pending.")
1677 1678
1678 1679 msg = _('Pull request reviewer approval is pending.')
1679 1680
1680 1681 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1681 1682
1682 1683 if fail_early:
1683 1684 return merge_check
1684 1685
1685 1686 # left over TODOs
1686 1687 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1687 1688 if todos:
1688 1689 log.debug("MergeCheck: cannot merge, {} "
1689 1690 "unresolved TODOs left.".format(len(todos)))
1690 1691
1691 1692 if len(todos) == 1:
1692 1693 msg = _('Cannot merge, {} TODO still not resolved.').format(
1693 1694 len(todos))
1694 1695 else:
1695 1696 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1696 1697 len(todos))
1697 1698
1698 1699 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1699 1700
1700 1701 if fail_early:
1701 1702 return merge_check
1702 1703
1703 1704 # merge possible, here is the filesystem simulation + shadow repo
1704 1705 merge_status, msg = PullRequestModel().merge_status(
1705 1706 pull_request, translator=translator,
1706 1707 force_shadow_repo_refresh=force_shadow_repo_refresh)
1707 1708 merge_check.merge_possible = merge_status
1708 1709 merge_check.merge_msg = msg
1709 1710 if not merge_status:
1710 1711 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1711 1712 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1712 1713
1713 1714 if fail_early:
1714 1715 return merge_check
1715 1716
1716 1717 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1717 1718 return merge_check
1718 1719
1719 1720 @classmethod
1720 1721 def get_merge_conditions(cls, pull_request, translator):
1721 1722 _ = translator
1722 1723 merge_details = {}
1723 1724
1724 1725 model = PullRequestModel()
1725 1726 use_rebase = model._use_rebase_for_merging(pull_request)
1726 1727
1727 1728 if use_rebase:
1728 1729 merge_details['merge_strategy'] = dict(
1729 1730 details={},
1730 1731 message=_('Merge strategy: rebase')
1731 1732 )
1732 1733 else:
1733 1734 merge_details['merge_strategy'] = dict(
1734 1735 details={},
1735 1736 message=_('Merge strategy: explicit merge commit')
1736 1737 )
1737 1738
1738 1739 close_branch = model._close_branch_before_merging(pull_request)
1739 1740 if close_branch:
1740 1741 repo_type = pull_request.target_repo.repo_type
1741 1742 close_msg = ''
1742 1743 if repo_type == 'hg':
1743 1744 close_msg = _('Source branch will be closed after merge.')
1744 1745 elif repo_type == 'git':
1745 1746 close_msg = _('Source branch will be deleted after merge.')
1746 1747
1747 1748 merge_details['close_branch'] = dict(
1748 1749 details={},
1749 1750 message=close_msg
1750 1751 )
1751 1752
1752 1753 return merge_details
1753 1754
1754 1755
1755 1756 ChangeTuple = collections.namedtuple(
1756 1757 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1757 1758
1758 1759 FileChangeTuple = collections.namedtuple(
1759 1760 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,964 +1,966 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture()
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 'trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 125 pull_request, [(pull_request.author, ['author'], False, [])],
126 126 pull_request.author)
127 127 Session().commit()
128 128
129 129 prs = PullRequestModel().get_awaiting_my_review(
130 130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 131 assert isinstance(prs, list)
132 132 assert len(prs) == 1
133 133
134 134 def test_count_awaiting_my_review(self, pull_request):
135 135 PullRequestModel().update_reviewers(
136 136 pull_request, [(pull_request.author, ['author'], False, [])],
137 137 pull_request.author)
138 138 Session().commit()
139 139
140 140 pr_count = PullRequestModel().count_awaiting_my_review(
141 141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 142 assert pr_count == 1
143 143
144 144 def test_delete_calls_cleanup_merge(self, pull_request):
145 145 repo_id = pull_request.target_repo.repo_id
146 146 PullRequestModel().delete(pull_request, pull_request.author)
147 147 Session().commit()
148 148
149 149 self.workspace_remove_mock.assert_called_once_with(
150 150 repo_id, self.workspace_id)
151 151
152 152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 153 PullRequestModel().close_pull_request(
154 154 pull_request, pull_request.author)
155 155 Session().commit()
156 156
157 157 repo_id = pull_request.target_repo.repo_id
158 158
159 159 self.workspace_remove_mock.assert_called_once_with(
160 160 repo_id, self.workspace_id)
161 161 self.hook_mock.assert_called_with(
162 162 self.pull_request, self.pull_request.author, 'close')
163 163
164 164 def test_merge_status(self, pull_request):
165 165 self.merge_mock.return_value = MergeResponse(
166 166 True, False, None, MergeFailureReason.NONE)
167 167
168 168 assert pull_request._last_merge_source_rev is None
169 169 assert pull_request._last_merge_target_rev is None
170 170 assert pull_request.last_merge_status is None
171 171
172 172 status, msg = PullRequestModel().merge_status(pull_request)
173 173 assert status is True
174 174 assert msg == 'This pull request can be automatically merged.'
175 175 self.merge_mock.assert_called_with(
176 176 self.repo_id, self.workspace_id,
177 177 pull_request.target_ref_parts,
178 178 pull_request.source_repo.scm_instance(),
179 179 pull_request.source_ref_parts, dry_run=True,
180 180 use_rebase=False, close_branch=False)
181 181
182 182 assert pull_request._last_merge_source_rev == self.source_commit
183 183 assert pull_request._last_merge_target_rev == self.target_commit
184 184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185 185
186 186 self.merge_mock.reset_mock()
187 187 status, msg = PullRequestModel().merge_status(pull_request)
188 188 assert status is True
189 189 assert msg == 'This pull request can be automatically merged.'
190 190 assert self.merge_mock.called is False
191 191
192 192 def test_merge_status_known_failure(self, pull_request):
193 193 self.merge_mock.return_value = MergeResponse(
194 False, False, None, MergeFailureReason.MERGE_FAILED)
194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 metadata={'unresolved_files': 'file1'})
195 196
196 197 assert pull_request._last_merge_source_rev is None
197 198 assert pull_request._last_merge_target_rev is None
198 199 assert pull_request.last_merge_status is None
199 200
200 201 status, msg = PullRequestModel().merge_status(pull_request)
201 202 assert status is False
202 assert msg == 'This pull request cannot be merged because of merge conflicts.'
203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
203 204 self.merge_mock.assert_called_with(
204 205 self.repo_id, self.workspace_id,
205 206 pull_request.target_ref_parts,
206 207 pull_request.source_repo.scm_instance(),
207 208 pull_request.source_ref_parts, dry_run=True,
208 209 use_rebase=False, close_branch=False)
209 210
210 211 assert pull_request._last_merge_source_rev == self.source_commit
211 212 assert pull_request._last_merge_target_rev == self.target_commit
212 assert (
213 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
214 214
215 215 self.merge_mock.reset_mock()
216 216 status, msg = PullRequestModel().merge_status(pull_request)
217 217 assert status is False
218 assert msg == 'This pull request cannot be merged because of merge conflicts.'
218 assert msg == 'This pull request cannot be merged because of merge conflicts. '
219 219 assert self.merge_mock.called is False
220 220
221 221 def test_merge_status_unknown_failure(self, pull_request):
222 222 self.merge_mock.return_value = MergeResponse(
223 223 False, False, None, MergeFailureReason.UNKNOWN,
224 224 metadata={'exception': 'MockError'})
225 225
226 226 assert pull_request._last_merge_source_rev is None
227 227 assert pull_request._last_merge_target_rev is None
228 228 assert pull_request.last_merge_status is None
229 229
230 230 status, msg = PullRequestModel().merge_status(pull_request)
231 231 assert status is False
232 232 assert msg == (
233 233 'This pull request cannot be merged because of an unhandled exception. '
234 234 'MockError')
235 235 self.merge_mock.assert_called_with(
236 236 self.repo_id, self.workspace_id,
237 237 pull_request.target_ref_parts,
238 238 pull_request.source_repo.scm_instance(),
239 239 pull_request.source_ref_parts, dry_run=True,
240 240 use_rebase=False, close_branch=False)
241 241
242 242 assert pull_request._last_merge_source_rev is None
243 243 assert pull_request._last_merge_target_rev is None
244 244 assert pull_request.last_merge_status is None
245 245
246 246 self.merge_mock.reset_mock()
247 247 status, msg = PullRequestModel().merge_status(pull_request)
248 248 assert status is False
249 249 assert msg == (
250 250 'This pull request cannot be merged because of an unhandled exception. '
251 251 'MockError')
252 252 assert self.merge_mock.called is True
253 253
254 254 def test_merge_status_when_target_is_locked(self, pull_request):
255 255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 256 status, msg = PullRequestModel().merge_status(pull_request)
257 257 assert status is False
258 258 assert msg == (
259 259 'This pull request cannot be merged because the target repository '
260 260 'is locked by user:1.')
261 261
262 262 def test_merge_status_requirements_check_target(self, pull_request):
263 263
264 264 def has_largefiles(self, repo):
265 265 return repo == pull_request.source_repo
266 266
267 267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 268 with patcher:
269 269 status, msg = PullRequestModel().merge_status(pull_request)
270 270
271 271 assert status is False
272 272 assert msg == 'Target repository large files support is disabled.'
273 273
274 274 def test_merge_status_requirements_check_source(self, pull_request):
275 275
276 276 def has_largefiles(self, repo):
277 277 return repo == pull_request.target_repo
278 278
279 279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 280 with patcher:
281 281 status, msg = PullRequestModel().merge_status(pull_request)
282 282
283 283 assert status is False
284 284 assert msg == 'Source repository large files support is disabled.'
285 285
286 286 def test_merge(self, pull_request, merge_extras):
287 287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 288 merge_ref = Reference(
289 289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 290 self.merge_mock.return_value = MergeResponse(
291 291 True, True, merge_ref, MergeFailureReason.NONE)
292 292
293 293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 294 PullRequestModel().merge_repo(
295 295 pull_request, pull_request.author, extras=merge_extras)
296 296 Session().commit()
297 297
298 298 message = (
299 299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 300 u'\n\n {pr_title}'.format(
301 301 pr_id=pull_request.pull_request_id,
302 302 source_repo=safe_unicode(
303 303 pull_request.source_repo.scm_instance().name),
304 304 source_ref_name=pull_request.source_ref_parts.name,
305 305 pr_title=safe_unicode(pull_request.title)
306 306 )
307 307 )
308 308 self.merge_mock.assert_called_with(
309 309 self.repo_id, self.workspace_id,
310 310 pull_request.target_ref_parts,
311 311 pull_request.source_repo.scm_instance(),
312 312 pull_request.source_ref_parts,
313 313 user_name=user.short_contact, user_email=user.email, message=message,
314 314 use_rebase=False, close_branch=False
315 315 )
316 316 self.invalidation_mock.assert_called_once_with(
317 317 pull_request.target_repo.repo_name)
318 318
319 319 self.hook_mock.assert_called_with(
320 320 self.pull_request, self.pull_request.author, 'merge')
321 321
322 322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324 324
325 325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 327 merge_ref = Reference(
328 328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 329 self.merge_mock.return_value = MergeResponse(
330 330 True, True, merge_ref, MergeFailureReason.NONE)
331 331
332 332 merge_extras['repository'] = pull_request.target_repo.repo_name
333 333
334 334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 336 PullRequestModel().merge_repo(
337 337 pull_request, pull_request.author, extras=merge_extras)
338 338 Session().commit()
339 339
340 340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341 341
342 342 message = (
343 343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 344 u'\n\n {pr_title}'.format(
345 345 pr_id=pull_request.pull_request_id,
346 346 source_repo=safe_unicode(
347 347 pull_request.source_repo.scm_instance().name),
348 348 source_ref_name=pull_request.source_ref_parts.name,
349 349 pr_title=safe_unicode(pull_request.title)
350 350 )
351 351 )
352 352 self.merge_mock.assert_called_with(
353 353 self.repo_id, self.workspace_id,
354 354 pull_request.target_ref_parts,
355 355 pull_request.source_repo.scm_instance(),
356 356 pull_request.source_ref_parts,
357 357 user_name=user.short_contact, user_email=user.email, message=message,
358 358 use_rebase=False, close_branch=False
359 359 )
360 360 self.invalidation_mock.assert_called_once_with(
361 361 pull_request.target_repo.repo_name)
362 362
363 363 self.hook_mock.assert_called_with(
364 364 self.pull_request, self.pull_request.author, 'merge')
365 365
366 366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368 368
369 369 def test_merge_failed(self, pull_request, merge_extras):
370 370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 371 merge_ref = Reference(
372 372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 373 self.merge_mock.return_value = MergeResponse(
374 374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375 375
376 376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 377 PullRequestModel().merge_repo(
378 378 pull_request, pull_request.author, extras=merge_extras)
379 379 Session().commit()
380 380
381 381 message = (
382 382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 383 u'\n\n {pr_title}'.format(
384 384 pr_id=pull_request.pull_request_id,
385 385 source_repo=safe_unicode(
386 386 pull_request.source_repo.scm_instance().name),
387 387 source_ref_name=pull_request.source_ref_parts.name,
388 388 pr_title=safe_unicode(pull_request.title)
389 389 )
390 390 )
391 391 self.merge_mock.assert_called_with(
392 392 self.repo_id, self.workspace_id,
393 393 pull_request.target_ref_parts,
394 394 pull_request.source_repo.scm_instance(),
395 395 pull_request.source_ref_parts,
396 396 user_name=user.short_contact, user_email=user.email, message=message,
397 397 use_rebase=False, close_branch=False
398 398 )
399 399
400 400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 401 assert self.invalidation_mock.called is False
402 402 assert pull_request.merge_rev is None
403 403
404 404 def test_get_commit_ids(self, pull_request):
405 405 # The PR has been not merget yet, so expect an exception
406 406 with pytest.raises(ValueError):
407 407 PullRequestModel()._get_commit_ids(pull_request)
408 408
409 409 # Merge revision is in the revisions list
410 410 pull_request.merge_rev = pull_request.revisions[0]
411 411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 412 assert commit_ids == pull_request.revisions
413 413
414 414 # Merge revision is not in the revisions list
415 415 pull_request.merge_rev = 'f000' * 10
416 416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418 418
419 419 def test_get_diff_from_pr_version(self, pull_request):
420 420 source_repo = pull_request.source_repo
421 421 source_ref_id = pull_request.source_ref_parts.commit_id
422 422 target_ref_id = pull_request.target_ref_parts.commit_id
423 423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 424 source_repo, source_ref_id, target_ref_id,
425 425 hide_whitespace_changes=False, diff_context=6)
426 426 assert 'file_1' in diff.raw
427 427
428 428 def test_generate_title_returns_unicode(self):
429 429 title = PullRequestModel().generate_pullrequest_title(
430 430 source='source-dummy',
431 431 source_ref='source-ref-dummy',
432 432 target='target-dummy',
433 433 )
434 434 assert type(title) == unicode
435 435
436 436
437 437 @pytest.mark.usefixtures('config_stub')
438 438 class TestIntegrationMerge(object):
439 439 @pytest.mark.parametrize('extra_config', (
440 440 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
441 441 ))
442 442 def test_merge_triggers_push_hooks(
443 443 self, pr_util, user_admin, capture_rcextensions, merge_extras,
444 444 extra_config):
445 445
446 446 pull_request = pr_util.create_pull_request(
447 447 approved=True, mergeable=True)
448 448 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
449 449 merge_extras['repository'] = pull_request.target_repo.repo_name
450 450 Session().commit()
451 451
452 452 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
453 453 merge_state = PullRequestModel().merge_repo(
454 454 pull_request, user_admin, extras=merge_extras)
455 455 Session().commit()
456 456
457 457 assert merge_state.executed
458 458 assert '_pre_push_hook' in capture_rcextensions
459 459 assert '_push_hook' in capture_rcextensions
460 460
461 461 def test_merge_can_be_rejected_by_pre_push_hook(
462 462 self, pr_util, user_admin, capture_rcextensions, merge_extras):
463 463 pull_request = pr_util.create_pull_request(
464 464 approved=True, mergeable=True)
465 465 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
466 466 merge_extras['repository'] = pull_request.target_repo.repo_name
467 467 Session().commit()
468 468
469 469 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
470 470 pre_pull.side_effect = RepositoryError("Disallow push!")
471 471 merge_status = PullRequestModel().merge_repo(
472 472 pull_request, user_admin, extras=merge_extras)
473 473 Session().commit()
474 474
475 475 assert not merge_status.executed
476 476 assert 'pre_push' not in capture_rcextensions
477 477 assert 'post_push' not in capture_rcextensions
478 478
479 479 def test_merge_fails_if_target_is_locked(
480 480 self, pr_util, user_regular, merge_extras):
481 481 pull_request = pr_util.create_pull_request(
482 482 approved=True, mergeable=True)
483 483 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
484 484 pull_request.target_repo.locked = locked_by
485 485 # TODO: johbo: Check if this can work based on the database, currently
486 486 # all data is pre-computed, that's why just updating the DB is not
487 487 # enough.
488 488 merge_extras['locked_by'] = locked_by
489 489 merge_extras['repository'] = pull_request.target_repo.repo_name
490 490 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
491 491 Session().commit()
492 492 merge_status = PullRequestModel().merge_repo(
493 493 pull_request, user_regular, extras=merge_extras)
494 494 Session().commit()
495 495
496 496 assert not merge_status.executed
497 497
498 498
499 499 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
500 500 (False, 1, 0),
501 501 (True, 0, 1),
502 502 ])
503 503 def test_outdated_comments(
504 504 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
505 505 pull_request = pr_util.create_pull_request()
506 506 pr_util.create_inline_comment(file_path='not_in_updated_diff')
507 507
508 508 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
509 509 pr_util.add_one_commit()
510 510 assert_inline_comments(
511 511 pull_request, visible=inlines_count, outdated=outdated_count)
512 512 outdated_comment_mock.assert_called_with(pull_request)
513 513
514 514
515 515 @pytest.mark.parametrize('mr_type, expected_msg', [
516 516 (MergeFailureReason.NONE,
517 517 'This pull request can be automatically merged.'),
518 518 (MergeFailureReason.UNKNOWN,
519 519 'This pull request cannot be merged because of an unhandled exception. CRASH'),
520 520 (MergeFailureReason.MERGE_FAILED,
521 'This pull request cannot be merged because of merge conflicts.'),
521 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
522 522 (MergeFailureReason.PUSH_FAILED,
523 523 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
524 524 (MergeFailureReason.TARGET_IS_NOT_HEAD,
525 525 'This pull request cannot be merged because the target `ref_name` is not a head.'),
526 526 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
527 527 'This pull request cannot be merged because the source contains more branches than the target.'),
528 528 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
529 529 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
530 530 (MergeFailureReason.TARGET_IS_LOCKED,
531 531 'This pull request cannot be merged because the target repository is locked by user:123.'),
532 532 (MergeFailureReason.MISSING_TARGET_REF,
533 533 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
534 534 (MergeFailureReason.MISSING_SOURCE_REF,
535 535 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
536 536 (MergeFailureReason.SUBREPO_MERGE_FAILED,
537 537 'This pull request cannot be merged because of conflicts related to sub repositories.'),
538 538
539 539 ])
540 540 def test_merge_response_message(mr_type, expected_msg):
541 541 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
542 542 metadata = {
543 'unresolved_files': 'CONFLICT_FILE',
543 544 'exception': "CRASH",
544 545 'target': 'some-repo',
545 546 'merge_commit': 'merge_commit',
546 547 'target_ref': merge_ref,
547 548 'source_ref': merge_ref,
548 549 'heads': ','.join(['a', 'b', 'c']),
549 'locked_by': 'user:123'}
550 'locked_by': 'user:123'
551 }
550 552
551 553 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
552 554 assert merge_response.merge_status_message == expected_msg
553 555
554 556
555 557 @pytest.fixture()
556 558 def merge_extras(user_regular):
557 559 """
558 560 Context for the vcs operation when running a merge.
559 561 """
560 562 extras = {
561 563 'ip': '127.0.0.1',
562 564 'username': user_regular.username,
563 565 'user_id': user_regular.user_id,
564 566 'action': 'push',
565 567 'repository': 'fake_target_repo_name',
566 568 'scm': 'git',
567 569 'config': 'fake_config_ini_path',
568 570 'repo_store': '',
569 571 'make_lock': None,
570 572 'locked_by': [None, None, None],
571 573 'server_url': 'http://test.example.com:5000',
572 574 'hooks': ['push', 'pull'],
573 575 'is_shadow_repo': False,
574 576 }
575 577 return extras
576 578
577 579
578 580 @pytest.mark.usefixtures('config_stub')
579 581 class TestUpdateCommentHandling(object):
580 582
581 583 @pytest.fixture(autouse=True, scope='class')
582 584 def enable_outdated_comments(self, request, baseapp):
583 585 config_patch = mock.patch.dict(
584 586 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
585 587 config_patch.start()
586 588
587 589 @request.addfinalizer
588 590 def cleanup():
589 591 config_patch.stop()
590 592
591 593 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
592 594 commits = [
593 595 {'message': 'a'},
594 596 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
595 597 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
596 598 ]
597 599 pull_request = pr_util.create_pull_request(
598 600 commits=commits, target_head='a', source_head='b', revisions=['b'])
599 601 pr_util.create_inline_comment(file_path='file_b')
600 602 pr_util.add_one_commit(head='c')
601 603
602 604 assert_inline_comments(pull_request, visible=1, outdated=0)
603 605
604 606 def test_comment_stays_unflagged_on_change_above(self, pr_util):
605 607 original_content = ''.join(
606 608 ['line {}\n'.format(x) for x in range(1, 11)])
607 609 updated_content = 'new_line_at_top\n' + original_content
608 610 commits = [
609 611 {'message': 'a'},
610 612 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
611 613 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
612 614 ]
613 615 pull_request = pr_util.create_pull_request(
614 616 commits=commits, target_head='a', source_head='b', revisions=['b'])
615 617
616 618 with outdated_comments_patcher():
617 619 comment = pr_util.create_inline_comment(
618 620 line_no=u'n8', file_path='file_b')
619 621 pr_util.add_one_commit(head='c')
620 622
621 623 assert_inline_comments(pull_request, visible=1, outdated=0)
622 624 assert comment.line_no == u'n9'
623 625
624 626 def test_comment_stays_unflagged_on_change_below(self, pr_util):
625 627 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
626 628 updated_content = original_content + 'new_line_at_end\n'
627 629 commits = [
628 630 {'message': 'a'},
629 631 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
630 632 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
631 633 ]
632 634 pull_request = pr_util.create_pull_request(
633 635 commits=commits, target_head='a', source_head='b', revisions=['b'])
634 636 pr_util.create_inline_comment(file_path='file_b')
635 637 pr_util.add_one_commit(head='c')
636 638
637 639 assert_inline_comments(pull_request, visible=1, outdated=0)
638 640
639 641 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
640 642 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
641 643 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
642 644 change_lines = list(base_lines)
643 645 change_lines.insert(6, 'line 6a added\n')
644 646
645 647 # Changes on the last line of sight
646 648 update_lines = list(change_lines)
647 649 update_lines[0] = 'line 1 changed\n'
648 650 update_lines[-1] = 'line 12 changed\n'
649 651
650 652 def file_b(lines):
651 653 return FileNode('file_b', ''.join(lines))
652 654
653 655 commits = [
654 656 {'message': 'a', 'added': [file_b(base_lines)]},
655 657 {'message': 'b', 'changed': [file_b(change_lines)]},
656 658 {'message': 'c', 'changed': [file_b(update_lines)]},
657 659 ]
658 660
659 661 pull_request = pr_util.create_pull_request(
660 662 commits=commits, target_head='a', source_head='b', revisions=['b'])
661 663 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
662 664
663 665 with outdated_comments_patcher():
664 666 pr_util.add_one_commit(head='c')
665 667 assert_inline_comments(pull_request, visible=0, outdated=1)
666 668
667 669 @pytest.mark.parametrize("change, content", [
668 670 ('changed', 'changed\n'),
669 671 ('removed', ''),
670 672 ], ids=['changed', 'removed'])
671 673 def test_comment_flagged_on_change(self, pr_util, change, content):
672 674 commits = [
673 675 {'message': 'a'},
674 676 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
675 677 {'message': 'c', change: [FileNode('file_b', content)]},
676 678 ]
677 679 pull_request = pr_util.create_pull_request(
678 680 commits=commits, target_head='a', source_head='b', revisions=['b'])
679 681 pr_util.create_inline_comment(file_path='file_b')
680 682
681 683 with outdated_comments_patcher():
682 684 pr_util.add_one_commit(head='c')
683 685 assert_inline_comments(pull_request, visible=0, outdated=1)
684 686
685 687
686 688 @pytest.mark.usefixtures('config_stub')
687 689 class TestUpdateChangedFiles(object):
688 690
689 691 def test_no_changes_on_unchanged_diff(self, pr_util):
690 692 commits = [
691 693 {'message': 'a'},
692 694 {'message': 'b',
693 695 'added': [FileNode('file_b', 'test_content b\n')]},
694 696 {'message': 'c',
695 697 'added': [FileNode('file_c', 'test_content c\n')]},
696 698 ]
697 699 # open a PR from a to b, adding file_b
698 700 pull_request = pr_util.create_pull_request(
699 701 commits=commits, target_head='a', source_head='b', revisions=['b'],
700 702 name_suffix='per-file-review')
701 703
702 704 # modify PR adding new file file_c
703 705 pr_util.add_one_commit(head='c')
704 706
705 707 assert_pr_file_changes(
706 708 pull_request,
707 709 added=['file_c'],
708 710 modified=[],
709 711 removed=[])
710 712
711 713 def test_modify_and_undo_modification_diff(self, pr_util):
712 714 commits = [
713 715 {'message': 'a'},
714 716 {'message': 'b',
715 717 'added': [FileNode('file_b', 'test_content b\n')]},
716 718 {'message': 'c',
717 719 'changed': [FileNode('file_b', 'test_content b modified\n')]},
718 720 {'message': 'd',
719 721 'changed': [FileNode('file_b', 'test_content b\n')]},
720 722 ]
721 723 # open a PR from a to b, adding file_b
722 724 pull_request = pr_util.create_pull_request(
723 725 commits=commits, target_head='a', source_head='b', revisions=['b'],
724 726 name_suffix='per-file-review')
725 727
726 728 # modify PR modifying file file_b
727 729 pr_util.add_one_commit(head='c')
728 730
729 731 assert_pr_file_changes(
730 732 pull_request,
731 733 added=[],
732 734 modified=['file_b'],
733 735 removed=[])
734 736
735 737 # move the head again to d, which rollbacks change,
736 738 # meaning we should indicate no changes
737 739 pr_util.add_one_commit(head='d')
738 740
739 741 assert_pr_file_changes(
740 742 pull_request,
741 743 added=[],
742 744 modified=[],
743 745 removed=[])
744 746
745 747 def test_updated_all_files_in_pr(self, pr_util):
746 748 commits = [
747 749 {'message': 'a'},
748 750 {'message': 'b', 'added': [
749 751 FileNode('file_a', 'test_content a\n'),
750 752 FileNode('file_b', 'test_content b\n'),
751 753 FileNode('file_c', 'test_content c\n')]},
752 754 {'message': 'c', 'changed': [
753 755 FileNode('file_a', 'test_content a changed\n'),
754 756 FileNode('file_b', 'test_content b changed\n'),
755 757 FileNode('file_c', 'test_content c changed\n')]},
756 758 ]
757 759 # open a PR from a to b, changing 3 files
758 760 pull_request = pr_util.create_pull_request(
759 761 commits=commits, target_head='a', source_head='b', revisions=['b'],
760 762 name_suffix='per-file-review')
761 763
762 764 pr_util.add_one_commit(head='c')
763 765
764 766 assert_pr_file_changes(
765 767 pull_request,
766 768 added=[],
767 769 modified=['file_a', 'file_b', 'file_c'],
768 770 removed=[])
769 771
770 772 def test_updated_and_removed_all_files_in_pr(self, pr_util):
771 773 commits = [
772 774 {'message': 'a'},
773 775 {'message': 'b', 'added': [
774 776 FileNode('file_a', 'test_content a\n'),
775 777 FileNode('file_b', 'test_content b\n'),
776 778 FileNode('file_c', 'test_content c\n')]},
777 779 {'message': 'c', 'removed': [
778 780 FileNode('file_a', 'test_content a changed\n'),
779 781 FileNode('file_b', 'test_content b changed\n'),
780 782 FileNode('file_c', 'test_content c changed\n')]},
781 783 ]
782 784 # open a PR from a to b, removing 3 files
783 785 pull_request = pr_util.create_pull_request(
784 786 commits=commits, target_head='a', source_head='b', revisions=['b'],
785 787 name_suffix='per-file-review')
786 788
787 789 pr_util.add_one_commit(head='c')
788 790
789 791 assert_pr_file_changes(
790 792 pull_request,
791 793 added=[],
792 794 modified=[],
793 795 removed=['file_a', 'file_b', 'file_c'])
794 796
795 797
796 798 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
797 799 model = PullRequestModel()
798 800 pull_request = pr_util.create_pull_request()
799 801 pr_util.update_source_repository()
800 802
801 803 model.update_commits(pull_request)
802 804
803 805 # Expect that it has a version entry now
804 806 assert len(model.get_versions(pull_request)) == 1
805 807
806 808
807 809 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
808 810 pull_request = pr_util.create_pull_request()
809 811 model = PullRequestModel()
810 812 model.update_commits(pull_request)
811 813
812 814 # Expect that it still has no versions
813 815 assert len(model.get_versions(pull_request)) == 0
814 816
815 817
816 818 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
817 819 model = PullRequestModel()
818 820 pull_request = pr_util.create_pull_request()
819 821 comment = pr_util.create_comment()
820 822 pr_util.update_source_repository()
821 823
822 824 model.update_commits(pull_request)
823 825
824 826 # Expect that the comment is linked to the pr version now
825 827 assert comment.pull_request_version == model.get_versions(pull_request)[0]
826 828
827 829
828 830 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
829 831 model = PullRequestModel()
830 832 pull_request = pr_util.create_pull_request()
831 833 pr_util.update_source_repository()
832 834 pr_util.update_source_repository()
833 835
834 836 model.update_commits(pull_request)
835 837
836 838 # Expect to find a new comment about the change
837 839 expected_message = textwrap.dedent(
838 840 """\
839 841 Pull request updated. Auto status change to |under_review|
840 842
841 843 .. role:: added
842 844 .. role:: removed
843 845 .. parsed-literal::
844 846
845 847 Changed commits:
846 848 * :added:`1 added`
847 849 * :removed:`0 removed`
848 850
849 851 Changed files:
850 852 * `A file_2 <#a_c--92ed3b5f07b4>`_
851 853
852 854 .. |under_review| replace:: *"Under Review"*"""
853 855 )
854 856 pull_request_comments = sorted(
855 857 pull_request.comments, key=lambda c: c.modified_at)
856 858 update_comment = pull_request_comments[-1]
857 859 assert update_comment.text == expected_message
858 860
859 861
860 862 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
861 863 pull_request = pr_util.create_pull_request()
862 864
863 865 # Avoiding default values
864 866 pull_request.status = PullRequest.STATUS_CLOSED
865 867 pull_request._last_merge_source_rev = "0" * 40
866 868 pull_request._last_merge_target_rev = "1" * 40
867 869 pull_request.last_merge_status = 1
868 870 pull_request.merge_rev = "2" * 40
869 871
870 872 # Remember automatic values
871 873 created_on = pull_request.created_on
872 874 updated_on = pull_request.updated_on
873 875
874 876 # Create a new version of the pull request
875 877 version = PullRequestModel()._create_version_from_snapshot(pull_request)
876 878
877 879 # Check attributes
878 880 assert version.title == pr_util.create_parameters['title']
879 881 assert version.description == pr_util.create_parameters['description']
880 882 assert version.status == PullRequest.STATUS_CLOSED
881 883
882 884 # versions get updated created_on
883 885 assert version.created_on != created_on
884 886
885 887 assert version.updated_on == updated_on
886 888 assert version.user_id == pull_request.user_id
887 889 assert version.revisions == pr_util.create_parameters['revisions']
888 890 assert version.source_repo == pr_util.source_repository
889 891 assert version.source_ref == pr_util.create_parameters['source_ref']
890 892 assert version.target_repo == pr_util.target_repository
891 893 assert version.target_ref == pr_util.create_parameters['target_ref']
892 894 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
893 895 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
894 896 assert version.last_merge_status == pull_request.last_merge_status
895 897 assert version.merge_rev == pull_request.merge_rev
896 898 assert version.pull_request == pull_request
897 899
898 900
899 901 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
900 902 version1 = pr_util.create_version_of_pull_request()
901 903 comment_linked = pr_util.create_comment(linked_to=version1)
902 904 comment_unlinked = pr_util.create_comment()
903 905 version2 = pr_util.create_version_of_pull_request()
904 906
905 907 PullRequestModel()._link_comments_to_version(version2)
906 908 Session().commit()
907 909
908 910 # Expect that only the new comment is linked to version2
909 911 assert (
910 912 comment_unlinked.pull_request_version_id ==
911 913 version2.pull_request_version_id)
912 914 assert (
913 915 comment_linked.pull_request_version_id ==
914 916 version1.pull_request_version_id)
915 917 assert (
916 918 comment_unlinked.pull_request_version_id !=
917 919 comment_linked.pull_request_version_id)
918 920
919 921
920 922 def test_calculate_commits():
921 923 old_ids = [1, 2, 3]
922 924 new_ids = [1, 3, 4, 5]
923 925 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
924 926 assert change.added == [4, 5]
925 927 assert change.common == [1, 3]
926 928 assert change.removed == [2]
927 929 assert change.total == [1, 3, 4, 5]
928 930
929 931
930 932 def assert_inline_comments(pull_request, visible=None, outdated=None):
931 933 if visible is not None:
932 934 inline_comments = CommentsModel().get_inline_comments(
933 935 pull_request.target_repo.repo_id, pull_request=pull_request)
934 936 inline_cnt = CommentsModel().get_inline_comments_count(
935 937 inline_comments)
936 938 assert inline_cnt == visible
937 939 if outdated is not None:
938 940 outdated_comments = CommentsModel().get_outdated_comments(
939 941 pull_request.target_repo.repo_id, pull_request)
940 942 assert len(outdated_comments) == outdated
941 943
942 944
943 945 def assert_pr_file_changes(
944 946 pull_request, added=None, modified=None, removed=None):
945 947 pr_versions = PullRequestModel().get_versions(pull_request)
946 948 # always use first version, ie original PR to calculate changes
947 949 pull_request_version = pr_versions[0]
948 950 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
949 951 pull_request, pull_request_version)
950 952 file_changes = PullRequestModel()._calculate_file_changes(
951 953 old_diff_data, new_diff_data)
952 954
953 955 assert added == file_changes.added, \
954 956 'expected added:%s vs value:%s' % (added, file_changes.added)
955 957 assert modified == file_changes.modified, \
956 958 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
957 959 assert removed == file_changes.removed, \
958 960 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
959 961
960 962
961 963 def outdated_comments_patcher(use_outdated=True):
962 964 return mock.patch.object(
963 965 CommentsModel, 'use_outdated_comments',
964 966 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now