##// END OF EJS Templates
chore(code-cleanups): small fixes for readability
super-admin -
r5197:4a692945 default
parent child Browse files
Show More
@@ -1,1984 +1,1983 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Base module for all VCS systems
21 21 """
22 22 import os
23 23 import re
24 24 import time
25 25 import shutil
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import dataclasses
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35
36 36 import rhodecode
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib.utils2 import safe_str, CachedProperty
39 39 from rhodecode.lib.vcs.utils import author_name, author_email
40 40 from rhodecode.lib.vcs.conf import settings
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 46 RepositoryError)
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 FILEMODE_DEFAULT = 0o100644
53 53 FILEMODE_EXECUTABLE = 0o100755
54 54 EMPTY_COMMIT_ID = '0' * 40
55 55
56 56
57 57 @dataclasses.dataclass
58 58 class Reference:
59 59 type: str
60 60 name: str
61 61 commit_id: str
62 62
63 63 def __iter__(self):
64 64 yield self.type
65 65 yield self.name
66 66 yield self.commit_id
67 67
68 68 @property
69 69 def branch(self):
70 70 if self.type == 'branch':
71 71 return self.name
72 72
73 73 @property
74 74 def bookmark(self):
75 75 if self.type == 'book':
76 76 return self.name
77 77
78 78 @property
79 79 def to_str(self):
80 80 return reference_to_unicode(self)
81 81
82 82 def asdict(self):
83 83 return dict(
84 84 type=self.type,
85 85 name=self.name,
86 86 commit_id=self.commit_id
87 87 )
88 88
89 89
90 90 def unicode_to_reference(raw: str):
91 91 """
92 92 Convert a unicode (or string) to a reference object.
93 93 If unicode evaluates to False it returns None.
94 94 """
95 95 if raw:
96 96 refs = raw.split(':')
97 97 return Reference(*refs)
98 98 else:
99 99 return None
100 100
101 101
102 102 def reference_to_unicode(ref: Reference):
103 103 """
104 104 Convert a reference object to unicode.
105 105 If reference is None it returns None.
106 106 """
107 107 if ref:
108 108 return ':'.join(ref)
109 109 else:
110 110 return None
111 111
112 112
113 113 class MergeFailureReason(object):
114 114 """
115 115 Enumeration with all the reasons why the server side merge could fail.
116 116
117 117 DO NOT change the number of the reasons, as they may be stored in the
118 118 database.
119 119
120 120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 121 reasons.
122 122 """
123 123
124 124 # Everything went well.
125 125 NONE = 0
126 126
127 127 # An unexpected exception was raised. Check the logs for more details.
128 128 UNKNOWN = 1
129 129
130 130 # The merge was not successful, there are conflicts.
131 131 MERGE_FAILED = 2
132 132
133 133 # The merge succeeded but we could not push it to the target repository.
134 134 PUSH_FAILED = 3
135 135
136 136 # The specified target is not a head in the target repository.
137 137 TARGET_IS_NOT_HEAD = 4
138 138
139 139 # The source repository contains more branches than the target. Pushing
140 140 # the merge will create additional branches in the target.
141 141 HG_SOURCE_HAS_MORE_BRANCHES = 5
142 142
143 143 # The target reference has multiple heads. That does not allow to correctly
144 144 # identify the target location. This could only happen for mercurial
145 145 # branches.
146 146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
147 147
148 148 # The target repository is locked
149 149 TARGET_IS_LOCKED = 7
150 150
151 151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
152 152 # A involved commit could not be found.
153 153 _DEPRECATED_MISSING_COMMIT = 8
154 154
155 155 # The target repo reference is missing.
156 156 MISSING_TARGET_REF = 9
157 157
158 158 # The source repo reference is missing.
159 159 MISSING_SOURCE_REF = 10
160 160
161 161 # The merge was not successful, there are conflicts related to sub
162 162 # repositories.
163 163 SUBREPO_MERGE_FAILED = 11
164 164
165 165
166 166 class UpdateFailureReason(object):
167 167 """
168 168 Enumeration with all the reasons why the pull request update could fail.
169 169
170 170 DO NOT change the number of the reasons, as they may be stored in the
171 171 database.
172 172
173 173 Changing the name of a reason is acceptable and encouraged to deprecate old
174 174 reasons.
175 175 """
176 176
177 177 # Everything went well.
178 178 NONE = 0
179 179
180 180 # An unexpected exception was raised. Check the logs for more details.
181 181 UNKNOWN = 1
182 182
183 183 # The pull request is up to date.
184 184 NO_CHANGE = 2
185 185
186 186 # The pull request has a reference type that is not supported for update.
187 187 WRONG_REF_TYPE = 3
188 188
189 189 # Update failed because the target reference is missing.
190 190 MISSING_TARGET_REF = 4
191 191
192 192 # Update failed because the source reference is missing.
193 193 MISSING_SOURCE_REF = 5
194 194
195 195
196 196 class MergeResponse(object):
197 197
198 198 # uses .format(**metadata) for variables
199 199 MERGE_STATUS_MESSAGES = {
200 200 MergeFailureReason.NONE: lazy_ugettext(
201 201 'This pull request can be automatically merged.'),
202 202 MergeFailureReason.UNKNOWN: lazy_ugettext(
203 203 'This pull request cannot be merged because of an unhandled exception. '
204 204 '{exception}'),
205 205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
206 206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
207 207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
208 208 'This pull request could not be merged because push to '
209 209 'target:`{target}@{merge_commit}` failed.'),
210 210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
211 211 'This pull request cannot be merged because the target '
212 212 '`{target_ref.name}` is not a head.'),
213 213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
214 214 'This pull request cannot be merged because the source contains '
215 215 'more branches than the target.'),
216 216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
217 217 'This pull request cannot be merged because the target `{target_ref.name}` '
218 218 'has multiple heads: `{heads}`.'),
219 219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
220 220 'This pull request cannot be merged because the target repository is '
221 221 'locked by {locked_by}.'),
222 222
223 223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
224 224 'This pull request cannot be merged because the target '
225 225 'reference `{target_ref.name}` is missing.'),
226 226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
227 227 'This pull request cannot be merged because the source '
228 228 'reference `{source_ref.name}` is missing.'),
229 229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
230 230 'This pull request cannot be merged because of conflicts related '
231 231 'to sub repositories.'),
232 232
233 233 # Deprecations
234 234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
235 235 'This pull request cannot be merged because the target or the '
236 236 'source reference is missing.'),
237 237
238 238 }
239 239
240 240 def __init__(self, possible, executed, merge_ref: Reference, failure_reason, metadata=None):
241 241 self.possible = possible
242 242 self.executed = executed
243 243 self.merge_ref = merge_ref
244 244 self.failure_reason = failure_reason
245 245 self.metadata = metadata or {}
246 246
247 247 def __repr__(self):
248 248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
249 249
250 250 def __eq__(self, other):
251 251 same_instance = isinstance(other, self.__class__)
252 252 return same_instance \
253 253 and self.possible == other.possible \
254 254 and self.executed == other.executed \
255 255 and self.failure_reason == other.failure_reason
256 256
257 257 @property
258 258 def label(self):
259 259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
260 260 not k.startswith('_'))
261 261 return label_dict.get(self.failure_reason)
262 262
263 263 @property
264 264 def merge_status_message(self):
265 265 """
266 266 Return a human friendly error message for the given merge status code.
267 267 """
268 268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
269 269
270 270 try:
271 271 return msg.format(**self.metadata)
272 272 except Exception:
273 273 log.exception('Failed to format %s message', self)
274 274 return msg
275 275
276 276 def asdict(self):
277 277 data = {}
278 278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
279 279 'merge_status_message']:
280 280 data[k] = getattr(self, k)
281 281 return data
282 282
283 283
284 284 class TargetRefMissing(ValueError):
285 285 pass
286 286
287 287
288 288 class SourceRefMissing(ValueError):
289 289 pass
290 290
291 291
292 292 class BaseRepository(object):
293 293 """
294 294 Base Repository for final backends
295 295
296 296 .. attribute:: DEFAULT_BRANCH_NAME
297 297
298 298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
299 299
300 300 .. attribute:: commit_ids
301 301
302 302 list of all available commit ids, in ascending order
303 303
304 304 .. attribute:: path
305 305
306 306 absolute path to the repository
307 307
308 308 .. attribute:: bookmarks
309 309
310 310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
311 311 there are no bookmarks or the backend implementation does not support
312 312 bookmarks.
313 313
314 314 .. attribute:: tags
315 315
316 316 Mapping from name to :term:`Commit ID` of the tag.
317 317
318 318 """
319 319
320 320 DEFAULT_BRANCH_NAME = None
321 321 DEFAULT_CONTACT = "Unknown"
322 322 DEFAULT_DESCRIPTION = "unknown"
323 323 EMPTY_COMMIT_ID = '0' * 40
324 324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
325 325
326 326 path = None
327 327
328 328 _is_empty = None
329 329 _commit_ids = {}
330 330
331 331 def __init__(self, repo_path, config=None, create=False, **kwargs):
332 332 """
333 333 Initializes repository. Raises RepositoryError if repository could
334 334 not be find at the given ``repo_path`` or directory at ``repo_path``
335 335 exists and ``create`` is set to True.
336 336
337 337 :param repo_path: local path of the repository
338 338 :param config: repository configuration
339 339 :param create=False: if set to True, would try to create repository.
340 340 :param src_url=None: if set, should be proper url from which repository
341 341 would be cloned; requires ``create`` parameter to be set to True -
342 342 raises RepositoryError if src_url is set and create evaluates to
343 343 False
344 344 """
345 345 raise NotImplementedError
346 346
347 347 def __repr__(self):
348 348 return f'<{self.__class__.__name__} at {self.path}>'
349 349
350 350 def __len__(self):
351 351 return self.count()
352 352
353 353 def __eq__(self, other):
354 354 same_instance = isinstance(other, self.__class__)
355 355 return same_instance and other.path == self.path
356 356
357 357 def __ne__(self, other):
358 358 return not self.__eq__(other)
359 359
360 360 def get_create_shadow_cache_pr_path(self, db_repo):
361 361 path = db_repo.cached_diffs_dir
362 362 if not os.path.exists(path):
363 363 os.makedirs(path, 0o755)
364 364 return path
365 365
366 366 @classmethod
367 367 def get_default_config(cls, default=None):
368 368 config = Config()
369 369 if default and isinstance(default, list):
370 370 for section, key, val in default:
371 371 config.set(section, key, val)
372 372 return config
373 373
374 374 @LazyProperty
375 375 def _remote(self):
376 376 raise NotImplementedError
377 377
378 378 def _heads(self, branch=None):
379 379 return []
380 380
381 381 @LazyProperty
382 382 def EMPTY_COMMIT(self):
383 383 return EmptyCommit(self.EMPTY_COMMIT_ID)
384 384
385 385 @LazyProperty
386 386 def alias(self):
387 387 for k, v in settings.BACKENDS.items():
388 388 if v.split('.')[-1] == str(self.__class__.__name__):
389 389 return k
390 390
391 391 @LazyProperty
392 392 def name(self):
393 393 return safe_str(os.path.basename(self.path))
394 394
395 395 @LazyProperty
396 396 def description(self):
397 397 raise NotImplementedError
398 398
399 399 def refs(self):
400 400 """
401 401 returns a `dict` with branches, bookmarks, tags, and closed_branches
402 402 for this repository
403 403 """
404 404 return dict(
405 405 branches=self.branches,
406 406 branches_closed=self.branches_closed,
407 407 tags=self.tags,
408 408 bookmarks=self.bookmarks
409 409 )
410 410
411 411 @LazyProperty
412 412 def branches(self):
413 413 """
414 414 A `dict` which maps branch names to commit ids.
415 415 """
416 416 raise NotImplementedError
417 417
418 418 @LazyProperty
419 419 def branches_closed(self):
420 420 """
421 421 A `dict` which maps tags names to commit ids.
422 422 """
423 423 raise NotImplementedError
424 424
425 425 @LazyProperty
426 426 def bookmarks(self):
427 427 """
428 428 A `dict` which maps tags names to commit ids.
429 429 """
430 430 raise NotImplementedError
431 431
432 432 @LazyProperty
433 433 def tags(self):
434 434 """
435 435 A `dict` which maps tags names to commit ids.
436 436 """
437 437 raise NotImplementedError
438 438
439 439 @LazyProperty
440 440 def size(self):
441 441 """
442 442 Returns combined size in bytes for all repository files
443 443 """
444 444 tip = self.get_commit()
445 445 return tip.size
446 446
447 447 def size_at_commit(self, commit_id):
448 448 commit = self.get_commit(commit_id)
449 449 return commit.size
450 450
451 451 def _check_for_empty(self):
452 452 no_commits = len(self._commit_ids) == 0
453 453 if no_commits:
454 454 # check on remote to be sure
455 455 return self._remote.is_empty()
456 456 else:
457 457 return False
458 458
459 459 def is_empty(self):
460 460 if rhodecode.is_test:
461 461 return self._check_for_empty()
462 462
463 463 if self._is_empty is None:
464 464 # cache empty for production, but not tests
465 465 self._is_empty = self._check_for_empty()
466 466
467 467 return self._is_empty
468 468
469 469 @staticmethod
470 470 def check_url(url, config):
471 471 """
472 472 Function will check given url and try to verify if it's a valid
473 473 link.
474 474 """
475 475 raise NotImplementedError
476 476
477 477 @staticmethod
478 478 def is_valid_repository(path):
479 479 """
480 480 Check if given `path` contains a valid repository of this backend
481 481 """
482 482 raise NotImplementedError
483 483
484 484 # ==========================================================================
485 485 # COMMITS
486 486 # ==========================================================================
487 487
488 488 @CachedProperty
489 489 def commit_ids(self):
490 490 raise NotImplementedError
491 491
492 492 def append_commit_id(self, commit_id):
493 493 if commit_id not in self.commit_ids:
494 494 self._rebuild_cache(self.commit_ids + [commit_id])
495 495
496 496 # clear cache
497 497 self._invalidate_prop_cache('commit_ids')
498 498 self._is_empty = False
499 499
500 500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
501 501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
502 502 """
503 503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
504 504 are both None, most recent commit is returned.
505 505
506 506 :param pre_load: Optional. List of commit attributes to load.
507 507
508 508 :raises ``EmptyRepositoryError``: if there are no commits
509 509 """
510 510 raise NotImplementedError
511 511
512 512 def __iter__(self):
513 513 for commit_id in self.commit_ids:
514 514 yield self.get_commit(commit_id=commit_id)
515 515
516 516 def get_commits(
517 517 self, start_id=None, end_id=None, start_date=None, end_date=None,
518 518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
519 519 """
520 520 Returns iterator of `BaseCommit` objects from start to end
521 521 not inclusive. This should behave just like a list, ie. end is not
522 522 inclusive.
523 523
524 524 :param start_id: None or str, must be a valid commit id
525 525 :param end_id: None or str, must be a valid commit id
526 526 :param start_date:
527 527 :param end_date:
528 528 :param branch_name:
529 529 :param show_hidden:
530 530 :param pre_load:
531 531 :param translate_tags:
532 532 """
533 533 raise NotImplementedError
534 534
535 535 def __getitem__(self, key):
536 536 """
537 537 Allows index based access to the commit objects of this repository.
538 538 """
539 539 pre_load = ["author", "branch", "date", "message", "parents"]
540 540 if isinstance(key, slice):
541 541 return self._get_range(key, pre_load)
542 542 return self.get_commit(commit_idx=key, pre_load=pre_load)
543 543
544 544 def _get_range(self, slice_obj, pre_load):
545 545 for commit_id in self.commit_ids.__getitem__(slice_obj):
546 546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
547 547
548 548 def count(self):
549 549 return len(self.commit_ids)
550 550
551 551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
552 552 """
553 553 Creates and returns a tag for the given ``commit_id``.
554 554
555 555 :param name: name for new tag
556 556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
557 557 :param commit_id: commit id for which new tag would be created
558 558 :param message: message of the tag's commit
559 559 :param date: date of tag's commit
560 560
561 561 :raises TagAlreadyExistError: if tag with same name already exists
562 562 """
563 563 raise NotImplementedError
564 564
565 565 def remove_tag(self, name, user, message=None, date=None):
566 566 """
567 567 Removes tag with the given ``name``.
568 568
569 569 :param name: name of the tag to be removed
570 570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
571 571 :param message: message of the tag's removal commit
572 572 :param date: date of tag's removal commit
573 573
574 574 :raises TagDoesNotExistError: if tag with given name does not exists
575 575 """
576 576 raise NotImplementedError
577 577
578 578 def get_diff(
579 579 self, commit1, commit2, path=None, ignore_whitespace=False,
580 580 context=3, path1=None):
581 581 """
582 582 Returns (git like) *diff*, as plain text. Shows changes introduced by
583 583 `commit2` since `commit1`.
584 584
585 585 :param commit1: Entry point from which diff is shown. Can be
586 586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
587 587 the changes since empty state of the repository until `commit2`
588 588 :param commit2: Until which commit changes should be shown.
589 589 :param path: Can be set to a path of a file to create a diff of that
590 590 file. If `path1` is also set, this value is only associated to
591 591 `commit2`.
592 592 :param ignore_whitespace: If set to ``True``, would not show whitespace
593 593 changes. Defaults to ``False``.
594 594 :param context: How many lines before/after changed lines should be
595 595 shown. Defaults to ``3``.
596 596 :param path1: Can be set to a path to associate with `commit1`. This
597 597 parameter works only for backends which support diff generation for
598 598 different paths. Other backends will raise a `ValueError` if `path1`
599 599 is set and has a different value than `path`.
600 600 :param file_path: filter this diff by given path pattern
601 601 """
602 602 raise NotImplementedError
603 603
604 604 def strip(self, commit_id, branch=None):
605 605 """
606 606 Strip given commit_id from the repository
607 607 """
608 608 raise NotImplementedError
609 609
610 610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
611 611 """
612 612 Return a latest common ancestor commit if one exists for this repo
613 613 `commit_id1` vs `commit_id2` from `repo2`.
614 614
615 615 :param commit_id1: Commit it from this repository to use as a
616 616 target for the comparison.
617 617 :param commit_id2: Source commit id to use for comparison.
618 618 :param repo2: Source repository to use for comparison.
619 619 """
620 620 raise NotImplementedError
621 621
622 622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
623 623 """
624 624 Compare this repository's revision `commit_id1` with `commit_id2`.
625 625
626 626 Returns a tuple(commits, ancestor) that would be merged from
627 627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
628 628 will be returned as ancestor.
629 629
630 630 :param commit_id1: Commit it from this repository to use as a
631 631 target for the comparison.
632 632 :param commit_id2: Source commit id to use for comparison.
633 633 :param repo2: Source repository to use for comparison.
634 634 :param merge: If set to ``True`` will do a merge compare which also
635 635 returns the common ancestor.
636 636 :param pre_load: Optional. List of commit attributes to load.
637 637 """
638 638 raise NotImplementedError
639 639
640 640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
641 641 user_name='', user_email='', message='', dry_run=False,
642 642 use_rebase=False, close_branch=False):
643 643 """
644 644 Merge the revisions specified in `source_ref` from `source_repo`
645 645 onto the `target_ref` of this repository.
646 646
647 647 `source_ref` and `target_ref` are named tupls with the following
648 648 fields `type`, `name` and `commit_id`.
649 649
650 650 Returns a MergeResponse named tuple with the following fields
651 651 'possible', 'executed', 'source_commit', 'target_commit',
652 652 'merge_commit'.
653 653
654 654 :param repo_id: `repo_id` target repo id.
655 655 :param workspace_id: `workspace_id` unique identifier.
656 656 :param target_ref: `target_ref` points to the commit on top of which
657 657 the `source_ref` should be merged.
658 658 :param source_repo: The repository that contains the commits to be
659 659 merged.
660 660 :param source_ref: `source_ref` points to the topmost commit from
661 661 the `source_repo` which should be merged.
662 662 :param user_name: Merge commit `user_name`.
663 663 :param user_email: Merge commit `user_email`.
664 664 :param message: Merge commit `message`.
665 665 :param dry_run: If `True` the merge will not take place.
666 666 :param use_rebase: If `True` commits from the source will be rebased
667 667 on top of the target instead of being merged.
668 668 :param close_branch: If `True` branch will be close before merging it
669 669 """
670 670 if dry_run:
671 671 message = message or settings.MERGE_DRY_RUN_MESSAGE
672 672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
673 673 user_name = user_name or settings.MERGE_DRY_RUN_USER
674 674 else:
675 675 if not user_name:
676 676 raise ValueError('user_name cannot be empty')
677 677 if not user_email:
678 678 raise ValueError('user_email cannot be empty')
679 679 if not message:
680 680 raise ValueError('message cannot be empty')
681 681
682 682 try:
683 683 return self._merge_repo(
684 684 repo_id, workspace_id, target_ref, source_repo,
685 685 source_ref, message, user_name, user_email, dry_run=dry_run,
686 686 use_rebase=use_rebase, close_branch=close_branch)
687 687 except RepositoryError as exc:
688 688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
689 689 return MergeResponse(
690 690 False, False, None, MergeFailureReason.UNKNOWN,
691 691 metadata={'exception': str(exc)})
692 692
693 693 def _merge_repo(self, repo_id, workspace_id, target_ref,
694 694 source_repo, source_ref, merge_message,
695 695 merger_name, merger_email, dry_run=False,
696 696 use_rebase=False, close_branch=False):
697 697 """Internal implementation of merge."""
698 698 raise NotImplementedError
699 699
700 700 def _maybe_prepare_merge_workspace(
701 701 self, repo_id, workspace_id, target_ref, source_ref):
702 702 """
703 703 Create the merge workspace.
704 704
705 705 :param workspace_id: `workspace_id` unique identifier.
706 706 """
707 707 raise NotImplementedError
708 708
709 709 @classmethod
710 710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
711 711 """
712 712 Legacy version that was used before. We still need it for
713 713 backward compat
714 714 """
715 715 return os.path.join(
716 716 os.path.dirname(repo_path),
717 717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
718 718
719 719 @classmethod
720 720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
721 721 # The name of the shadow repository must start with '.', so it is
722 722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
723 723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
724 724 if os.path.exists(legacy_repository_path):
725 725 return legacy_repository_path
726 726 else:
727 727 return os.path.join(
728 728 os.path.dirname(repo_path),
729 729 f'.__shadow_repo_{repo_id}_{workspace_id}')
730 730
731 731 def cleanup_merge_workspace(self, repo_id, workspace_id):
732 732 """
733 733 Remove merge workspace.
734 734
735 735 This function MUST not fail in case there is no workspace associated to
736 736 the given `workspace_id`.
737 737
738 738 :param workspace_id: `workspace_id` unique identifier.
739 739 """
740 740 shadow_repository_path = self._get_shadow_repository_path(
741 741 self.path, repo_id, workspace_id)
742 742 shadow_repository_path_del = '{}.{}.delete'.format(
743 743 shadow_repository_path, time.time())
744 744
745 745 # move the shadow repo, so it never conflicts with the one used.
746 746 # we use this method because shutil.rmtree had some edge case problems
747 747 # removing symlinked repositories
748 748 if not os.path.isdir(shadow_repository_path):
749 749 return
750 750
751 751 shutil.move(shadow_repository_path, shadow_repository_path_del)
752 752 try:
753 753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
754 754 except Exception:
755 755 log.exception('Failed to gracefully remove shadow repo under %s',
756 756 shadow_repository_path_del)
757 757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
758 758
759 759 # ========== #
760 760 # COMMIT API #
761 761 # ========== #
762 762
763 763 @LazyProperty
764 764 def in_memory_commit(self):
765 765 """
766 766 Returns :class:`InMemoryCommit` object for this repository.
767 767 """
768 768 raise NotImplementedError
769 769
770 770 # ======================== #
771 771 # UTILITIES FOR SUBCLASSES #
772 772 # ======================== #
773 773
774 774 def _validate_diff_commits(self, commit1, commit2):
775 775 """
776 776 Validates that the given commits are related to this repository.
777 777
778 778 Intended as a utility for sub classes to have a consistent validation
779 779 of input parameters in methods like :meth:`get_diff`.
780 780 """
781 781 self._validate_commit(commit1)
782 782 self._validate_commit(commit2)
783 783 if (isinstance(commit1, EmptyCommit) and
784 784 isinstance(commit2, EmptyCommit)):
785 785 raise ValueError("Cannot compare two empty commits")
786 786
787 787 def _validate_commit(self, commit):
788 788 if not isinstance(commit, BaseCommit):
789 789 raise TypeError(
790 790 "%s is not of type BaseCommit" % repr(commit))
791 791 if commit.repository != self and not isinstance(commit, EmptyCommit):
792 792 raise ValueError(
793 793 "Commit %s must be a valid commit from this repository %s, "
794 794 "related to this repository instead %s." %
795 795 (commit, self, commit.repository))
796 796
797 797 def _validate_commit_id(self, commit_id):
798 798 if not isinstance(commit_id, str):
799 799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
800 800
801 801 def _validate_commit_idx(self, commit_idx):
802 802 if not isinstance(commit_idx, int):
803 803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
804 804
805 805 def _validate_branch_name(self, branch_name):
806 806 if branch_name and branch_name not in self.branches_all:
807 807 msg = (f"Branch {branch_name} not found in {self}")
808 808 raise BranchDoesNotExistError(msg)
809 809
810 810 #
811 811 # Supporting deprecated API parts
812 812 # TODO: johbo: consider to move this into a mixin
813 813 #
814 814
815 815 @property
816 816 def EMPTY_CHANGESET(self):
817 817 warnings.warn(
818 818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
819 819 return self.EMPTY_COMMIT_ID
820 820
821 821 @property
822 822 def revisions(self):
823 823 warnings.warn("Use commits attribute instead", DeprecationWarning)
824 824 return self.commit_ids
825 825
826 826 @revisions.setter
827 827 def revisions(self, value):
828 828 warnings.warn("Use commits attribute instead", DeprecationWarning)
829 829 self.commit_ids = value
830 830
831 831 def get_changeset(self, revision=None, pre_load=None):
832 832 warnings.warn("Use get_commit instead", DeprecationWarning)
833 833 commit_id = None
834 834 commit_idx = None
835 835 if isinstance(revision, str):
836 836 commit_id = revision
837 837 else:
838 838 commit_idx = revision
839 839 return self.get_commit(
840 840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
841 841
842 842 def get_changesets(
843 843 self, start=None, end=None, start_date=None, end_date=None,
844 844 branch_name=None, pre_load=None):
845 845 warnings.warn("Use get_commits instead", DeprecationWarning)
846 846 start_id = self._revision_to_commit(start)
847 847 end_id = self._revision_to_commit(end)
848 848 return self.get_commits(
849 849 start_id=start_id, end_id=end_id, start_date=start_date,
850 850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
851 851
852 852 def _revision_to_commit(self, revision):
853 853 """
854 854 Translates a revision to a commit_id
855 855
856 856 Helps to support the old changeset based API which allows to use
857 857 commit ids and commit indices interchangeable.
858 858 """
859 859 if revision is None:
860 860 return revision
861 861
862 862 if isinstance(revision, str):
863 863 commit_id = revision
864 864 else:
865 865 commit_id = self.commit_ids[revision]
866 866 return commit_id
867 867
868 868 @property
869 869 def in_memory_changeset(self):
870 870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
871 871 return self.in_memory_commit
872 872
873 873 def get_path_permissions(self, username):
874 874 """
875 875 Returns a path permission checker or None if not supported
876 876
877 877 :param username: session user name
878 878 :return: an instance of BasePathPermissionChecker or None
879 879 """
880 880 return None
881 881
882 882 def install_hooks(self, force=False):
883 883 return self._remote.install_hooks(force)
884 884
885 885 def get_hooks_info(self):
886 886 return self._remote.get_hooks_info()
887 887
888 888 def vcsserver_invalidate_cache(self, delete=False):
889 889 return self._remote.vcsserver_invalidate_cache(delete)
890 890
891 891
892 892 class BaseCommit(object):
893 893 """
894 894 Each backend should implement it's commit representation.
895 895
896 896 **Attributes**
897 897
898 898 ``repository``
899 899 repository object within which commit exists
900 900
901 901 ``id``
902 902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
903 903 just ``tip``.
904 904
905 905 ``raw_id``
906 906 raw commit representation (i.e. full 40 length sha for git
907 907 backend)
908 908
909 909 ``short_id``
910 910 shortened (if apply) version of ``raw_id``; it would be simple
911 911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
912 912 as ``raw_id`` for subversion
913 913
914 914 ``idx``
915 915 commit index
916 916
917 917 ``files``
918 918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
919 919
920 920 ``dirs``
921 921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
922 922
923 923 ``nodes``
924 924 combined list of ``Node`` objects
925 925
926 926 ``author``
927 927 author of the commit, as unicode
928 928
929 929 ``message``
930 930 message of the commit, as unicode
931 931
932 932 ``parents``
933 933 list of parent commits
934 934
935 935 """
936 936 repository = None
937 937 branch = None
938 938
939 939 """
940 940 Depending on the backend this should be set to the branch name of the
941 941 commit. Backends not supporting branches on commits should leave this
942 942 value as ``None``.
943 943 """
944 944
945 945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
946 946 """
947 947 This template is used to generate a default prefix for repository archives
948 948 if no prefix has been specified.
949 949 """
950 950
951 951 def __repr__(self):
952 952 return self.__str__()
953 953
954 954 def __str__(self):
955 955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
956 956
957 957 def __eq__(self, other):
958 958 same_instance = isinstance(other, self.__class__)
959 959 return same_instance and self.raw_id == other.raw_id
960 960
961 961 def __json__(self):
962 962 parents = []
963 963 try:
964 964 for parent in self.parents:
965 965 parents.append({'raw_id': parent.raw_id})
966 966 except NotImplementedError:
967 967 # empty commit doesn't have parents implemented
968 968 pass
969 969
970 970 return {
971 971 'short_id': self.short_id,
972 972 'raw_id': self.raw_id,
973 973 'revision': self.idx,
974 974 'message': self.message,
975 975 'date': self.date,
976 976 'author': self.author,
977 977 'parents': parents,
978 978 'branch': self.branch
979 979 }
980 980
981 981 def __getstate__(self):
982 982 d = self.__dict__.copy()
983 983 d.pop('_remote', None)
984 984 d.pop('repository', None)
985 985 return d
986 986
987 987 def get_remote(self):
988 988 return self._remote
989 989
990 990 def serialize(self):
991 991 return self.__json__()
992 992
993 993 def _get_refs(self):
994 994 return {
995 995 'branches': [self.branch] if self.branch else [],
996 996 'bookmarks': getattr(self, 'bookmarks', []),
997 997 'tags': self.tags
998 998 }
999 999
1000 1000 @LazyProperty
1001 1001 def last(self):
1002 1002 """
1003 1003 ``True`` if this is last commit in repository, ``False``
1004 1004 otherwise; trying to access this attribute while there is no
1005 1005 commits would raise `EmptyRepositoryError`
1006 1006 """
1007 1007 if self.repository is None:
1008 1008 raise CommitError("Cannot check if it's most recent commit")
1009 1009 return self.raw_id == self.repository.commit_ids[-1]
1010 1010
1011 1011 @LazyProperty
1012 1012 def parents(self):
1013 1013 """
1014 1014 Returns list of parent commits.
1015 1015 """
1016 1016 raise NotImplementedError
1017 1017
1018 1018 @LazyProperty
1019 1019 def first_parent(self):
1020 1020 """
1021 1021 Returns list of parent commits.
1022 1022 """
1023 1023 return self.parents[0] if self.parents else EmptyCommit()
1024 1024
1025 1025 @property
1026 1026 def merge(self):
1027 1027 """
1028 1028 Returns boolean if commit is a merge.
1029 1029 """
1030 1030 return len(self.parents) > 1
1031 1031
1032 1032 @LazyProperty
1033 1033 def children(self):
1034 1034 """
1035 1035 Returns list of child commits.
1036 1036 """
1037 1037 raise NotImplementedError
1038 1038
1039 1039 @LazyProperty
1040 1040 def id(self):
1041 1041 """
1042 1042 Returns string identifying this commit.
1043 1043 """
1044 1044 raise NotImplementedError
1045 1045
1046 1046 @LazyProperty
1047 1047 def raw_id(self):
1048 1048 """
1049 1049 Returns raw string identifying this commit.
1050 1050 """
1051 1051 raise NotImplementedError
1052 1052
1053 1053 @LazyProperty
1054 1054 def short_id(self):
1055 1055 """
1056 1056 Returns shortened version of ``raw_id`` attribute, as string,
1057 1057 identifying this commit, useful for presentation to users.
1058 1058 """
1059 1059 raise NotImplementedError
1060 1060
1061 1061 @LazyProperty
1062 1062 def idx(self):
1063 1063 """
1064 1064 Returns integer identifying this commit.
1065 1065 """
1066 1066 raise NotImplementedError
1067 1067
1068 1068 @LazyProperty
1069 1069 def committer(self):
1070 1070 """
1071 1071 Returns committer for this commit
1072 1072 """
1073 1073 raise NotImplementedError
1074 1074
1075 1075 @LazyProperty
1076 1076 def committer_name(self):
1077 1077 """
1078 1078 Returns committer name for this commit
1079 1079 """
1080 1080
1081 1081 return author_name(self.committer)
1082 1082
1083 1083 @LazyProperty
1084 1084 def committer_email(self):
1085 1085 """
1086 1086 Returns committer email address for this commit
1087 1087 """
1088 1088
1089 1089 return author_email(self.committer)
1090 1090
1091 1091 @LazyProperty
1092 1092 def author(self):
1093 1093 """
1094 1094 Returns author for this commit
1095 1095 """
1096 1096
1097 1097 raise NotImplementedError
1098 1098
1099 1099 @LazyProperty
1100 1100 def author_name(self):
1101 1101 """
1102 1102 Returns author name for this commit
1103 1103 """
1104 1104
1105 1105 return author_name(self.author)
1106 1106
1107 1107 @LazyProperty
1108 1108 def author_email(self):
1109 1109 """
1110 1110 Returns author email address for this commit
1111 1111 """
1112 1112
1113 1113 return author_email(self.author)
1114 1114
1115 1115 def get_file_mode(self, path: bytes):
1116 1116 """
1117 1117 Returns stat mode of the file at `path`.
1118 1118 """
1119 1119 raise NotImplementedError
1120 1120
1121 1121 def is_link(self, path):
1122 1122 """
1123 1123 Returns ``True`` if given `path` is a symlink
1124 1124 """
1125 1125 raise NotImplementedError
1126 1126
1127 1127 def is_node_binary(self, path):
1128 1128 """
1129 1129 Returns ``True`` is given path is a binary file
1130 1130 """
1131 1131 raise NotImplementedError
1132 1132
1133 1133 def node_md5_hash(self, path):
1134 1134 """
1135 1135 Returns md5 hash of a node data
1136 1136 """
1137 1137 raise NotImplementedError
1138 1138
1139 1139 def get_file_content(self, path) -> bytes:
1140 1140 """
1141 1141 Returns content of the file at the given `path`.
1142 1142 """
1143 1143 raise NotImplementedError
1144 1144
1145 1145 def get_file_content_streamed(self, path):
1146 1146 """
1147 1147 returns a streaming response from vcsserver with file content
1148 1148 """
1149 1149 raise NotImplementedError
1150 1150
1151 1151 def get_file_size(self, path):
1152 1152 """
1153 1153 Returns size of the file at the given `path`.
1154 1154 """
1155 1155 raise NotImplementedError
1156 1156
1157 1157 def get_path_commit(self, path, pre_load=None):
1158 1158 """
1159 1159 Returns last commit of the file at the given `path`.
1160 1160
1161 1161 :param pre_load: Optional. List of commit attributes to load.
1162 1162 """
1163 1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1164 1164 if not commits:
1165 1165 raise RepositoryError(
1166 1166 'Failed to fetch history for path {}. '
1167 1167 'Please check if such path exists in your repository'.format(
1168 1168 path))
1169 1169 return commits[0]
1170 1170
1171 1171 def get_path_history(self, path, limit=None, pre_load=None):
1172 1172 """
1173 1173 Returns history of file as reversed list of :class:`BaseCommit`
1174 1174 objects for which file at given `path` has been modified.
1175 1175
1176 1176 :param limit: Optional. Allows to limit the size of the returned
1177 1177 history. This is intended as a hint to the underlying backend, so
1178 1178 that it can apply optimizations depending on the limit.
1179 1179 :param pre_load: Optional. List of commit attributes to load.
1180 1180 """
1181 1181 raise NotImplementedError
1182 1182
1183 1183 def get_file_annotate(self, path, pre_load=None):
1184 1184 """
1185 1185 Returns a generator of four element tuples with
1186 1186 lineno, sha, commit lazy loader and line
1187 1187
1188 1188 :param pre_load: Optional. List of commit attributes to load.
1189 1189 """
1190 1190 raise NotImplementedError
1191 1191
1192 1192 def get_nodes(self, path, pre_load=None):
1193 1193 """
1194 1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1195 1195 state of commit at the given ``path``.
1196 1196
1197 1197 :raises ``CommitError``: if node at the given ``path`` is not
1198 1198 instance of ``DirNode``
1199 1199 """
1200 1200 raise NotImplementedError
1201 1201
1202 1202 def get_node(self, path):
1203 1203 """
1204 1204 Returns ``Node`` object from the given ``path``.
1205 1205
1206 1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1207 1207 ``path``
1208 1208 """
1209 1209 raise NotImplementedError
1210 1210
1211 1211 def get_largefile_node(self, path):
1212 1212 """
1213 1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1214 1214 or None if it's not a largefile node
1215 1215 """
1216 1216 return None
1217 1217
1218 1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1219 1219 archive_dir_name=None, write_metadata=False, mtime=None,
1220 1220 archive_at_path='/', cache_config=None):
1221 1221 """
1222 1222 Creates an archive containing the contents of the repository.
1223 1223
1224 1224 :param archive_name_key: unique key under this archive should be generated
1225 1225 :param kind: one of the following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1226 1226 :param archive_dir_name: name of root directory in archive.
1227 1227 Default is repository name and commit's short_id joined with dash:
1228 1228 ``"{repo_name}-{short_id}"``.
1229 1229 :param write_metadata: write a metadata file into archive.
1230 1230 :param mtime: custom modification time for archive creation, defaults
1231 1231 to time.time() if not given.
1232 1232 :param archive_at_path: pack files at this path (default '/')
1233 1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1234 1234
1235 1235 :raise VCSError: If prefix has a problem.
1236 1236 """
1237 1237 cache_config = cache_config or {}
1238 1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1239 1239 if kind not in allowed_kinds:
1240 1240 raise ImproperArchiveTypeError(
1241 1241 f'Archive kind ({kind}) not supported use one of {allowed_kinds}')
1242 1242
1243 1243 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1244 1244 mtime = mtime is not None or time.mktime(self.date.timetuple())
1245 1245 commit_id = self.raw_id
1246 1246
1247 1247 return self.repository._remote.archive_repo(
1248 1248 archive_name_key, kind, mtime, archive_at_path,
1249 1249 archive_dir_name, commit_id, cache_config)
1250 1250
1251 1251 def _validate_archive_prefix(self, archive_dir_name):
1252 1252 if archive_dir_name is None:
1253 1253 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1254 1254 repo_name=safe_str(self.repository.name),
1255 1255 short_id=self.short_id)
1256 1256 elif not isinstance(archive_dir_name, str):
1257 1257 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1258 1258 elif archive_dir_name.startswith('/'):
1259 1259 raise VCSError("Prefix cannot start with leading slash")
1260 1260 elif archive_dir_name.strip() == '':
1261 1261 raise VCSError("Prefix cannot be empty")
1262 1262 elif not archive_dir_name.isascii():
1263 1263 raise VCSError("Prefix cannot contain non ascii characters")
1264 1264 return archive_dir_name
1265 1265
1266 1266 @LazyProperty
1267 1267 def root(self):
1268 1268 """
1269 1269 Returns ``RootNode`` object for this commit.
1270 1270 """
1271 1271 return self.get_node('')
1272 1272
1273 1273 def next(self, branch=None):
1274 1274 """
1275 1275 Returns next commit from current, if branch is gives it will return
1276 1276 next commit belonging to this branch
1277 1277
1278 1278 :param branch: show commits within the given named branch
1279 1279 """
1280 1280 indexes = range(self.idx + 1, self.repository.count())
1281 1281 return self._find_next(indexes, branch)
1282 1282
1283 1283 def prev(self, branch=None):
1284 1284 """
1285 1285 Returns previous commit from current, if branch is gives it will
1286 1286 return previous commit belonging to this branch
1287 1287
1288 1288 :param branch: show commit within the given named branch
1289 1289 """
1290 1290 indexes = range(self.idx - 1, -1, -1)
1291 1291 return self._find_next(indexes, branch)
1292 1292
1293 1293 def _find_next(self, indexes, branch=None):
1294 1294 if branch and self.branch != branch:
1295 1295 raise VCSError('Branch option used on commit not belonging '
1296 1296 'to that branch')
1297 1297
1298 1298 for next_idx in indexes:
1299 1299 commit = self.repository.get_commit(commit_idx=next_idx)
1300 1300 if branch and branch != commit.branch:
1301 1301 continue
1302 1302 return commit
1303 1303 raise CommitDoesNotExistError
1304 1304
1305 1305 def diff(self, ignore_whitespace=True, context=3):
1306 1306 """
1307 1307 Returns a `Diff` object representing the change made by this commit.
1308 1308 """
1309 1309 parent = self.first_parent
1310 1310 diff = self.repository.get_diff(
1311 1311 parent, self,
1312 1312 ignore_whitespace=ignore_whitespace,
1313 1313 context=context)
1314 1314 return diff
1315 1315
1316 1316 @LazyProperty
1317 1317 def added(self):
1318 1318 """
1319 1319 Returns list of added ``FileNode`` objects.
1320 1320 """
1321 1321 raise NotImplementedError
1322 1322
1323 1323 @LazyProperty
1324 1324 def changed(self):
1325 1325 """
1326 1326 Returns list of modified ``FileNode`` objects.
1327 1327 """
1328 1328 raise NotImplementedError
1329 1329
1330 1330 @LazyProperty
1331 1331 def removed(self):
1332 1332 """
1333 1333 Returns list of removed ``FileNode`` objects.
1334 1334 """
1335 1335 raise NotImplementedError
1336 1336
1337 1337 @LazyProperty
1338 1338 def size(self):
1339 1339 """
1340 1340 Returns total number of bytes from contents of all filenodes.
1341 1341 """
1342 1342 return sum(node.size for node in self.get_filenodes_generator())
1343 1343
1344 1344 def walk(self, topurl=''):
1345 1345 """
1346 1346 Similar to os.walk method. Insted of filesystem it walks through
1347 1347 commit starting at given ``topurl``. Returns generator of tuples
1348 1348 (top_node, dirnodes, filenodes).
1349 1349 """
1350 1350 from rhodecode.lib.vcs.nodes import DirNode
1351 1351
1352 1352 if isinstance(topurl, DirNode):
1353 1353 top_node = topurl
1354 1354 else:
1355 1355 top_node = self.get_node(topurl)
1356 1356
1357 1357 has_default_pre_load = False
1358 1358 if isinstance(top_node, DirNode):
1359 1359 # used to inject as we walk same defaults as given top_node
1360 1360 default_pre_load = top_node.default_pre_load
1361 1361 has_default_pre_load = True
1362 1362
1363 1363 if not top_node.is_dir():
1364 1364 return
1365 1365 yield top_node, top_node.dirs, top_node.files
1366 1366 for dir_node in top_node.dirs:
1367 1367 if has_default_pre_load:
1368 1368 dir_node.default_pre_load = default_pre_load
1369 1369 yield from self.walk(dir_node)
1370 1370
1371 1371 def get_filenodes_generator(self):
1372 1372 """
1373 1373 Returns generator that yields *all* file nodes.
1374 1374 """
1375 1375 for topnode, dirs, files in self.walk():
1376 1376 yield from files
1377 1377
1378 1378 #
1379 1379 # Utilities for sub classes to support consistent behavior
1380 1380 #
1381 1381
1382 1382 def no_node_at_path(self, path):
1383 1383 return NodeDoesNotExistError(
1384 1384 f"There is no file nor directory at the given path: "
1385 1385 f"`{safe_str(path)}` at commit {self.short_id}")
1386 1386
1387 1387 def _fix_path(self, path: str) -> str:
1388 1388 """
1389 1389 Paths are stored without trailing slash so we need to get rid off it if
1390 1390 needed.
1391 1391 """
1392 1392 return safe_str(path).rstrip('/')
1393 1393
1394 1394 #
1395 1395 # Deprecated API based on changesets
1396 1396 #
1397 1397
1398 1398 @property
1399 1399 def revision(self):
1400 1400 warnings.warn("Use idx instead", DeprecationWarning)
1401 1401 return self.idx
1402 1402
1403 1403 @revision.setter
1404 1404 def revision(self, value):
1405 1405 warnings.warn("Use idx instead", DeprecationWarning)
1406 1406 self.idx = value
1407 1407
1408 1408 def get_file_changeset(self, path):
1409 1409 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1410 1410 return self.get_path_commit(path)
1411 1411
1412 1412
1413 1413 class BaseChangesetClass(type):
1414 1414
1415 1415 def __instancecheck__(self, instance):
1416 1416 return isinstance(instance, BaseCommit)
1417 1417
1418 1418
1419 1419 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1420 1420
1421 1421 def __new__(cls, *args, **kwargs):
1422 1422 warnings.warn(
1423 1423 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1424 1424 return super().__new__(cls, *args, **kwargs)
1425 1425
1426 1426
1427 1427 class BaseInMemoryCommit(object):
1428 1428 """
1429 1429 Represents differences between repository's state (most recent head) and
1430 1430 changes made *in place*.
1431 1431
1432 1432 **Attributes**
1433 1433
1434 1434 ``repository``
1435 1435 repository object for this in-memory-commit
1436 1436
1437 1437 ``added``
1438 1438 list of ``FileNode`` objects marked as *added*
1439 1439
1440 1440 ``changed``
1441 1441 list of ``FileNode`` objects marked as *changed*
1442 1442
1443 1443 ``removed``
1444 1444 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1445 1445 *removed*
1446 1446
1447 1447 ``parents``
1448 1448 list of :class:`BaseCommit` instances representing parents of
1449 1449 in-memory commit. Should always be 2-element sequence.
1450 1450
1451 1451 """
1452 1452
1453 1453 def __init__(self, repository):
1454 1454 self.repository = repository
1455 1455 self.added = []
1456 1456 self.changed = []
1457 1457 self.removed = []
1458 1458 self.parents = []
1459 1459
1460 1460 def add(self, *filenodes):
1461 1461 """
1462 1462 Marks given ``FileNode`` objects as *to be committed*.
1463 1463
1464 1464 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1465 1465 latest commit
1466 1466 :raises ``NodeAlreadyAddedError``: if node with same path is already
1467 1467 marked as *added*
1468 1468 """
1469 1469 # Check if not already marked as *added* first
1470 1470 for node in filenodes:
1471 1471 if node.path in (n.path for n in self.added):
1472 1472 raise NodeAlreadyAddedError(
1473 1473 "Such FileNode %s is already marked for addition"
1474 1474 % node.path)
1475 1475 for node in filenodes:
1476 1476 self.added.append(node)
1477 1477
1478 1478 def change(self, *filenodes):
1479 1479 """
1480 1480 Marks given ``FileNode`` objects to be *changed* in next commit.
1481 1481
1482 1482 :raises ``EmptyRepositoryError``: if there are no commits yet
1483 1483 :raises ``NodeAlreadyExistsError``: if node with same path is already
1484 1484 marked to be *changed*
1485 1485 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1486 1486 marked to be *removed*
1487 1487 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1488 1488 commit
1489 1489 :raises ``NodeNotChangedError``: if node hasn't really be changed
1490 1490 """
1491 1491 for node in filenodes:
1492 1492 if node.path in (n.path for n in self.removed):
1493 1493 raise NodeAlreadyRemovedError(
1494 1494 "Node at %s is already marked as removed" % node.path)
1495 1495 try:
1496 1496 self.repository.get_commit()
1497 1497 except EmptyRepositoryError:
1498 1498 raise EmptyRepositoryError(
1499 1499 "Nothing to change - try to *add* new nodes rather than "
1500 1500 "changing them")
1501 1501 for node in filenodes:
1502 1502 if node.path in (n.path for n in self.changed):
1503 1503 raise NodeAlreadyChangedError(
1504 1504 "Node at '%s' is already marked as changed" % node.path)
1505 1505 self.changed.append(node)
1506 1506
1507 1507 def remove(self, *filenodes):
1508 1508 """
1509 1509 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1510 1510 *removed* in next commit.
1511 1511
1512 1512 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1513 1513 be *removed*
1514 1514 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1515 1515 be *changed*
1516 1516 """
1517 1517 for node in filenodes:
1518 1518 if node.path in (n.path for n in self.removed):
1519 1519 raise NodeAlreadyRemovedError(
1520 1520 "Node is already marked to for removal at %s" % node.path)
1521 1521 if node.path in (n.path for n in self.changed):
1522 1522 raise NodeAlreadyChangedError(
1523 1523 "Node is already marked to be changed at %s" % node.path)
1524 1524 # We only mark node as *removed* - real removal is done by
1525 1525 # commit method
1526 1526 self.removed.append(node)
1527 1527
1528 1528 def reset(self):
1529 1529 """
1530 1530 Resets this instance to initial state (cleans ``added``, ``changed``
1531 1531 and ``removed`` lists).
1532 1532 """
1533 1533 self.added = []
1534 1534 self.changed = []
1535 1535 self.removed = []
1536 1536 self.parents = []
1537 1537
1538 1538 def get_ipaths(self):
1539 1539 """
1540 1540 Returns generator of paths from nodes marked as added, changed or
1541 1541 removed.
1542 1542 """
1543 1543 for node in itertools.chain(self.added, self.changed, self.removed):
1544 1544 yield node.path
1545 1545
1546 1546 def get_paths(self):
1547 1547 """
1548 1548 Returns list of paths from nodes marked as added, changed or removed.
1549 1549 """
1550 1550 return list(self.get_ipaths())
1551 1551
1552 1552 def check_integrity(self, parents=None):
1553 1553 """
1554 1554 Checks in-memory commit's integrity. Also, sets parents if not
1555 1555 already set.
1556 1556
1557 1557 :raises CommitError: if any error occurs (i.e.
1558 1558 ``NodeDoesNotExistError``).
1559 1559 """
1560 1560 if not self.parents:
1561 1561 parents = parents or []
1562 1562 if len(parents) == 0:
1563 1563 try:
1564 1564 parents = [self.repository.get_commit(), None]
1565 1565 except EmptyRepositoryError:
1566 1566 parents = [None, None]
1567 1567 elif len(parents) == 1:
1568 1568 parents += [None]
1569 1569 self.parents = parents
1570 1570
1571 1571 # Local parents, only if not None
1572 1572 parents = [p for p in self.parents if p]
1573 1573
1574 1574 # Check nodes marked as added
1575 1575 for p in parents:
1576 1576 for node in self.added:
1577 1577 try:
1578 1578 p.get_node(node.path)
1579 1579 except NodeDoesNotExistError:
1580 1580 pass
1581 1581 else:
1582 1582 raise NodeAlreadyExistsError(
1583 1583 f"Node `{node.path}` already exists at {p}")
1584 1584
1585 1585 # Check nodes marked as changed
1586 1586 missing = set(self.changed)
1587 1587 not_changed = set(self.changed)
1588 1588 if self.changed and not parents:
1589 1589 raise NodeDoesNotExistError(str(self.changed[0].path))
1590 1590 for p in parents:
1591 1591 for node in self.changed:
1592 1592 try:
1593 1593 old = p.get_node(node.path)
1594 1594 missing.remove(node)
1595 1595 # if content actually changed, remove node from not_changed
1596 1596 if old.content != node.content:
1597 1597 not_changed.remove(node)
1598 1598 except NodeDoesNotExistError:
1599 1599 pass
1600 1600 if self.changed and missing:
1601 1601 raise NodeDoesNotExistError(
1602 "Node `%s` marked as modified but missing in parents: %s"
1603 % (node.path, parents))
1602 f"Node `{node.path}` marked as modified but missing in parents: {parents}")
1604 1603
1605 1604 if self.changed and not_changed:
1606 1605 raise NodeNotChangedError(
1607 1606 "Node `%s` wasn't actually changed (parents: %s)"
1608 1607 % (not_changed.pop().path, parents))
1609 1608
1610 1609 # Check nodes marked as removed
1611 1610 if self.removed and not parents:
1612 1611 raise NodeDoesNotExistError(
1613 1612 "Cannot remove node at %s as there "
1614 1613 "were no parents specified" % self.removed[0].path)
1615 1614 really_removed = set()
1616 1615 for p in parents:
1617 1616 for node in self.removed:
1618 1617 try:
1619 1618 p.get_node(node.path)
1620 1619 really_removed.add(node)
1621 1620 except CommitError:
1622 1621 pass
1623 1622 not_removed = set(self.removed) - really_removed
1624 1623 if not_removed:
1625 1624 # TODO: johbo: This code branch does not seem to be covered
1626 1625 raise NodeDoesNotExistError(
1627 1626 "Cannot remove node at %s from "
1628 1627 "following parents: %s" % (not_removed, parents))
1629 1628
1630 1629 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1631 1630 """
1632 1631 Performs in-memory commit (doesn't check workdir in any way) and
1633 1632 returns newly created :class:`BaseCommit`. Updates repository's
1634 1633 attribute `commits`.
1635 1634
1636 1635 .. note::
1637 1636
1638 1637 While overriding this method each backend's should call
1639 1638 ``self.check_integrity(parents)`` in the first place.
1640 1639
1641 1640 :param message: message of the commit
1642 1641 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1643 1642 :param parents: single parent or sequence of parents from which commit
1644 1643 would be derived
1645 1644 :param date: ``datetime.datetime`` instance. Defaults to
1646 1645 ``datetime.datetime.now()``.
1647 1646 :param branch: branch name, as string. If none given, default backend's
1648 1647 branch would be used.
1649 1648
1650 1649 :raises ``CommitError``: if any error occurs while committing
1651 1650 """
1652 1651 raise NotImplementedError
1653 1652
1654 1653
1655 1654 class BaseInMemoryChangesetClass(type):
1656 1655
1657 1656 def __instancecheck__(self, instance):
1658 1657 return isinstance(instance, BaseInMemoryCommit)
1659 1658
1660 1659
1661 1660 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1662 1661
1663 1662 def __new__(cls, *args, **kwargs):
1664 1663 warnings.warn(
1665 1664 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1666 1665 return super().__new__(cls, *args, **kwargs)
1667 1666
1668 1667
1669 1668 class EmptyCommit(BaseCommit):
1670 1669 """
1671 1670 An dummy empty commit. It's possible to pass hash when creating
1672 1671 an EmptyCommit
1673 1672 """
1674 1673
1675 1674 def __init__(
1676 1675 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1677 1676 message='', author='', date=None):
1678 1677 self._empty_commit_id = commit_id
1679 1678 # TODO: johbo: Solve idx parameter, default value does not make
1680 1679 # too much sense
1681 1680 self.idx = idx
1682 1681 self.message = message
1683 1682 self.author = author
1684 1683 self.date = date or datetime.datetime.fromtimestamp(0)
1685 1684 self.repository = repo
1686 1685 self.alias = alias
1687 1686
1688 1687 @LazyProperty
1689 1688 def raw_id(self):
1690 1689 """
1691 1690 Returns raw string identifying this commit, useful for web
1692 1691 representation.
1693 1692 """
1694 1693
1695 1694 return self._empty_commit_id
1696 1695
1697 1696 @LazyProperty
1698 1697 def branch(self):
1699 1698 if self.alias:
1700 1699 from rhodecode.lib.vcs.backends import get_backend
1701 1700 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1702 1701
1703 1702 @LazyProperty
1704 1703 def short_id(self):
1705 1704 return self.raw_id[:12]
1706 1705
1707 1706 @LazyProperty
1708 1707 def id(self):
1709 1708 return self.raw_id
1710 1709
1711 1710 def get_path_commit(self, path, pre_load=None):
1712 1711 return self
1713 1712
1714 1713 def get_file_content(self, path) -> bytes:
1715 1714 return b''
1716 1715
1717 1716 def get_file_content_streamed(self, path):
1718 1717 yield self.get_file_content(path)
1719 1718
1720 1719 def get_file_size(self, path):
1721 1720 return 0
1722 1721
1723 1722
1724 1723 class EmptyChangesetClass(type):
1725 1724
1726 1725 def __instancecheck__(self, instance):
1727 1726 return isinstance(instance, EmptyCommit)
1728 1727
1729 1728
1730 1729 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1731 1730
1732 1731 def __new__(cls, *args, **kwargs):
1733 1732 warnings.warn(
1734 1733 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1735 1734 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1736 1735
1737 1736 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1738 1737 alias=None, revision=-1, message='', author='', date=None):
1739 1738 if requested_revision is not None:
1740 1739 warnings.warn(
1741 1740 "Parameter requested_revision not supported anymore",
1742 1741 DeprecationWarning)
1743 1742 super().__init__(
1744 1743 commit_id=cs, repo=repo, alias=alias, idx=revision,
1745 1744 message=message, author=author, date=date)
1746 1745
1747 1746 @property
1748 1747 def revision(self):
1749 1748 warnings.warn("Use idx instead", DeprecationWarning)
1750 1749 return self.idx
1751 1750
1752 1751 @revision.setter
1753 1752 def revision(self, value):
1754 1753 warnings.warn("Use idx instead", DeprecationWarning)
1755 1754 self.idx = value
1756 1755
1757 1756
1758 1757 class EmptyRepository(BaseRepository):
1759 1758 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1760 1759 pass
1761 1760
1762 1761 def get_diff(self, *args, **kwargs):
1763 1762 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1764 1763 return GitDiff(b'')
1765 1764
1766 1765
1767 1766 class CollectionGenerator(object):
1768 1767
1769 1768 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1770 1769 self.repo = repo
1771 1770 self.commit_ids = commit_ids
1772 1771 self.collection_size = collection_size
1773 1772 self.pre_load = pre_load
1774 1773 self.translate_tag = translate_tag
1775 1774
1776 1775 def __len__(self):
1777 1776 if self.collection_size is not None:
1778 1777 return self.collection_size
1779 1778 return self.commit_ids.__len__()
1780 1779
1781 1780 def __iter__(self):
1782 1781 for commit_id in self.commit_ids:
1783 1782 # TODO: johbo: Mercurial passes in commit indices or commit ids
1784 1783 yield self._commit_factory(commit_id)
1785 1784
1786 1785 def _commit_factory(self, commit_id):
1787 1786 """
1788 1787 Allows backends to override the way commits are generated.
1789 1788 """
1790 1789 return self.repo.get_commit(
1791 1790 commit_id=commit_id, pre_load=self.pre_load,
1792 1791 translate_tag=self.translate_tag)
1793 1792
1794 1793 def __getitem__(self, key):
1795 1794 """Return either a single element by index, or a sliced collection."""
1796 1795
1797 1796 if isinstance(key, slice):
1798 1797 commit_ids = self.commit_ids[key.start:key.stop]
1799 1798
1800 1799 else:
1801 1800 # single item
1802 1801 commit_ids = self.commit_ids[key]
1803 1802
1804 1803 return self.__class__(
1805 1804 self.repo, commit_ids, pre_load=self.pre_load,
1806 1805 translate_tag=self.translate_tag)
1807 1806
1808 1807 def __repr__(self):
1809 1808 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1810 1809
1811 1810
1812 1811 class Config(object):
1813 1812 """
1814 1813 Represents the configuration for a repository.
1815 1814
1816 1815 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1817 1816 standard library. It implements only the needed subset.
1818 1817 """
1819 1818
1820 1819 def __init__(self):
1821 1820 self._values = {}
1822 1821
1823 1822 def copy(self):
1824 1823 clone = Config()
1825 1824 for section, values in self._values.items():
1826 1825 clone._values[section] = values.copy()
1827 1826 return clone
1828 1827
1829 1828 def __repr__(self):
1830 1829 return '<Config({} sections) at {}>'.format(
1831 1830 len(self._values), hex(id(self)))
1832 1831
1833 1832 def items(self, section):
1834 1833 return self._values.get(section, {}).items()
1835 1834
1836 1835 def get(self, section, option):
1837 1836 return self._values.get(section, {}).get(option)
1838 1837
1839 1838 def set(self, section, option, value):
1840 1839 section_values = self._values.setdefault(section, {})
1841 1840 section_values[option] = value
1842 1841
1843 1842 def clear_section(self, section):
1844 1843 self._values[section] = {}
1845 1844
1846 1845 def serialize(self):
1847 1846 """
1848 1847 Creates a list of three tuples (section, key, value) representing
1849 1848 this config object.
1850 1849 """
1851 1850 items = []
1852 1851 for section in self._values:
1853 1852 for option, value in self._values[section].items():
1854 1853 items.append(
1855 1854 (safe_str(section), safe_str(option), safe_str(value)))
1856 1855 return items
1857 1856
1858 1857
1859 1858 class Diff(object):
1860 1859 """
1861 1860 Represents a diff result from a repository backend.
1862 1861
1863 1862 Subclasses have to provide a backend specific value for
1864 1863 :attr:`_header_re` and :attr:`_meta_re`.
1865 1864 """
1866 1865 _meta_re = None
1867 1866 _header_re: bytes = re.compile(br"")
1868 1867
1869 1868 def __init__(self, raw_diff: bytes):
1870 1869 if not isinstance(raw_diff, bytes):
1871 1870 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1872 1871
1873 1872 self.raw = memoryview(raw_diff)
1874 1873
1875 1874 def get_header_re(self):
1876 1875 return self._header_re
1877 1876
1878 1877 def chunks(self):
1879 1878 """
1880 1879 split the diff in chunks of separate --git a/file b/file chunks
1881 1880 to make diffs consistent we must prepend with \n, and make sure
1882 1881 we can detect last chunk as this was also has special rule
1883 1882 """
1884 1883
1885 1884 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1886 1885
1887 1886 chunks = diff_parts[1:]
1888 1887 total_chunks = len(chunks)
1889 1888
1890 1889 def diff_iter(_chunks):
1891 1890 for cur_chunk, chunk in enumerate(_chunks, start=1):
1892 1891 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1893 1892 return diff_iter(chunks)
1894 1893
1895 1894
1896 1895 class DiffChunk(object):
1897 1896
1898 1897 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1899 1898 self.diff_obj = diff_obj
1900 1899
1901 1900 # since we split by \ndiff --git that part is lost from original diff
1902 1901 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1903 1902 if not is_last_chunk:
1904 1903 chunk += b'\n'
1905 1904 header_re = self.diff_obj.get_header_re()
1906 1905 match = header_re.match(chunk)
1907 1906 self.header = match.groupdict()
1908 1907 self.diff = chunk[match.end():]
1909 1908 self.raw = chunk
1910 1909
1911 1910 @property
1912 1911 def header_as_str(self):
1913 1912 if self.header:
1914 1913 def safe_str_on_bytes(val):
1915 1914 if isinstance(val, bytes):
1916 1915 return safe_str(val)
1917 1916 return val
1918 1917 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1919 1918
1920 1919 def __repr__(self):
1921 1920 return f'DiffChunk({self.header_as_str})'
1922 1921
1923 1922
1924 1923 class BasePathPermissionChecker(object):
1925 1924
1926 1925 @staticmethod
1927 1926 def create_from_patterns(includes, excludes):
1928 1927 if includes and '*' in includes and not excludes:
1929 1928 return AllPathPermissionChecker()
1930 1929 elif excludes and '*' in excludes:
1931 1930 return NonePathPermissionChecker()
1932 1931 else:
1933 1932 return PatternPathPermissionChecker(includes, excludes)
1934 1933
1935 1934 @property
1936 1935 def has_full_access(self):
1937 1936 raise NotImplementedError()
1938 1937
1939 1938 def has_access(self, path):
1940 1939 raise NotImplementedError()
1941 1940
1942 1941
1943 1942 class AllPathPermissionChecker(BasePathPermissionChecker):
1944 1943
1945 1944 @property
1946 1945 def has_full_access(self):
1947 1946 return True
1948 1947
1949 1948 def has_access(self, path):
1950 1949 return True
1951 1950
1952 1951
1953 1952 class NonePathPermissionChecker(BasePathPermissionChecker):
1954 1953
1955 1954 @property
1956 1955 def has_full_access(self):
1957 1956 return False
1958 1957
1959 1958 def has_access(self, path):
1960 1959 return False
1961 1960
1962 1961
1963 1962 class PatternPathPermissionChecker(BasePathPermissionChecker):
1964 1963
1965 1964 def __init__(self, includes, excludes):
1966 1965 self.includes = includes
1967 1966 self.excludes = excludes
1968 1967 self.includes_re = [] if not includes else [
1969 1968 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1970 1969 self.excludes_re = [] if not excludes else [
1971 1970 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1972 1971
1973 1972 @property
1974 1973 def has_full_access(self):
1975 1974 return '*' in self.includes and not self.excludes
1976 1975
1977 1976 def has_access(self, path):
1978 1977 for regex in self.excludes_re:
1979 1978 if regex.match(path):
1980 1979 return False
1981 1980 for regex in self.includes_re:
1982 1981 if regex.match(path):
1983 1982 return True
1984 1983 return False
@@ -1,2392 +1,2390 b''
1 1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 """
21 21 pull request model for RhodeCode
22 22 """
23 23
24 24 import logging
25 25 import os
26 26
27 27 import datetime
28 28 import urllib.request
29 29 import urllib.parse
30 30 import urllib.error
31 31 import collections
32 32
33 33 import dataclasses as dataclasses
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from collections import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.ext_json import sjson as json
43 43 from rhodecode.lib.markup_renderer import (
44 44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 45 from rhodecode.lib.hash_utils import md5_safe
46 46 from rhodecode.lib.str_utils import safe_str
47 47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 48 from rhodecode.lib.vcs.backends.base import (
49 49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 50 TargetRefMissing, SourceRefMissing)
51 51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 52 from rhodecode.lib.vcs.exceptions import (
53 53 CommitDoesNotExistError, EmptyRepositoryError)
54 54 from rhodecode.model import BaseModel
55 55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 56 from rhodecode.model.comment import CommentsModel
57 57 from rhodecode.model.db import (
58 58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 60 from rhodecode.model.meta import Session
61 61 from rhodecode.model.notification import NotificationModel, \
62 62 EmailNotificationModel
63 63 from rhodecode.model.scm import ScmModel
64 64 from rhodecode.model.settings import VcsSettingsModel
65 65
66 66
67 67 log = logging.getLogger(__name__)
68 68
69 69
70 70 # Data structure to hold the response data when updating commits during a pull
71 71 # request update.
72 72 class UpdateResponse(object):
73 73
74 74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 75 commit_changes, source_changed, target_changed):
76 76
77 77 self.executed = executed
78 78 self.reason = reason
79 79 self.new = new
80 80 self.old = old
81 81 self.common_ancestor_id = common_ancestor_id
82 82 self.changes = commit_changes
83 83 self.source_changed = source_changed
84 84 self.target_changed = target_changed
85 85
86 86
87 87 def get_diff_info(
88 88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 89 get_commit_authors=True):
90 90 """
91 91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 92 This is also used for default reviewers logic
93 93 """
94 94
95 95 source_scm = source_repo.scm_instance()
96 96 target_scm = target_repo.scm_instance()
97 97
98 98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 99 if not ancestor_id:
100 100 raise ValueError(
101 101 'cannot calculate diff info without a common ancestor. '
102 102 'Make sure both repositories are related, and have a common forking commit.')
103 103
104 104 # case here is that want a simple diff without incoming commits,
105 105 # previewing what will be merged based only on commits in the source.
106 106 log.debug('Using ancestor %s as source_ref instead of %s',
107 107 ancestor_id, source_ref)
108 108
109 109 # source of changes now is the common ancestor
110 110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 111 # target commit becomes the source ref as it is the last commit
112 112 # for diff generation this logic gives proper diff
113 113 target_commit = source_scm.get_commit(commit_id=source_ref)
114 114
115 115 vcs_diff = \
116 116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 117 ignore_whitespace=False, context=3)
118 118
119 119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
120 120 diff_limit=0, file_limit=0, show_full_diff=True)
121 121
122 122 _parsed = diff_processor.prepare()
123 123
124 124 all_files = []
125 125 all_files_changes = []
126 126 changed_lines = {}
127 127 stats = [0, 0]
128 128 for f in _parsed:
129 129 all_files.append(f['filename'])
130 130 all_files_changes.append({
131 131 'filename': f['filename'],
132 132 'stats': f['stats']
133 133 })
134 134 stats[0] += f['stats']['added']
135 135 stats[1] += f['stats']['deleted']
136 136
137 137 changed_lines[f['filename']] = []
138 138 if len(f['chunks']) < 2:
139 139 continue
140 140 # first line is "context" information
141 141 for chunks in f['chunks'][1:]:
142 142 for chunk in chunks['lines']:
143 143 if chunk['action'] not in ('del', 'mod'):
144 144 continue
145 145 changed_lines[f['filename']].append(chunk['old_lineno'])
146 146
147 147 commit_authors = []
148 148 user_counts = {}
149 149 email_counts = {}
150 150 author_counts = {}
151 151 _commit_cache = {}
152 152
153 153 commits = []
154 154 if get_commit_authors:
155 155 log.debug('Obtaining commit authors from set of commits')
156 156 _compare_data = target_scm.compare(
157 157 target_ref, source_ref, source_scm, merge=True,
158 158 pre_load=["author", "date", "message"]
159 159 )
160 160
161 161 for commit in _compare_data:
162 162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 163 # at this function which is later called via JSON serialization
164 164 serialized_commit = dict(
165 165 author=commit.author,
166 166 date=commit.date,
167 167 message=commit.message,
168 168 commit_id=commit.raw_id,
169 169 raw_id=commit.raw_id
170 170 )
171 171 commits.append(serialized_commit)
172 172 user = User.get_from_cs_author(serialized_commit['author'])
173 173 if user and user not in commit_authors:
174 174 commit_authors.append(user)
175 175
176 176 # lines
177 177 if get_authors:
178 178 log.debug('Calculating authors of changed files')
179 179 target_commit = source_repo.get_commit(ancestor_id)
180 180
181 181 for fname, lines in changed_lines.items():
182 182
183 183 try:
184 184 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 185 except Exception:
186 186 log.exception("Failed to load node with path %s", fname)
187 187 continue
188 188
189 189 if not isinstance(node, FileNode):
190 190 continue
191 191
192 192 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 193 if node.is_binary:
194 194 author = node.last_commit.author
195 195 email = node.last_commit.author_email
196 196
197 197 user = User.get_from_cs_author(author)
198 198 if user:
199 199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 200 author_counts[author] = author_counts.get(author, 0) + 1
201 201 email_counts[email] = email_counts.get(email, 0) + 1
202 202
203 203 continue
204 204
205 205 for annotation in node.annotate:
206 206 line_no, commit_id, get_commit_func, line_text = annotation
207 207 if line_no in lines:
208 208 if commit_id not in _commit_cache:
209 209 _commit_cache[commit_id] = get_commit_func()
210 210 commit = _commit_cache[commit_id]
211 211 author = commit.author
212 212 email = commit.author_email
213 213 user = User.get_from_cs_author(author)
214 214 if user:
215 215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 216 author_counts[author] = author_counts.get(author, 0) + 1
217 217 email_counts[email] = email_counts.get(email, 0) + 1
218 218
219 219 log.debug('Default reviewers processing finished')
220 220
221 221 return {
222 222 'commits': commits,
223 223 'files': all_files_changes,
224 224 'stats': stats,
225 225 'ancestor': ancestor_id,
226 226 # original authors of modified files
227 227 'original_authors': {
228 228 'users': user_counts,
229 229 'authors': author_counts,
230 230 'emails': email_counts,
231 231 },
232 232 'commit_authors': commit_authors
233 233 }
234 234
235 235
236 236 class PullRequestModel(BaseModel):
237 237
238 238 cls = PullRequest
239 239
240 240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241 241
242 242 UPDATE_STATUS_MESSAGES = {
243 243 UpdateFailureReason.NONE: lazy_ugettext(
244 244 'Pull request update successful.'),
245 245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 246 'Pull request update failed because of an unknown error.'),
247 247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 248 'No update needed because the source and target have not changed.'),
249 249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 250 'Pull request cannot be updated because the reference type is '
251 251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 253 'This pull request cannot be updated because the target '
254 254 'reference is missing.'),
255 255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 256 'This pull request cannot be updated because the source '
257 257 'reference is missing.'),
258 258 }
259 259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261 261
262 262 def __get_pull_request(self, pull_request):
263 263 return self._get_instance((
264 264 PullRequest, PullRequestVersion), pull_request)
265 265
266 266 def _check_perms(self, perms, pull_request, user, api=False):
267 267 if not api:
268 268 return h.HasRepoPermissionAny(*perms)(
269 269 user=user, repo_name=pull_request.target_repo.repo_name)
270 270 else:
271 271 return h.HasRepoPermissionAnyApi(*perms)(
272 272 user=user, repo_name=pull_request.target_repo.repo_name)
273 273
274 274 def check_user_read(self, pull_request, user, api=False):
275 275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 276 return self._check_perms(_perms, pull_request, user, api)
277 277
278 278 def check_user_merge(self, pull_request, user, api=False):
279 279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 280 return self._check_perms(_perms, pull_request, user, api)
281 281
282 282 def check_user_update(self, pull_request, user, api=False):
283 283 owner = user.user_id == pull_request.user_id
284 284 return self.check_user_merge(pull_request, user, api) or owner
285 285
286 286 def check_user_delete(self, pull_request, user):
287 287 owner = user.user_id == pull_request.user_id
288 288 _perms = ('repository.admin',)
289 289 return self._check_perms(_perms, pull_request, user) or owner
290 290
291 291 def is_user_reviewer(self, pull_request, user):
292 292 return user.user_id in [
293 293 x.user_id for x in
294 294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 295 if x.user
296 296 ]
297 297
298 298 def check_user_change_status(self, pull_request, user, api=False):
299 299 return self.check_user_update(pull_request, user, api) \
300 300 or self.is_user_reviewer(pull_request, user)
301 301
302 302 def check_user_comment(self, pull_request, user):
303 303 owner = user.user_id == pull_request.user_id
304 304 return self.check_user_read(pull_request, user) or owner
305 305
306 306 def get(self, pull_request):
307 307 return self.__get_pull_request(pull_request)
308 308
309 309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 310 statuses=None, opened_by=None, order_by=None,
311 311 order_dir='desc', only_created=False):
312 312 repo = None
313 313 if repo_name:
314 314 repo = self._get_repo(repo_name)
315 315
316 316 q = PullRequest.query()
317 317
318 318 if search_q:
319 319 like_expression = u'%{}%'.format(safe_str(search_q))
320 320 q = q.join(User, User.user_id == PullRequest.user_id)
321 321 q = q.filter(or_(
322 322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 323 User.username.ilike(like_expression),
324 324 PullRequest.title.ilike(like_expression),
325 325 PullRequest.description.ilike(like_expression),
326 326 ))
327 327
328 328 # source or target
329 329 if repo and source:
330 330 q = q.filter(PullRequest.source_repo == repo)
331 331 elif repo:
332 332 q = q.filter(PullRequest.target_repo == repo)
333 333
334 334 # closed,opened
335 335 if statuses:
336 336 q = q.filter(PullRequest.status.in_(statuses))
337 337
338 338 # opened by filter
339 339 if opened_by:
340 340 q = q.filter(PullRequest.user_id.in_(opened_by))
341 341
342 342 # only get those that are in "created" state
343 343 if only_created:
344 344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345 345
346 346 order_map = {
347 347 'name_raw': PullRequest.pull_request_id,
348 348 'id': PullRequest.pull_request_id,
349 349 'title': PullRequest.title,
350 350 'updated_on_raw': PullRequest.updated_on,
351 351 'target_repo': PullRequest.target_repo_id
352 352 }
353 353 if order_by and order_by in order_map:
354 354 if order_dir == 'asc':
355 355 q = q.order_by(order_map[order_by].asc())
356 356 else:
357 357 q = q.order_by(order_map[order_by].desc())
358 358
359 359 return q
360 360
361 361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 362 opened_by=None):
363 363 """
364 364 Count the number of pull requests for a specific repository.
365 365
366 366 :param repo_name: target or source repo
367 367 :param search_q: filter by text
368 368 :param source: boolean flag to specify if repo_name refers to source
369 369 :param statuses: list of pull request statuses
370 370 :param opened_by: author user of the pull request
371 371 :returns: int number of pull requests
372 372 """
373 373 q = self._prepare_get_all_query(
374 374 repo_name, search_q=search_q, source=source, statuses=statuses,
375 375 opened_by=opened_by)
376 376
377 377 return q.count()
378 378
379 379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 381 """
382 382 Get all pull requests for a specific repository.
383 383
384 384 :param repo_name: target or source repo
385 385 :param search_q: filter by text
386 386 :param source: boolean flag to specify if repo_name refers to source
387 387 :param statuses: list of pull request statuses
388 388 :param opened_by: author user of the pull request
389 389 :param offset: pagination offset
390 390 :param length: length of returned list
391 391 :param order_by: order of the returned list
392 392 :param order_dir: 'asc' or 'desc' ordering direction
393 393 :returns: list of pull requests
394 394 """
395 395 q = self._prepare_get_all_query(
396 396 repo_name, search_q=search_q, source=source, statuses=statuses,
397 397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398 398
399 399 if length:
400 400 pull_requests = q.limit(length).offset(offset).all()
401 401 else:
402 402 pull_requests = q.all()
403 403
404 404 return pull_requests
405 405
406 406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
407 407 """
408 408 Count the number of pull requests for a specific repository that are
409 409 awaiting review.
410 410
411 411 :param repo_name: target or source repo
412 412 :param search_q: filter by text
413 413 :param statuses: list of pull request statuses
414 414 :returns: int number of pull requests
415 415 """
416 416 pull_requests = self.get_awaiting_review(
417 417 repo_name, search_q=search_q, statuses=statuses)
418 418
419 419 return len(pull_requests)
420 420
421 421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
422 422 offset=0, length=None, order_by=None, order_dir='desc'):
423 423 """
424 424 Get all pull requests for a specific repository that are awaiting
425 425 review.
426 426
427 427 :param repo_name: target or source repo
428 428 :param search_q: filter by text
429 429 :param statuses: list of pull request statuses
430 430 :param offset: pagination offset
431 431 :param length: length of returned list
432 432 :param order_by: order of the returned list
433 433 :param order_dir: 'asc' or 'desc' ordering direction
434 434 :returns: list of pull requests
435 435 """
436 436 pull_requests = self.get_all(
437 437 repo_name, search_q=search_q, statuses=statuses,
438 438 order_by=order_by, order_dir=order_dir)
439 439
440 440 _filtered_pull_requests = []
441 441 for pr in pull_requests:
442 442 status = pr.calculated_review_status()
443 443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
444 444 ChangesetStatus.STATUS_UNDER_REVIEW]:
445 445 _filtered_pull_requests.append(pr)
446 446 if length:
447 447 return _filtered_pull_requests[offset:offset+length]
448 448 else:
449 449 return _filtered_pull_requests
450 450
451 451 def _prepare_awaiting_my_review_review_query(
452 452 self, repo_name, user_id, search_q=None, statuses=None,
453 453 order_by=None, order_dir='desc'):
454 454
455 455 for_review_statuses = [
456 456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
457 457 ]
458 458
459 459 pull_request_alias = aliased(PullRequest)
460 460 status_alias = aliased(ChangesetStatus)
461 461 reviewers_alias = aliased(PullRequestReviewers)
462 462 repo_alias = aliased(Repository)
463 463
464 464 last_ver_subq = Session()\
465 465 .query(func.min(ChangesetStatus.version)) \
466 466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
467 467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
468 468 .subquery()
469 469
470 470 q = Session().query(pull_request_alias) \
471 471 .options(lazyload(pull_request_alias.author)) \
472 472 .join(reviewers_alias,
473 473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
474 474 .join(repo_alias,
475 475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
476 476 .outerjoin(status_alias,
477 477 and_(status_alias.user_id == reviewers_alias.user_id,
478 478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
479 479 .filter(or_(status_alias.version == null(),
480 480 status_alias.version == last_ver_subq)) \
481 481 .filter(reviewers_alias.user_id == user_id) \
482 482 .filter(repo_alias.repo_name == repo_name) \
483 483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
484 484 .group_by(pull_request_alias)
485 485
486 486 # closed,opened
487 487 if statuses:
488 488 q = q.filter(pull_request_alias.status.in_(statuses))
489 489
490 490 if search_q:
491 491 like_expression = u'%{}%'.format(safe_str(search_q))
492 492 q = q.join(User, User.user_id == pull_request_alias.user_id)
493 493 q = q.filter(or_(
494 494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
495 495 User.username.ilike(like_expression),
496 496 pull_request_alias.title.ilike(like_expression),
497 497 pull_request_alias.description.ilike(like_expression),
498 498 ))
499 499
500 500 order_map = {
501 501 'name_raw': pull_request_alias.pull_request_id,
502 502 'title': pull_request_alias.title,
503 503 'updated_on_raw': pull_request_alias.updated_on,
504 504 'target_repo': pull_request_alias.target_repo_id
505 505 }
506 506 if order_by and order_by in order_map:
507 507 if order_dir == 'asc':
508 508 q = q.order_by(order_map[order_by].asc())
509 509 else:
510 510 q = q.order_by(order_map[order_by].desc())
511 511
512 512 return q
513 513
514 514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
515 515 """
516 516 Count the number of pull requests for a specific repository that are
517 517 awaiting review from a specific user.
518 518
519 519 :param repo_name: target or source repo
520 520 :param user_id: reviewer user of the pull request
521 521 :param search_q: filter by text
522 522 :param statuses: list of pull request statuses
523 523 :returns: int number of pull requests
524 524 """
525 525 q = self._prepare_awaiting_my_review_review_query(
526 526 repo_name, user_id, search_q=search_q, statuses=statuses)
527 527 return q.count()
528 528
529 529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
530 530 offset=0, length=None, order_by=None, order_dir='desc'):
531 531 """
532 532 Get all pull requests for a specific repository that are awaiting
533 533 review from a specific user.
534 534
535 535 :param repo_name: target or source repo
536 536 :param user_id: reviewer user of the pull request
537 537 :param search_q: filter by text
538 538 :param statuses: list of pull request statuses
539 539 :param offset: pagination offset
540 540 :param length: length of returned list
541 541 :param order_by: order of the returned list
542 542 :param order_dir: 'asc' or 'desc' ordering direction
543 543 :returns: list of pull requests
544 544 """
545 545
546 546 q = self._prepare_awaiting_my_review_review_query(
547 547 repo_name, user_id, search_q=search_q, statuses=statuses,
548 548 order_by=order_by, order_dir=order_dir)
549 549
550 550 if length:
551 551 pull_requests = q.limit(length).offset(offset).all()
552 552 else:
553 553 pull_requests = q.all()
554 554
555 555 return pull_requests
556 556
557 557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
558 558 order_by=None, order_dir='desc'):
559 559 """
560 560 return a query of pull-requests user is an creator, or he's added as a reviewer
561 561 """
562 562 q = PullRequest.query()
563 563 if user_id:
564 564
565 565 base_query = select(PullRequestReviewers)\
566 566 .where(PullRequestReviewers.user_id == user_id)\
567 567 .with_only_columns(PullRequestReviewers.pull_request_id)
568 568
569 569 user_filter = or_(
570 570 PullRequest.user_id == user_id,
571 571 PullRequest.pull_request_id.in_(base_query)
572 572 )
573 573 q = PullRequest.query().filter(user_filter)
574 574
575 575 # closed,opened
576 576 if statuses:
577 577 q = q.filter(PullRequest.status.in_(statuses))
578 578
579 579 if query:
580 580 like_expression = u'%{}%'.format(safe_str(query))
581 581 q = q.join(User, User.user_id == PullRequest.user_id)
582 582 q = q.filter(or_(
583 583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 584 User.username.ilike(like_expression),
585 585 PullRequest.title.ilike(like_expression),
586 586 PullRequest.description.ilike(like_expression),
587 587 ))
588 588
589 589 order_map = {
590 590 'name_raw': PullRequest.pull_request_id,
591 591 'title': PullRequest.title,
592 592 'updated_on_raw': PullRequest.updated_on,
593 593 'target_repo': PullRequest.target_repo_id
594 594 }
595 595 if order_by and order_by in order_map:
596 596 if order_dir == 'asc':
597 597 q = q.order_by(order_map[order_by].asc())
598 598 else:
599 599 q = q.order_by(order_map[order_by].desc())
600 600
601 601 return q
602 602
603 603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
604 604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
605 605 return q.count()
606 606
607 607 def get_im_participating_in(
608 608 self, user_id=None, statuses=None, query='', offset=0,
609 609 length=None, order_by=None, order_dir='desc'):
610 610 """
611 611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
612 612 """
613 613
614 614 q = self._prepare_im_participating_query(
615 615 user_id, statuses=statuses, query=query, order_by=order_by,
616 616 order_dir=order_dir)
617 617
618 618 if length:
619 619 pull_requests = q.limit(length).offset(offset).all()
620 620 else:
621 621 pull_requests = q.all()
622 622
623 623 return pull_requests
624 624
625 625 def _prepare_participating_in_for_review_query(
626 626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
627 627
628 628 for_review_statuses = [
629 629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
630 630 ]
631 631
632 632 pull_request_alias = aliased(PullRequest)
633 633 status_alias = aliased(ChangesetStatus)
634 634 reviewers_alias = aliased(PullRequestReviewers)
635 635
636 636 last_ver_subq = Session()\
637 637 .query(func.min(ChangesetStatus.version)) \
638 638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
639 639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
640 640 .subquery()
641 641
642 642 q = Session().query(pull_request_alias) \
643 643 .options(lazyload(pull_request_alias.author)) \
644 644 .join(reviewers_alias,
645 645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
646 646 .outerjoin(status_alias,
647 647 and_(status_alias.user_id == reviewers_alias.user_id,
648 648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
649 649 .filter(or_(status_alias.version == null(),
650 650 status_alias.version == last_ver_subq)) \
651 651 .filter(reviewers_alias.user_id == user_id) \
652 652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
653 653 .group_by(pull_request_alias)
654 654
655 655 # closed,opened
656 656 if statuses:
657 657 q = q.filter(pull_request_alias.status.in_(statuses))
658 658
659 659 if query:
660 660 like_expression = u'%{}%'.format(safe_str(query))
661 661 q = q.join(User, User.user_id == pull_request_alias.user_id)
662 662 q = q.filter(or_(
663 663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
664 664 User.username.ilike(like_expression),
665 665 pull_request_alias.title.ilike(like_expression),
666 666 pull_request_alias.description.ilike(like_expression),
667 667 ))
668 668
669 669 order_map = {
670 670 'name_raw': pull_request_alias.pull_request_id,
671 671 'title': pull_request_alias.title,
672 672 'updated_on_raw': pull_request_alias.updated_on,
673 673 'target_repo': pull_request_alias.target_repo_id
674 674 }
675 675 if order_by and order_by in order_map:
676 676 if order_dir == 'asc':
677 677 q = q.order_by(order_map[order_by].asc())
678 678 else:
679 679 q = q.order_by(order_map[order_by].desc())
680 680
681 681 return q
682 682
683 683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
684 684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
685 685 return q.count()
686 686
687 687 def get_im_participating_in_for_review(
688 688 self, user_id, statuses=None, query='', offset=0,
689 689 length=None, order_by=None, order_dir='desc'):
690 690 """
691 691 Get all Pull requests that needs user approval or rejection
692 692 """
693 693
694 694 q = self._prepare_participating_in_for_review_query(
695 695 user_id, statuses=statuses, query=query, order_by=order_by,
696 696 order_dir=order_dir)
697 697
698 698 if length:
699 699 pull_requests = q.limit(length).offset(offset).all()
700 700 else:
701 701 pull_requests = q.all()
702 702
703 703 return pull_requests
704 704
705 705 def get_versions(self, pull_request):
706 706 """
707 707 returns version of pull request sorted by ID descending
708 708 """
709 709 return PullRequestVersion.query()\
710 710 .filter(PullRequestVersion.pull_request == pull_request)\
711 711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
712 712 .all()
713 713
714 714 def get_pr_version(self, pull_request_id, version=None):
715 715 at_version = None
716 716
717 717 if version and version == 'latest':
718 718 pull_request_ver = PullRequest.get(pull_request_id)
719 719 pull_request_obj = pull_request_ver
720 720 _org_pull_request_obj = pull_request_obj
721 721 at_version = 'latest'
722 722 elif version:
723 723 pull_request_ver = PullRequestVersion.get_or_404(version)
724 724 pull_request_obj = pull_request_ver
725 725 _org_pull_request_obj = pull_request_ver.pull_request
726 726 at_version = pull_request_ver.pull_request_version_id
727 727 else:
728 728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
729 729 pull_request_id)
730 730
731 731 pull_request_display_obj = PullRequest.get_pr_display_object(
732 732 pull_request_obj, _org_pull_request_obj)
733 733
734 734 return _org_pull_request_obj, pull_request_obj, \
735 735 pull_request_display_obj, at_version
736 736
737 737 def pr_commits_versions(self, versions):
738 738 """
739 739 Maps the pull-request commits into all known PR versions. This way we can obtain
740 740 each pr version the commit was introduced in.
741 741 """
742 742 commit_versions = collections.defaultdict(list)
743 743 num_versions = [x.pull_request_version_id for x in versions]
744 744 for ver in versions:
745 745 for commit_id in ver.revisions:
746 746 ver_idx = ChangesetComment.get_index_from_version(
747 747 ver.pull_request_version_id, num_versions=num_versions)
748 748 commit_versions[commit_id].append(ver_idx)
749 749 return commit_versions
750 750
751 751 def create(self, created_by, source_repo, source_ref, target_repo,
752 752 target_ref, revisions, reviewers, observers, title, description=None,
753 753 common_ancestor_id=None,
754 754 description_renderer=None,
755 755 reviewer_data=None, translator=None, auth_user=None):
756 756 translator = translator or get_current_request().translate
757 757
758 758 created_by_user = self._get_user(created_by)
759 759 auth_user = auth_user or created_by_user.AuthUser()
760 760 source_repo = self._get_repo(source_repo)
761 761 target_repo = self._get_repo(target_repo)
762 762
763 763 pull_request = PullRequest()
764 764 pull_request.source_repo = source_repo
765 765 pull_request.source_ref = source_ref
766 766 pull_request.target_repo = target_repo
767 767 pull_request.target_ref = target_ref
768 768 pull_request.revisions = revisions
769 769 pull_request.title = title
770 770 pull_request.description = description
771 771 pull_request.description_renderer = description_renderer
772 772 pull_request.author = created_by_user
773 773 pull_request.reviewer_data = reviewer_data
774 774 pull_request.pull_request_state = pull_request.STATE_CREATING
775 775 pull_request.common_ancestor_id = common_ancestor_id
776 776
777 777 Session().add(pull_request)
778 778 Session().flush()
779 779
780 780 reviewer_ids = set()
781 781 # members / reviewers
782 782 for reviewer_object in reviewers:
783 783 user_id, reasons, mandatory, role, rules = reviewer_object
784 784 user = self._get_user(user_id)
785 785
786 786 # skip duplicates
787 787 if user.user_id in reviewer_ids:
788 788 continue
789 789
790 790 reviewer_ids.add(user.user_id)
791 791
792 792 reviewer = PullRequestReviewers()
793 793 reviewer.user = user
794 794 reviewer.pull_request = pull_request
795 795 reviewer.reasons = reasons
796 796 reviewer.mandatory = mandatory
797 797 reviewer.role = role
798 798
799 799 # NOTE(marcink): pick only first rule for now
800 800 rule_id = list(rules)[0] if rules else None
801 801 rule = RepoReviewRule.get(rule_id) if rule_id else None
802 802 if rule:
803 803 review_group = rule.user_group_vote_rule(user_id)
804 804 # we check if this particular reviewer is member of a voting group
805 805 if review_group:
806 806 # NOTE(marcink):
807 807 # can be that user is member of more but we pick the first same,
808 808 # same as default reviewers algo
809 809 review_group = review_group[0]
810 810
811 811 rule_data = {
812 812 'rule_name':
813 813 rule.review_rule_name,
814 814 'rule_user_group_entry_id':
815 815 review_group.repo_review_rule_users_group_id,
816 816 'rule_user_group_name':
817 817 review_group.users_group.users_group_name,
818 818 'rule_user_group_members':
819 819 [x.user.username for x in review_group.users_group.members],
820 820 'rule_user_group_members_id':
821 821 [x.user.user_id for x in review_group.users_group.members],
822 822 }
823 823 # e.g {'vote_rule': -1, 'mandatory': True}
824 824 rule_data.update(review_group.rule_data())
825 825
826 826 reviewer.rule_data = rule_data
827 827
828 828 Session().add(reviewer)
829 829 Session().flush()
830 830
831 831 for observer_object in observers:
832 832 user_id, reasons, mandatory, role, rules = observer_object
833 833 user = self._get_user(user_id)
834 834
835 835 # skip duplicates from reviewers
836 836 if user.user_id in reviewer_ids:
837 837 continue
838 838
839 839 #reviewer_ids.add(user.user_id)
840 840
841 841 observer = PullRequestReviewers()
842 842 observer.user = user
843 843 observer.pull_request = pull_request
844 844 observer.reasons = reasons
845 845 observer.mandatory = mandatory
846 846 observer.role = role
847 847
848 848 # NOTE(marcink): pick only first rule for now
849 849 rule_id = list(rules)[0] if rules else None
850 850 rule = RepoReviewRule.get(rule_id) if rule_id else None
851 851 if rule:
852 852 # TODO(marcink): do we need this for observers ??
853 853 pass
854 854
855 855 Session().add(observer)
856 856 Session().flush()
857 857
858 858 # Set approval status to "Under Review" for all commits which are
859 859 # part of this pull request.
860 860 ChangesetStatusModel().set_status(
861 861 repo=target_repo,
862 862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
863 863 user=created_by_user,
864 864 pull_request=pull_request
865 865 )
866 866 # we commit early at this point. This has to do with a fact
867 867 # that before queries do some row-locking. And because of that
868 868 # we need to commit and finish transaction before below validate call
869 869 # that for large repos could be long resulting in long row locks
870 870 Session().commit()
871 871
872 872 # prepare workspace, and run initial merge simulation. Set state during that
873 873 # operation
874 874 pull_request = PullRequest.get(pull_request.pull_request_id)
875 875
876 876 # set as merging, for merge simulation, and if finished to created so we mark
877 877 # simulation is working fine
878 878 with pull_request.set_state(PullRequest.STATE_MERGING,
879 879 final_state=PullRequest.STATE_CREATED) as state_obj:
880 880 MergeCheck.validate(
881 881 pull_request, auth_user=auth_user, translator=translator)
882 882
883 883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
884 884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
885 885
886 886 creation_data = pull_request.get_api_data(with_merge_state=False)
887 887 self._log_audit_action(
888 888 'repo.pull_request.create', {'data': creation_data},
889 889 auth_user, pull_request)
890 890
891 891 return pull_request
892 892
893 893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
894 894 pull_request = self.__get_pull_request(pull_request)
895 895 target_scm = pull_request.target_repo.scm_instance()
896 896 if action == 'create':
897 897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
898 898 elif action == 'merge':
899 899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
900 900 elif action == 'close':
901 901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
902 902 elif action == 'review_status_change':
903 903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
904 904 elif action == 'update':
905 905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
906 906 elif action == 'comment':
907 907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
908 908 elif action == 'comment_edit':
909 909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
910 910 else:
911 911 return
912 912
913 913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
914 914 pull_request, action, trigger_hook)
915 915 trigger_hook(
916 916 username=user.username,
917 917 repo_name=pull_request.target_repo.repo_name,
918 918 repo_type=target_scm.alias,
919 919 pull_request=pull_request,
920 920 data=data)
921 921
922 922 def _get_commit_ids(self, pull_request):
923 923 """
924 924 Return the commit ids of the merged pull request.
925 925
926 926 This method is not dealing correctly yet with the lack of autoupdates
927 927 nor with the implicit target updates.
928 928 For example: if a commit in the source repo is already in the target it
929 929 will be reported anyways.
930 930 """
931 931 merge_rev = pull_request.merge_rev
932 932 if merge_rev is None:
933 933 raise ValueError('This pull request was not merged yet')
934 934
935 935 commit_ids = list(pull_request.revisions)
936 936 if merge_rev not in commit_ids:
937 937 commit_ids.append(merge_rev)
938 938
939 939 return commit_ids
940 940
941 941 def merge_repo(self, pull_request, user, extras):
942 942 repo_type = pull_request.source_repo.repo_type
943 943 log.debug("Merging pull request %s", pull_request)
944 944
945 945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
946 946 merge_state = self._merge_pull_request(pull_request, user, extras)
947 947 if merge_state.executed:
948 948 log.debug("Merge was successful, updating the pull request comments.")
949 949 self._comment_and_close_pr(pull_request, user, merge_state)
950 950
951 951 self._log_audit_action(
952 952 'repo.pull_request.merge',
953 953 {'merge_state': merge_state.__dict__},
954 954 user, pull_request)
955 955
956 956 else:
957 957 log.warning("Merge failed, not updating the pull request.")
958 958 return merge_state
959 959
960 960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
961 961 target_vcs = pull_request.target_repo.scm_instance()
962 962 source_vcs = pull_request.source_repo.scm_instance()
963 963
964 964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
965 965 pr_id=pull_request.pull_request_id,
966 966 pr_title=pull_request.title,
967 967 pr_desc=pull_request.description,
968 968 source_repo=source_vcs.name,
969 969 source_ref_name=pull_request.source_ref_parts.name,
970 970 target_repo=target_vcs.name,
971 971 target_ref_name=pull_request.target_ref_parts.name,
972 972 )
973 973
974 974 workspace_id = self._workspace_id(pull_request)
975 975 repo_id = pull_request.target_repo.repo_id
976 976 use_rebase = self._use_rebase_for_merging(pull_request)
977 977 close_branch = self._close_branch_before_merging(pull_request)
978 978 user_name = self._user_name_for_merging(pull_request, user)
979 979
980 980 target_ref = self._refresh_reference(
981 981 pull_request.target_ref_parts, target_vcs)
982 982
983 983 callback_daemon, extras = prepare_callback_daemon(
984 984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
985 985 host=vcs_settings.HOOKS_HOST,
986 986 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
987 987
988 988 with callback_daemon:
989 989 # TODO: johbo: Implement a clean way to run a config_override
990 990 # for a single call.
991 991 target_vcs.config.set(
992 992 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
993 993
994 994 merge_state = target_vcs.merge(
995 995 repo_id, workspace_id, target_ref, source_vcs,
996 996 pull_request.source_ref_parts,
997 997 user_name=user_name, user_email=user.email,
998 998 message=message, use_rebase=use_rebase,
999 999 close_branch=close_branch)
1000 1000
1001 1001 return merge_state
1002 1002
1003 1003 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1004 1004 pull_request.merge_rev = merge_state.merge_ref.commit_id
1005 1005 pull_request.updated_on = datetime.datetime.now()
1006 1006 close_msg = close_msg or 'Pull request merged and closed'
1007 1007
1008 1008 CommentsModel().create(
1009 1009 text=safe_str(close_msg),
1010 1010 repo=pull_request.target_repo.repo_id,
1011 1011 user=user.user_id,
1012 1012 pull_request=pull_request.pull_request_id,
1013 1013 f_path=None,
1014 1014 line_no=None,
1015 1015 closing_pr=True
1016 1016 )
1017 1017
1018 1018 Session().add(pull_request)
1019 1019 Session().flush()
1020 1020 # TODO: paris: replace invalidation with less radical solution
1021 1021 ScmModel().mark_for_invalidation(
1022 1022 pull_request.target_repo.repo_name)
1023 1023 self.trigger_pull_request_hook(pull_request, user, 'merge')
1024 1024
1025 1025 def has_valid_update_type(self, pull_request):
1026 1026 source_ref_type = pull_request.source_ref_parts.type
1027 1027 return source_ref_type in self.REF_TYPES
1028 1028
1029 1029 def get_flow_commits(self, pull_request):
1030 1030
1031 1031 # source repo
1032 1032 source_ref_name = pull_request.source_ref_parts.name
1033 1033 source_ref_type = pull_request.source_ref_parts.type
1034 1034 source_ref_id = pull_request.source_ref_parts.commit_id
1035 1035 source_repo = pull_request.source_repo.scm_instance()
1036 1036
1037 1037 try:
1038 1038 if source_ref_type in self.REF_TYPES:
1039 1039 source_commit = source_repo.get_commit(
1040 1040 source_ref_name, reference_obj=pull_request.source_ref_parts)
1041 1041 else:
1042 1042 source_commit = source_repo.get_commit(source_ref_id)
1043 1043 except CommitDoesNotExistError:
1044 1044 raise SourceRefMissing()
1045 1045
1046 1046 # target repo
1047 1047 target_ref_name = pull_request.target_ref_parts.name
1048 1048 target_ref_type = pull_request.target_ref_parts.type
1049 1049 target_ref_id = pull_request.target_ref_parts.commit_id
1050 1050 target_repo = pull_request.target_repo.scm_instance()
1051 1051
1052 1052 try:
1053 1053 if target_ref_type in self.REF_TYPES:
1054 1054 target_commit = target_repo.get_commit(
1055 1055 target_ref_name, reference_obj=pull_request.target_ref_parts)
1056 1056 else:
1057 1057 target_commit = target_repo.get_commit(target_ref_id)
1058 1058 except CommitDoesNotExistError:
1059 1059 raise TargetRefMissing()
1060 1060
1061 1061 return source_commit, target_commit
1062 1062
1063 1063 def update_commits(self, pull_request, updating_user):
1064 1064 """
1065 1065 Get the updated list of commits for the pull request
1066 1066 and return the new pull request version and the list
1067 1067 of commits processed by this update action
1068 1068
1069 1069 updating_user is the user_object who triggered the update
1070 1070 """
1071 1071 pull_request = self.__get_pull_request(pull_request)
1072 1072 source_ref_type = pull_request.source_ref_parts.type
1073 1073 source_ref_name = pull_request.source_ref_parts.name
1074 1074 source_ref_id = pull_request.source_ref_parts.commit_id
1075 1075
1076 1076 target_ref_type = pull_request.target_ref_parts.type
1077 1077 target_ref_name = pull_request.target_ref_parts.name
1078 1078 target_ref_id = pull_request.target_ref_parts.commit_id
1079 1079
1080 1080 if not self.has_valid_update_type(pull_request):
1081 1081 log.debug("Skipping update of pull request %s due to ref type: %s",
1082 1082 pull_request, source_ref_type)
1083 1083 return UpdateResponse(
1084 1084 executed=False,
1085 1085 reason=UpdateFailureReason.WRONG_REF_TYPE,
1086 1086 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1087 1087 source_changed=False, target_changed=False)
1088 1088
1089 1089 try:
1090 1090 source_commit, target_commit = self.get_flow_commits(pull_request)
1091 1091 except SourceRefMissing:
1092 1092 return UpdateResponse(
1093 1093 executed=False,
1094 1094 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1095 1095 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1096 1096 source_changed=False, target_changed=False)
1097 1097 except TargetRefMissing:
1098 1098 return UpdateResponse(
1099 1099 executed=False,
1100 1100 reason=UpdateFailureReason.MISSING_TARGET_REF,
1101 1101 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1102 1102 source_changed=False, target_changed=False)
1103 1103
1104 1104 source_changed = source_ref_id != source_commit.raw_id
1105 1105 target_changed = target_ref_id != target_commit.raw_id
1106 1106
1107 1107 if not (source_changed or target_changed):
1108 1108 log.debug("Nothing changed in pull request %s", pull_request)
1109 1109 return UpdateResponse(
1110 1110 executed=False,
1111 1111 reason=UpdateFailureReason.NO_CHANGE,
1112 1112 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1113 1113 source_changed=target_changed, target_changed=source_changed)
1114 1114
1115 1115 change_in_found = 'target repo' if target_changed else 'source repo'
1116 1116 log.debug('Updating pull request because of change in %s detected',
1117 1117 change_in_found)
1118 1118
1119 1119 # Finally there is a need for an update, in case of source change
1120 1120 # we create a new version, else just an update
1121 1121 if source_changed:
1122 1122 pull_request_version = self._create_version_from_snapshot(pull_request)
1123 1123 self._link_comments_to_version(pull_request_version)
1124 1124 else:
1125 1125 try:
1126 1126 ver = pull_request.versions[-1]
1127 1127 except IndexError:
1128 1128 ver = None
1129 1129
1130 1130 pull_request.pull_request_version_id = \
1131 1131 ver.pull_request_version_id if ver else None
1132 1132 pull_request_version = pull_request
1133 1133
1134 1134 source_repo = pull_request.source_repo.scm_instance()
1135 1135 target_repo = pull_request.target_repo.scm_instance()
1136 1136
1137 1137 # re-compute commit ids
1138 1138 old_commit_ids = pull_request.revisions
1139 1139 pre_load = ["author", "date", "message", "branch"]
1140 1140 commit_ranges = target_repo.compare(
1141 1141 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1142 1142 pre_load=pre_load)
1143 1143
1144 1144 target_ref = target_commit.raw_id
1145 1145 source_ref = source_commit.raw_id
1146 1146 ancestor_commit_id = target_repo.get_common_ancestor(
1147 1147 target_ref, source_ref, source_repo)
1148 1148
1149 1149 if not ancestor_commit_id:
1150 1150 raise ValueError(
1151 1151 'cannot calculate diff info without a common ancestor. '
1152 1152 'Make sure both repositories are related, and have a common forking commit.')
1153 1153
1154 1154 pull_request.common_ancestor_id = ancestor_commit_id
1155 1155
1156 pull_request.source_ref = '%s:%s:%s' % (
1157 source_ref_type, source_ref_name, source_commit.raw_id)
1158 pull_request.target_ref = '%s:%s:%s' % (
1159 target_ref_type, target_ref_name, ancestor_commit_id)
1156 pull_request.source_ref = f'{source_ref_type}:{source_ref_name}:{source_commit.raw_id}'
1157 pull_request.target_ref = f'{target_ref_type}:{target_ref_name}:{ancestor_commit_id}'
1160 1158
1161 1159 pull_request.revisions = [
1162 1160 commit.raw_id for commit in reversed(commit_ranges)]
1163 1161 pull_request.updated_on = datetime.datetime.now()
1164 1162 Session().add(pull_request)
1165 1163 new_commit_ids = pull_request.revisions
1166 1164
1167 1165 old_diff_data, new_diff_data = self._generate_update_diffs(
1168 1166 pull_request, pull_request_version)
1169 1167
1170 1168 # calculate commit and file changes
1171 1169 commit_changes = self._calculate_commit_id_changes(
1172 1170 old_commit_ids, new_commit_ids)
1173 1171 file_changes = self._calculate_file_changes(
1174 1172 old_diff_data, new_diff_data)
1175 1173
1176 1174 # set comments as outdated if DIFFS changed
1177 1175 CommentsModel().outdate_comments(
1178 1176 pull_request, old_diff_data=old_diff_data,
1179 1177 new_diff_data=new_diff_data)
1180 1178
1181 1179 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1182 1180 file_node_changes = (
1183 1181 file_changes.added or file_changes.modified or file_changes.removed)
1184 1182 pr_has_changes = valid_commit_changes or file_node_changes
1185 1183
1186 1184 # Add an automatic comment to the pull request, in case
1187 1185 # anything has changed
1188 1186 if pr_has_changes:
1189 1187 update_comment = CommentsModel().create(
1190 1188 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1191 1189 repo=pull_request.target_repo,
1192 1190 user=pull_request.author,
1193 1191 pull_request=pull_request,
1194 1192 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1195 1193
1196 1194 # Update status to "Under Review" for added commits
1197 1195 for commit_id in commit_changes.added:
1198 1196 ChangesetStatusModel().set_status(
1199 1197 repo=pull_request.source_repo,
1200 1198 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1201 1199 comment=update_comment,
1202 1200 user=pull_request.author,
1203 1201 pull_request=pull_request,
1204 1202 revision=commit_id)
1205 1203
1206 1204 # initial commit
1207 1205 Session().commit()
1208 1206
1209 1207 if pr_has_changes:
1210 1208 # send update email to users
1211 1209 try:
1212 1210 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1213 1211 ancestor_commit_id=ancestor_commit_id,
1214 1212 commit_changes=commit_changes,
1215 1213 file_changes=file_changes)
1216 1214 Session().commit()
1217 1215 except Exception:
1218 1216 log.exception('Failed to send email notification to users')
1219 1217 Session().rollback()
1220 1218
1221 1219 log.debug(
1222 1220 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1223 1221 'removed_ids: %s', pull_request.pull_request_id,
1224 1222 commit_changes.added, commit_changes.common, commit_changes.removed)
1225 1223 log.debug(
1226 1224 'Updated pull request with the following file changes: %s',
1227 1225 file_changes)
1228 1226
1229 1227 log.info(
1230 1228 "Updated pull request %s from commit %s to commit %s, "
1231 1229 "stored new version %s of this pull request.",
1232 1230 pull_request.pull_request_id, source_ref_id,
1233 1231 pull_request.source_ref_parts.commit_id,
1234 1232 pull_request_version.pull_request_version_id)
1235 1233
1236 1234 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1237 1235
1238 1236 return UpdateResponse(
1239 1237 executed=True, reason=UpdateFailureReason.NONE,
1240 1238 old=pull_request, new=pull_request_version,
1241 1239 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1242 1240 source_changed=source_changed, target_changed=target_changed)
1243 1241
1244 1242 def _create_version_from_snapshot(self, pull_request):
1245 1243 version = PullRequestVersion()
1246 1244 version.title = pull_request.title
1247 1245 version.description = pull_request.description
1248 1246 version.status = pull_request.status
1249 1247 version.pull_request_state = pull_request.pull_request_state
1250 1248 version.created_on = datetime.datetime.now()
1251 1249 version.updated_on = pull_request.updated_on
1252 1250 version.user_id = pull_request.user_id
1253 1251 version.source_repo = pull_request.source_repo
1254 1252 version.source_ref = pull_request.source_ref
1255 1253 version.target_repo = pull_request.target_repo
1256 1254 version.target_ref = pull_request.target_ref
1257 1255
1258 1256 version._last_merge_source_rev = pull_request._last_merge_source_rev
1259 1257 version._last_merge_target_rev = pull_request._last_merge_target_rev
1260 1258 version.last_merge_status = pull_request.last_merge_status
1261 1259 version.last_merge_metadata = pull_request.last_merge_metadata
1262 1260 version.shadow_merge_ref = pull_request.shadow_merge_ref
1263 1261 version.merge_rev = pull_request.merge_rev
1264 1262 version.reviewer_data = pull_request.reviewer_data
1265 1263
1266 1264 version.revisions = pull_request.revisions
1267 1265 version.common_ancestor_id = pull_request.common_ancestor_id
1268 1266 version.pull_request = pull_request
1269 1267 Session().add(version)
1270 1268 Session().flush()
1271 1269
1272 1270 return version
1273 1271
1274 1272 def _generate_update_diffs(self, pull_request, pull_request_version):
1275 1273
1276 1274 diff_context = (
1277 1275 self.DIFF_CONTEXT +
1278 1276 CommentsModel.needed_extra_diff_context())
1279 1277 hide_whitespace_changes = False
1280 1278 source_repo = pull_request_version.source_repo
1281 1279 source_ref_id = pull_request_version.source_ref_parts.commit_id
1282 1280 target_ref_id = pull_request_version.target_ref_parts.commit_id
1283 1281 old_diff = self._get_diff_from_pr_or_version(
1284 1282 source_repo, source_ref_id, target_ref_id,
1285 1283 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1286 1284
1287 1285 source_repo = pull_request.source_repo
1288 1286 source_ref_id = pull_request.source_ref_parts.commit_id
1289 1287 target_ref_id = pull_request.target_ref_parts.commit_id
1290 1288
1291 1289 new_diff = self._get_diff_from_pr_or_version(
1292 1290 source_repo, source_ref_id, target_ref_id,
1293 1291 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1294 1292
1295 1293 # NOTE: this was using diff_format='gitdiff'
1296 1294 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1297 1295 old_diff_data.prepare()
1298 1296 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1299 1297 new_diff_data.prepare()
1300 1298
1301 1299 return old_diff_data, new_diff_data
1302 1300
1303 1301 def _link_comments_to_version(self, pull_request_version):
1304 1302 """
1305 1303 Link all unlinked comments of this pull request to the given version.
1306 1304
1307 1305 :param pull_request_version: The `PullRequestVersion` to which
1308 1306 the comments shall be linked.
1309 1307
1310 1308 """
1311 1309 pull_request = pull_request_version.pull_request
1312 1310 comments = ChangesetComment.query()\
1313 1311 .filter(
1314 1312 # TODO: johbo: Should we query for the repo at all here?
1315 1313 # Pending decision on how comments of PRs are to be related
1316 1314 # to either the source repo, the target repo or no repo at all.
1317 1315 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1318 1316 ChangesetComment.pull_request == pull_request,
1319 1317 ChangesetComment.pull_request_version == null())\
1320 1318 .order_by(ChangesetComment.comment_id.asc())
1321 1319
1322 1320 # TODO: johbo: Find out why this breaks if it is done in a bulk
1323 1321 # operation.
1324 1322 for comment in comments:
1325 1323 comment.pull_request_version_id = (
1326 1324 pull_request_version.pull_request_version_id)
1327 1325 Session().add(comment)
1328 1326
1329 1327 def _calculate_commit_id_changes(self, old_ids, new_ids):
1330 1328 added = [x for x in new_ids if x not in old_ids]
1331 1329 common = [x for x in new_ids if x in old_ids]
1332 1330 removed = [x for x in old_ids if x not in new_ids]
1333 1331 total = new_ids
1334 1332 return ChangeTuple(added, common, removed, total)
1335 1333
1336 1334 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1337 1335
1338 1336 old_files = OrderedDict()
1339 1337 for diff_data in old_diff_data.parsed_diff:
1340 1338 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1341 1339
1342 1340 added_files = []
1343 1341 modified_files = []
1344 1342 removed_files = []
1345 1343 for diff_data in new_diff_data.parsed_diff:
1346 1344 new_filename = diff_data['filename']
1347 1345 new_hash = md5_safe(diff_data['raw_diff'])
1348 1346
1349 1347 old_hash = old_files.get(new_filename)
1350 1348 if not old_hash:
1351 1349 # file is not present in old diff, we have to figure out from parsed diff
1352 1350 # operation ADD/REMOVE
1353 1351 operations_dict = diff_data['stats']['ops']
1354 1352 if diffs.DEL_FILENODE in operations_dict:
1355 1353 removed_files.append(new_filename)
1356 1354 else:
1357 1355 added_files.append(new_filename)
1358 1356 else:
1359 1357 if new_hash != old_hash:
1360 1358 modified_files.append(new_filename)
1361 1359 # now remove a file from old, since we have seen it already
1362 1360 del old_files[new_filename]
1363 1361
1364 1362 # removed files is when there are present in old, but not in NEW,
1365 1363 # since we remove old files that are present in new diff, left-overs
1366 1364 # if any should be the removed files
1367 1365 removed_files.extend(old_files.keys())
1368 1366
1369 1367 return FileChangeTuple(added_files, modified_files, removed_files)
1370 1368
1371 1369 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1372 1370 """
1373 1371 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1374 1372 so it's always looking the same disregarding on which default
1375 1373 renderer system is using.
1376 1374
1377 1375 :param ancestor_commit_id: ancestor raw_id
1378 1376 :param changes: changes named tuple
1379 1377 :param file_changes: file changes named tuple
1380 1378
1381 1379 """
1382 1380 new_status = ChangesetStatus.get_status_lbl(
1383 1381 ChangesetStatus.STATUS_UNDER_REVIEW)
1384 1382
1385 1383 changed_files = (
1386 1384 file_changes.added + file_changes.modified + file_changes.removed)
1387 1385
1388 1386 params = {
1389 1387 'under_review_label': new_status,
1390 1388 'added_commits': changes.added,
1391 1389 'removed_commits': changes.removed,
1392 1390 'changed_files': changed_files,
1393 1391 'added_files': file_changes.added,
1394 1392 'modified_files': file_changes.modified,
1395 1393 'removed_files': file_changes.removed,
1396 1394 'ancestor_commit_id': ancestor_commit_id
1397 1395 }
1398 1396 renderer = RstTemplateRenderer()
1399 1397 return renderer.render('pull_request_update.mako', **params)
1400 1398
1401 1399 def edit(self, pull_request, title, description, description_renderer, user):
1402 1400 pull_request = self.__get_pull_request(pull_request)
1403 1401 old_data = pull_request.get_api_data(with_merge_state=False)
1404 1402 if pull_request.is_closed():
1405 1403 raise ValueError('This pull request is closed')
1406 1404 if title:
1407 1405 pull_request.title = title
1408 1406 pull_request.description = description
1409 1407 pull_request.updated_on = datetime.datetime.now()
1410 1408 pull_request.description_renderer = description_renderer
1411 1409 Session().add(pull_request)
1412 1410 self._log_audit_action(
1413 1411 'repo.pull_request.edit', {'old_data': old_data},
1414 1412 user, pull_request)
1415 1413
1416 1414 def update_reviewers(self, pull_request, reviewer_data, user):
1417 1415 """
1418 1416 Update the reviewers in the pull request
1419 1417
1420 1418 :param pull_request: the pr to update
1421 1419 :param reviewer_data: list of tuples
1422 1420 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1423 1421 :param user: current use who triggers this action
1424 1422 """
1425 1423
1426 1424 pull_request = self.__get_pull_request(pull_request)
1427 1425 if pull_request.is_closed():
1428 1426 raise ValueError('This pull request is closed')
1429 1427
1430 1428 reviewers = {}
1431 1429 for user_id, reasons, mandatory, role, rules in reviewer_data:
1432 1430 if isinstance(user_id, (int, str)):
1433 1431 user_id = self._get_user(user_id).user_id
1434 1432 reviewers[user_id] = {
1435 1433 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1436 1434
1437 1435 reviewers_ids = set(reviewers.keys())
1438 1436 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1439 1437 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1440 1438
1441 1439 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1442 1440
1443 1441 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1444 1442 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1445 1443
1446 1444 log.debug("Adding %s reviewers", ids_to_add)
1447 1445 log.debug("Removing %s reviewers", ids_to_remove)
1448 1446 changed = False
1449 1447 added_audit_reviewers = []
1450 1448 removed_audit_reviewers = []
1451 1449
1452 1450 for uid in ids_to_add:
1453 1451 changed = True
1454 1452 _usr = self._get_user(uid)
1455 1453 reviewer = PullRequestReviewers()
1456 1454 reviewer.user = _usr
1457 1455 reviewer.pull_request = pull_request
1458 1456 reviewer.reasons = reviewers[uid]['reasons']
1459 1457 # NOTE(marcink): mandatory shouldn't be changed now
1460 1458 # reviewer.mandatory = reviewers[uid]['reasons']
1461 1459 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1462 1460 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1463 1461 Session().add(reviewer)
1464 1462 added_audit_reviewers.append(reviewer.get_dict())
1465 1463
1466 1464 for uid in ids_to_remove:
1467 1465 changed = True
1468 1466 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1469 1467 # This is an edge case that handles previous state of having the same reviewer twice.
1470 1468 # this CAN happen due to the lack of DB checks
1471 1469 reviewers = PullRequestReviewers.query()\
1472 1470 .filter(PullRequestReviewers.user_id == uid,
1473 1471 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1474 1472 PullRequestReviewers.pull_request == pull_request)\
1475 1473 .all()
1476 1474
1477 1475 for obj in reviewers:
1478 1476 added_audit_reviewers.append(obj.get_dict())
1479 1477 Session().delete(obj)
1480 1478
1481 1479 if changed:
1482 1480 Session().expire_all()
1483 1481 pull_request.updated_on = datetime.datetime.now()
1484 1482 Session().add(pull_request)
1485 1483
1486 1484 # finally store audit logs
1487 1485 for user_data in added_audit_reviewers:
1488 1486 self._log_audit_action(
1489 1487 'repo.pull_request.reviewer.add', {'data': user_data},
1490 1488 user, pull_request)
1491 1489 for user_data in removed_audit_reviewers:
1492 1490 self._log_audit_action(
1493 1491 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1494 1492 user, pull_request)
1495 1493
1496 1494 self.notify_reviewers(pull_request, ids_to_add, user)
1497 1495 return ids_to_add, ids_to_remove
1498 1496
1499 1497 def update_observers(self, pull_request, observer_data, user):
1500 1498 """
1501 1499 Update the observers in the pull request
1502 1500
1503 1501 :param pull_request: the pr to update
1504 1502 :param observer_data: list of tuples
1505 1503 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1506 1504 :param user: current use who triggers this action
1507 1505 """
1508 1506 pull_request = self.__get_pull_request(pull_request)
1509 1507 if pull_request.is_closed():
1510 1508 raise ValueError('This pull request is closed')
1511 1509
1512 1510 observers = {}
1513 1511 for user_id, reasons, mandatory, role, rules in observer_data:
1514 1512 if isinstance(user_id, (int, str)):
1515 1513 user_id = self._get_user(user_id).user_id
1516 1514 observers[user_id] = {
1517 1515 'reasons': reasons, 'observers': mandatory, 'role': role}
1518 1516
1519 1517 observers_ids = set(observers.keys())
1520 1518 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1521 1519 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1522 1520
1523 1521 current_observers_ids = set([x.user.user_id for x in current_observers])
1524 1522
1525 1523 ids_to_add = observers_ids.difference(current_observers_ids)
1526 1524 ids_to_remove = current_observers_ids.difference(observers_ids)
1527 1525
1528 1526 log.debug("Adding %s observer", ids_to_add)
1529 1527 log.debug("Removing %s observer", ids_to_remove)
1530 1528 changed = False
1531 1529 added_audit_observers = []
1532 1530 removed_audit_observers = []
1533 1531
1534 1532 for uid in ids_to_add:
1535 1533 changed = True
1536 1534 _usr = self._get_user(uid)
1537 1535 observer = PullRequestReviewers()
1538 1536 observer.user = _usr
1539 1537 observer.pull_request = pull_request
1540 1538 observer.reasons = observers[uid]['reasons']
1541 1539 # NOTE(marcink): mandatory shouldn't be changed now
1542 1540 # observer.mandatory = observer[uid]['reasons']
1543 1541
1544 1542 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1545 1543 observer.role = PullRequestReviewers.ROLE_OBSERVER
1546 1544 Session().add(observer)
1547 1545 added_audit_observers.append(observer.get_dict())
1548 1546
1549 1547 for uid in ids_to_remove:
1550 1548 changed = True
1551 1549 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1552 1550 # This is an edge case that handles previous state of having the same reviewer twice.
1553 1551 # this CAN happen due to the lack of DB checks
1554 1552 observers = PullRequestReviewers.query()\
1555 1553 .filter(PullRequestReviewers.user_id == uid,
1556 1554 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1557 1555 PullRequestReviewers.pull_request == pull_request)\
1558 1556 .all()
1559 1557
1560 1558 for obj in observers:
1561 1559 added_audit_observers.append(obj.get_dict())
1562 1560 Session().delete(obj)
1563 1561
1564 1562 if changed:
1565 1563 Session().expire_all()
1566 1564 pull_request.updated_on = datetime.datetime.now()
1567 1565 Session().add(pull_request)
1568 1566
1569 1567 # finally store audit logs
1570 1568 for user_data in added_audit_observers:
1571 1569 self._log_audit_action(
1572 1570 'repo.pull_request.observer.add', {'data': user_data},
1573 1571 user, pull_request)
1574 1572 for user_data in removed_audit_observers:
1575 1573 self._log_audit_action(
1576 1574 'repo.pull_request.observer.delete', {'old_data': user_data},
1577 1575 user, pull_request)
1578 1576
1579 1577 self.notify_observers(pull_request, ids_to_add, user)
1580 1578 return ids_to_add, ids_to_remove
1581 1579
1582 1580 def get_url(self, pull_request, request=None, permalink=False):
1583 1581 if not request:
1584 1582 request = get_current_request()
1585 1583
1586 1584 if permalink:
1587 1585 return request.route_url(
1588 1586 'pull_requests_global',
1589 1587 pull_request_id=pull_request.pull_request_id,)
1590 1588 else:
1591 1589 return request.route_url('pullrequest_show',
1592 1590 repo_name=safe_str(pull_request.target_repo.repo_name),
1593 1591 pull_request_id=pull_request.pull_request_id,)
1594 1592
1595 1593 def get_shadow_clone_url(self, pull_request, request=None):
1596 1594 """
1597 1595 Returns qualified url pointing to the shadow repository. If this pull
1598 1596 request is closed there is no shadow repository and ``None`` will be
1599 1597 returned.
1600 1598 """
1601 1599 if pull_request.is_closed():
1602 1600 return None
1603 1601 else:
1604 1602 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1605 1603 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1606 1604
1607 1605 def _notify_reviewers(self, pull_request, user_ids, role, user):
1608 1606 # notification to reviewers/observers
1609 1607 if not user_ids:
1610 1608 return
1611 1609
1612 1610 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1613 1611
1614 1612 pull_request_obj = pull_request
1615 1613 # get the current participants of this pull request
1616 1614 recipients = user_ids
1617 1615 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1618 1616
1619 1617 pr_source_repo = pull_request_obj.source_repo
1620 1618 pr_target_repo = pull_request_obj.target_repo
1621 1619
1622 1620 pr_url = h.route_url('pullrequest_show',
1623 1621 repo_name=pr_target_repo.repo_name,
1624 1622 pull_request_id=pull_request_obj.pull_request_id,)
1625 1623
1626 1624 # set some variables for email notification
1627 1625 pr_target_repo_url = h.route_url(
1628 1626 'repo_summary', repo_name=pr_target_repo.repo_name)
1629 1627
1630 1628 pr_source_repo_url = h.route_url(
1631 1629 'repo_summary', repo_name=pr_source_repo.repo_name)
1632 1630
1633 1631 # pull request specifics
1634 1632 pull_request_commits = [
1635 1633 (x.raw_id, x.message)
1636 1634 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1637 1635
1638 1636 current_rhodecode_user = user
1639 1637 kwargs = {
1640 1638 'user': current_rhodecode_user,
1641 1639 'pull_request_author': pull_request.author,
1642 1640 'pull_request': pull_request_obj,
1643 1641 'pull_request_commits': pull_request_commits,
1644 1642
1645 1643 'pull_request_target_repo': pr_target_repo,
1646 1644 'pull_request_target_repo_url': pr_target_repo_url,
1647 1645
1648 1646 'pull_request_source_repo': pr_source_repo,
1649 1647 'pull_request_source_repo_url': pr_source_repo_url,
1650 1648
1651 1649 'pull_request_url': pr_url,
1652 1650 'thread_ids': [pr_url],
1653 1651 'user_role': role
1654 1652 }
1655 1653
1656 1654 # create notification objects, and emails
1657 1655 NotificationModel().create(
1658 1656 created_by=current_rhodecode_user,
1659 1657 notification_subject='', # Filled in based on the notification_type
1660 1658 notification_body='', # Filled in based on the notification_type
1661 1659 notification_type=notification_type,
1662 1660 recipients=recipients,
1663 1661 email_kwargs=kwargs,
1664 1662 )
1665 1663
1666 1664 def notify_reviewers(self, pull_request, reviewers_ids, user):
1667 1665 return self._notify_reviewers(pull_request, reviewers_ids,
1668 1666 PullRequestReviewers.ROLE_REVIEWER, user)
1669 1667
1670 1668 def notify_observers(self, pull_request, observers_ids, user):
1671 1669 return self._notify_reviewers(pull_request, observers_ids,
1672 1670 PullRequestReviewers.ROLE_OBSERVER, user)
1673 1671
1674 1672 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1675 1673 commit_changes, file_changes):
1676 1674
1677 1675 updating_user_id = updating_user.user_id
1678 1676 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1679 1677 # NOTE(marcink): send notification to all other users except to
1680 1678 # person who updated the PR
1681 1679 recipients = reviewers.difference(set([updating_user_id]))
1682 1680
1683 1681 log.debug('Notify following recipients about pull-request update %s', recipients)
1684 1682
1685 1683 pull_request_obj = pull_request
1686 1684
1687 1685 # send email about the update
1688 1686 changed_files = (
1689 1687 file_changes.added + file_changes.modified + file_changes.removed)
1690 1688
1691 1689 pr_source_repo = pull_request_obj.source_repo
1692 1690 pr_target_repo = pull_request_obj.target_repo
1693 1691
1694 1692 pr_url = h.route_url('pullrequest_show',
1695 1693 repo_name=pr_target_repo.repo_name,
1696 1694 pull_request_id=pull_request_obj.pull_request_id,)
1697 1695
1698 1696 # set some variables for email notification
1699 1697 pr_target_repo_url = h.route_url(
1700 1698 'repo_summary', repo_name=pr_target_repo.repo_name)
1701 1699
1702 1700 pr_source_repo_url = h.route_url(
1703 1701 'repo_summary', repo_name=pr_source_repo.repo_name)
1704 1702
1705 1703 email_kwargs = {
1706 1704 'date': datetime.datetime.now(),
1707 1705 'updating_user': updating_user,
1708 1706
1709 1707 'pull_request': pull_request_obj,
1710 1708
1711 1709 'pull_request_target_repo': pr_target_repo,
1712 1710 'pull_request_target_repo_url': pr_target_repo_url,
1713 1711
1714 1712 'pull_request_source_repo': pr_source_repo,
1715 1713 'pull_request_source_repo_url': pr_source_repo_url,
1716 1714
1717 1715 'pull_request_url': pr_url,
1718 1716
1719 1717 'ancestor_commit_id': ancestor_commit_id,
1720 1718 'added_commits': commit_changes.added,
1721 1719 'removed_commits': commit_changes.removed,
1722 1720 'changed_files': changed_files,
1723 1721 'added_files': file_changes.added,
1724 1722 'modified_files': file_changes.modified,
1725 1723 'removed_files': file_changes.removed,
1726 1724 'thread_ids': [pr_url],
1727 1725 }
1728 1726
1729 1727 # create notification objects, and emails
1730 1728 NotificationModel().create(
1731 1729 created_by=updating_user,
1732 1730 notification_subject='', # Filled in based on the notification_type
1733 1731 notification_body='', # Filled in based on the notification_type
1734 1732 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1735 1733 recipients=recipients,
1736 1734 email_kwargs=email_kwargs,
1737 1735 )
1738 1736
1739 1737 def delete(self, pull_request, user=None):
1740 1738 if not user:
1741 1739 user = getattr(get_current_rhodecode_user(), 'username', None)
1742 1740
1743 1741 pull_request = self.__get_pull_request(pull_request)
1744 1742 old_data = pull_request.get_api_data(with_merge_state=False)
1745 1743 self._cleanup_merge_workspace(pull_request)
1746 1744 self._log_audit_action(
1747 1745 'repo.pull_request.delete', {'old_data': old_data},
1748 1746 user, pull_request)
1749 1747 Session().delete(pull_request)
1750 1748
1751 1749 def close_pull_request(self, pull_request, user):
1752 1750 pull_request = self.__get_pull_request(pull_request)
1753 1751 self._cleanup_merge_workspace(pull_request)
1754 1752 pull_request.status = PullRequest.STATUS_CLOSED
1755 1753 pull_request.updated_on = datetime.datetime.now()
1756 1754 Session().add(pull_request)
1757 1755 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1758 1756
1759 1757 pr_data = pull_request.get_api_data(with_merge_state=False)
1760 1758 self._log_audit_action(
1761 1759 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1762 1760
1763 1761 def close_pull_request_with_comment(
1764 1762 self, pull_request, user, repo, message=None, auth_user=None):
1765 1763
1766 1764 pull_request_review_status = pull_request.calculated_review_status()
1767 1765
1768 1766 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1769 1767 # approved only if we have voting consent
1770 1768 status = ChangesetStatus.STATUS_APPROVED
1771 1769 else:
1772 1770 status = ChangesetStatus.STATUS_REJECTED
1773 1771 status_lbl = ChangesetStatus.get_status_lbl(status)
1774 1772
1775 1773 default_message = (
1776 1774 'Closing with status change {transition_icon} {status}.'
1777 1775 ).format(transition_icon='>', status=status_lbl)
1778 1776 text = message or default_message
1779 1777
1780 1778 # create a comment, and link it to new status
1781 1779 comment = CommentsModel().create(
1782 1780 text=text,
1783 1781 repo=repo.repo_id,
1784 1782 user=user.user_id,
1785 1783 pull_request=pull_request.pull_request_id,
1786 1784 status_change=status_lbl,
1787 1785 status_change_type=status,
1788 1786 closing_pr=True,
1789 1787 auth_user=auth_user,
1790 1788 )
1791 1789
1792 1790 # calculate old status before we change it
1793 1791 old_calculated_status = pull_request.calculated_review_status()
1794 1792 ChangesetStatusModel().set_status(
1795 1793 repo.repo_id,
1796 1794 status,
1797 1795 user.user_id,
1798 1796 comment=comment,
1799 1797 pull_request=pull_request.pull_request_id
1800 1798 )
1801 1799
1802 1800 Session().flush()
1803 1801
1804 1802 self.trigger_pull_request_hook(pull_request, user, 'comment',
1805 1803 data={'comment': comment})
1806 1804
1807 1805 # we now calculate the status of pull request again, and based on that
1808 1806 # calculation trigger status change. This might happen in cases
1809 1807 # that non-reviewer admin closes a pr, which means his vote doesn't
1810 1808 # change the status, while if he's a reviewer this might change it.
1811 1809 calculated_status = pull_request.calculated_review_status()
1812 1810 if old_calculated_status != calculated_status:
1813 1811 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1814 1812 data={'status': calculated_status})
1815 1813
1816 1814 # finally close the PR
1817 1815 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1818 1816
1819 1817 return comment, status
1820 1818
1821 1819 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1822 1820 _ = translator or get_current_request().translate
1823 1821
1824 1822 if not self._is_merge_enabled(pull_request):
1825 1823 return None, False, _('Server-side pull request merging is disabled.')
1826 1824
1827 1825 if pull_request.is_closed():
1828 1826 return None, False, _('This pull request is closed.')
1829 1827
1830 1828 merge_possible, msg = self._check_repo_requirements(
1831 1829 target=pull_request.target_repo, source=pull_request.source_repo,
1832 1830 translator=_)
1833 1831 if not merge_possible:
1834 1832 return None, merge_possible, msg
1835 1833
1836 1834 try:
1837 1835 merge_response = self._try_merge(
1838 1836 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1839 1837 log.debug("Merge response: %s", merge_response)
1840 1838 return merge_response, merge_response.possible, merge_response.merge_status_message
1841 1839 except NotImplementedError:
1842 1840 return None, False, _('Pull request merging is not supported.')
1843 1841
1844 1842 def _check_repo_requirements(self, target, source, translator):
1845 1843 """
1846 1844 Check if `target` and `source` have compatible requirements.
1847 1845
1848 1846 Currently this is just checking for largefiles.
1849 1847 """
1850 1848 _ = translator
1851 1849 target_has_largefiles = self._has_largefiles(target)
1852 1850 source_has_largefiles = self._has_largefiles(source)
1853 1851 merge_possible = True
1854 1852 message = u''
1855 1853
1856 1854 if target_has_largefiles != source_has_largefiles:
1857 1855 merge_possible = False
1858 1856 if source_has_largefiles:
1859 1857 message = _(
1860 1858 'Target repository large files support is disabled.')
1861 1859 else:
1862 1860 message = _(
1863 1861 'Source repository large files support is disabled.')
1864 1862
1865 1863 return merge_possible, message
1866 1864
1867 1865 def _has_largefiles(self, repo):
1868 1866 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1869 1867 'extensions', 'largefiles')
1870 1868 return largefiles_ui and largefiles_ui[0].active
1871 1869
1872 1870 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1873 1871 """
1874 1872 Try to merge the pull request and return the merge status.
1875 1873 """
1876 1874 log.debug(
1877 1875 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1878 1876 pull_request.pull_request_id, force_shadow_repo_refresh)
1879 1877 target_vcs = pull_request.target_repo.scm_instance()
1880 1878 # Refresh the target reference.
1881 1879 try:
1882 1880 target_ref = self._refresh_reference(
1883 1881 pull_request.target_ref_parts, target_vcs)
1884 1882 except CommitDoesNotExistError:
1885 1883 merge_state = MergeResponse(
1886 1884 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1887 1885 metadata={'target_ref': pull_request.target_ref_parts})
1888 1886 return merge_state
1889 1887
1890 1888 target_locked = pull_request.target_repo.locked
1891 1889 if target_locked and target_locked[0]:
1892 1890 locked_by = 'user:{}'.format(target_locked[0])
1893 1891 log.debug("The target repository is locked by %s.", locked_by)
1894 1892 merge_state = MergeResponse(
1895 1893 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1896 1894 metadata={'locked_by': locked_by})
1897 1895 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1898 1896 pull_request, target_ref):
1899 1897 log.debug("Refreshing the merge status of the repository.")
1900 1898 merge_state = self._refresh_merge_state(
1901 1899 pull_request, target_vcs, target_ref)
1902 1900 else:
1903 1901 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1904 1902 metadata = {
1905 1903 'unresolved_files': '',
1906 1904 'target_ref': pull_request.target_ref_parts,
1907 1905 'source_ref': pull_request.source_ref_parts,
1908 1906 }
1909 1907 if pull_request.last_merge_metadata:
1910 1908 metadata.update(pull_request.last_merge_metadata_parsed)
1911 1909
1912 1910 if not possible and target_ref.type == 'branch':
1913 1911 # NOTE(marcink): case for mercurial multiple heads on branch
1914 1912 heads = target_vcs._heads(target_ref.name)
1915 1913 if len(heads) != 1:
1916 1914 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1917 1915 metadata.update({
1918 1916 'heads': heads
1919 1917 })
1920 1918
1921 1919 merge_state = MergeResponse(
1922 1920 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1923 1921
1924 1922 return merge_state
1925 1923
1926 1924 def _refresh_reference(self, reference, vcs_repository):
1927 1925 if reference.type in self.UPDATABLE_REF_TYPES:
1928 1926 name_or_id = reference.name
1929 1927 else:
1930 1928 name_or_id = reference.commit_id
1931 1929
1932 1930 refreshed_commit = vcs_repository.get_commit(name_or_id)
1933 1931 refreshed_reference = Reference(
1934 1932 reference.type, reference.name, refreshed_commit.raw_id)
1935 1933 return refreshed_reference
1936 1934
1937 1935 def _needs_merge_state_refresh(self, pull_request, target_reference):
1938 1936 return not(
1939 1937 pull_request.revisions and
1940 1938 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1941 1939 target_reference.commit_id == pull_request._last_merge_target_rev)
1942 1940
1943 1941 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1944 1942 workspace_id = self._workspace_id(pull_request)
1945 1943 source_vcs = pull_request.source_repo.scm_instance()
1946 1944 repo_id = pull_request.target_repo.repo_id
1947 1945 use_rebase = self._use_rebase_for_merging(pull_request)
1948 1946 close_branch = self._close_branch_before_merging(pull_request)
1949 1947 merge_state = target_vcs.merge(
1950 1948 repo_id, workspace_id,
1951 1949 target_reference, source_vcs, pull_request.source_ref_parts,
1952 1950 dry_run=True, use_rebase=use_rebase,
1953 1951 close_branch=close_branch)
1954 1952
1955 1953 # Do not store the response if there was an unknown error.
1956 1954 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1957 1955 pull_request._last_merge_source_rev = \
1958 1956 pull_request.source_ref_parts.commit_id
1959 1957 pull_request._last_merge_target_rev = target_reference.commit_id
1960 1958 pull_request.last_merge_status = merge_state.failure_reason
1961 1959 pull_request.last_merge_metadata = merge_state.metadata
1962 1960
1963 1961 pull_request.shadow_merge_ref = merge_state.merge_ref
1964 1962 Session().add(pull_request)
1965 1963 Session().commit()
1966 1964
1967 1965 return merge_state
1968 1966
1969 1967 def _workspace_id(self, pull_request):
1970 1968 workspace_id = 'pr-%s' % pull_request.pull_request_id
1971 1969 return workspace_id
1972 1970
1973 1971 def generate_repo_data(self, repo, commit_id=None, branch=None,
1974 1972 bookmark=None, translator=None):
1975 1973 from rhodecode.model.repo import RepoModel
1976 1974
1977 1975 all_refs, selected_ref = \
1978 1976 self._get_repo_pullrequest_sources(
1979 1977 repo.scm_instance(), commit_id=commit_id,
1980 1978 branch=branch, bookmark=bookmark, translator=translator)
1981 1979
1982 1980 refs_select2 = []
1983 1981 for element in all_refs:
1984 1982 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1985 1983 refs_select2.append({'text': element[1], 'children': children})
1986 1984
1987 1985 return {
1988 1986 'user': {
1989 1987 'user_id': repo.user.user_id,
1990 1988 'username': repo.user.username,
1991 1989 'firstname': repo.user.first_name,
1992 1990 'lastname': repo.user.last_name,
1993 1991 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1994 1992 },
1995 1993 'name': repo.repo_name,
1996 1994 'link': RepoModel().get_url(repo),
1997 1995 'description': h.chop_at_smart(repo.description_safe, '\n'),
1998 1996 'refs': {
1999 1997 'all_refs': all_refs,
2000 1998 'selected_ref': selected_ref,
2001 1999 'select2_refs': refs_select2
2002 2000 }
2003 2001 }
2004 2002
2005 2003 def generate_pullrequest_title(self, source, source_ref, target):
2006 2004 return u'{source}#{at_ref} to {target}'.format(
2007 2005 source=source,
2008 2006 at_ref=source_ref,
2009 2007 target=target,
2010 2008 )
2011 2009
2012 2010 def _cleanup_merge_workspace(self, pull_request):
2013 2011 # Merging related cleanup
2014 2012 repo_id = pull_request.target_repo.repo_id
2015 2013 target_scm = pull_request.target_repo.scm_instance()
2016 2014 workspace_id = self._workspace_id(pull_request)
2017 2015
2018 2016 try:
2019 2017 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2020 2018 except NotImplementedError:
2021 2019 pass
2022 2020
2023 2021 def _get_repo_pullrequest_sources(
2024 2022 self, repo, commit_id=None, branch=None, bookmark=None,
2025 2023 translator=None):
2026 2024 """
2027 2025 Return a structure with repo's interesting commits, suitable for
2028 2026 the selectors in pullrequest controller
2029 2027
2030 2028 :param commit_id: a commit that must be in the list somehow
2031 2029 and selected by default
2032 2030 :param branch: a branch that must be in the list and selected
2033 2031 by default - even if closed
2034 2032 :param bookmark: a bookmark that must be in the list and selected
2035 2033 """
2036 2034 _ = translator or get_current_request().translate
2037 2035
2038 2036 commit_id = safe_str(commit_id) if commit_id else None
2039 2037 branch = safe_str(branch) if branch else None
2040 2038 bookmark = safe_str(bookmark) if bookmark else None
2041 2039
2042 2040 selected = None
2043 2041
2044 2042 # order matters: first source that has commit_id in it will be selected
2045 2043 sources = []
2046 2044 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2047 2045 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2048 2046
2049 2047 if commit_id:
2050 2048 ref_commit = (h.short_id(commit_id), commit_id)
2051 2049 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2052 2050
2053 2051 sources.append(
2054 2052 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2055 2053 )
2056 2054
2057 2055 groups = []
2058 2056
2059 2057 for group_key, ref_list, group_name, match in sources:
2060 2058 group_refs = []
2061 2059 for ref_name, ref_id in ref_list:
2062 2060 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2063 2061 group_refs.append((ref_key, ref_name))
2064 2062
2065 2063 if not selected:
2066 2064 if set([commit_id, match]) & set([ref_id, ref_name]):
2067 2065 selected = ref_key
2068 2066
2069 2067 if group_refs:
2070 2068 groups.append((group_refs, group_name))
2071 2069
2072 2070 if not selected:
2073 2071 ref = commit_id or branch or bookmark
2074 2072 if ref:
2075 2073 raise CommitDoesNotExistError(
2076 2074 u'No commit refs could be found matching: {}'.format(ref))
2077 2075 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2078 2076 selected = u'branch:{}:{}'.format(
2079 2077 safe_str(repo.DEFAULT_BRANCH_NAME),
2080 2078 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2081 2079 )
2082 2080 elif repo.commit_ids:
2083 2081 # make the user select in this case
2084 2082 selected = None
2085 2083 else:
2086 2084 raise EmptyRepositoryError()
2087 2085 return groups, selected
2088 2086
2089 2087 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2090 2088 hide_whitespace_changes, diff_context):
2091 2089
2092 2090 return self._get_diff_from_pr_or_version(
2093 2091 source_repo, source_ref_id, target_ref_id,
2094 2092 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2095 2093
2096 2094 def _get_diff_from_pr_or_version(
2097 2095 self, source_repo, source_ref_id, target_ref_id,
2098 2096 hide_whitespace_changes, diff_context):
2099 2097
2100 2098 target_commit = source_repo.get_commit(
2101 2099 commit_id=safe_str(target_ref_id))
2102 2100 source_commit = source_repo.get_commit(
2103 2101 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2104 2102 if isinstance(source_repo, Repository):
2105 2103 vcs_repo = source_repo.scm_instance()
2106 2104 else:
2107 2105 vcs_repo = source_repo
2108 2106
2109 2107 # TODO: johbo: In the context of an update, we cannot reach
2110 2108 # the old commit anymore with our normal mechanisms. It needs
2111 2109 # some sort of special support in the vcs layer to avoid this
2112 2110 # workaround.
2113 2111 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2114 2112 vcs_repo.alias == 'git'):
2115 2113 source_commit.raw_id = safe_str(source_ref_id)
2116 2114
2117 2115 log.debug('calculating diff between '
2118 2116 'source_ref:%s and target_ref:%s for repo `%s`',
2119 2117 target_ref_id, source_ref_id,
2120 2118 safe_str(vcs_repo.path))
2121 2119
2122 2120 vcs_diff = vcs_repo.get_diff(
2123 2121 commit1=target_commit, commit2=source_commit,
2124 2122 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2125 2123 return vcs_diff
2126 2124
2127 2125 def _is_merge_enabled(self, pull_request):
2128 2126 return self._get_general_setting(
2129 2127 pull_request, 'rhodecode_pr_merge_enabled')
2130 2128
2131 2129 def _use_rebase_for_merging(self, pull_request):
2132 2130 repo_type = pull_request.target_repo.repo_type
2133 2131 if repo_type == 'hg':
2134 2132 return self._get_general_setting(
2135 2133 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2136 2134 elif repo_type == 'git':
2137 2135 return self._get_general_setting(
2138 2136 pull_request, 'rhodecode_git_use_rebase_for_merging')
2139 2137
2140 2138 return False
2141 2139
2142 2140 def _user_name_for_merging(self, pull_request, user):
2143 2141 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2144 2142 if env_user_name_attr and hasattr(user, env_user_name_attr):
2145 2143 user_name_attr = env_user_name_attr
2146 2144 else:
2147 2145 user_name_attr = 'short_contact'
2148 2146
2149 2147 user_name = getattr(user, user_name_attr)
2150 2148 return user_name
2151 2149
2152 2150 def _close_branch_before_merging(self, pull_request):
2153 2151 repo_type = pull_request.target_repo.repo_type
2154 2152 if repo_type == 'hg':
2155 2153 return self._get_general_setting(
2156 2154 pull_request, 'rhodecode_hg_close_branch_before_merging')
2157 2155 elif repo_type == 'git':
2158 2156 return self._get_general_setting(
2159 2157 pull_request, 'rhodecode_git_close_branch_before_merging')
2160 2158
2161 2159 return False
2162 2160
2163 2161 def _get_general_setting(self, pull_request, settings_key, default=False):
2164 2162 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2165 2163 settings = settings_model.get_general_settings()
2166 2164 return settings.get(settings_key, default)
2167 2165
2168 2166 def _log_audit_action(self, action, action_data, user, pull_request):
2169 2167 audit_logger.store(
2170 2168 action=action,
2171 2169 action_data=action_data,
2172 2170 user=user,
2173 2171 repo=pull_request.target_repo)
2174 2172
2175 2173 def get_reviewer_functions(self):
2176 2174 """
2177 2175 Fetches functions for validation and fetching default reviewers.
2178 2176 If available we use the EE package, else we fallback to CE
2179 2177 package functions
2180 2178 """
2181 2179 try:
2182 2180 from rc_reviewers.utils import get_default_reviewers_data
2183 2181 from rc_reviewers.utils import validate_default_reviewers
2184 2182 from rc_reviewers.utils import validate_observers
2185 2183 except ImportError:
2186 2184 from rhodecode.apps.repository.utils import get_default_reviewers_data
2187 2185 from rhodecode.apps.repository.utils import validate_default_reviewers
2188 2186 from rhodecode.apps.repository.utils import validate_observers
2189 2187
2190 2188 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2191 2189
2192 2190
2193 2191 class MergeCheck(object):
2194 2192 """
2195 2193 Perform Merge Checks and returns a check object which stores information
2196 2194 about merge errors, and merge conditions
2197 2195 """
2198 2196 TODO_CHECK = 'todo'
2199 2197 PERM_CHECK = 'perm'
2200 2198 REVIEW_CHECK = 'review'
2201 2199 MERGE_CHECK = 'merge'
2202 2200 WIP_CHECK = 'wip'
2203 2201
2204 2202 def __init__(self):
2205 2203 self.review_status = None
2206 2204 self.merge_possible = None
2207 2205 self.merge_msg = ''
2208 2206 self.merge_response = None
2209 2207 self.failed = None
2210 2208 self.errors = []
2211 2209 self.error_details = OrderedDict()
2212 2210 self.source_commit = AttributeDict()
2213 2211 self.target_commit = AttributeDict()
2214 2212 self.reviewers_count = 0
2215 2213 self.observers_count = 0
2216 2214
2217 2215 def __repr__(self):
2218 2216 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2219 2217 self.merge_possible, self.failed, self.errors)
2220 2218
2221 2219 def push_error(self, error_type, message, error_key, details):
2222 2220 self.failed = True
2223 2221 self.errors.append([error_type, message])
2224 2222 self.error_details[error_key] = dict(
2225 2223 details=details,
2226 2224 error_type=error_type,
2227 2225 message=message
2228 2226 )
2229 2227
2230 2228 @classmethod
2231 2229 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2232 2230 force_shadow_repo_refresh=False):
2233 2231 _ = translator
2234 2232 merge_check = cls()
2235 2233
2236 2234 # title has WIP:
2237 2235 if pull_request.work_in_progress:
2238 2236 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2239 2237
2240 2238 msg = _('WIP marker in title prevents from accidental merge.')
2241 2239 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2242 2240 if fail_early:
2243 2241 return merge_check
2244 2242
2245 2243 # permissions to merge
2246 2244 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2247 2245 if not user_allowed_to_merge:
2248 2246 log.debug("MergeCheck: cannot merge, approval is pending.")
2249 2247
2250 2248 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2251 2249 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2252 2250 if fail_early:
2253 2251 return merge_check
2254 2252
2255 2253 # permission to merge into the target branch
2256 2254 target_commit_id = pull_request.target_ref_parts.commit_id
2257 2255 if pull_request.target_ref_parts.type == 'branch':
2258 2256 branch_name = pull_request.target_ref_parts.name
2259 2257 else:
2260 2258 # for mercurial we can always figure out the branch from the commit
2261 2259 # in case of bookmark
2262 2260 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2263 2261 branch_name = target_commit.branch
2264 2262
2265 2263 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2266 2264 pull_request.target_repo.repo_name, branch_name)
2267 2265 if branch_perm and branch_perm == 'branch.none':
2268 2266 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2269 2267 branch_name, rule)
2270 2268 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2271 2269 if fail_early:
2272 2270 return merge_check
2273 2271
2274 2272 # review status, must be always present
2275 2273 review_status = pull_request.calculated_review_status()
2276 2274 merge_check.review_status = review_status
2277 2275 merge_check.reviewers_count = pull_request.reviewers_count
2278 2276 merge_check.observers_count = pull_request.observers_count
2279 2277
2280 2278 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2281 2279 if not status_approved and merge_check.reviewers_count:
2282 2280 log.debug("MergeCheck: cannot merge, approval is pending.")
2283 2281 msg = _('Pull request reviewer approval is pending.')
2284 2282
2285 2283 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2286 2284
2287 2285 if fail_early:
2288 2286 return merge_check
2289 2287
2290 2288 # left over TODOs
2291 2289 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2292 2290 if todos:
2293 2291 log.debug("MergeCheck: cannot merge, {} "
2294 2292 "unresolved TODOs left.".format(len(todos)))
2295 2293
2296 2294 if len(todos) == 1:
2297 2295 msg = _('Cannot merge, {} TODO still not resolved.').format(
2298 2296 len(todos))
2299 2297 else:
2300 2298 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2301 2299 len(todos))
2302 2300
2303 2301 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2304 2302
2305 2303 if fail_early:
2306 2304 return merge_check
2307 2305
2308 2306 # merge possible, here is the filesystem simulation + shadow repo
2309 2307 merge_response, merge_status, msg = PullRequestModel().merge_status(
2310 2308 pull_request, translator=translator,
2311 2309 force_shadow_repo_refresh=force_shadow_repo_refresh)
2312 2310
2313 2311 merge_check.merge_possible = merge_status
2314 2312 merge_check.merge_msg = msg
2315 2313 merge_check.merge_response = merge_response
2316 2314
2317 2315 source_ref_id = pull_request.source_ref_parts.commit_id
2318 2316 target_ref_id = pull_request.target_ref_parts.commit_id
2319 2317
2320 2318 try:
2321 2319 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2322 2320 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2323 2321 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2324 2322 merge_check.source_commit.current_raw_id = source_commit.raw_id
2325 2323 merge_check.source_commit.previous_raw_id = source_ref_id
2326 2324
2327 2325 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2328 2326 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2329 2327 merge_check.target_commit.current_raw_id = target_commit.raw_id
2330 2328 merge_check.target_commit.previous_raw_id = target_ref_id
2331 2329 except (SourceRefMissing, TargetRefMissing):
2332 2330 pass
2333 2331
2334 2332 if not merge_status:
2335 2333 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2336 2334 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2337 2335
2338 2336 if fail_early:
2339 2337 return merge_check
2340 2338
2341 2339 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2342 2340 return merge_check
2343 2341
2344 2342 @classmethod
2345 2343 def get_merge_conditions(cls, pull_request, translator):
2346 2344 _ = translator
2347 2345 merge_details = {}
2348 2346
2349 2347 model = PullRequestModel()
2350 2348 use_rebase = model._use_rebase_for_merging(pull_request)
2351 2349
2352 2350 if use_rebase:
2353 2351 merge_details['merge_strategy'] = dict(
2354 2352 details={},
2355 2353 message=_('Merge strategy: rebase')
2356 2354 )
2357 2355 else:
2358 2356 merge_details['merge_strategy'] = dict(
2359 2357 details={},
2360 2358 message=_('Merge strategy: explicit merge commit')
2361 2359 )
2362 2360
2363 2361 close_branch = model._close_branch_before_merging(pull_request)
2364 2362 if close_branch:
2365 2363 repo_type = pull_request.target_repo.repo_type
2366 2364 close_msg = ''
2367 2365 if repo_type == 'hg':
2368 2366 close_msg = _('Source branch will be closed before the merge.')
2369 2367 elif repo_type == 'git':
2370 2368 close_msg = _('Source branch will be deleted after the merge.')
2371 2369
2372 2370 merge_details['close_branch'] = dict(
2373 2371 details={},
2374 2372 message=close_msg
2375 2373 )
2376 2374
2377 2375 return merge_details
2378 2376
2379 2377
2380 2378 @dataclasses.dataclass
2381 2379 class ChangeTuple:
2382 2380 added: list
2383 2381 common: list
2384 2382 removed: list
2385 2383 total: list
2386 2384
2387 2385
2388 2386 @dataclasses.dataclass
2389 2387 class FileChangeTuple:
2390 2388 added: list
2391 2389 modified: list
2392 2390 removed: list
@@ -1,147 +1,147 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import datetime
21 21
22 22 import pytest
23 23
24 24 from rhodecode.lib.vcs.nodes import FileNode
25 25 from rhodecode.tests.vcs.conftest import BackendTestMixin
26 26
27 27
28 28 @pytest.mark.usefixtures("vcs_repository_support")
29 29 class TestBranches(BackendTestMixin):
30 30
31 31 def test_empty_repository_has_no_branches(self, vcsbackend):
32 32 empty_repo = vcsbackend.create_repo()
33 33 assert empty_repo.branches == {}
34 34
35 35 def test_branches_all(self, vcsbackend):
36 36 branch_count = {
37 37 'git': 1,
38 38 'hg': 1,
39 39 'svn': 0,
40 40 }
41 41 assert len(self.repo.branches_all) == branch_count[vcsbackend.alias]
42 42
43 43 def test_closed_branches(self):
44 44 assert len(self.repo.branches_closed) == 0
45 45
46 46 def test_simple(self, local_dt_to_utc):
47 47 tip = self.repo.get_commit()
48 48 assert tip.message == 'Changes...'
49 49 assert tip.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 21))
50 50
51 51 @pytest.mark.backends("git", "hg")
52 52 def test_new_branch(self):
53 53 # This check must not be removed to ensure the 'branches' LazyProperty
54 54 # gets hit *before* the new 'foobar' branch got created:
55 55 assert 'foobar' not in self.repo.branches
56 56 self.imc.add(
57 57 FileNode(b'docs/index.txt', content=b'Documentation\n')
58 58 )
59 59 foobar_tip = self.imc.commit(
60 message=u'New branch: foobar',
61 author=u'joe <joe@rhodecode.com>',
60 message='New branch: foobar',
61 author='joe <joe@rhodecode.com>',
62 62 branch='foobar',
63 63 )
64 64 assert 'foobar' in self.repo.branches
65 65 assert foobar_tip.branch == 'foobar'
66 66
67 67 @pytest.mark.backends("git", "hg")
68 68 def test_new_head(self):
69 69 tip = self.repo.get_commit()
70 70 self.imc.add(
71 71 FileNode(b'docs/index.txt',
72 72 content=b'Documentation\n')
73 73 )
74 74 foobar_tip = self.imc.commit(
75 message=u'New branch: foobar',
76 author=u'joe <joe@rhodecode.com>',
75 message='New branch: foobar',
76 author='joe <joe@rhodecode.com>',
77 77 branch='foobar',
78 78 parents=[tip],
79 79 )
80 80 self.imc.change(FileNode(
81 81 b'docs/index.txt',
82 82 content=b'Documentation\nand more...\n'))
83 83 newtip = self.imc.commit(
84 84 message=u'At default branch',
85 85 author=u'joe <joe@rhodecode.com>',
86 86 branch=foobar_tip.branch,
87 87 parents=[foobar_tip],
88 88 )
89 89
90 90 newest_tip = self.imc.commit(
91 91 message=u'Merged with %s' % foobar_tip.raw_id,
92 92 author=u'joe <joe@rhodecode.com>',
93 93 branch=self.backend_class.DEFAULT_BRANCH_NAME,
94 94 parents=[newtip, foobar_tip],
95 95 )
96 96
97 97 assert newest_tip.branch == \
98 98 self.backend_class.DEFAULT_BRANCH_NAME
99 99
100 100 @pytest.mark.backends("git", "hg")
101 101 def test_branch_with_slash_in_name(self):
102 102 self.imc.add(FileNode(b'extrafile', content=b'Some data\n'))
103 103 self.imc.commit(
104 104 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
105 105 branch='issue/123')
106 106 assert 'issue/123' in self.repo.branches
107 107
108 108 @pytest.mark.backends("git", "hg")
109 109 def test_branch_with_slash_in_name_and_similar_without(self):
110 110 self.imc.add(FileNode(b'extrafile', content=b'Some data\n'))
111 111 self.imc.commit(
112 112 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
113 113 branch='issue/123')
114 114 self.imc.add(FileNode(b'extrafile II', content=b'Some data\n'))
115 115 self.imc.commit(
116 116 u'Branch without a slash...', author=u'joe <joe@rhodecode.com>',
117 117 branch='123')
118 118 assert 'issue/123' in self.repo.branches
119 119 assert '123' in self.repo.branches
120 120
121 121
122 122 class TestSvnBranches(object):
123 123
124 124 def test_empty_repository_has_no_tags_and_branches(self, vcsbackend_svn):
125 125 empty_repo = vcsbackend_svn.create_repo()
126 126 assert empty_repo.branches == {}
127 127 assert empty_repo.tags == {}
128 128
129 129 def test_missing_structure_has_no_tags_and_branches(self, vcsbackend_svn):
130 130 repo = vcsbackend_svn.create_repo(number_of_commits=1)
131 131 assert repo.branches == {}
132 132 assert repo.tags == {}
133 133
134 134 def test_discovers_ordered_branches(self, vcsbackend_svn):
135 135 repo = vcsbackend_svn['svn-simple-layout']
136 136 expected_branches = [
137 137 'branches/add-docs',
138 138 'branches/argparse',
139 139 'trunk',
140 140 ]
141 141 assert list(repo.branches.keys()) == expected_branches
142 142
143 143 def test_discovers_ordered_tags(self, vcsbackend_svn):
144 144 repo = vcsbackend_svn['svn-simple-layout']
145 145 expected_tags = [
146 146 'tags/v0.1', 'tags/v0.2', 'tags/v0.3', 'tags/v0.5']
147 147 assert list(repo.tags.keys()) == expected_tags
General Comments 0
You need to be logged in to leave comments. Login now