##// END OF EJS Templates
pull-requests: add merge check that detects WIP marker in title. This will prevent merges in such case....
ergo -
r4099:c12e69d0 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1760 +1,1770 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
141 141 statuses=None, opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=False):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 if search_q:
150 150 like_expression = u'%{}%'.format(safe_unicode(search_q))
151 151 q = q.filter(or_(
152 152 cast(PullRequest.pull_request_id, String).ilike(like_expression),
153 153 PullRequest.title.ilike(like_expression),
154 154 PullRequest.description.ilike(like_expression),
155 155 ))
156 156
157 157 # source or target
158 158 if repo and source:
159 159 q = q.filter(PullRequest.source_repo == repo)
160 160 elif repo:
161 161 q = q.filter(PullRequest.target_repo == repo)
162 162
163 163 # closed,opened
164 164 if statuses:
165 165 q = q.filter(PullRequest.status.in_(statuses))
166 166
167 167 # opened by filter
168 168 if opened_by:
169 169 q = q.filter(PullRequest.user_id.in_(opened_by))
170 170
171 171 # only get those that are in "created" state
172 172 if only_created:
173 173 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
174 174
175 175 if order_by:
176 176 order_map = {
177 177 'name_raw': PullRequest.pull_request_id,
178 178 'id': PullRequest.pull_request_id,
179 179 'title': PullRequest.title,
180 180 'updated_on_raw': PullRequest.updated_on,
181 181 'target_repo': PullRequest.target_repo_id
182 182 }
183 183 if order_dir == 'asc':
184 184 q = q.order_by(order_map[order_by].asc())
185 185 else:
186 186 q = q.order_by(order_map[order_by].desc())
187 187
188 188 return q
189 189
190 190 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
191 191 opened_by=None):
192 192 """
193 193 Count the number of pull requests for a specific repository.
194 194
195 195 :param repo_name: target or source repo
196 196 :param search_q: filter by text
197 197 :param source: boolean flag to specify if repo_name refers to source
198 198 :param statuses: list of pull request statuses
199 199 :param opened_by: author user of the pull request
200 200 :returns: int number of pull requests
201 201 """
202 202 q = self._prepare_get_all_query(
203 203 repo_name, search_q=search_q, source=source, statuses=statuses,
204 204 opened_by=opened_by)
205 205
206 206 return q.count()
207 207
208 208 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
209 209 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
210 210 """
211 211 Get all pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param search_q: filter by text
215 215 :param source: boolean flag to specify if repo_name refers to source
216 216 :param statuses: list of pull request statuses
217 217 :param opened_by: author user of the pull request
218 218 :param offset: pagination offset
219 219 :param length: length of returned list
220 220 :param order_by: order of the returned list
221 221 :param order_dir: 'asc' or 'desc' ordering direction
222 222 :returns: list of pull requests
223 223 """
224 224 q = self._prepare_get_all_query(
225 225 repo_name, search_q=search_q, source=source, statuses=statuses,
226 226 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
227 227
228 228 if length:
229 229 pull_requests = q.limit(length).offset(offset).all()
230 230 else:
231 231 pull_requests = q.all()
232 232
233 233 return pull_requests
234 234
235 235 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
236 236 opened_by=None):
237 237 """
238 238 Count the number of pull requests for a specific repository that are
239 239 awaiting review.
240 240
241 241 :param repo_name: target or source repo
242 242 :param search_q: filter by text
243 243 :param source: boolean flag to specify if repo_name refers to source
244 244 :param statuses: list of pull request statuses
245 245 :param opened_by: author user of the pull request
246 246 :returns: int number of pull requests
247 247 """
248 248 pull_requests = self.get_awaiting_review(
249 249 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
250 250
251 251 return len(pull_requests)
252 252
253 253 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
254 254 opened_by=None, offset=0, length=None,
255 255 order_by=None, order_dir='desc'):
256 256 """
257 257 Get all pull requests for a specific repository that are awaiting
258 258 review.
259 259
260 260 :param repo_name: target or source repo
261 261 :param search_q: filter by text
262 262 :param source: boolean flag to specify if repo_name refers to source
263 263 :param statuses: list of pull request statuses
264 264 :param opened_by: author user of the pull request
265 265 :param offset: pagination offset
266 266 :param length: length of returned list
267 267 :param order_by: order of the returned list
268 268 :param order_dir: 'asc' or 'desc' ordering direction
269 269 :returns: list of pull requests
270 270 """
271 271 pull_requests = self.get_all(
272 272 repo_name, search_q=search_q, source=source, statuses=statuses,
273 273 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
274 274
275 275 _filtered_pull_requests = []
276 276 for pr in pull_requests:
277 277 status = pr.calculated_review_status()
278 278 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
279 279 ChangesetStatus.STATUS_UNDER_REVIEW]:
280 280 _filtered_pull_requests.append(pr)
281 281 if length:
282 282 return _filtered_pull_requests[offset:offset+length]
283 283 else:
284 284 return _filtered_pull_requests
285 285
286 286 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
287 287 opened_by=None, user_id=None):
288 288 """
289 289 Count the number of pull requests for a specific repository that are
290 290 awaiting review from a specific user.
291 291
292 292 :param repo_name: target or source repo
293 293 :param search_q: filter by text
294 294 :param source: boolean flag to specify if repo_name refers to source
295 295 :param statuses: list of pull request statuses
296 296 :param opened_by: author user of the pull request
297 297 :param user_id: reviewer user of the pull request
298 298 :returns: int number of pull requests
299 299 """
300 300 pull_requests = self.get_awaiting_my_review(
301 301 repo_name, search_q=search_q, source=source, statuses=statuses,
302 302 opened_by=opened_by, user_id=user_id)
303 303
304 304 return len(pull_requests)
305 305
306 306 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
307 307 opened_by=None, user_id=None, offset=0,
308 308 length=None, order_by=None, order_dir='desc'):
309 309 """
310 310 Get all pull requests for a specific repository that are awaiting
311 311 review from a specific user.
312 312
313 313 :param repo_name: target or source repo
314 314 :param search_q: filter by text
315 315 :param source: boolean flag to specify if repo_name refers to source
316 316 :param statuses: list of pull request statuses
317 317 :param opened_by: author user of the pull request
318 318 :param user_id: reviewer user of the pull request
319 319 :param offset: pagination offset
320 320 :param length: length of returned list
321 321 :param order_by: order of the returned list
322 322 :param order_dir: 'asc' or 'desc' ordering direction
323 323 :returns: list of pull requests
324 324 """
325 325 pull_requests = self.get_all(
326 326 repo_name, search_q=search_q, source=source, statuses=statuses,
327 327 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
328 328
329 329 _my = PullRequestModel().get_not_reviewed(user_id)
330 330 my_participation = []
331 331 for pr in pull_requests:
332 332 if pr in _my:
333 333 my_participation.append(pr)
334 334 _filtered_pull_requests = my_participation
335 335 if length:
336 336 return _filtered_pull_requests[offset:offset+length]
337 337 else:
338 338 return _filtered_pull_requests
339 339
340 340 def get_not_reviewed(self, user_id):
341 341 return [
342 342 x.pull_request for x in PullRequestReviewers.query().filter(
343 343 PullRequestReviewers.user_id == user_id).all()
344 344 ]
345 345
346 346 def _prepare_participating_query(self, user_id=None, statuses=None,
347 347 order_by=None, order_dir='desc'):
348 348 q = PullRequest.query()
349 349 if user_id:
350 350 reviewers_subquery = Session().query(
351 351 PullRequestReviewers.pull_request_id).filter(
352 352 PullRequestReviewers.user_id == user_id).subquery()
353 353 user_filter = or_(
354 354 PullRequest.user_id == user_id,
355 355 PullRequest.pull_request_id.in_(reviewers_subquery)
356 356 )
357 357 q = PullRequest.query().filter(user_filter)
358 358
359 359 # closed,opened
360 360 if statuses:
361 361 q = q.filter(PullRequest.status.in_(statuses))
362 362
363 363 if order_by:
364 364 order_map = {
365 365 'name_raw': PullRequest.pull_request_id,
366 366 'title': PullRequest.title,
367 367 'updated_on_raw': PullRequest.updated_on,
368 368 'target_repo': PullRequest.target_repo_id
369 369 }
370 370 if order_dir == 'asc':
371 371 q = q.order_by(order_map[order_by].asc())
372 372 else:
373 373 q = q.order_by(order_map[order_by].desc())
374 374
375 375 return q
376 376
377 377 def count_im_participating_in(self, user_id=None, statuses=None):
378 378 q = self._prepare_participating_query(user_id, statuses=statuses)
379 379 return q.count()
380 380
381 381 def get_im_participating_in(
382 382 self, user_id=None, statuses=None, offset=0,
383 383 length=None, order_by=None, order_dir='desc'):
384 384 """
385 385 Get all Pull requests that i'm participating in, or i have opened
386 386 """
387 387
388 388 q = self._prepare_participating_query(
389 389 user_id, statuses=statuses, order_by=order_by,
390 390 order_dir=order_dir)
391 391
392 392 if length:
393 393 pull_requests = q.limit(length).offset(offset).all()
394 394 else:
395 395 pull_requests = q.all()
396 396
397 397 return pull_requests
398 398
399 399 def get_versions(self, pull_request):
400 400 """
401 401 returns version of pull request sorted by ID descending
402 402 """
403 403 return PullRequestVersion.query()\
404 404 .filter(PullRequestVersion.pull_request == pull_request)\
405 405 .order_by(PullRequestVersion.pull_request_version_id.asc())\
406 406 .all()
407 407
408 408 def get_pr_version(self, pull_request_id, version=None):
409 409 at_version = None
410 410
411 411 if version and version == 'latest':
412 412 pull_request_ver = PullRequest.get(pull_request_id)
413 413 pull_request_obj = pull_request_ver
414 414 _org_pull_request_obj = pull_request_obj
415 415 at_version = 'latest'
416 416 elif version:
417 417 pull_request_ver = PullRequestVersion.get_or_404(version)
418 418 pull_request_obj = pull_request_ver
419 419 _org_pull_request_obj = pull_request_ver.pull_request
420 420 at_version = pull_request_ver.pull_request_version_id
421 421 else:
422 422 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
423 423 pull_request_id)
424 424
425 425 pull_request_display_obj = PullRequest.get_pr_display_object(
426 426 pull_request_obj, _org_pull_request_obj)
427 427
428 428 return _org_pull_request_obj, pull_request_obj, \
429 429 pull_request_display_obj, at_version
430 430
431 431 def create(self, created_by, source_repo, source_ref, target_repo,
432 432 target_ref, revisions, reviewers, title, description=None,
433 433 description_renderer=None,
434 434 reviewer_data=None, translator=None, auth_user=None):
435 435 translator = translator or get_current_request().translate
436 436
437 437 created_by_user = self._get_user(created_by)
438 438 auth_user = auth_user or created_by_user.AuthUser()
439 439 source_repo = self._get_repo(source_repo)
440 440 target_repo = self._get_repo(target_repo)
441 441
442 442 pull_request = PullRequest()
443 443 pull_request.source_repo = source_repo
444 444 pull_request.source_ref = source_ref
445 445 pull_request.target_repo = target_repo
446 446 pull_request.target_ref = target_ref
447 447 pull_request.revisions = revisions
448 448 pull_request.title = title
449 449 pull_request.description = description
450 450 pull_request.description_renderer = description_renderer
451 451 pull_request.author = created_by_user
452 452 pull_request.reviewer_data = reviewer_data
453 453 pull_request.pull_request_state = pull_request.STATE_CREATING
454 454 Session().add(pull_request)
455 455 Session().flush()
456 456
457 457 reviewer_ids = set()
458 458 # members / reviewers
459 459 for reviewer_object in reviewers:
460 460 user_id, reasons, mandatory, rules = reviewer_object
461 461 user = self._get_user(user_id)
462 462
463 463 # skip duplicates
464 464 if user.user_id in reviewer_ids:
465 465 continue
466 466
467 467 reviewer_ids.add(user.user_id)
468 468
469 469 reviewer = PullRequestReviewers()
470 470 reviewer.user = user
471 471 reviewer.pull_request = pull_request
472 472 reviewer.reasons = reasons
473 473 reviewer.mandatory = mandatory
474 474
475 475 # NOTE(marcink): pick only first rule for now
476 476 rule_id = list(rules)[0] if rules else None
477 477 rule = RepoReviewRule.get(rule_id) if rule_id else None
478 478 if rule:
479 479 review_group = rule.user_group_vote_rule(user_id)
480 480 # we check if this particular reviewer is member of a voting group
481 481 if review_group:
482 482 # NOTE(marcink):
483 483 # can be that user is member of more but we pick the first same,
484 484 # same as default reviewers algo
485 485 review_group = review_group[0]
486 486
487 487 rule_data = {
488 488 'rule_name':
489 489 rule.review_rule_name,
490 490 'rule_user_group_entry_id':
491 491 review_group.repo_review_rule_users_group_id,
492 492 'rule_user_group_name':
493 493 review_group.users_group.users_group_name,
494 494 'rule_user_group_members':
495 495 [x.user.username for x in review_group.users_group.members],
496 496 'rule_user_group_members_id':
497 497 [x.user.user_id for x in review_group.users_group.members],
498 498 }
499 499 # e.g {'vote_rule': -1, 'mandatory': True}
500 500 rule_data.update(review_group.rule_data())
501 501
502 502 reviewer.rule_data = rule_data
503 503
504 504 Session().add(reviewer)
505 505 Session().flush()
506 506
507 507 # Set approval status to "Under Review" for all commits which are
508 508 # part of this pull request.
509 509 ChangesetStatusModel().set_status(
510 510 repo=target_repo,
511 511 status=ChangesetStatus.STATUS_UNDER_REVIEW,
512 512 user=created_by_user,
513 513 pull_request=pull_request
514 514 )
515 515 # we commit early at this point. This has to do with a fact
516 516 # that before queries do some row-locking. And because of that
517 517 # we need to commit and finish transaction before below validate call
518 518 # that for large repos could be long resulting in long row locks
519 519 Session().commit()
520 520
521 521 # prepare workspace, and run initial merge simulation. Set state during that
522 522 # operation
523 523 pull_request = PullRequest.get(pull_request.pull_request_id)
524 524
525 525 # set as merging, for merge simulation, and if finished to created so we mark
526 526 # simulation is working fine
527 527 with pull_request.set_state(PullRequest.STATE_MERGING,
528 528 final_state=PullRequest.STATE_CREATED) as state_obj:
529 529 MergeCheck.validate(
530 530 pull_request, auth_user=auth_user, translator=translator)
531 531
532 532 self.notify_reviewers(pull_request, reviewer_ids)
533 533 self.trigger_pull_request_hook(
534 534 pull_request, created_by_user, 'create')
535 535
536 536 creation_data = pull_request.get_api_data(with_merge_state=False)
537 537 self._log_audit_action(
538 538 'repo.pull_request.create', {'data': creation_data},
539 539 auth_user, pull_request)
540 540
541 541 return pull_request
542 542
543 543 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
544 544 pull_request = self.__get_pull_request(pull_request)
545 545 target_scm = pull_request.target_repo.scm_instance()
546 546 if action == 'create':
547 547 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
548 548 elif action == 'merge':
549 549 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
550 550 elif action == 'close':
551 551 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
552 552 elif action == 'review_status_change':
553 553 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
554 554 elif action == 'update':
555 555 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
556 556 elif action == 'comment':
557 557 # dummy hook ! for comment. We want this function to handle all cases
558 558 def trigger_hook(*args, **kwargs):
559 559 pass
560 560 comment = data['comment']
561 561 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
562 562 else:
563 563 return
564 564
565 565 trigger_hook(
566 566 username=user.username,
567 567 repo_name=pull_request.target_repo.repo_name,
568 568 repo_alias=target_scm.alias,
569 569 pull_request=pull_request,
570 570 data=data)
571 571
572 572 def _get_commit_ids(self, pull_request):
573 573 """
574 574 Return the commit ids of the merged pull request.
575 575
576 576 This method is not dealing correctly yet with the lack of autoupdates
577 577 nor with the implicit target updates.
578 578 For example: if a commit in the source repo is already in the target it
579 579 will be reported anyways.
580 580 """
581 581 merge_rev = pull_request.merge_rev
582 582 if merge_rev is None:
583 583 raise ValueError('This pull request was not merged yet')
584 584
585 585 commit_ids = list(pull_request.revisions)
586 586 if merge_rev not in commit_ids:
587 587 commit_ids.append(merge_rev)
588 588
589 589 return commit_ids
590 590
591 591 def merge_repo(self, pull_request, user, extras):
592 592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 593 extras['user_agent'] = 'internal-merge'
594 594 merge_state = self._merge_pull_request(pull_request, user, extras)
595 595 if merge_state.executed:
596 596 log.debug("Merge was successful, updating the pull request comments.")
597 597 self._comment_and_close_pr(pull_request, user, merge_state)
598 598
599 599 self._log_audit_action(
600 600 'repo.pull_request.merge',
601 601 {'merge_state': merge_state.__dict__},
602 602 user, pull_request)
603 603
604 604 else:
605 605 log.warn("Merge failed, not updating the pull request.")
606 606 return merge_state
607 607
608 608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 609 target_vcs = pull_request.target_repo.scm_instance()
610 610 source_vcs = pull_request.source_repo.scm_instance()
611 611
612 612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 613 pr_id=pull_request.pull_request_id,
614 614 pr_title=pull_request.title,
615 615 source_repo=source_vcs.name,
616 616 source_ref_name=pull_request.source_ref_parts.name,
617 617 target_repo=target_vcs.name,
618 618 target_ref_name=pull_request.target_ref_parts.name,
619 619 )
620 620
621 621 workspace_id = self._workspace_id(pull_request)
622 622 repo_id = pull_request.target_repo.repo_id
623 623 use_rebase = self._use_rebase_for_merging(pull_request)
624 624 close_branch = self._close_branch_before_merging(pull_request)
625 625
626 626 target_ref = self._refresh_reference(
627 627 pull_request.target_ref_parts, target_vcs)
628 628
629 629 callback_daemon, extras = prepare_callback_daemon(
630 630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 631 host=vcs_settings.HOOKS_HOST,
632 632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633 633
634 634 with callback_daemon:
635 635 # TODO: johbo: Implement a clean way to run a config_override
636 636 # for a single call.
637 637 target_vcs.config.set(
638 638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639 639
640 640 user_name = user.short_contact
641 641 merge_state = target_vcs.merge(
642 642 repo_id, workspace_id, target_ref, source_vcs,
643 643 pull_request.source_ref_parts,
644 644 user_name=user_name, user_email=user.email,
645 645 message=message, use_rebase=use_rebase,
646 646 close_branch=close_branch)
647 647 return merge_state
648 648
649 649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 650 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 651 pull_request.updated_on = datetime.datetime.now()
652 652 close_msg = close_msg or 'Pull request merged and closed'
653 653
654 654 CommentsModel().create(
655 655 text=safe_unicode(close_msg),
656 656 repo=pull_request.target_repo.repo_id,
657 657 user=user.user_id,
658 658 pull_request=pull_request.pull_request_id,
659 659 f_path=None,
660 660 line_no=None,
661 661 closing_pr=True
662 662 )
663 663
664 664 Session().add(pull_request)
665 665 Session().flush()
666 666 # TODO: paris: replace invalidation with less radical solution
667 667 ScmModel().mark_for_invalidation(
668 668 pull_request.target_repo.repo_name)
669 669 self.trigger_pull_request_hook(pull_request, user, 'merge')
670 670
671 671 def has_valid_update_type(self, pull_request):
672 672 source_ref_type = pull_request.source_ref_parts.type
673 673 return source_ref_type in self.REF_TYPES
674 674
675 675 def update_commits(self, pull_request):
676 676 """
677 677 Get the updated list of commits for the pull request
678 678 and return the new pull request version and the list
679 679 of commits processed by this update action
680 680 """
681 681 pull_request = self.__get_pull_request(pull_request)
682 682 source_ref_type = pull_request.source_ref_parts.type
683 683 source_ref_name = pull_request.source_ref_parts.name
684 684 source_ref_id = pull_request.source_ref_parts.commit_id
685 685
686 686 target_ref_type = pull_request.target_ref_parts.type
687 687 target_ref_name = pull_request.target_ref_parts.name
688 688 target_ref_id = pull_request.target_ref_parts.commit_id
689 689
690 690 if not self.has_valid_update_type(pull_request):
691 691 log.debug("Skipping update of pull request %s due to ref type: %s",
692 692 pull_request, source_ref_type)
693 693 return UpdateResponse(
694 694 executed=False,
695 695 reason=UpdateFailureReason.WRONG_REF_TYPE,
696 696 old=pull_request, new=None, changes=None,
697 697 source_changed=False, target_changed=False)
698 698
699 699 # source repo
700 700 source_repo = pull_request.source_repo.scm_instance()
701 701
702 702 try:
703 703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 704 except CommitDoesNotExistError:
705 705 return UpdateResponse(
706 706 executed=False,
707 707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 708 old=pull_request, new=None, changes=None,
709 709 source_changed=False, target_changed=False)
710 710
711 711 source_changed = source_ref_id != source_commit.raw_id
712 712
713 713 # target repo
714 714 target_repo = pull_request.target_repo.scm_instance()
715 715
716 716 try:
717 717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
718 718 except CommitDoesNotExistError:
719 719 return UpdateResponse(
720 720 executed=False,
721 721 reason=UpdateFailureReason.MISSING_TARGET_REF,
722 722 old=pull_request, new=None, changes=None,
723 723 source_changed=False, target_changed=False)
724 724 target_changed = target_ref_id != target_commit.raw_id
725 725
726 726 if not (source_changed or target_changed):
727 727 log.debug("Nothing changed in pull request %s", pull_request)
728 728 return UpdateResponse(
729 729 executed=False,
730 730 reason=UpdateFailureReason.NO_CHANGE,
731 731 old=pull_request, new=None, changes=None,
732 732 source_changed=target_changed, target_changed=source_changed)
733 733
734 734 change_in_found = 'target repo' if target_changed else 'source repo'
735 735 log.debug('Updating pull request because of change in %s detected',
736 736 change_in_found)
737 737
738 738 # Finally there is a need for an update, in case of source change
739 739 # we create a new version, else just an update
740 740 if source_changed:
741 741 pull_request_version = self._create_version_from_snapshot(pull_request)
742 742 self._link_comments_to_version(pull_request_version)
743 743 else:
744 744 try:
745 745 ver = pull_request.versions[-1]
746 746 except IndexError:
747 747 ver = None
748 748
749 749 pull_request.pull_request_version_id = \
750 750 ver.pull_request_version_id if ver else None
751 751 pull_request_version = pull_request
752 752
753 753 try:
754 754 if target_ref_type in self.REF_TYPES:
755 755 target_commit = target_repo.get_commit(target_ref_name)
756 756 else:
757 757 target_commit = target_repo.get_commit(target_ref_id)
758 758 except CommitDoesNotExistError:
759 759 return UpdateResponse(
760 760 executed=False,
761 761 reason=UpdateFailureReason.MISSING_TARGET_REF,
762 762 old=pull_request, new=None, changes=None,
763 763 source_changed=source_changed, target_changed=target_changed)
764 764
765 765 # re-compute commit ids
766 766 old_commit_ids = pull_request.revisions
767 767 pre_load = ["author", "date", "message", "branch"]
768 768 commit_ranges = target_repo.compare(
769 769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
770 770 pre_load=pre_load)
771 771
772 772 ancestor = source_repo.get_common_ancestor(
773 773 source_commit.raw_id, target_commit.raw_id, target_repo)
774 774
775 775 pull_request.source_ref = '%s:%s:%s' % (
776 776 source_ref_type, source_ref_name, source_commit.raw_id)
777 777 pull_request.target_ref = '%s:%s:%s' % (
778 778 target_ref_type, target_ref_name, ancestor)
779 779
780 780 pull_request.revisions = [
781 781 commit.raw_id for commit in reversed(commit_ranges)]
782 782 pull_request.updated_on = datetime.datetime.now()
783 783 Session().add(pull_request)
784 784 new_commit_ids = pull_request.revisions
785 785
786 786 old_diff_data, new_diff_data = self._generate_update_diffs(
787 787 pull_request, pull_request_version)
788 788
789 789 # calculate commit and file changes
790 790 changes = self._calculate_commit_id_changes(
791 791 old_commit_ids, new_commit_ids)
792 792 file_changes = self._calculate_file_changes(
793 793 old_diff_data, new_diff_data)
794 794
795 795 # set comments as outdated if DIFFS changed
796 796 CommentsModel().outdate_comments(
797 797 pull_request, old_diff_data=old_diff_data,
798 798 new_diff_data=new_diff_data)
799 799
800 800 commit_changes = (changes.added or changes.removed)
801 801 file_node_changes = (
802 802 file_changes.added or file_changes.modified or file_changes.removed)
803 803 pr_has_changes = commit_changes or file_node_changes
804 804
805 805 # Add an automatic comment to the pull request, in case
806 806 # anything has changed
807 807 if pr_has_changes:
808 808 update_comment = CommentsModel().create(
809 809 text=self._render_update_message(changes, file_changes),
810 810 repo=pull_request.target_repo,
811 811 user=pull_request.author,
812 812 pull_request=pull_request,
813 813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
814 814
815 815 # Update status to "Under Review" for added commits
816 816 for commit_id in changes.added:
817 817 ChangesetStatusModel().set_status(
818 818 repo=pull_request.source_repo,
819 819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
820 820 comment=update_comment,
821 821 user=pull_request.author,
822 822 pull_request=pull_request,
823 823 revision=commit_id)
824 824
825 825 log.debug(
826 826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
827 827 'removed_ids: %s', pull_request.pull_request_id,
828 828 changes.added, changes.common, changes.removed)
829 829 log.debug(
830 830 'Updated pull request with the following file changes: %s',
831 831 file_changes)
832 832
833 833 log.info(
834 834 "Updated pull request %s from commit %s to commit %s, "
835 835 "stored new version %s of this pull request.",
836 836 pull_request.pull_request_id, source_ref_id,
837 837 pull_request.source_ref_parts.commit_id,
838 838 pull_request_version.pull_request_version_id)
839 839 Session().commit()
840 840 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
841 841
842 842 return UpdateResponse(
843 843 executed=True, reason=UpdateFailureReason.NONE,
844 844 old=pull_request, new=pull_request_version, changes=changes,
845 845 source_changed=source_changed, target_changed=target_changed)
846 846
847 847 def _create_version_from_snapshot(self, pull_request):
848 848 version = PullRequestVersion()
849 849 version.title = pull_request.title
850 850 version.description = pull_request.description
851 851 version.status = pull_request.status
852 852 version.pull_request_state = pull_request.pull_request_state
853 853 version.created_on = datetime.datetime.now()
854 854 version.updated_on = pull_request.updated_on
855 855 version.user_id = pull_request.user_id
856 856 version.source_repo = pull_request.source_repo
857 857 version.source_ref = pull_request.source_ref
858 858 version.target_repo = pull_request.target_repo
859 859 version.target_ref = pull_request.target_ref
860 860
861 861 version._last_merge_source_rev = pull_request._last_merge_source_rev
862 862 version._last_merge_target_rev = pull_request._last_merge_target_rev
863 863 version.last_merge_status = pull_request.last_merge_status
864 864 version.shadow_merge_ref = pull_request.shadow_merge_ref
865 865 version.merge_rev = pull_request.merge_rev
866 866 version.reviewer_data = pull_request.reviewer_data
867 867
868 868 version.revisions = pull_request.revisions
869 869 version.pull_request = pull_request
870 870 Session().add(version)
871 871 Session().flush()
872 872
873 873 return version
874 874
875 875 def _generate_update_diffs(self, pull_request, pull_request_version):
876 876
877 877 diff_context = (
878 878 self.DIFF_CONTEXT +
879 879 CommentsModel.needed_extra_diff_context())
880 880 hide_whitespace_changes = False
881 881 source_repo = pull_request_version.source_repo
882 882 source_ref_id = pull_request_version.source_ref_parts.commit_id
883 883 target_ref_id = pull_request_version.target_ref_parts.commit_id
884 884 old_diff = self._get_diff_from_pr_or_version(
885 885 source_repo, source_ref_id, target_ref_id,
886 886 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
887 887
888 888 source_repo = pull_request.source_repo
889 889 source_ref_id = pull_request.source_ref_parts.commit_id
890 890 target_ref_id = pull_request.target_ref_parts.commit_id
891 891
892 892 new_diff = self._get_diff_from_pr_or_version(
893 893 source_repo, source_ref_id, target_ref_id,
894 894 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
895 895
896 896 old_diff_data = diffs.DiffProcessor(old_diff)
897 897 old_diff_data.prepare()
898 898 new_diff_data = diffs.DiffProcessor(new_diff)
899 899 new_diff_data.prepare()
900 900
901 901 return old_diff_data, new_diff_data
902 902
903 903 def _link_comments_to_version(self, pull_request_version):
904 904 """
905 905 Link all unlinked comments of this pull request to the given version.
906 906
907 907 :param pull_request_version: The `PullRequestVersion` to which
908 908 the comments shall be linked.
909 909
910 910 """
911 911 pull_request = pull_request_version.pull_request
912 912 comments = ChangesetComment.query()\
913 913 .filter(
914 914 # TODO: johbo: Should we query for the repo at all here?
915 915 # Pending decision on how comments of PRs are to be related
916 916 # to either the source repo, the target repo or no repo at all.
917 917 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
918 918 ChangesetComment.pull_request == pull_request,
919 919 ChangesetComment.pull_request_version == None)\
920 920 .order_by(ChangesetComment.comment_id.asc())
921 921
922 922 # TODO: johbo: Find out why this breaks if it is done in a bulk
923 923 # operation.
924 924 for comment in comments:
925 925 comment.pull_request_version_id = (
926 926 pull_request_version.pull_request_version_id)
927 927 Session().add(comment)
928 928
929 929 def _calculate_commit_id_changes(self, old_ids, new_ids):
930 930 added = [x for x in new_ids if x not in old_ids]
931 931 common = [x for x in new_ids if x in old_ids]
932 932 removed = [x for x in old_ids if x not in new_ids]
933 933 total = new_ids
934 934 return ChangeTuple(added, common, removed, total)
935 935
936 936 def _calculate_file_changes(self, old_diff_data, new_diff_data):
937 937
938 938 old_files = OrderedDict()
939 939 for diff_data in old_diff_data.parsed_diff:
940 940 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
941 941
942 942 added_files = []
943 943 modified_files = []
944 944 removed_files = []
945 945 for diff_data in new_diff_data.parsed_diff:
946 946 new_filename = diff_data['filename']
947 947 new_hash = md5_safe(diff_data['raw_diff'])
948 948
949 949 old_hash = old_files.get(new_filename)
950 950 if not old_hash:
951 951 # file is not present in old diff, means it's added
952 952 added_files.append(new_filename)
953 953 else:
954 954 if new_hash != old_hash:
955 955 modified_files.append(new_filename)
956 956 # now remove a file from old, since we have seen it already
957 957 del old_files[new_filename]
958 958
959 959 # removed files is when there are present in old, but not in NEW,
960 960 # since we remove old files that are present in new diff, left-overs
961 961 # if any should be the removed files
962 962 removed_files.extend(old_files.keys())
963 963
964 964 return FileChangeTuple(added_files, modified_files, removed_files)
965 965
966 966 def _render_update_message(self, changes, file_changes):
967 967 """
968 968 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
969 969 so it's always looking the same disregarding on which default
970 970 renderer system is using.
971 971
972 972 :param changes: changes named tuple
973 973 :param file_changes: file changes named tuple
974 974
975 975 """
976 976 new_status = ChangesetStatus.get_status_lbl(
977 977 ChangesetStatus.STATUS_UNDER_REVIEW)
978 978
979 979 changed_files = (
980 980 file_changes.added + file_changes.modified + file_changes.removed)
981 981
982 982 params = {
983 983 'under_review_label': new_status,
984 984 'added_commits': changes.added,
985 985 'removed_commits': changes.removed,
986 986 'changed_files': changed_files,
987 987 'added_files': file_changes.added,
988 988 'modified_files': file_changes.modified,
989 989 'removed_files': file_changes.removed,
990 990 }
991 991 renderer = RstTemplateRenderer()
992 992 return renderer.render('pull_request_update.mako', **params)
993 993
994 994 def edit(self, pull_request, title, description, description_renderer, user):
995 995 pull_request = self.__get_pull_request(pull_request)
996 996 old_data = pull_request.get_api_data(with_merge_state=False)
997 997 if pull_request.is_closed():
998 998 raise ValueError('This pull request is closed')
999 999 if title:
1000 1000 pull_request.title = title
1001 1001 pull_request.description = description
1002 1002 pull_request.updated_on = datetime.datetime.now()
1003 1003 pull_request.description_renderer = description_renderer
1004 1004 Session().add(pull_request)
1005 1005 self._log_audit_action(
1006 1006 'repo.pull_request.edit', {'old_data': old_data},
1007 1007 user, pull_request)
1008 1008
1009 1009 def update_reviewers(self, pull_request, reviewer_data, user):
1010 1010 """
1011 1011 Update the reviewers in the pull request
1012 1012
1013 1013 :param pull_request: the pr to update
1014 1014 :param reviewer_data: list of tuples
1015 1015 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1016 1016 """
1017 1017 pull_request = self.__get_pull_request(pull_request)
1018 1018 if pull_request.is_closed():
1019 1019 raise ValueError('This pull request is closed')
1020 1020
1021 1021 reviewers = {}
1022 1022 for user_id, reasons, mandatory, rules in reviewer_data:
1023 1023 if isinstance(user_id, (int, compat.string_types)):
1024 1024 user_id = self._get_user(user_id).user_id
1025 1025 reviewers[user_id] = {
1026 1026 'reasons': reasons, 'mandatory': mandatory}
1027 1027
1028 1028 reviewers_ids = set(reviewers.keys())
1029 1029 current_reviewers = PullRequestReviewers.query()\
1030 1030 .filter(PullRequestReviewers.pull_request ==
1031 1031 pull_request).all()
1032 1032 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1033 1033
1034 1034 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1035 1035 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1036 1036
1037 1037 log.debug("Adding %s reviewers", ids_to_add)
1038 1038 log.debug("Removing %s reviewers", ids_to_remove)
1039 1039 changed = False
1040 1040 added_audit_reviewers = []
1041 1041 removed_audit_reviewers = []
1042 1042
1043 1043 for uid in ids_to_add:
1044 1044 changed = True
1045 1045 _usr = self._get_user(uid)
1046 1046 reviewer = PullRequestReviewers()
1047 1047 reviewer.user = _usr
1048 1048 reviewer.pull_request = pull_request
1049 1049 reviewer.reasons = reviewers[uid]['reasons']
1050 1050 # NOTE(marcink): mandatory shouldn't be changed now
1051 1051 # reviewer.mandatory = reviewers[uid]['reasons']
1052 1052 Session().add(reviewer)
1053 1053 added_audit_reviewers.append(reviewer.get_dict())
1054 1054
1055 1055 for uid in ids_to_remove:
1056 1056 changed = True
1057 1057 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1058 1058 # that prevents and fixes cases that we added the same reviewer twice.
1059 1059 # this CAN happen due to the lack of DB checks
1060 1060 reviewers = PullRequestReviewers.query()\
1061 1061 .filter(PullRequestReviewers.user_id == uid,
1062 1062 PullRequestReviewers.pull_request == pull_request)\
1063 1063 .all()
1064 1064
1065 1065 for obj in reviewers:
1066 1066 added_audit_reviewers.append(obj.get_dict())
1067 1067 Session().delete(obj)
1068 1068
1069 1069 if changed:
1070 1070 Session().expire_all()
1071 1071 pull_request.updated_on = datetime.datetime.now()
1072 1072 Session().add(pull_request)
1073 1073
1074 1074 # finally store audit logs
1075 1075 for user_data in added_audit_reviewers:
1076 1076 self._log_audit_action(
1077 1077 'repo.pull_request.reviewer.add', {'data': user_data},
1078 1078 user, pull_request)
1079 1079 for user_data in removed_audit_reviewers:
1080 1080 self._log_audit_action(
1081 1081 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1082 1082 user, pull_request)
1083 1083
1084 1084 self.notify_reviewers(pull_request, ids_to_add)
1085 1085 return ids_to_add, ids_to_remove
1086 1086
1087 1087 def get_url(self, pull_request, request=None, permalink=False):
1088 1088 if not request:
1089 1089 request = get_current_request()
1090 1090
1091 1091 if permalink:
1092 1092 return request.route_url(
1093 1093 'pull_requests_global',
1094 1094 pull_request_id=pull_request.pull_request_id,)
1095 1095 else:
1096 1096 return request.route_url('pullrequest_show',
1097 1097 repo_name=safe_str(pull_request.target_repo.repo_name),
1098 1098 pull_request_id=pull_request.pull_request_id,)
1099 1099
1100 1100 def get_shadow_clone_url(self, pull_request, request=None):
1101 1101 """
1102 1102 Returns qualified url pointing to the shadow repository. If this pull
1103 1103 request is closed there is no shadow repository and ``None`` will be
1104 1104 returned.
1105 1105 """
1106 1106 if pull_request.is_closed():
1107 1107 return None
1108 1108 else:
1109 1109 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1110 1110 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1111 1111
1112 1112 def notify_reviewers(self, pull_request, reviewers_ids):
1113 1113 # notification to reviewers
1114 1114 if not reviewers_ids:
1115 1115 return
1116 1116
1117 1117 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1118 1118
1119 1119 pull_request_obj = pull_request
1120 1120 # get the current participants of this pull request
1121 1121 recipients = reviewers_ids
1122 1122 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1123 1123
1124 1124 pr_source_repo = pull_request_obj.source_repo
1125 1125 pr_target_repo = pull_request_obj.target_repo
1126 1126
1127 1127 pr_url = h.route_url('pullrequest_show',
1128 1128 repo_name=pr_target_repo.repo_name,
1129 1129 pull_request_id=pull_request_obj.pull_request_id,)
1130 1130
1131 1131 # set some variables for email notification
1132 1132 pr_target_repo_url = h.route_url(
1133 1133 'repo_summary', repo_name=pr_target_repo.repo_name)
1134 1134
1135 1135 pr_source_repo_url = h.route_url(
1136 1136 'repo_summary', repo_name=pr_source_repo.repo_name)
1137 1137
1138 1138 # pull request specifics
1139 1139 pull_request_commits = [
1140 1140 (x.raw_id, x.message)
1141 1141 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1142 1142
1143 1143 kwargs = {
1144 1144 'user': pull_request.author,
1145 1145 'pull_request': pull_request_obj,
1146 1146 'pull_request_commits': pull_request_commits,
1147 1147
1148 1148 'pull_request_target_repo': pr_target_repo,
1149 1149 'pull_request_target_repo_url': pr_target_repo_url,
1150 1150
1151 1151 'pull_request_source_repo': pr_source_repo,
1152 1152 'pull_request_source_repo_url': pr_source_repo_url,
1153 1153
1154 1154 'pull_request_url': pr_url,
1155 1155 }
1156 1156
1157 1157 # pre-generate the subject for notification itself
1158 1158 (subject,
1159 1159 _h, _e, # we don't care about those
1160 1160 body_plaintext) = EmailNotificationModel().render_email(
1161 1161 notification_type, **kwargs)
1162 1162
1163 1163 # create notification objects, and emails
1164 1164 NotificationModel().create(
1165 1165 created_by=pull_request.author,
1166 1166 notification_subject=subject,
1167 1167 notification_body=body_plaintext,
1168 1168 notification_type=notification_type,
1169 1169 recipients=recipients,
1170 1170 email_kwargs=kwargs,
1171 1171 )
1172 1172
1173 1173 def delete(self, pull_request, user):
1174 1174 pull_request = self.__get_pull_request(pull_request)
1175 1175 old_data = pull_request.get_api_data(with_merge_state=False)
1176 1176 self._cleanup_merge_workspace(pull_request)
1177 1177 self._log_audit_action(
1178 1178 'repo.pull_request.delete', {'old_data': old_data},
1179 1179 user, pull_request)
1180 1180 Session().delete(pull_request)
1181 1181
1182 1182 def close_pull_request(self, pull_request, user):
1183 1183 pull_request = self.__get_pull_request(pull_request)
1184 1184 self._cleanup_merge_workspace(pull_request)
1185 1185 pull_request.status = PullRequest.STATUS_CLOSED
1186 1186 pull_request.updated_on = datetime.datetime.now()
1187 1187 Session().add(pull_request)
1188 1188 self.trigger_pull_request_hook(
1189 1189 pull_request, pull_request.author, 'close')
1190 1190
1191 1191 pr_data = pull_request.get_api_data(with_merge_state=False)
1192 1192 self._log_audit_action(
1193 1193 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1194 1194
1195 1195 def close_pull_request_with_comment(
1196 1196 self, pull_request, user, repo, message=None, auth_user=None):
1197 1197
1198 1198 pull_request_review_status = pull_request.calculated_review_status()
1199 1199
1200 1200 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1201 1201 # approved only if we have voting consent
1202 1202 status = ChangesetStatus.STATUS_APPROVED
1203 1203 else:
1204 1204 status = ChangesetStatus.STATUS_REJECTED
1205 1205 status_lbl = ChangesetStatus.get_status_lbl(status)
1206 1206
1207 1207 default_message = (
1208 1208 'Closing with status change {transition_icon} {status}.'
1209 1209 ).format(transition_icon='>', status=status_lbl)
1210 1210 text = message or default_message
1211 1211
1212 1212 # create a comment, and link it to new status
1213 1213 comment = CommentsModel().create(
1214 1214 text=text,
1215 1215 repo=repo.repo_id,
1216 1216 user=user.user_id,
1217 1217 pull_request=pull_request.pull_request_id,
1218 1218 status_change=status_lbl,
1219 1219 status_change_type=status,
1220 1220 closing_pr=True,
1221 1221 auth_user=auth_user,
1222 1222 )
1223 1223
1224 1224 # calculate old status before we change it
1225 1225 old_calculated_status = pull_request.calculated_review_status()
1226 1226 ChangesetStatusModel().set_status(
1227 1227 repo.repo_id,
1228 1228 status,
1229 1229 user.user_id,
1230 1230 comment=comment,
1231 1231 pull_request=pull_request.pull_request_id
1232 1232 )
1233 1233
1234 1234 Session().flush()
1235 1235 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1236 1236 # we now calculate the status of pull request again, and based on that
1237 1237 # calculation trigger status change. This might happen in cases
1238 1238 # that non-reviewer admin closes a pr, which means his vote doesn't
1239 1239 # change the status, while if he's a reviewer this might change it.
1240 1240 calculated_status = pull_request.calculated_review_status()
1241 1241 if old_calculated_status != calculated_status:
1242 1242 self.trigger_pull_request_hook(
1243 1243 pull_request, user, 'review_status_change',
1244 1244 data={'status': calculated_status})
1245 1245
1246 1246 # finally close the PR
1247 1247 PullRequestModel().close_pull_request(
1248 1248 pull_request.pull_request_id, user)
1249 1249
1250 1250 return comment, status
1251 1251
1252 1252 def merge_status(self, pull_request, translator=None,
1253 1253 force_shadow_repo_refresh=False):
1254 1254 _ = translator or get_current_request().translate
1255 1255
1256 1256 if not self._is_merge_enabled(pull_request):
1257 1257 return False, _('Server-side pull request merging is disabled.')
1258 1258 if pull_request.is_closed():
1259 1259 return False, _('This pull request is closed.')
1260 1260 merge_possible, msg = self._check_repo_requirements(
1261 1261 target=pull_request.target_repo, source=pull_request.source_repo,
1262 1262 translator=_)
1263 1263 if not merge_possible:
1264 1264 return merge_possible, msg
1265 1265
1266 1266 try:
1267 1267 resp = self._try_merge(
1268 1268 pull_request,
1269 1269 force_shadow_repo_refresh=force_shadow_repo_refresh)
1270 1270 log.debug("Merge response: %s", resp)
1271 1271 status = resp.possible, resp.merge_status_message
1272 1272 except NotImplementedError:
1273 1273 status = False, _('Pull request merging is not supported.')
1274 1274
1275 1275 return status
1276 1276
1277 1277 def _check_repo_requirements(self, target, source, translator):
1278 1278 """
1279 1279 Check if `target` and `source` have compatible requirements.
1280 1280
1281 1281 Currently this is just checking for largefiles.
1282 1282 """
1283 1283 _ = translator
1284 1284 target_has_largefiles = self._has_largefiles(target)
1285 1285 source_has_largefiles = self._has_largefiles(source)
1286 1286 merge_possible = True
1287 1287 message = u''
1288 1288
1289 1289 if target_has_largefiles != source_has_largefiles:
1290 1290 merge_possible = False
1291 1291 if source_has_largefiles:
1292 1292 message = _(
1293 1293 'Target repository large files support is disabled.')
1294 1294 else:
1295 1295 message = _(
1296 1296 'Source repository large files support is disabled.')
1297 1297
1298 1298 return merge_possible, message
1299 1299
1300 1300 def _has_largefiles(self, repo):
1301 1301 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1302 1302 'extensions', 'largefiles')
1303 1303 return largefiles_ui and largefiles_ui[0].active
1304 1304
1305 1305 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1306 1306 """
1307 1307 Try to merge the pull request and return the merge status.
1308 1308 """
1309 1309 log.debug(
1310 1310 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1311 1311 pull_request.pull_request_id, force_shadow_repo_refresh)
1312 1312 target_vcs = pull_request.target_repo.scm_instance()
1313 1313 # Refresh the target reference.
1314 1314 try:
1315 1315 target_ref = self._refresh_reference(
1316 1316 pull_request.target_ref_parts, target_vcs)
1317 1317 except CommitDoesNotExistError:
1318 1318 merge_state = MergeResponse(
1319 1319 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1320 1320 metadata={'target_ref': pull_request.target_ref_parts})
1321 1321 return merge_state
1322 1322
1323 1323 target_locked = pull_request.target_repo.locked
1324 1324 if target_locked and target_locked[0]:
1325 1325 locked_by = 'user:{}'.format(target_locked[0])
1326 1326 log.debug("The target repository is locked by %s.", locked_by)
1327 1327 merge_state = MergeResponse(
1328 1328 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1329 1329 metadata={'locked_by': locked_by})
1330 1330 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1331 1331 pull_request, target_ref):
1332 1332 log.debug("Refreshing the merge status of the repository.")
1333 1333 merge_state = self._refresh_merge_state(
1334 1334 pull_request, target_vcs, target_ref)
1335 1335 else:
1336 1336 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1337 1337 metadata = {
1338 1338 'unresolved_files': '',
1339 1339 'target_ref': pull_request.target_ref_parts,
1340 1340 'source_ref': pull_request.source_ref_parts,
1341 1341 }
1342 1342 if not possible and target_ref.type == 'branch':
1343 1343 # NOTE(marcink): case for mercurial multiple heads on branch
1344 1344 heads = target_vcs._heads(target_ref.name)
1345 1345 if len(heads) != 1:
1346 1346 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1347 1347 metadata.update({
1348 1348 'heads': heads
1349 1349 })
1350 1350 merge_state = MergeResponse(
1351 1351 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1352 1352
1353 1353 return merge_state
1354 1354
1355 1355 def _refresh_reference(self, reference, vcs_repository):
1356 1356 if reference.type in self.UPDATABLE_REF_TYPES:
1357 1357 name_or_id = reference.name
1358 1358 else:
1359 1359 name_or_id = reference.commit_id
1360 1360
1361 1361 refreshed_commit = vcs_repository.get_commit(name_or_id)
1362 1362 refreshed_reference = Reference(
1363 1363 reference.type, reference.name, refreshed_commit.raw_id)
1364 1364 return refreshed_reference
1365 1365
1366 1366 def _needs_merge_state_refresh(self, pull_request, target_reference):
1367 1367 return not(
1368 1368 pull_request.revisions and
1369 1369 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1370 1370 target_reference.commit_id == pull_request._last_merge_target_rev)
1371 1371
1372 1372 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1373 1373 workspace_id = self._workspace_id(pull_request)
1374 1374 source_vcs = pull_request.source_repo.scm_instance()
1375 1375 repo_id = pull_request.target_repo.repo_id
1376 1376 use_rebase = self._use_rebase_for_merging(pull_request)
1377 1377 close_branch = self._close_branch_before_merging(pull_request)
1378 1378 merge_state = target_vcs.merge(
1379 1379 repo_id, workspace_id,
1380 1380 target_reference, source_vcs, pull_request.source_ref_parts,
1381 1381 dry_run=True, use_rebase=use_rebase,
1382 1382 close_branch=close_branch)
1383 1383
1384 1384 # Do not store the response if there was an unknown error.
1385 1385 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1386 1386 pull_request._last_merge_source_rev = \
1387 1387 pull_request.source_ref_parts.commit_id
1388 1388 pull_request._last_merge_target_rev = target_reference.commit_id
1389 1389 pull_request.last_merge_status = merge_state.failure_reason
1390 1390 pull_request.shadow_merge_ref = merge_state.merge_ref
1391 1391 Session().add(pull_request)
1392 1392 Session().commit()
1393 1393
1394 1394 return merge_state
1395 1395
1396 1396 def _workspace_id(self, pull_request):
1397 1397 workspace_id = 'pr-%s' % pull_request.pull_request_id
1398 1398 return workspace_id
1399 1399
1400 1400 def generate_repo_data(self, repo, commit_id=None, branch=None,
1401 1401 bookmark=None, translator=None):
1402 1402 from rhodecode.model.repo import RepoModel
1403 1403
1404 1404 all_refs, selected_ref = \
1405 1405 self._get_repo_pullrequest_sources(
1406 1406 repo.scm_instance(), commit_id=commit_id,
1407 1407 branch=branch, bookmark=bookmark, translator=translator)
1408 1408
1409 1409 refs_select2 = []
1410 1410 for element in all_refs:
1411 1411 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1412 1412 refs_select2.append({'text': element[1], 'children': children})
1413 1413
1414 1414 return {
1415 1415 'user': {
1416 1416 'user_id': repo.user.user_id,
1417 1417 'username': repo.user.username,
1418 1418 'firstname': repo.user.first_name,
1419 1419 'lastname': repo.user.last_name,
1420 1420 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1421 1421 },
1422 1422 'name': repo.repo_name,
1423 1423 'link': RepoModel().get_url(repo),
1424 1424 'description': h.chop_at_smart(repo.description_safe, '\n'),
1425 1425 'refs': {
1426 1426 'all_refs': all_refs,
1427 1427 'selected_ref': selected_ref,
1428 1428 'select2_refs': refs_select2
1429 1429 }
1430 1430 }
1431 1431
1432 1432 def generate_pullrequest_title(self, source, source_ref, target):
1433 1433 return u'{source}#{at_ref} to {target}'.format(
1434 1434 source=source,
1435 1435 at_ref=source_ref,
1436 1436 target=target,
1437 1437 )
1438 1438
1439 1439 def _cleanup_merge_workspace(self, pull_request):
1440 1440 # Merging related cleanup
1441 1441 repo_id = pull_request.target_repo.repo_id
1442 1442 target_scm = pull_request.target_repo.scm_instance()
1443 1443 workspace_id = self._workspace_id(pull_request)
1444 1444
1445 1445 try:
1446 1446 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1447 1447 except NotImplementedError:
1448 1448 pass
1449 1449
1450 1450 def _get_repo_pullrequest_sources(
1451 1451 self, repo, commit_id=None, branch=None, bookmark=None,
1452 1452 translator=None):
1453 1453 """
1454 1454 Return a structure with repo's interesting commits, suitable for
1455 1455 the selectors in pullrequest controller
1456 1456
1457 1457 :param commit_id: a commit that must be in the list somehow
1458 1458 and selected by default
1459 1459 :param branch: a branch that must be in the list and selected
1460 1460 by default - even if closed
1461 1461 :param bookmark: a bookmark that must be in the list and selected
1462 1462 """
1463 1463 _ = translator or get_current_request().translate
1464 1464
1465 1465 commit_id = safe_str(commit_id) if commit_id else None
1466 1466 branch = safe_unicode(branch) if branch else None
1467 1467 bookmark = safe_unicode(bookmark) if bookmark else None
1468 1468
1469 1469 selected = None
1470 1470
1471 1471 # order matters: first source that has commit_id in it will be selected
1472 1472 sources = []
1473 1473 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1474 1474 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1475 1475
1476 1476 if commit_id:
1477 1477 ref_commit = (h.short_id(commit_id), commit_id)
1478 1478 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1479 1479
1480 1480 sources.append(
1481 1481 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1482 1482 )
1483 1483
1484 1484 groups = []
1485 1485
1486 1486 for group_key, ref_list, group_name, match in sources:
1487 1487 group_refs = []
1488 1488 for ref_name, ref_id in ref_list:
1489 1489 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1490 1490 group_refs.append((ref_key, ref_name))
1491 1491
1492 1492 if not selected:
1493 1493 if set([commit_id, match]) & set([ref_id, ref_name]):
1494 1494 selected = ref_key
1495 1495
1496 1496 if group_refs:
1497 1497 groups.append((group_refs, group_name))
1498 1498
1499 1499 if not selected:
1500 1500 ref = commit_id or branch or bookmark
1501 1501 if ref:
1502 1502 raise CommitDoesNotExistError(
1503 1503 u'No commit refs could be found matching: {}'.format(ref))
1504 1504 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1505 1505 selected = u'branch:{}:{}'.format(
1506 1506 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1507 1507 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1508 1508 )
1509 1509 elif repo.commit_ids:
1510 1510 # make the user select in this case
1511 1511 selected = None
1512 1512 else:
1513 1513 raise EmptyRepositoryError()
1514 1514 return groups, selected
1515 1515
1516 1516 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1517 1517 hide_whitespace_changes, diff_context):
1518 1518
1519 1519 return self._get_diff_from_pr_or_version(
1520 1520 source_repo, source_ref_id, target_ref_id,
1521 1521 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1522 1522
1523 1523 def _get_diff_from_pr_or_version(
1524 1524 self, source_repo, source_ref_id, target_ref_id,
1525 1525 hide_whitespace_changes, diff_context):
1526 1526
1527 1527 target_commit = source_repo.get_commit(
1528 1528 commit_id=safe_str(target_ref_id))
1529 1529 source_commit = source_repo.get_commit(
1530 1530 commit_id=safe_str(source_ref_id))
1531 1531 if isinstance(source_repo, Repository):
1532 1532 vcs_repo = source_repo.scm_instance()
1533 1533 else:
1534 1534 vcs_repo = source_repo
1535 1535
1536 1536 # TODO: johbo: In the context of an update, we cannot reach
1537 1537 # the old commit anymore with our normal mechanisms. It needs
1538 1538 # some sort of special support in the vcs layer to avoid this
1539 1539 # workaround.
1540 1540 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1541 1541 vcs_repo.alias == 'git'):
1542 1542 source_commit.raw_id = safe_str(source_ref_id)
1543 1543
1544 1544 log.debug('calculating diff between '
1545 1545 'source_ref:%s and target_ref:%s for repo `%s`',
1546 1546 target_ref_id, source_ref_id,
1547 1547 safe_unicode(vcs_repo.path))
1548 1548
1549 1549 vcs_diff = vcs_repo.get_diff(
1550 1550 commit1=target_commit, commit2=source_commit,
1551 1551 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1552 1552 return vcs_diff
1553 1553
1554 1554 def _is_merge_enabled(self, pull_request):
1555 1555 return self._get_general_setting(
1556 1556 pull_request, 'rhodecode_pr_merge_enabled')
1557 1557
1558 1558 def _use_rebase_for_merging(self, pull_request):
1559 1559 repo_type = pull_request.target_repo.repo_type
1560 1560 if repo_type == 'hg':
1561 1561 return self._get_general_setting(
1562 1562 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1563 1563 elif repo_type == 'git':
1564 1564 return self._get_general_setting(
1565 1565 pull_request, 'rhodecode_git_use_rebase_for_merging')
1566 1566
1567 1567 return False
1568 1568
1569 1569 def _close_branch_before_merging(self, pull_request):
1570 1570 repo_type = pull_request.target_repo.repo_type
1571 1571 if repo_type == 'hg':
1572 1572 return self._get_general_setting(
1573 1573 pull_request, 'rhodecode_hg_close_branch_before_merging')
1574 1574 elif repo_type == 'git':
1575 1575 return self._get_general_setting(
1576 1576 pull_request, 'rhodecode_git_close_branch_before_merging')
1577 1577
1578 1578 return False
1579 1579
1580 1580 def _get_general_setting(self, pull_request, settings_key, default=False):
1581 1581 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1582 1582 settings = settings_model.get_general_settings()
1583 1583 return settings.get(settings_key, default)
1584 1584
1585 1585 def _log_audit_action(self, action, action_data, user, pull_request):
1586 1586 audit_logger.store(
1587 1587 action=action,
1588 1588 action_data=action_data,
1589 1589 user=user,
1590 1590 repo=pull_request.target_repo)
1591 1591
1592 1592 def get_reviewer_functions(self):
1593 1593 """
1594 1594 Fetches functions for validation and fetching default reviewers.
1595 1595 If available we use the EE package, else we fallback to CE
1596 1596 package functions
1597 1597 """
1598 1598 try:
1599 1599 from rc_reviewers.utils import get_default_reviewers_data
1600 1600 from rc_reviewers.utils import validate_default_reviewers
1601 1601 except ImportError:
1602 1602 from rhodecode.apps.repository.utils import get_default_reviewers_data
1603 1603 from rhodecode.apps.repository.utils import validate_default_reviewers
1604 1604
1605 1605 return get_default_reviewers_data, validate_default_reviewers
1606 1606
1607 1607
1608 1608 class MergeCheck(object):
1609 1609 """
1610 1610 Perform Merge Checks and returns a check object which stores information
1611 1611 about merge errors, and merge conditions
1612 1612 """
1613 1613 TODO_CHECK = 'todo'
1614 1614 PERM_CHECK = 'perm'
1615 1615 REVIEW_CHECK = 'review'
1616 1616 MERGE_CHECK = 'merge'
1617 WIP_CHECK = 'wip'
1617 1618
1618 1619 def __init__(self):
1619 1620 self.review_status = None
1620 1621 self.merge_possible = None
1621 1622 self.merge_msg = ''
1622 1623 self.failed = None
1623 1624 self.errors = []
1624 1625 self.error_details = OrderedDict()
1625 1626
1626 1627 def push_error(self, error_type, message, error_key, details):
1627 1628 self.failed = True
1628 1629 self.errors.append([error_type, message])
1629 1630 self.error_details[error_key] = dict(
1630 1631 details=details,
1631 1632 error_type=error_type,
1632 1633 message=message
1633 1634 )
1634 1635
1635 1636 @classmethod
1636 1637 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1637 1638 force_shadow_repo_refresh=False):
1638 1639 _ = translator
1639 1640 merge_check = cls()
1640 1641
1642 # title has WIP:
1643 if pull_request.work_in_progress:
1644 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1645
1646 msg = _('WIP marker in title prevents from accidental merge.')
1647 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1648 if fail_early:
1649 return merge_check
1650
1641 1651 # permissions to merge
1642 1652 user_allowed_to_merge = PullRequestModel().check_user_merge(
1643 1653 pull_request, auth_user)
1644 1654 if not user_allowed_to_merge:
1645 1655 log.debug("MergeCheck: cannot merge, approval is pending.")
1646 1656
1647 1657 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1648 1658 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1649 1659 if fail_early:
1650 1660 return merge_check
1651 1661
1652 1662 # permission to merge into the target branch
1653 1663 target_commit_id = pull_request.target_ref_parts.commit_id
1654 1664 if pull_request.target_ref_parts.type == 'branch':
1655 1665 branch_name = pull_request.target_ref_parts.name
1656 1666 else:
1657 1667 # for mercurial we can always figure out the branch from the commit
1658 1668 # in case of bookmark
1659 1669 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1660 1670 branch_name = target_commit.branch
1661 1671
1662 1672 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1663 1673 pull_request.target_repo.repo_name, branch_name)
1664 1674 if branch_perm and branch_perm == 'branch.none':
1665 1675 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1666 1676 branch_name, rule)
1667 1677 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1668 1678 if fail_early:
1669 1679 return merge_check
1670 1680
1671 1681 # review status, must be always present
1672 1682 review_status = pull_request.calculated_review_status()
1673 1683 merge_check.review_status = review_status
1674 1684
1675 1685 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1676 1686 if not status_approved:
1677 1687 log.debug("MergeCheck: cannot merge, approval is pending.")
1678 1688
1679 1689 msg = _('Pull request reviewer approval is pending.')
1680 1690
1681 1691 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1682 1692
1683 1693 if fail_early:
1684 1694 return merge_check
1685 1695
1686 1696 # left over TODOs
1687 1697 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1688 1698 if todos:
1689 1699 log.debug("MergeCheck: cannot merge, {} "
1690 1700 "unresolved TODOs left.".format(len(todos)))
1691 1701
1692 1702 if len(todos) == 1:
1693 1703 msg = _('Cannot merge, {} TODO still not resolved.').format(
1694 1704 len(todos))
1695 1705 else:
1696 1706 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1697 1707 len(todos))
1698 1708
1699 1709 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1700 1710
1701 1711 if fail_early:
1702 1712 return merge_check
1703 1713
1704 1714 # merge possible, here is the filesystem simulation + shadow repo
1705 1715 merge_status, msg = PullRequestModel().merge_status(
1706 1716 pull_request, translator=translator,
1707 1717 force_shadow_repo_refresh=force_shadow_repo_refresh)
1708 1718 merge_check.merge_possible = merge_status
1709 1719 merge_check.merge_msg = msg
1710 1720 if not merge_status:
1711 1721 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1712 1722 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1713 1723
1714 1724 if fail_early:
1715 1725 return merge_check
1716 1726
1717 1727 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1718 1728 return merge_check
1719 1729
1720 1730 @classmethod
1721 1731 def get_merge_conditions(cls, pull_request, translator):
1722 1732 _ = translator
1723 1733 merge_details = {}
1724 1734
1725 1735 model = PullRequestModel()
1726 1736 use_rebase = model._use_rebase_for_merging(pull_request)
1727 1737
1728 1738 if use_rebase:
1729 1739 merge_details['merge_strategy'] = dict(
1730 1740 details={},
1731 1741 message=_('Merge strategy: rebase')
1732 1742 )
1733 1743 else:
1734 1744 merge_details['merge_strategy'] = dict(
1735 1745 details={},
1736 1746 message=_('Merge strategy: explicit merge commit')
1737 1747 )
1738 1748
1739 1749 close_branch = model._close_branch_before_merging(pull_request)
1740 1750 if close_branch:
1741 1751 repo_type = pull_request.target_repo.repo_type
1742 1752 close_msg = ''
1743 1753 if repo_type == 'hg':
1744 1754 close_msg = _('Source branch will be closed after merge.')
1745 1755 elif repo_type == 'git':
1746 1756 close_msg = _('Source branch will be deleted after merge.')
1747 1757
1748 1758 merge_details['close_branch'] = dict(
1749 1759 details={},
1750 1760 message=close_msg
1751 1761 )
1752 1762
1753 1763 return merge_details
1754 1764
1755 1765
1756 1766 ChangeTuple = collections.namedtuple(
1757 1767 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1758 1768
1759 1769 FileChangeTuple = collections.namedtuple(
1760 1770 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,966 +1,980 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture()
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 'trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'ąć')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 125 pull_request, [(pull_request.author, ['author'], False, [])],
126 126 pull_request.author)
127 127 Session().commit()
128 128
129 129 prs = PullRequestModel().get_awaiting_my_review(
130 130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 131 assert isinstance(prs, list)
132 132 assert len(prs) == 1
133 133
134 134 def test_count_awaiting_my_review(self, pull_request):
135 135 PullRequestModel().update_reviewers(
136 136 pull_request, [(pull_request.author, ['author'], False, [])],
137 137 pull_request.author)
138 138 Session().commit()
139 139
140 140 pr_count = PullRequestModel().count_awaiting_my_review(
141 141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 142 assert pr_count == 1
143 143
144 144 def test_delete_calls_cleanup_merge(self, pull_request):
145 145 repo_id = pull_request.target_repo.repo_id
146 146 PullRequestModel().delete(pull_request, pull_request.author)
147 147 Session().commit()
148 148
149 149 self.workspace_remove_mock.assert_called_once_with(
150 150 repo_id, self.workspace_id)
151 151
152 152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 153 PullRequestModel().close_pull_request(
154 154 pull_request, pull_request.author)
155 155 Session().commit()
156 156
157 157 repo_id = pull_request.target_repo.repo_id
158 158
159 159 self.workspace_remove_mock.assert_called_once_with(
160 160 repo_id, self.workspace_id)
161 161 self.hook_mock.assert_called_with(
162 162 self.pull_request, self.pull_request.author, 'close')
163 163
164 164 def test_merge_status(self, pull_request):
165 165 self.merge_mock.return_value = MergeResponse(
166 166 True, False, None, MergeFailureReason.NONE)
167 167
168 168 assert pull_request._last_merge_source_rev is None
169 169 assert pull_request._last_merge_target_rev is None
170 170 assert pull_request.last_merge_status is None
171 171
172 172 status, msg = PullRequestModel().merge_status(pull_request)
173 173 assert status is True
174 174 assert msg == 'This pull request can be automatically merged.'
175 175 self.merge_mock.assert_called_with(
176 176 self.repo_id, self.workspace_id,
177 177 pull_request.target_ref_parts,
178 178 pull_request.source_repo.scm_instance(),
179 179 pull_request.source_ref_parts, dry_run=True,
180 180 use_rebase=False, close_branch=False)
181 181
182 182 assert pull_request._last_merge_source_rev == self.source_commit
183 183 assert pull_request._last_merge_target_rev == self.target_commit
184 184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185 185
186 186 self.merge_mock.reset_mock()
187 187 status, msg = PullRequestModel().merge_status(pull_request)
188 188 assert status is True
189 189 assert msg == 'This pull request can be automatically merged.'
190 190 assert self.merge_mock.called is False
191 191
192 192 def test_merge_status_known_failure(self, pull_request):
193 193 self.merge_mock.return_value = MergeResponse(
194 194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 195 metadata={'unresolved_files': 'file1'})
196 196
197 197 assert pull_request._last_merge_source_rev is None
198 198 assert pull_request._last_merge_target_rev is None
199 199 assert pull_request.last_merge_status is None
200 200
201 201 status, msg = PullRequestModel().merge_status(pull_request)
202 202 assert status is False
203 203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
204 204 self.merge_mock.assert_called_with(
205 205 self.repo_id, self.workspace_id,
206 206 pull_request.target_ref_parts,
207 207 pull_request.source_repo.scm_instance(),
208 208 pull_request.source_ref_parts, dry_run=True,
209 209 use_rebase=False, close_branch=False)
210 210
211 211 assert pull_request._last_merge_source_rev == self.source_commit
212 212 assert pull_request._last_merge_target_rev == self.target_commit
213 213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
214 214
215 215 self.merge_mock.reset_mock()
216 216 status, msg = PullRequestModel().merge_status(pull_request)
217 217 assert status is False
218 218 assert msg == 'This pull request cannot be merged because of merge conflicts. '
219 219 assert self.merge_mock.called is False
220 220
221 221 def test_merge_status_unknown_failure(self, pull_request):
222 222 self.merge_mock.return_value = MergeResponse(
223 223 False, False, None, MergeFailureReason.UNKNOWN,
224 224 metadata={'exception': 'MockError'})
225 225
226 226 assert pull_request._last_merge_source_rev is None
227 227 assert pull_request._last_merge_target_rev is None
228 228 assert pull_request.last_merge_status is None
229 229
230 230 status, msg = PullRequestModel().merge_status(pull_request)
231 231 assert status is False
232 232 assert msg == (
233 233 'This pull request cannot be merged because of an unhandled exception. '
234 234 'MockError')
235 235 self.merge_mock.assert_called_with(
236 236 self.repo_id, self.workspace_id,
237 237 pull_request.target_ref_parts,
238 238 pull_request.source_repo.scm_instance(),
239 239 pull_request.source_ref_parts, dry_run=True,
240 240 use_rebase=False, close_branch=False)
241 241
242 242 assert pull_request._last_merge_source_rev is None
243 243 assert pull_request._last_merge_target_rev is None
244 244 assert pull_request.last_merge_status is None
245 245
246 246 self.merge_mock.reset_mock()
247 247 status, msg = PullRequestModel().merge_status(pull_request)
248 248 assert status is False
249 249 assert msg == (
250 250 'This pull request cannot be merged because of an unhandled exception. '
251 251 'MockError')
252 252 assert self.merge_mock.called is True
253 253
254 254 def test_merge_status_when_target_is_locked(self, pull_request):
255 255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 256 status, msg = PullRequestModel().merge_status(pull_request)
257 257 assert status is False
258 258 assert msg == (
259 259 'This pull request cannot be merged because the target repository '
260 260 'is locked by user:1.')
261 261
262 262 def test_merge_status_requirements_check_target(self, pull_request):
263 263
264 264 def has_largefiles(self, repo):
265 265 return repo == pull_request.source_repo
266 266
267 267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 268 with patcher:
269 269 status, msg = PullRequestModel().merge_status(pull_request)
270 270
271 271 assert status is False
272 272 assert msg == 'Target repository large files support is disabled.'
273 273
274 274 def test_merge_status_requirements_check_source(self, pull_request):
275 275
276 276 def has_largefiles(self, repo):
277 277 return repo == pull_request.target_repo
278 278
279 279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 280 with patcher:
281 281 status, msg = PullRequestModel().merge_status(pull_request)
282 282
283 283 assert status is False
284 284 assert msg == 'Source repository large files support is disabled.'
285 285
286 286 def test_merge(self, pull_request, merge_extras):
287 287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 288 merge_ref = Reference(
289 289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 290 self.merge_mock.return_value = MergeResponse(
291 291 True, True, merge_ref, MergeFailureReason.NONE)
292 292
293 293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 294 PullRequestModel().merge_repo(
295 295 pull_request, pull_request.author, extras=merge_extras)
296 296 Session().commit()
297 297
298 298 message = (
299 299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 300 u'\n\n {pr_title}'.format(
301 301 pr_id=pull_request.pull_request_id,
302 302 source_repo=safe_unicode(
303 303 pull_request.source_repo.scm_instance().name),
304 304 source_ref_name=pull_request.source_ref_parts.name,
305 305 pr_title=safe_unicode(pull_request.title)
306 306 )
307 307 )
308 308 self.merge_mock.assert_called_with(
309 309 self.repo_id, self.workspace_id,
310 310 pull_request.target_ref_parts,
311 311 pull_request.source_repo.scm_instance(),
312 312 pull_request.source_ref_parts,
313 313 user_name=user.short_contact, user_email=user.email, message=message,
314 314 use_rebase=False, close_branch=False
315 315 )
316 316 self.invalidation_mock.assert_called_once_with(
317 317 pull_request.target_repo.repo_name)
318 318
319 319 self.hook_mock.assert_called_with(
320 320 self.pull_request, self.pull_request.author, 'merge')
321 321
322 322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324 324
325 325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 327 merge_ref = Reference(
328 328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 329 self.merge_mock.return_value = MergeResponse(
330 330 True, True, merge_ref, MergeFailureReason.NONE)
331 331
332 332 merge_extras['repository'] = pull_request.target_repo.repo_name
333 333
334 334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 336 PullRequestModel().merge_repo(
337 337 pull_request, pull_request.author, extras=merge_extras)
338 338 Session().commit()
339 339
340 340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341 341
342 342 message = (
343 343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 344 u'\n\n {pr_title}'.format(
345 345 pr_id=pull_request.pull_request_id,
346 346 source_repo=safe_unicode(
347 347 pull_request.source_repo.scm_instance().name),
348 348 source_ref_name=pull_request.source_ref_parts.name,
349 349 pr_title=safe_unicode(pull_request.title)
350 350 )
351 351 )
352 352 self.merge_mock.assert_called_with(
353 353 self.repo_id, self.workspace_id,
354 354 pull_request.target_ref_parts,
355 355 pull_request.source_repo.scm_instance(),
356 356 pull_request.source_ref_parts,
357 357 user_name=user.short_contact, user_email=user.email, message=message,
358 358 use_rebase=False, close_branch=False
359 359 )
360 360 self.invalidation_mock.assert_called_once_with(
361 361 pull_request.target_repo.repo_name)
362 362
363 363 self.hook_mock.assert_called_with(
364 364 self.pull_request, self.pull_request.author, 'merge')
365 365
366 366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368 368
369 369 def test_merge_failed(self, pull_request, merge_extras):
370 370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 371 merge_ref = Reference(
372 372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 373 self.merge_mock.return_value = MergeResponse(
374 374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375 375
376 376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 377 PullRequestModel().merge_repo(
378 378 pull_request, pull_request.author, extras=merge_extras)
379 379 Session().commit()
380 380
381 381 message = (
382 382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 383 u'\n\n {pr_title}'.format(
384 384 pr_id=pull_request.pull_request_id,
385 385 source_repo=safe_unicode(
386 386 pull_request.source_repo.scm_instance().name),
387 387 source_ref_name=pull_request.source_ref_parts.name,
388 388 pr_title=safe_unicode(pull_request.title)
389 389 )
390 390 )
391 391 self.merge_mock.assert_called_with(
392 392 self.repo_id, self.workspace_id,
393 393 pull_request.target_ref_parts,
394 394 pull_request.source_repo.scm_instance(),
395 395 pull_request.source_ref_parts,
396 396 user_name=user.short_contact, user_email=user.email, message=message,
397 397 use_rebase=False, close_branch=False
398 398 )
399 399
400 400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 401 assert self.invalidation_mock.called is False
402 402 assert pull_request.merge_rev is None
403 403
404 404 def test_get_commit_ids(self, pull_request):
405 # The PR has been not merget yet, so expect an exception
405 # The PR has been not merged yet, so expect an exception
406 406 with pytest.raises(ValueError):
407 407 PullRequestModel()._get_commit_ids(pull_request)
408 408
409 409 # Merge revision is in the revisions list
410 410 pull_request.merge_rev = pull_request.revisions[0]
411 411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 412 assert commit_ids == pull_request.revisions
413 413
414 414 # Merge revision is not in the revisions list
415 415 pull_request.merge_rev = 'f000' * 10
416 416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418 418
419 419 def test_get_diff_from_pr_version(self, pull_request):
420 420 source_repo = pull_request.source_repo
421 421 source_ref_id = pull_request.source_ref_parts.commit_id
422 422 target_ref_id = pull_request.target_ref_parts.commit_id
423 423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 424 source_repo, source_ref_id, target_ref_id,
425 425 hide_whitespace_changes=False, diff_context=6)
426 426 assert 'file_1' in diff.raw
427 427
428 428 def test_generate_title_returns_unicode(self):
429 429 title = PullRequestModel().generate_pullrequest_title(
430 430 source='source-dummy',
431 431 source_ref='source-ref-dummy',
432 432 target='target-dummy',
433 433 )
434 434 assert type(title) == unicode
435 435
436 @pytest.mark.parametrize('title, has_wip', [
437 ('hello', False),
438 ('hello wip', False),
439 ('hello wip: xxx', False),
440 ('[wip] hello', True),
441 ('[wip] hello', True),
442 ('wip: hello', True),
443 ('wip hello', True),
444
445 ])
446 def test_wip_title_marker(self, pull_request, title, has_wip):
447 pull_request.title = title
448 assert pull_request.work_in_progress == has_wip
449
436 450
437 451 @pytest.mark.usefixtures('config_stub')
438 452 class TestIntegrationMerge(object):
439 453 @pytest.mark.parametrize('extra_config', (
440 454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
441 455 ))
442 456 def test_merge_triggers_push_hooks(
443 457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
444 458 extra_config):
445 459
446 460 pull_request = pr_util.create_pull_request(
447 461 approved=True, mergeable=True)
448 462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
449 463 merge_extras['repository'] = pull_request.target_repo.repo_name
450 464 Session().commit()
451 465
452 466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
453 467 merge_state = PullRequestModel().merge_repo(
454 468 pull_request, user_admin, extras=merge_extras)
455 469 Session().commit()
456 470
457 471 assert merge_state.executed
458 472 assert '_pre_push_hook' in capture_rcextensions
459 473 assert '_push_hook' in capture_rcextensions
460 474
461 475 def test_merge_can_be_rejected_by_pre_push_hook(
462 476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
463 477 pull_request = pr_util.create_pull_request(
464 478 approved=True, mergeable=True)
465 479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
466 480 merge_extras['repository'] = pull_request.target_repo.repo_name
467 481 Session().commit()
468 482
469 483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
470 484 pre_pull.side_effect = RepositoryError("Disallow push!")
471 485 merge_status = PullRequestModel().merge_repo(
472 486 pull_request, user_admin, extras=merge_extras)
473 487 Session().commit()
474 488
475 489 assert not merge_status.executed
476 490 assert 'pre_push' not in capture_rcextensions
477 491 assert 'post_push' not in capture_rcextensions
478 492
479 493 def test_merge_fails_if_target_is_locked(
480 494 self, pr_util, user_regular, merge_extras):
481 495 pull_request = pr_util.create_pull_request(
482 496 approved=True, mergeable=True)
483 497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
484 498 pull_request.target_repo.locked = locked_by
485 499 # TODO: johbo: Check if this can work based on the database, currently
486 500 # all data is pre-computed, that's why just updating the DB is not
487 501 # enough.
488 502 merge_extras['locked_by'] = locked_by
489 503 merge_extras['repository'] = pull_request.target_repo.repo_name
490 504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
491 505 Session().commit()
492 506 merge_status = PullRequestModel().merge_repo(
493 507 pull_request, user_regular, extras=merge_extras)
494 508 Session().commit()
495 509
496 510 assert not merge_status.executed
497 511
498 512
499 513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
500 514 (False, 1, 0),
501 515 (True, 0, 1),
502 516 ])
503 517 def test_outdated_comments(
504 518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
505 519 pull_request = pr_util.create_pull_request()
506 520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
507 521
508 522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
509 523 pr_util.add_one_commit()
510 524 assert_inline_comments(
511 525 pull_request, visible=inlines_count, outdated=outdated_count)
512 526 outdated_comment_mock.assert_called_with(pull_request)
513 527
514 528
515 529 @pytest.mark.parametrize('mr_type, expected_msg', [
516 530 (MergeFailureReason.NONE,
517 531 'This pull request can be automatically merged.'),
518 532 (MergeFailureReason.UNKNOWN,
519 533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
520 534 (MergeFailureReason.MERGE_FAILED,
521 535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
522 536 (MergeFailureReason.PUSH_FAILED,
523 537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
524 538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
525 539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
526 540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
527 541 'This pull request cannot be merged because the source contains more branches than the target.'),
528 542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
529 543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
530 544 (MergeFailureReason.TARGET_IS_LOCKED,
531 545 'This pull request cannot be merged because the target repository is locked by user:123.'),
532 546 (MergeFailureReason.MISSING_TARGET_REF,
533 547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
534 548 (MergeFailureReason.MISSING_SOURCE_REF,
535 549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
536 550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
537 551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
538 552
539 553 ])
540 554 def test_merge_response_message(mr_type, expected_msg):
541 555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
542 556 metadata = {
543 557 'unresolved_files': 'CONFLICT_FILE',
544 558 'exception': "CRASH",
545 559 'target': 'some-repo',
546 560 'merge_commit': 'merge_commit',
547 561 'target_ref': merge_ref,
548 562 'source_ref': merge_ref,
549 563 'heads': ','.join(['a', 'b', 'c']),
550 564 'locked_by': 'user:123'
551 565 }
552 566
553 567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
554 568 assert merge_response.merge_status_message == expected_msg
555 569
556 570
557 571 @pytest.fixture()
558 572 def merge_extras(user_regular):
559 573 """
560 574 Context for the vcs operation when running a merge.
561 575 """
562 576 extras = {
563 577 'ip': '127.0.0.1',
564 578 'username': user_regular.username,
565 579 'user_id': user_regular.user_id,
566 580 'action': 'push',
567 581 'repository': 'fake_target_repo_name',
568 582 'scm': 'git',
569 583 'config': 'fake_config_ini_path',
570 584 'repo_store': '',
571 585 'make_lock': None,
572 586 'locked_by': [None, None, None],
573 587 'server_url': 'http://test.example.com:5000',
574 588 'hooks': ['push', 'pull'],
575 589 'is_shadow_repo': False,
576 590 }
577 591 return extras
578 592
579 593
580 594 @pytest.mark.usefixtures('config_stub')
581 595 class TestUpdateCommentHandling(object):
582 596
583 597 @pytest.fixture(autouse=True, scope='class')
584 598 def enable_outdated_comments(self, request, baseapp):
585 599 config_patch = mock.patch.dict(
586 600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
587 601 config_patch.start()
588 602
589 603 @request.addfinalizer
590 604 def cleanup():
591 605 config_patch.stop()
592 606
593 607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
594 608 commits = [
595 609 {'message': 'a'},
596 610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
597 611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
598 612 ]
599 613 pull_request = pr_util.create_pull_request(
600 614 commits=commits, target_head='a', source_head='b', revisions=['b'])
601 615 pr_util.create_inline_comment(file_path='file_b')
602 616 pr_util.add_one_commit(head='c')
603 617
604 618 assert_inline_comments(pull_request, visible=1, outdated=0)
605 619
606 620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
607 621 original_content = ''.join(
608 622 ['line {}\n'.format(x) for x in range(1, 11)])
609 623 updated_content = 'new_line_at_top\n' + original_content
610 624 commits = [
611 625 {'message': 'a'},
612 626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
613 627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
614 628 ]
615 629 pull_request = pr_util.create_pull_request(
616 630 commits=commits, target_head='a', source_head='b', revisions=['b'])
617 631
618 632 with outdated_comments_patcher():
619 633 comment = pr_util.create_inline_comment(
620 634 line_no=u'n8', file_path='file_b')
621 635 pr_util.add_one_commit(head='c')
622 636
623 637 assert_inline_comments(pull_request, visible=1, outdated=0)
624 638 assert comment.line_no == u'n9'
625 639
626 640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
627 641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
628 642 updated_content = original_content + 'new_line_at_end\n'
629 643 commits = [
630 644 {'message': 'a'},
631 645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
632 646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
633 647 ]
634 648 pull_request = pr_util.create_pull_request(
635 649 commits=commits, target_head='a', source_head='b', revisions=['b'])
636 650 pr_util.create_inline_comment(file_path='file_b')
637 651 pr_util.add_one_commit(head='c')
638 652
639 653 assert_inline_comments(pull_request, visible=1, outdated=0)
640 654
641 655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
642 656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
643 657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
644 658 change_lines = list(base_lines)
645 659 change_lines.insert(6, 'line 6a added\n')
646 660
647 661 # Changes on the last line of sight
648 662 update_lines = list(change_lines)
649 663 update_lines[0] = 'line 1 changed\n'
650 664 update_lines[-1] = 'line 12 changed\n'
651 665
652 666 def file_b(lines):
653 667 return FileNode('file_b', ''.join(lines))
654 668
655 669 commits = [
656 670 {'message': 'a', 'added': [file_b(base_lines)]},
657 671 {'message': 'b', 'changed': [file_b(change_lines)]},
658 672 {'message': 'c', 'changed': [file_b(update_lines)]},
659 673 ]
660 674
661 675 pull_request = pr_util.create_pull_request(
662 676 commits=commits, target_head='a', source_head='b', revisions=['b'])
663 677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
664 678
665 679 with outdated_comments_patcher():
666 680 pr_util.add_one_commit(head='c')
667 681 assert_inline_comments(pull_request, visible=0, outdated=1)
668 682
669 683 @pytest.mark.parametrize("change, content", [
670 684 ('changed', 'changed\n'),
671 685 ('removed', ''),
672 686 ], ids=['changed', 'removed'])
673 687 def test_comment_flagged_on_change(self, pr_util, change, content):
674 688 commits = [
675 689 {'message': 'a'},
676 690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
677 691 {'message': 'c', change: [FileNode('file_b', content)]},
678 692 ]
679 693 pull_request = pr_util.create_pull_request(
680 694 commits=commits, target_head='a', source_head='b', revisions=['b'])
681 695 pr_util.create_inline_comment(file_path='file_b')
682 696
683 697 with outdated_comments_patcher():
684 698 pr_util.add_one_commit(head='c')
685 699 assert_inline_comments(pull_request, visible=0, outdated=1)
686 700
687 701
688 702 @pytest.mark.usefixtures('config_stub')
689 703 class TestUpdateChangedFiles(object):
690 704
691 705 def test_no_changes_on_unchanged_diff(self, pr_util):
692 706 commits = [
693 707 {'message': 'a'},
694 708 {'message': 'b',
695 709 'added': [FileNode('file_b', 'test_content b\n')]},
696 710 {'message': 'c',
697 711 'added': [FileNode('file_c', 'test_content c\n')]},
698 712 ]
699 713 # open a PR from a to b, adding file_b
700 714 pull_request = pr_util.create_pull_request(
701 715 commits=commits, target_head='a', source_head='b', revisions=['b'],
702 716 name_suffix='per-file-review')
703 717
704 718 # modify PR adding new file file_c
705 719 pr_util.add_one_commit(head='c')
706 720
707 721 assert_pr_file_changes(
708 722 pull_request,
709 723 added=['file_c'],
710 724 modified=[],
711 725 removed=[])
712 726
713 727 def test_modify_and_undo_modification_diff(self, pr_util):
714 728 commits = [
715 729 {'message': 'a'},
716 730 {'message': 'b',
717 731 'added': [FileNode('file_b', 'test_content b\n')]},
718 732 {'message': 'c',
719 733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
720 734 {'message': 'd',
721 735 'changed': [FileNode('file_b', 'test_content b\n')]},
722 736 ]
723 737 # open a PR from a to b, adding file_b
724 738 pull_request = pr_util.create_pull_request(
725 739 commits=commits, target_head='a', source_head='b', revisions=['b'],
726 740 name_suffix='per-file-review')
727 741
728 742 # modify PR modifying file file_b
729 743 pr_util.add_one_commit(head='c')
730 744
731 745 assert_pr_file_changes(
732 746 pull_request,
733 747 added=[],
734 748 modified=['file_b'],
735 749 removed=[])
736 750
737 751 # move the head again to d, which rollbacks change,
738 752 # meaning we should indicate no changes
739 753 pr_util.add_one_commit(head='d')
740 754
741 755 assert_pr_file_changes(
742 756 pull_request,
743 757 added=[],
744 758 modified=[],
745 759 removed=[])
746 760
747 761 def test_updated_all_files_in_pr(self, pr_util):
748 762 commits = [
749 763 {'message': 'a'},
750 764 {'message': 'b', 'added': [
751 765 FileNode('file_a', 'test_content a\n'),
752 766 FileNode('file_b', 'test_content b\n'),
753 767 FileNode('file_c', 'test_content c\n')]},
754 768 {'message': 'c', 'changed': [
755 769 FileNode('file_a', 'test_content a changed\n'),
756 770 FileNode('file_b', 'test_content b changed\n'),
757 771 FileNode('file_c', 'test_content c changed\n')]},
758 772 ]
759 773 # open a PR from a to b, changing 3 files
760 774 pull_request = pr_util.create_pull_request(
761 775 commits=commits, target_head='a', source_head='b', revisions=['b'],
762 776 name_suffix='per-file-review')
763 777
764 778 pr_util.add_one_commit(head='c')
765 779
766 780 assert_pr_file_changes(
767 781 pull_request,
768 782 added=[],
769 783 modified=['file_a', 'file_b', 'file_c'],
770 784 removed=[])
771 785
772 786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
773 787 commits = [
774 788 {'message': 'a'},
775 789 {'message': 'b', 'added': [
776 790 FileNode('file_a', 'test_content a\n'),
777 791 FileNode('file_b', 'test_content b\n'),
778 792 FileNode('file_c', 'test_content c\n')]},
779 793 {'message': 'c', 'removed': [
780 794 FileNode('file_a', 'test_content a changed\n'),
781 795 FileNode('file_b', 'test_content b changed\n'),
782 796 FileNode('file_c', 'test_content c changed\n')]},
783 797 ]
784 798 # open a PR from a to b, removing 3 files
785 799 pull_request = pr_util.create_pull_request(
786 800 commits=commits, target_head='a', source_head='b', revisions=['b'],
787 801 name_suffix='per-file-review')
788 802
789 803 pr_util.add_one_commit(head='c')
790 804
791 805 assert_pr_file_changes(
792 806 pull_request,
793 807 added=[],
794 808 modified=[],
795 809 removed=['file_a', 'file_b', 'file_c'])
796 810
797 811
798 812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
799 813 model = PullRequestModel()
800 814 pull_request = pr_util.create_pull_request()
801 815 pr_util.update_source_repository()
802 816
803 817 model.update_commits(pull_request)
804 818
805 819 # Expect that it has a version entry now
806 820 assert len(model.get_versions(pull_request)) == 1
807 821
808 822
809 823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
810 824 pull_request = pr_util.create_pull_request()
811 825 model = PullRequestModel()
812 826 model.update_commits(pull_request)
813 827
814 828 # Expect that it still has no versions
815 829 assert len(model.get_versions(pull_request)) == 0
816 830
817 831
818 832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
819 833 model = PullRequestModel()
820 834 pull_request = pr_util.create_pull_request()
821 835 comment = pr_util.create_comment()
822 836 pr_util.update_source_repository()
823 837
824 838 model.update_commits(pull_request)
825 839
826 840 # Expect that the comment is linked to the pr version now
827 841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
828 842
829 843
830 844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
831 845 model = PullRequestModel()
832 846 pull_request = pr_util.create_pull_request()
833 847 pr_util.update_source_repository()
834 848 pr_util.update_source_repository()
835 849
836 850 model.update_commits(pull_request)
837 851
838 852 # Expect to find a new comment about the change
839 853 expected_message = textwrap.dedent(
840 854 """\
841 855 Pull request updated. Auto status change to |under_review|
842 856
843 857 .. role:: added
844 858 .. role:: removed
845 859 .. parsed-literal::
846 860
847 861 Changed commits:
848 862 * :added:`1 added`
849 863 * :removed:`0 removed`
850 864
851 865 Changed files:
852 866 * `A file_2 <#a_c--92ed3b5f07b4>`_
853 867
854 868 .. |under_review| replace:: *"Under Review"*"""
855 869 )
856 870 pull_request_comments = sorted(
857 871 pull_request.comments, key=lambda c: c.modified_at)
858 872 update_comment = pull_request_comments[-1]
859 873 assert update_comment.text == expected_message
860 874
861 875
862 876 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
863 877 pull_request = pr_util.create_pull_request()
864 878
865 879 # Avoiding default values
866 880 pull_request.status = PullRequest.STATUS_CLOSED
867 881 pull_request._last_merge_source_rev = "0" * 40
868 882 pull_request._last_merge_target_rev = "1" * 40
869 883 pull_request.last_merge_status = 1
870 884 pull_request.merge_rev = "2" * 40
871 885
872 886 # Remember automatic values
873 887 created_on = pull_request.created_on
874 888 updated_on = pull_request.updated_on
875 889
876 890 # Create a new version of the pull request
877 891 version = PullRequestModel()._create_version_from_snapshot(pull_request)
878 892
879 893 # Check attributes
880 894 assert version.title == pr_util.create_parameters['title']
881 895 assert version.description == pr_util.create_parameters['description']
882 896 assert version.status == PullRequest.STATUS_CLOSED
883 897
884 898 # versions get updated created_on
885 899 assert version.created_on != created_on
886 900
887 901 assert version.updated_on == updated_on
888 902 assert version.user_id == pull_request.user_id
889 903 assert version.revisions == pr_util.create_parameters['revisions']
890 904 assert version.source_repo == pr_util.source_repository
891 905 assert version.source_ref == pr_util.create_parameters['source_ref']
892 906 assert version.target_repo == pr_util.target_repository
893 907 assert version.target_ref == pr_util.create_parameters['target_ref']
894 908 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
895 909 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
896 910 assert version.last_merge_status == pull_request.last_merge_status
897 911 assert version.merge_rev == pull_request.merge_rev
898 912 assert version.pull_request == pull_request
899 913
900 914
901 915 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
902 916 version1 = pr_util.create_version_of_pull_request()
903 917 comment_linked = pr_util.create_comment(linked_to=version1)
904 918 comment_unlinked = pr_util.create_comment()
905 919 version2 = pr_util.create_version_of_pull_request()
906 920
907 921 PullRequestModel()._link_comments_to_version(version2)
908 922 Session().commit()
909 923
910 924 # Expect that only the new comment is linked to version2
911 925 assert (
912 926 comment_unlinked.pull_request_version_id ==
913 927 version2.pull_request_version_id)
914 928 assert (
915 929 comment_linked.pull_request_version_id ==
916 930 version1.pull_request_version_id)
917 931 assert (
918 932 comment_unlinked.pull_request_version_id !=
919 933 comment_linked.pull_request_version_id)
920 934
921 935
922 936 def test_calculate_commits():
923 937 old_ids = [1, 2, 3]
924 938 new_ids = [1, 3, 4, 5]
925 939 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
926 940 assert change.added == [4, 5]
927 941 assert change.common == [1, 3]
928 942 assert change.removed == [2]
929 943 assert change.total == [1, 3, 4, 5]
930 944
931 945
932 946 def assert_inline_comments(pull_request, visible=None, outdated=None):
933 947 if visible is not None:
934 948 inline_comments = CommentsModel().get_inline_comments(
935 949 pull_request.target_repo.repo_id, pull_request=pull_request)
936 950 inline_cnt = CommentsModel().get_inline_comments_count(
937 951 inline_comments)
938 952 assert inline_cnt == visible
939 953 if outdated is not None:
940 954 outdated_comments = CommentsModel().get_outdated_comments(
941 955 pull_request.target_repo.repo_id, pull_request)
942 956 assert len(outdated_comments) == outdated
943 957
944 958
945 959 def assert_pr_file_changes(
946 960 pull_request, added=None, modified=None, removed=None):
947 961 pr_versions = PullRequestModel().get_versions(pull_request)
948 962 # always use first version, ie original PR to calculate changes
949 963 pull_request_version = pr_versions[0]
950 964 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
951 965 pull_request, pull_request_version)
952 966 file_changes = PullRequestModel()._calculate_file_changes(
953 967 old_diff_data, new_diff_data)
954 968
955 969 assert added == file_changes.added, \
956 970 'expected added:%s vs value:%s' % (added, file_changes.added)
957 971 assert modified == file_changes.modified, \
958 972 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
959 973 assert removed == file_changes.removed, \
960 974 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
961 975
962 976
963 977 def outdated_comments_patcher(use_outdated=True):
964 978 return mock.patch.object(
965 979 CommentsModel, 'use_outdated_comments',
966 980 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now