##// END OF EJS Templates
pull-requests: when creating a new version set the created_date to now instead of...
marcink -
r1207:b4498430 stable
parent child Browse files
Show More
@@ -1,1317 +1,1317 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from sqlalchemy import or_
35 35
36 36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 39 from rhodecode.lib.markup_renderer import (
40 40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 41 from rhodecode.lib.utils import action_logger
42 42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 46 from rhodecode.lib.vcs.exceptions import (
47 47 CommitDoesNotExistError, EmptyRepositoryError)
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import ChangesetCommentsModel
51 51 from rhodecode.model.db import (
52 52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 53 PullRequestVersion, ChangesetComment)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.notification import NotificationModel, \
56 56 EmailNotificationModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.settings import VcsSettingsModel
59 59
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 # Data structure to hold the response data when updating commits during a pull
65 65 # request update.
66 66 UpdateResponse = namedtuple(
67 67 'UpdateResponse', 'executed, reason, new, old, changes')
68 68
69 69
70 70 class PullRequestModel(BaseModel):
71 71
72 72 cls = PullRequest
73 73
74 74 DIFF_CONTEXT = 3
75 75
76 76 MERGE_STATUS_MESSAGES = {
77 77 MergeFailureReason.NONE: lazy_ugettext(
78 78 'This pull request can be automatically merged.'),
79 79 MergeFailureReason.UNKNOWN: lazy_ugettext(
80 80 'This pull request cannot be merged because of an unhandled'
81 81 ' exception.'),
82 82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
83 83 'This pull request cannot be merged because of conflicts.'),
84 84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
85 85 'This pull request could not be merged because push to target'
86 86 ' failed.'),
87 87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
88 88 'This pull request cannot be merged because the target is not a'
89 89 ' head.'),
90 90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
91 91 'This pull request cannot be merged because the source contains'
92 92 ' more branches than the target.'),
93 93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
94 94 'This pull request cannot be merged because the target has'
95 95 ' multiple heads.'),
96 96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
97 97 'This pull request cannot be merged because the target repository'
98 98 ' is locked.'),
99 99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
100 100 'This pull request cannot be merged because the target or the '
101 101 'source reference is missing.'),
102 102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
103 103 'This pull request cannot be merged because the target '
104 104 'reference is missing.'),
105 105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the source '
107 107 'reference is missing.'),
108 108 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
109 109 'This pull request cannot be merged because of conflicts related '
110 110 'to sub repositories.'),
111 111 }
112 112
113 113 UPDATE_STATUS_MESSAGES = {
114 114 UpdateFailureReason.NONE: lazy_ugettext(
115 115 'Pull request update successful.'),
116 116 UpdateFailureReason.UNKNOWN: lazy_ugettext(
117 117 'Pull request update failed because of an unknown error.'),
118 118 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
119 119 'No update needed because the source reference is already '
120 120 'up to date.'),
121 121 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
122 122 'Pull request cannot be updated because the reference type is '
123 123 'not supported for an update.'),
124 124 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 125 'This pull request cannot be updated because the target '
126 126 'reference is missing.'),
127 127 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 128 'This pull request cannot be updated because the source '
129 129 'reference is missing.'),
130 130 }
131 131
132 132 def __get_pull_request(self, pull_request):
133 133 return self._get_instance(PullRequest, pull_request)
134 134
135 135 def _check_perms(self, perms, pull_request, user, api=False):
136 136 if not api:
137 137 return h.HasRepoPermissionAny(*perms)(
138 138 user=user, repo_name=pull_request.target_repo.repo_name)
139 139 else:
140 140 return h.HasRepoPermissionAnyApi(*perms)(
141 141 user=user, repo_name=pull_request.target_repo.repo_name)
142 142
143 143 def check_user_read(self, pull_request, user, api=False):
144 144 _perms = ('repository.admin', 'repository.write', 'repository.read',)
145 145 return self._check_perms(_perms, pull_request, user, api)
146 146
147 147 def check_user_merge(self, pull_request, user, api=False):
148 148 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
149 149 return self._check_perms(_perms, pull_request, user, api)
150 150
151 151 def check_user_update(self, pull_request, user, api=False):
152 152 owner = user.user_id == pull_request.user_id
153 153 return self.check_user_merge(pull_request, user, api) or owner
154 154
155 155 def check_user_delete(self, pull_request, user):
156 156 owner = user.user_id == pull_request.user_id
157 157 _perms = ('repository.admin')
158 158 return self._check_perms(_perms, pull_request, user) or owner
159 159
160 160 def check_user_change_status(self, pull_request, user, api=False):
161 161 reviewer = user.user_id in [x.user_id for x in
162 162 pull_request.reviewers]
163 163 return self.check_user_update(pull_request, user, api) or reviewer
164 164
165 165 def get(self, pull_request):
166 166 return self.__get_pull_request(pull_request)
167 167
168 168 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
169 169 opened_by=None, order_by=None,
170 170 order_dir='desc'):
171 171 repo = None
172 172 if repo_name:
173 173 repo = self._get_repo(repo_name)
174 174
175 175 q = PullRequest.query()
176 176
177 177 # source or target
178 178 if repo and source:
179 179 q = q.filter(PullRequest.source_repo == repo)
180 180 elif repo:
181 181 q = q.filter(PullRequest.target_repo == repo)
182 182
183 183 # closed,opened
184 184 if statuses:
185 185 q = q.filter(PullRequest.status.in_(statuses))
186 186
187 187 # opened by filter
188 188 if opened_by:
189 189 q = q.filter(PullRequest.user_id.in_(opened_by))
190 190
191 191 if order_by:
192 192 order_map = {
193 193 'name_raw': PullRequest.pull_request_id,
194 194 'title': PullRequest.title,
195 195 'updated_on_raw': PullRequest.updated_on,
196 196 'target_repo': PullRequest.target_repo_id
197 197 }
198 198 if order_dir == 'asc':
199 199 q = q.order_by(order_map[order_by].asc())
200 200 else:
201 201 q = q.order_by(order_map[order_by].desc())
202 202
203 203 return q
204 204
205 205 def count_all(self, repo_name, source=False, statuses=None,
206 206 opened_by=None):
207 207 """
208 208 Count the number of pull requests for a specific repository.
209 209
210 210 :param repo_name: target or source repo
211 211 :param source: boolean flag to specify if repo_name refers to source
212 212 :param statuses: list of pull request statuses
213 213 :param opened_by: author user of the pull request
214 214 :returns: int number of pull requests
215 215 """
216 216 q = self._prepare_get_all_query(
217 217 repo_name, source=source, statuses=statuses, opened_by=opened_by)
218 218
219 219 return q.count()
220 220
221 221 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
222 222 offset=0, length=None, order_by=None, order_dir='desc'):
223 223 """
224 224 Get all pull requests for a specific repository.
225 225
226 226 :param repo_name: target or source repo
227 227 :param source: boolean flag to specify if repo_name refers to source
228 228 :param statuses: list of pull request statuses
229 229 :param opened_by: author user of the pull request
230 230 :param offset: pagination offset
231 231 :param length: length of returned list
232 232 :param order_by: order of the returned list
233 233 :param order_dir: 'asc' or 'desc' ordering direction
234 234 :returns: list of pull requests
235 235 """
236 236 q = self._prepare_get_all_query(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by,
238 238 order_by=order_by, order_dir=order_dir)
239 239
240 240 if length:
241 241 pull_requests = q.limit(length).offset(offset).all()
242 242 else:
243 243 pull_requests = q.all()
244 244
245 245 return pull_requests
246 246
247 247 def count_awaiting_review(self, repo_name, source=False, statuses=None,
248 248 opened_by=None):
249 249 """
250 250 Count the number of pull requests for a specific repository that are
251 251 awaiting review.
252 252
253 253 :param repo_name: target or source repo
254 254 :param source: boolean flag to specify if repo_name refers to source
255 255 :param statuses: list of pull request statuses
256 256 :param opened_by: author user of the pull request
257 257 :returns: int number of pull requests
258 258 """
259 259 pull_requests = self.get_awaiting_review(
260 260 repo_name, source=source, statuses=statuses, opened_by=opened_by)
261 261
262 262 return len(pull_requests)
263 263
264 264 def get_awaiting_review(self, repo_name, source=False, statuses=None,
265 265 opened_by=None, offset=0, length=None,
266 266 order_by=None, order_dir='desc'):
267 267 """
268 268 Get all pull requests for a specific repository that are awaiting
269 269 review.
270 270
271 271 :param repo_name: target or source repo
272 272 :param source: boolean flag to specify if repo_name refers to source
273 273 :param statuses: list of pull request statuses
274 274 :param opened_by: author user of the pull request
275 275 :param offset: pagination offset
276 276 :param length: length of returned list
277 277 :param order_by: order of the returned list
278 278 :param order_dir: 'asc' or 'desc' ordering direction
279 279 :returns: list of pull requests
280 280 """
281 281 pull_requests = self.get_all(
282 282 repo_name, source=source, statuses=statuses, opened_by=opened_by,
283 283 order_by=order_by, order_dir=order_dir)
284 284
285 285 _filtered_pull_requests = []
286 286 for pr in pull_requests:
287 287 status = pr.calculated_review_status()
288 288 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
289 289 ChangesetStatus.STATUS_UNDER_REVIEW]:
290 290 _filtered_pull_requests.append(pr)
291 291 if length:
292 292 return _filtered_pull_requests[offset:offset+length]
293 293 else:
294 294 return _filtered_pull_requests
295 295
296 296 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
297 297 opened_by=None, user_id=None):
298 298 """
299 299 Count the number of pull requests for a specific repository that are
300 300 awaiting review from a specific user.
301 301
302 302 :param repo_name: target or source repo
303 303 :param source: boolean flag to specify if repo_name refers to source
304 304 :param statuses: list of pull request statuses
305 305 :param opened_by: author user of the pull request
306 306 :param user_id: reviewer user of the pull request
307 307 :returns: int number of pull requests
308 308 """
309 309 pull_requests = self.get_awaiting_my_review(
310 310 repo_name, source=source, statuses=statuses, opened_by=opened_by,
311 311 user_id=user_id)
312 312
313 313 return len(pull_requests)
314 314
315 315 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
316 316 opened_by=None, user_id=None, offset=0,
317 317 length=None, order_by=None, order_dir='desc'):
318 318 """
319 319 Get all pull requests for a specific repository that are awaiting
320 320 review from a specific user.
321 321
322 322 :param repo_name: target or source repo
323 323 :param source: boolean flag to specify if repo_name refers to source
324 324 :param statuses: list of pull request statuses
325 325 :param opened_by: author user of the pull request
326 326 :param user_id: reviewer user of the pull request
327 327 :param offset: pagination offset
328 328 :param length: length of returned list
329 329 :param order_by: order of the returned list
330 330 :param order_dir: 'asc' or 'desc' ordering direction
331 331 :returns: list of pull requests
332 332 """
333 333 pull_requests = self.get_all(
334 334 repo_name, source=source, statuses=statuses, opened_by=opened_by,
335 335 order_by=order_by, order_dir=order_dir)
336 336
337 337 _my = PullRequestModel().get_not_reviewed(user_id)
338 338 my_participation = []
339 339 for pr in pull_requests:
340 340 if pr in _my:
341 341 my_participation.append(pr)
342 342 _filtered_pull_requests = my_participation
343 343 if length:
344 344 return _filtered_pull_requests[offset:offset+length]
345 345 else:
346 346 return _filtered_pull_requests
347 347
348 348 def get_not_reviewed(self, user_id):
349 349 return [
350 350 x.pull_request for x in PullRequestReviewers.query().filter(
351 351 PullRequestReviewers.user_id == user_id).all()
352 352 ]
353 353
354 354 def _prepare_participating_query(self, user_id=None, statuses=None,
355 355 order_by=None, order_dir='desc'):
356 356 q = PullRequest.query()
357 357 if user_id:
358 358 reviewers_subquery = Session().query(
359 359 PullRequestReviewers.pull_request_id).filter(
360 360 PullRequestReviewers.user_id == user_id).subquery()
361 361 user_filter= or_(
362 362 PullRequest.user_id == user_id,
363 363 PullRequest.pull_request_id.in_(reviewers_subquery)
364 364 )
365 365 q = PullRequest.query().filter(user_filter)
366 366
367 367 # closed,opened
368 368 if statuses:
369 369 q = q.filter(PullRequest.status.in_(statuses))
370 370
371 371 if order_by:
372 372 order_map = {
373 373 'name_raw': PullRequest.pull_request_id,
374 374 'title': PullRequest.title,
375 375 'updated_on_raw': PullRequest.updated_on,
376 376 'target_repo': PullRequest.target_repo_id
377 377 }
378 378 if order_dir == 'asc':
379 379 q = q.order_by(order_map[order_by].asc())
380 380 else:
381 381 q = q.order_by(order_map[order_by].desc())
382 382
383 383 return q
384 384
385 385 def count_im_participating_in(self, user_id=None, statuses=None):
386 386 q = self._prepare_participating_query(user_id, statuses=statuses)
387 387 return q.count()
388 388
389 389 def get_im_participating_in(
390 390 self, user_id=None, statuses=None, offset=0,
391 391 length=None, order_by=None, order_dir='desc'):
392 392 """
393 393 Get all Pull requests that i'm participating in, or i have opened
394 394 """
395 395
396 396 q = self._prepare_participating_query(
397 397 user_id, statuses=statuses, order_by=order_by,
398 398 order_dir=order_dir)
399 399
400 400 if length:
401 401 pull_requests = q.limit(length).offset(offset).all()
402 402 else:
403 403 pull_requests = q.all()
404 404
405 405 return pull_requests
406 406
407 407 def get_versions(self, pull_request):
408 408 """
409 409 returns version of pull request sorted by ID descending
410 410 """
411 411 return PullRequestVersion.query()\
412 412 .filter(PullRequestVersion.pull_request == pull_request)\
413 413 .order_by(PullRequestVersion.pull_request_version_id.asc())\
414 414 .all()
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None):
418 418 created_by_user = self._get_user(created_by)
419 419 source_repo = self._get_repo(source_repo)
420 420 target_repo = self._get_repo(target_repo)
421 421
422 422 pull_request = PullRequest()
423 423 pull_request.source_repo = source_repo
424 424 pull_request.source_ref = source_ref
425 425 pull_request.target_repo = target_repo
426 426 pull_request.target_ref = target_ref
427 427 pull_request.revisions = revisions
428 428 pull_request.title = title
429 429 pull_request.description = description
430 430 pull_request.author = created_by_user
431 431
432 432 Session().add(pull_request)
433 433 Session().flush()
434 434
435 435 reviewer_ids = set()
436 436 # members / reviewers
437 437 for reviewer_object in reviewers:
438 438 if isinstance(reviewer_object, tuple):
439 439 user_id, reasons = reviewer_object
440 440 else:
441 441 user_id, reasons = reviewer_object, []
442 442
443 443 user = self._get_user(user_id)
444 444 reviewer_ids.add(user.user_id)
445 445
446 446 reviewer = PullRequestReviewers(user, pull_request, reasons)
447 447 Session().add(reviewer)
448 448
449 449 # Set approval status to "Under Review" for all commits which are
450 450 # part of this pull request.
451 451 ChangesetStatusModel().set_status(
452 452 repo=target_repo,
453 453 status=ChangesetStatus.STATUS_UNDER_REVIEW,
454 454 user=created_by_user,
455 455 pull_request=pull_request
456 456 )
457 457
458 458 self.notify_reviewers(pull_request, reviewer_ids)
459 459 self._trigger_pull_request_hook(
460 460 pull_request, created_by_user, 'create')
461 461
462 462 return pull_request
463 463
464 464 def _trigger_pull_request_hook(self, pull_request, user, action):
465 465 pull_request = self.__get_pull_request(pull_request)
466 466 target_scm = pull_request.target_repo.scm_instance()
467 467 if action == 'create':
468 468 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
469 469 elif action == 'merge':
470 470 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
471 471 elif action == 'close':
472 472 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
473 473 elif action == 'review_status_change':
474 474 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
475 475 elif action == 'update':
476 476 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
477 477 else:
478 478 return
479 479
480 480 trigger_hook(
481 481 username=user.username,
482 482 repo_name=pull_request.target_repo.repo_name,
483 483 repo_alias=target_scm.alias,
484 484 pull_request=pull_request)
485 485
486 486 def _get_commit_ids(self, pull_request):
487 487 """
488 488 Return the commit ids of the merged pull request.
489 489
490 490 This method is not dealing correctly yet with the lack of autoupdates
491 491 nor with the implicit target updates.
492 492 For example: if a commit in the source repo is already in the target it
493 493 will be reported anyways.
494 494 """
495 495 merge_rev = pull_request.merge_rev
496 496 if merge_rev is None:
497 497 raise ValueError('This pull request was not merged yet')
498 498
499 499 commit_ids = list(pull_request.revisions)
500 500 if merge_rev not in commit_ids:
501 501 commit_ids.append(merge_rev)
502 502
503 503 return commit_ids
504 504
505 505 def merge(self, pull_request, user, extras):
506 506 log.debug("Merging pull request %s", pull_request.pull_request_id)
507 507 merge_state = self._merge_pull_request(pull_request, user, extras)
508 508 if merge_state.executed:
509 509 log.debug(
510 510 "Merge was successful, updating the pull request comments.")
511 511 self._comment_and_close_pr(pull_request, user, merge_state)
512 512 self._log_action('user_merged_pull_request', user, pull_request)
513 513 else:
514 514 log.warn("Merge failed, not updating the pull request.")
515 515 return merge_state
516 516
517 517 def _merge_pull_request(self, pull_request, user, extras):
518 518 target_vcs = pull_request.target_repo.scm_instance()
519 519 source_vcs = pull_request.source_repo.scm_instance()
520 520 target_ref = self._refresh_reference(
521 521 pull_request.target_ref_parts, target_vcs)
522 522
523 523 message = _(
524 524 'Merge pull request #%(pr_id)s from '
525 525 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
526 526 'pr_id': pull_request.pull_request_id,
527 527 'source_repo': source_vcs.name,
528 528 'source_ref_name': pull_request.source_ref_parts.name,
529 529 'pr_title': pull_request.title
530 530 }
531 531
532 532 workspace_id = self._workspace_id(pull_request)
533 533 use_rebase = self._use_rebase_for_merging(pull_request)
534 534
535 535 callback_daemon, extras = prepare_callback_daemon(
536 536 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
537 537 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
538 538
539 539 with callback_daemon:
540 540 # TODO: johbo: Implement a clean way to run a config_override
541 541 # for a single call.
542 542 target_vcs.config.set(
543 543 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
544 544 merge_state = target_vcs.merge(
545 545 target_ref, source_vcs, pull_request.source_ref_parts,
546 546 workspace_id, user_name=user.username,
547 547 user_email=user.email, message=message, use_rebase=use_rebase)
548 548 return merge_state
549 549
550 550 def _comment_and_close_pr(self, pull_request, user, merge_state):
551 551 pull_request.merge_rev = merge_state.merge_ref.commit_id
552 552 pull_request.updated_on = datetime.datetime.now()
553 553
554 554 ChangesetCommentsModel().create(
555 555 text=unicode(_('Pull request merged and closed')),
556 556 repo=pull_request.target_repo.repo_id,
557 557 user=user.user_id,
558 558 pull_request=pull_request.pull_request_id,
559 559 f_path=None,
560 560 line_no=None,
561 561 closing_pr=True
562 562 )
563 563
564 564 Session().add(pull_request)
565 565 Session().flush()
566 566 # TODO: paris: replace invalidation with less radical solution
567 567 ScmModel().mark_for_invalidation(
568 568 pull_request.target_repo.repo_name)
569 569 self._trigger_pull_request_hook(pull_request, user, 'merge')
570 570
571 571 def has_valid_update_type(self, pull_request):
572 572 source_ref_type = pull_request.source_ref_parts.type
573 573 return source_ref_type in ['book', 'branch', 'tag']
574 574
575 575 def update_commits(self, pull_request):
576 576 """
577 577 Get the updated list of commits for the pull request
578 578 and return the new pull request version and the list
579 579 of commits processed by this update action
580 580 """
581 581 pull_request = self.__get_pull_request(pull_request)
582 582 source_ref_type = pull_request.source_ref_parts.type
583 583 source_ref_name = pull_request.source_ref_parts.name
584 584 source_ref_id = pull_request.source_ref_parts.commit_id
585 585
586 586 if not self.has_valid_update_type(pull_request):
587 587 log.debug(
588 588 "Skipping update of pull request %s due to ref type: %s",
589 589 pull_request, source_ref_type)
590 590 return UpdateResponse(
591 591 executed=False,
592 592 reason=UpdateFailureReason.WRONG_REF_TPYE,
593 593 old=pull_request, new=None, changes=None)
594 594
595 595 source_repo = pull_request.source_repo.scm_instance()
596 596 try:
597 597 source_commit = source_repo.get_commit(commit_id=source_ref_name)
598 598 except CommitDoesNotExistError:
599 599 return UpdateResponse(
600 600 executed=False,
601 601 reason=UpdateFailureReason.MISSING_SOURCE_REF,
602 602 old=pull_request, new=None, changes=None)
603 603
604 604 if source_ref_id == source_commit.raw_id:
605 605 log.debug("Nothing changed in pull request %s", pull_request)
606 606 return UpdateResponse(
607 607 executed=False,
608 608 reason=UpdateFailureReason.NO_CHANGE,
609 609 old=pull_request, new=None, changes=None)
610 610
611 611 # Finally there is a need for an update
612 612 pull_request_version = self._create_version_from_snapshot(pull_request)
613 613 self._link_comments_to_version(pull_request_version)
614 614
615 615 target_ref_type = pull_request.target_ref_parts.type
616 616 target_ref_name = pull_request.target_ref_parts.name
617 617 target_ref_id = pull_request.target_ref_parts.commit_id
618 618 target_repo = pull_request.target_repo.scm_instance()
619 619
620 620 try:
621 621 if target_ref_type in ('tag', 'branch', 'book'):
622 622 target_commit = target_repo.get_commit(target_ref_name)
623 623 else:
624 624 target_commit = target_repo.get_commit(target_ref_id)
625 625 except CommitDoesNotExistError:
626 626 return UpdateResponse(
627 627 executed=False,
628 628 reason=UpdateFailureReason.MISSING_TARGET_REF,
629 629 old=pull_request, new=None, changes=None)
630 630
631 631 # re-compute commit ids
632 632 old_commit_ids = set(pull_request.revisions)
633 633 pre_load = ["author", "branch", "date", "message"]
634 634 commit_ranges = target_repo.compare(
635 635 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
636 636 pre_load=pre_load)
637 637
638 638 ancestor = target_repo.get_common_ancestor(
639 639 target_commit.raw_id, source_commit.raw_id, source_repo)
640 640
641 641 pull_request.source_ref = '%s:%s:%s' % (
642 642 source_ref_type, source_ref_name, source_commit.raw_id)
643 643 pull_request.target_ref = '%s:%s:%s' % (
644 644 target_ref_type, target_ref_name, ancestor)
645 645 pull_request.revisions = [
646 646 commit.raw_id for commit in reversed(commit_ranges)]
647 647 pull_request.updated_on = datetime.datetime.now()
648 648 Session().add(pull_request)
649 649 new_commit_ids = set(pull_request.revisions)
650 650
651 651 changes = self._calculate_commit_id_changes(
652 652 old_commit_ids, new_commit_ids)
653 653
654 654 old_diff_data, new_diff_data = self._generate_update_diffs(
655 655 pull_request, pull_request_version)
656 656
657 657 ChangesetCommentsModel().outdate_comments(
658 658 pull_request, old_diff_data=old_diff_data,
659 659 new_diff_data=new_diff_data)
660 660
661 661 file_changes = self._calculate_file_changes(
662 662 old_diff_data, new_diff_data)
663 663
664 664 # Add an automatic comment to the pull request
665 665 update_comment = ChangesetCommentsModel().create(
666 666 text=self._render_update_message(changes, file_changes),
667 667 repo=pull_request.target_repo,
668 668 user=pull_request.author,
669 669 pull_request=pull_request,
670 670 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
671 671
672 672 # Update status to "Under Review" for added commits
673 673 for commit_id in changes.added:
674 674 ChangesetStatusModel().set_status(
675 675 repo=pull_request.source_repo,
676 676 status=ChangesetStatus.STATUS_UNDER_REVIEW,
677 677 comment=update_comment,
678 678 user=pull_request.author,
679 679 pull_request=pull_request,
680 680 revision=commit_id)
681 681
682 682 log.debug(
683 683 'Updated pull request %s, added_ids: %s, common_ids: %s, '
684 684 'removed_ids: %s', pull_request.pull_request_id,
685 685 changes.added, changes.common, changes.removed)
686 686 log.debug('Updated pull request with the following file changes: %s',
687 687 file_changes)
688 688
689 689 log.info(
690 690 "Updated pull request %s from commit %s to commit %s, "
691 691 "stored new version %s of this pull request.",
692 692 pull_request.pull_request_id, source_ref_id,
693 693 pull_request.source_ref_parts.commit_id,
694 694 pull_request_version.pull_request_version_id)
695 695 Session().commit()
696 696 self._trigger_pull_request_hook(pull_request, pull_request.author,
697 697 'update')
698 698
699 699 return UpdateResponse(
700 700 executed=True, reason=UpdateFailureReason.NONE,
701 701 old=pull_request, new=pull_request_version, changes=changes)
702 702
703 703 def _create_version_from_snapshot(self, pull_request):
704 704 version = PullRequestVersion()
705 705 version.title = pull_request.title
706 706 version.description = pull_request.description
707 707 version.status = pull_request.status
708 version.created_on = pull_request.created_on
708 version.created_on = datetime.datetime.now()
709 709 version.updated_on = pull_request.updated_on
710 710 version.user_id = pull_request.user_id
711 711 version.source_repo = pull_request.source_repo
712 712 version.source_ref = pull_request.source_ref
713 713 version.target_repo = pull_request.target_repo
714 714 version.target_ref = pull_request.target_ref
715 715
716 716 version._last_merge_source_rev = pull_request._last_merge_source_rev
717 717 version._last_merge_target_rev = pull_request._last_merge_target_rev
718 718 version._last_merge_status = pull_request._last_merge_status
719 719 version.shadow_merge_ref = pull_request.shadow_merge_ref
720 720 version.merge_rev = pull_request.merge_rev
721 721
722 722 version.revisions = pull_request.revisions
723 723 version.pull_request = pull_request
724 724 Session().add(version)
725 725 Session().flush()
726 726
727 727 return version
728 728
729 729 def _generate_update_diffs(self, pull_request, pull_request_version):
730 730 diff_context = (
731 731 self.DIFF_CONTEXT +
732 732 ChangesetCommentsModel.needed_extra_diff_context())
733 733 old_diff = self._get_diff_from_pr_or_version(
734 734 pull_request_version, context=diff_context)
735 735 new_diff = self._get_diff_from_pr_or_version(
736 736 pull_request, context=diff_context)
737 737
738 738 old_diff_data = diffs.DiffProcessor(old_diff)
739 739 old_diff_data.prepare()
740 740 new_diff_data = diffs.DiffProcessor(new_diff)
741 741 new_diff_data.prepare()
742 742
743 743 return old_diff_data, new_diff_data
744 744
745 745 def _link_comments_to_version(self, pull_request_version):
746 746 """
747 747 Link all unlinked comments of this pull request to the given version.
748 748
749 749 :param pull_request_version: The `PullRequestVersion` to which
750 750 the comments shall be linked.
751 751
752 752 """
753 753 pull_request = pull_request_version.pull_request
754 754 comments = ChangesetComment.query().filter(
755 755 # TODO: johbo: Should we query for the repo at all here?
756 756 # Pending decision on how comments of PRs are to be related
757 757 # to either the source repo, the target repo or no repo at all.
758 758 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
759 759 ChangesetComment.pull_request == pull_request,
760 760 ChangesetComment.pull_request_version == None)
761 761
762 762 # TODO: johbo: Find out why this breaks if it is done in a bulk
763 763 # operation.
764 764 for comment in comments:
765 765 comment.pull_request_version_id = (
766 766 pull_request_version.pull_request_version_id)
767 767 Session().add(comment)
768 768
769 769 def _calculate_commit_id_changes(self, old_ids, new_ids):
770 770 added = new_ids.difference(old_ids)
771 771 common = old_ids.intersection(new_ids)
772 772 removed = old_ids.difference(new_ids)
773 773 return ChangeTuple(added, common, removed)
774 774
775 775 def _calculate_file_changes(self, old_diff_data, new_diff_data):
776 776
777 777 old_files = OrderedDict()
778 778 for diff_data in old_diff_data.parsed_diff:
779 779 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
780 780
781 781 added_files = []
782 782 modified_files = []
783 783 removed_files = []
784 784 for diff_data in new_diff_data.parsed_diff:
785 785 new_filename = diff_data['filename']
786 786 new_hash = md5_safe(diff_data['raw_diff'])
787 787
788 788 old_hash = old_files.get(new_filename)
789 789 if not old_hash:
790 790 # file is not present in old diff, means it's added
791 791 added_files.append(new_filename)
792 792 else:
793 793 if new_hash != old_hash:
794 794 modified_files.append(new_filename)
795 795 # now remove a file from old, since we have seen it already
796 796 del old_files[new_filename]
797 797
798 798 # removed files is when there are present in old, but not in NEW,
799 799 # since we remove old files that are present in new diff, left-overs
800 800 # if any should be the removed files
801 801 removed_files.extend(old_files.keys())
802 802
803 803 return FileChangeTuple(added_files, modified_files, removed_files)
804 804
805 805 def _render_update_message(self, changes, file_changes):
806 806 """
807 807 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
808 808 so it's always looking the same disregarding on which default
809 809 renderer system is using.
810 810
811 811 :param changes: changes named tuple
812 812 :param file_changes: file changes named tuple
813 813
814 814 """
815 815 new_status = ChangesetStatus.get_status_lbl(
816 816 ChangesetStatus.STATUS_UNDER_REVIEW)
817 817
818 818 changed_files = (
819 819 file_changes.added + file_changes.modified + file_changes.removed)
820 820
821 821 params = {
822 822 'under_review_label': new_status,
823 823 'added_commits': changes.added,
824 824 'removed_commits': changes.removed,
825 825 'changed_files': changed_files,
826 826 'added_files': file_changes.added,
827 827 'modified_files': file_changes.modified,
828 828 'removed_files': file_changes.removed,
829 829 }
830 830 renderer = RstTemplateRenderer()
831 831 return renderer.render('pull_request_update.mako', **params)
832 832
833 833 def edit(self, pull_request, title, description):
834 834 pull_request = self.__get_pull_request(pull_request)
835 835 if pull_request.is_closed():
836 836 raise ValueError('This pull request is closed')
837 837 if title:
838 838 pull_request.title = title
839 839 pull_request.description = description
840 840 pull_request.updated_on = datetime.datetime.now()
841 841 Session().add(pull_request)
842 842
843 843 def update_reviewers(self, pull_request, reviewer_data):
844 844 """
845 845 Update the reviewers in the pull request
846 846
847 847 :param pull_request: the pr to update
848 848 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
849 849 """
850 850
851 851 reviewers_reasons = {}
852 852 for user_id, reasons in reviewer_data:
853 853 if isinstance(user_id, (int, basestring)):
854 854 user_id = self._get_user(user_id).user_id
855 855 reviewers_reasons[user_id] = reasons
856 856
857 857 reviewers_ids = set(reviewers_reasons.keys())
858 858 pull_request = self.__get_pull_request(pull_request)
859 859 current_reviewers = PullRequestReviewers.query()\
860 860 .filter(PullRequestReviewers.pull_request ==
861 861 pull_request).all()
862 862 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
863 863
864 864 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
865 865 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
866 866
867 867 log.debug("Adding %s reviewers", ids_to_add)
868 868 log.debug("Removing %s reviewers", ids_to_remove)
869 869 changed = False
870 870 for uid in ids_to_add:
871 871 changed = True
872 872 _usr = self._get_user(uid)
873 873 reasons = reviewers_reasons[uid]
874 874 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
875 875 Session().add(reviewer)
876 876
877 877 self.notify_reviewers(pull_request, ids_to_add)
878 878
879 879 for uid in ids_to_remove:
880 880 changed = True
881 881 reviewer = PullRequestReviewers.query()\
882 882 .filter(PullRequestReviewers.user_id == uid,
883 883 PullRequestReviewers.pull_request == pull_request)\
884 884 .scalar()
885 885 if reviewer:
886 886 Session().delete(reviewer)
887 887 if changed:
888 888 pull_request.updated_on = datetime.datetime.now()
889 889 Session().add(pull_request)
890 890
891 891 return ids_to_add, ids_to_remove
892 892
893 893 def get_url(self, pull_request):
894 894 return h.url('pullrequest_show',
895 895 repo_name=safe_str(pull_request.target_repo.repo_name),
896 896 pull_request_id=pull_request.pull_request_id,
897 897 qualified=True)
898 898
899 899 def get_shadow_clone_url(self, pull_request):
900 900 """
901 901 Returns qualified url pointing to the shadow repository. If this pull
902 902 request is closed there is no shadow repository and ``None`` will be
903 903 returned.
904 904 """
905 905 if pull_request.is_closed():
906 906 return None
907 907 else:
908 908 pr_url = urllib.unquote(self.get_url(pull_request))
909 909 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
910 910
911 911 def notify_reviewers(self, pull_request, reviewers_ids):
912 912 # notification to reviewers
913 913 if not reviewers_ids:
914 914 return
915 915
916 916 pull_request_obj = pull_request
917 917 # get the current participants of this pull request
918 918 recipients = reviewers_ids
919 919 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
920 920
921 921 pr_source_repo = pull_request_obj.source_repo
922 922 pr_target_repo = pull_request_obj.target_repo
923 923
924 924 pr_url = h.url(
925 925 'pullrequest_show',
926 926 repo_name=pr_target_repo.repo_name,
927 927 pull_request_id=pull_request_obj.pull_request_id,
928 928 qualified=True,)
929 929
930 930 # set some variables for email notification
931 931 pr_target_repo_url = h.url(
932 932 'summary_home',
933 933 repo_name=pr_target_repo.repo_name,
934 934 qualified=True)
935 935
936 936 pr_source_repo_url = h.url(
937 937 'summary_home',
938 938 repo_name=pr_source_repo.repo_name,
939 939 qualified=True)
940 940
941 941 # pull request specifics
942 942 pull_request_commits = [
943 943 (x.raw_id, x.message)
944 944 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
945 945
946 946 kwargs = {
947 947 'user': pull_request.author,
948 948 'pull_request': pull_request_obj,
949 949 'pull_request_commits': pull_request_commits,
950 950
951 951 'pull_request_target_repo': pr_target_repo,
952 952 'pull_request_target_repo_url': pr_target_repo_url,
953 953
954 954 'pull_request_source_repo': pr_source_repo,
955 955 'pull_request_source_repo_url': pr_source_repo_url,
956 956
957 957 'pull_request_url': pr_url,
958 958 }
959 959
960 960 # pre-generate the subject for notification itself
961 961 (subject,
962 962 _h, _e, # we don't care about those
963 963 body_plaintext) = EmailNotificationModel().render_email(
964 964 notification_type, **kwargs)
965 965
966 966 # create notification objects, and emails
967 967 NotificationModel().create(
968 968 created_by=pull_request.author,
969 969 notification_subject=subject,
970 970 notification_body=body_plaintext,
971 971 notification_type=notification_type,
972 972 recipients=recipients,
973 973 email_kwargs=kwargs,
974 974 )
975 975
976 976 def delete(self, pull_request):
977 977 pull_request = self.__get_pull_request(pull_request)
978 978 self._cleanup_merge_workspace(pull_request)
979 979 Session().delete(pull_request)
980 980
981 981 def close_pull_request(self, pull_request, user):
982 982 pull_request = self.__get_pull_request(pull_request)
983 983 self._cleanup_merge_workspace(pull_request)
984 984 pull_request.status = PullRequest.STATUS_CLOSED
985 985 pull_request.updated_on = datetime.datetime.now()
986 986 Session().add(pull_request)
987 987 self._trigger_pull_request_hook(
988 988 pull_request, pull_request.author, 'close')
989 989 self._log_action('user_closed_pull_request', user, pull_request)
990 990
991 991 def close_pull_request_with_comment(self, pull_request, user, repo,
992 992 message=None):
993 993 status = ChangesetStatus.STATUS_REJECTED
994 994
995 995 if not message:
996 996 message = (
997 997 _('Status change %(transition_icon)s %(status)s') % {
998 998 'transition_icon': '>',
999 999 'status': ChangesetStatus.get_status_lbl(status)})
1000 1000
1001 1001 internal_message = _('Closing with') + ' ' + message
1002 1002
1003 1003 comm = ChangesetCommentsModel().create(
1004 1004 text=internal_message,
1005 1005 repo=repo.repo_id,
1006 1006 user=user.user_id,
1007 1007 pull_request=pull_request.pull_request_id,
1008 1008 f_path=None,
1009 1009 line_no=None,
1010 1010 status_change=ChangesetStatus.get_status_lbl(status),
1011 1011 status_change_type=status,
1012 1012 closing_pr=True
1013 1013 )
1014 1014
1015 1015 ChangesetStatusModel().set_status(
1016 1016 repo.repo_id,
1017 1017 status,
1018 1018 user.user_id,
1019 1019 comm,
1020 1020 pull_request=pull_request.pull_request_id
1021 1021 )
1022 1022 Session().flush()
1023 1023
1024 1024 PullRequestModel().close_pull_request(
1025 1025 pull_request.pull_request_id, user)
1026 1026
1027 1027 def merge_status(self, pull_request):
1028 1028 if not self._is_merge_enabled(pull_request):
1029 1029 return False, _('Server-side pull request merging is disabled.')
1030 1030 if pull_request.is_closed():
1031 1031 return False, _('This pull request is closed.')
1032 1032 merge_possible, msg = self._check_repo_requirements(
1033 1033 target=pull_request.target_repo, source=pull_request.source_repo)
1034 1034 if not merge_possible:
1035 1035 return merge_possible, msg
1036 1036
1037 1037 try:
1038 1038 resp = self._try_merge(pull_request)
1039 1039 log.debug("Merge response: %s", resp)
1040 1040 status = resp.possible, self.merge_status_message(
1041 1041 resp.failure_reason)
1042 1042 except NotImplementedError:
1043 1043 status = False, _('Pull request merging is not supported.')
1044 1044
1045 1045 return status
1046 1046
1047 1047 def _check_repo_requirements(self, target, source):
1048 1048 """
1049 1049 Check if `target` and `source` have compatible requirements.
1050 1050
1051 1051 Currently this is just checking for largefiles.
1052 1052 """
1053 1053 target_has_largefiles = self._has_largefiles(target)
1054 1054 source_has_largefiles = self._has_largefiles(source)
1055 1055 merge_possible = True
1056 1056 message = u''
1057 1057
1058 1058 if target_has_largefiles != source_has_largefiles:
1059 1059 merge_possible = False
1060 1060 if source_has_largefiles:
1061 1061 message = _(
1062 1062 'Target repository large files support is disabled.')
1063 1063 else:
1064 1064 message = _(
1065 1065 'Source repository large files support is disabled.')
1066 1066
1067 1067 return merge_possible, message
1068 1068
1069 1069 def _has_largefiles(self, repo):
1070 1070 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1071 1071 'extensions', 'largefiles')
1072 1072 return largefiles_ui and largefiles_ui[0].active
1073 1073
1074 1074 def _try_merge(self, pull_request):
1075 1075 """
1076 1076 Try to merge the pull request and return the merge status.
1077 1077 """
1078 1078 log.debug(
1079 1079 "Trying out if the pull request %s can be merged.",
1080 1080 pull_request.pull_request_id)
1081 1081 target_vcs = pull_request.target_repo.scm_instance()
1082 1082
1083 1083 # Refresh the target reference.
1084 1084 try:
1085 1085 target_ref = self._refresh_reference(
1086 1086 pull_request.target_ref_parts, target_vcs)
1087 1087 except CommitDoesNotExistError:
1088 1088 merge_state = MergeResponse(
1089 1089 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1090 1090 return merge_state
1091 1091
1092 1092 target_locked = pull_request.target_repo.locked
1093 1093 if target_locked and target_locked[0]:
1094 1094 log.debug("The target repository is locked.")
1095 1095 merge_state = MergeResponse(
1096 1096 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1097 1097 elif self._needs_merge_state_refresh(pull_request, target_ref):
1098 1098 log.debug("Refreshing the merge status of the repository.")
1099 1099 merge_state = self._refresh_merge_state(
1100 1100 pull_request, target_vcs, target_ref)
1101 1101 else:
1102 1102 possible = pull_request.\
1103 1103 _last_merge_status == MergeFailureReason.NONE
1104 1104 merge_state = MergeResponse(
1105 1105 possible, False, None, pull_request._last_merge_status)
1106 1106
1107 1107 return merge_state
1108 1108
1109 1109 def _refresh_reference(self, reference, vcs_repository):
1110 1110 if reference.type in ('branch', 'book'):
1111 1111 name_or_id = reference.name
1112 1112 else:
1113 1113 name_or_id = reference.commit_id
1114 1114 refreshed_commit = vcs_repository.get_commit(name_or_id)
1115 1115 refreshed_reference = Reference(
1116 1116 reference.type, reference.name, refreshed_commit.raw_id)
1117 1117 return refreshed_reference
1118 1118
1119 1119 def _needs_merge_state_refresh(self, pull_request, target_reference):
1120 1120 return not(
1121 1121 pull_request.revisions and
1122 1122 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1123 1123 target_reference.commit_id == pull_request._last_merge_target_rev)
1124 1124
1125 1125 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1126 1126 workspace_id = self._workspace_id(pull_request)
1127 1127 source_vcs = pull_request.source_repo.scm_instance()
1128 1128 use_rebase = self._use_rebase_for_merging(pull_request)
1129 1129 merge_state = target_vcs.merge(
1130 1130 target_reference, source_vcs, pull_request.source_ref_parts,
1131 1131 workspace_id, dry_run=True, use_rebase=use_rebase)
1132 1132
1133 1133 # Do not store the response if there was an unknown error.
1134 1134 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1135 1135 pull_request._last_merge_source_rev = \
1136 1136 pull_request.source_ref_parts.commit_id
1137 1137 pull_request._last_merge_target_rev = target_reference.commit_id
1138 1138 pull_request._last_merge_status = merge_state.failure_reason
1139 1139 pull_request.shadow_merge_ref = merge_state.merge_ref
1140 1140 Session().add(pull_request)
1141 1141 Session().commit()
1142 1142
1143 1143 return merge_state
1144 1144
1145 1145 def _workspace_id(self, pull_request):
1146 1146 workspace_id = 'pr-%s' % pull_request.pull_request_id
1147 1147 return workspace_id
1148 1148
1149 1149 def merge_status_message(self, status_code):
1150 1150 """
1151 1151 Return a human friendly error message for the given merge status code.
1152 1152 """
1153 1153 return self.MERGE_STATUS_MESSAGES[status_code]
1154 1154
1155 1155 def generate_repo_data(self, repo, commit_id=None, branch=None,
1156 1156 bookmark=None):
1157 1157 all_refs, selected_ref = \
1158 1158 self._get_repo_pullrequest_sources(
1159 1159 repo.scm_instance(), commit_id=commit_id,
1160 1160 branch=branch, bookmark=bookmark)
1161 1161
1162 1162 refs_select2 = []
1163 1163 for element in all_refs:
1164 1164 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1165 1165 refs_select2.append({'text': element[1], 'children': children})
1166 1166
1167 1167 return {
1168 1168 'user': {
1169 1169 'user_id': repo.user.user_id,
1170 1170 'username': repo.user.username,
1171 1171 'firstname': repo.user.firstname,
1172 1172 'lastname': repo.user.lastname,
1173 1173 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1174 1174 },
1175 1175 'description': h.chop_at_smart(repo.description, '\n'),
1176 1176 'refs': {
1177 1177 'all_refs': all_refs,
1178 1178 'selected_ref': selected_ref,
1179 1179 'select2_refs': refs_select2
1180 1180 }
1181 1181 }
1182 1182
1183 1183 def generate_pullrequest_title(self, source, source_ref, target):
1184 1184 return u'{source}#{at_ref} to {target}'.format(
1185 1185 source=source,
1186 1186 at_ref=source_ref,
1187 1187 target=target,
1188 1188 )
1189 1189
1190 1190 def _cleanup_merge_workspace(self, pull_request):
1191 1191 # Merging related cleanup
1192 1192 target_scm = pull_request.target_repo.scm_instance()
1193 1193 workspace_id = 'pr-%s' % pull_request.pull_request_id
1194 1194
1195 1195 try:
1196 1196 target_scm.cleanup_merge_workspace(workspace_id)
1197 1197 except NotImplementedError:
1198 1198 pass
1199 1199
1200 1200 def _get_repo_pullrequest_sources(
1201 1201 self, repo, commit_id=None, branch=None, bookmark=None):
1202 1202 """
1203 1203 Return a structure with repo's interesting commits, suitable for
1204 1204 the selectors in pullrequest controller
1205 1205
1206 1206 :param commit_id: a commit that must be in the list somehow
1207 1207 and selected by default
1208 1208 :param branch: a branch that must be in the list and selected
1209 1209 by default - even if closed
1210 1210 :param bookmark: a bookmark that must be in the list and selected
1211 1211 """
1212 1212
1213 1213 commit_id = safe_str(commit_id) if commit_id else None
1214 1214 branch = safe_str(branch) if branch else None
1215 1215 bookmark = safe_str(bookmark) if bookmark else None
1216 1216
1217 1217 selected = None
1218 1218
1219 1219 # order matters: first source that has commit_id in it will be selected
1220 1220 sources = []
1221 1221 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1222 1222 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1223 1223
1224 1224 if commit_id:
1225 1225 ref_commit = (h.short_id(commit_id), commit_id)
1226 1226 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1227 1227
1228 1228 sources.append(
1229 1229 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1230 1230 )
1231 1231
1232 1232 groups = []
1233 1233 for group_key, ref_list, group_name, match in sources:
1234 1234 group_refs = []
1235 1235 for ref_name, ref_id in ref_list:
1236 1236 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1237 1237 group_refs.append((ref_key, ref_name))
1238 1238
1239 1239 if not selected:
1240 1240 if set([commit_id, match]) & set([ref_id, ref_name]):
1241 1241 selected = ref_key
1242 1242
1243 1243 if group_refs:
1244 1244 groups.append((group_refs, group_name))
1245 1245
1246 1246 if not selected:
1247 1247 ref = commit_id or branch or bookmark
1248 1248 if ref:
1249 1249 raise CommitDoesNotExistError(
1250 1250 'No commit refs could be found matching: %s' % ref)
1251 1251 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1252 1252 selected = 'branch:%s:%s' % (
1253 1253 repo.DEFAULT_BRANCH_NAME,
1254 1254 repo.branches[repo.DEFAULT_BRANCH_NAME]
1255 1255 )
1256 1256 elif repo.commit_ids:
1257 1257 rev = repo.commit_ids[0]
1258 1258 selected = 'rev:%s:%s' % (rev, rev)
1259 1259 else:
1260 1260 raise EmptyRepositoryError()
1261 1261 return groups, selected
1262 1262
1263 1263 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1264 1264 pull_request = self.__get_pull_request(pull_request)
1265 1265 return self._get_diff_from_pr_or_version(pull_request, context=context)
1266 1266
1267 1267 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1268 1268 source_repo = pr_or_version.source_repo
1269 1269
1270 1270 # we swap org/other ref since we run a simple diff on one repo
1271 1271 target_ref_id = pr_or_version.target_ref_parts.commit_id
1272 1272 source_ref_id = pr_or_version.source_ref_parts.commit_id
1273 1273 target_commit = source_repo.get_commit(
1274 1274 commit_id=safe_str(target_ref_id))
1275 1275 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1276 1276 vcs_repo = source_repo.scm_instance()
1277 1277
1278 1278 # TODO: johbo: In the context of an update, we cannot reach
1279 1279 # the old commit anymore with our normal mechanisms. It needs
1280 1280 # some sort of special support in the vcs layer to avoid this
1281 1281 # workaround.
1282 1282 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1283 1283 vcs_repo.alias == 'git'):
1284 1284 source_commit.raw_id = safe_str(source_ref_id)
1285 1285
1286 1286 log.debug('calculating diff between '
1287 1287 'source_ref:%s and target_ref:%s for repo `%s`',
1288 1288 target_ref_id, source_ref_id,
1289 1289 safe_unicode(vcs_repo.path))
1290 1290
1291 1291 vcs_diff = vcs_repo.get_diff(
1292 1292 commit1=target_commit, commit2=source_commit, context=context)
1293 1293 return vcs_diff
1294 1294
1295 1295 def _is_merge_enabled(self, pull_request):
1296 1296 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1297 1297 settings = settings_model.get_general_settings()
1298 1298 return settings.get('rhodecode_pr_merge_enabled', False)
1299 1299
1300 1300 def _use_rebase_for_merging(self, pull_request):
1301 1301 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1302 1302 settings = settings_model.get_general_settings()
1303 1303 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1304 1304
1305 1305 def _log_action(self, action, user, pull_request):
1306 1306 action_logger(
1307 1307 user,
1308 1308 '{action}:{pr_id}'.format(
1309 1309 action=action, pr_id=pull_request.pull_request_id),
1310 1310 pull_request.target_repo)
1311 1311
1312 1312
1313 1313 ChangeTuple = namedtuple('ChangeTuple',
1314 1314 ['added', 'common', 'removed'])
1315 1315
1316 1316 FileChangeTuple = namedtuple('FileChangeTuple',
1317 1317 ['added', 'modified', 'removed'])
@@ -1,843 +1,846 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import ChangesetCommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 class TestPullRequestModel:
45 45
46 46 @pytest.fixture
47 47 def pull_request(self, request, backend, pr_util):
48 48 """
49 49 A pull request combined with multiples patches.
50 50 """
51 51 BackendClass = get_backend(backend.alias)
52 52 self.merge_patcher = mock.patch.object(BackendClass, 'merge')
53 53 self.workspace_remove_patcher = mock.patch.object(
54 54 BackendClass, 'cleanup_merge_workspace')
55 55
56 56 self.workspace_remove_mock = self.workspace_remove_patcher.start()
57 57 self.merge_mock = self.merge_patcher.start()
58 58 self.comment_patcher = mock.patch(
59 59 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
60 60 self.comment_patcher.start()
61 61 self.notification_patcher = mock.patch(
62 62 'rhodecode.model.notification.NotificationModel.create')
63 63 self.notification_patcher.start()
64 64 self.helper_patcher = mock.patch(
65 65 'rhodecode.lib.helpers.url')
66 66 self.helper_patcher.start()
67 67
68 68 self.hook_patcher = mock.patch.object(PullRequestModel,
69 69 '_trigger_pull_request_hook')
70 70 self.hook_mock = self.hook_patcher.start()
71 71
72 72 self.invalidation_patcher = mock.patch(
73 73 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
74 74 self.invalidation_mock = self.invalidation_patcher.start()
75 75
76 76 self.pull_request = pr_util.create_pull_request(
77 77 mergeable=True, name_suffix=u'Δ…Δ‡')
78 78 self.source_commit = self.pull_request.source_ref_parts.commit_id
79 79 self.target_commit = self.pull_request.target_ref_parts.commit_id
80 80 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
81 81
82 82 @request.addfinalizer
83 83 def cleanup_pull_request():
84 84 calls = [mock.call(
85 85 self.pull_request, self.pull_request.author, 'create')]
86 86 self.hook_mock.assert_has_calls(calls)
87 87
88 88 self.workspace_remove_patcher.stop()
89 89 self.merge_patcher.stop()
90 90 self.comment_patcher.stop()
91 91 self.notification_patcher.stop()
92 92 self.helper_patcher.stop()
93 93 self.hook_patcher.stop()
94 94 self.invalidation_patcher.stop()
95 95
96 96 return self.pull_request
97 97
98 98 def test_get_all(self, pull_request):
99 99 prs = PullRequestModel().get_all(pull_request.target_repo)
100 100 assert isinstance(prs, list)
101 101 assert len(prs) == 1
102 102
103 103 def test_count_all(self, pull_request):
104 104 pr_count = PullRequestModel().count_all(pull_request.target_repo)
105 105 assert pr_count == 1
106 106
107 107 def test_get_awaiting_review(self, pull_request):
108 108 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
109 109 assert isinstance(prs, list)
110 110 assert len(prs) == 1
111 111
112 112 def test_count_awaiting_review(self, pull_request):
113 113 pr_count = PullRequestModel().count_awaiting_review(
114 114 pull_request.target_repo)
115 115 assert pr_count == 1
116 116
117 117 def test_get_awaiting_my_review(self, pull_request):
118 118 PullRequestModel().update_reviewers(
119 119 pull_request, [(pull_request.author, ['author'])])
120 120 prs = PullRequestModel().get_awaiting_my_review(
121 121 pull_request.target_repo, user_id=pull_request.author.user_id)
122 122 assert isinstance(prs, list)
123 123 assert len(prs) == 1
124 124
125 125 def test_count_awaiting_my_review(self, pull_request):
126 126 PullRequestModel().update_reviewers(
127 127 pull_request, [(pull_request.author, ['author'])])
128 128 pr_count = PullRequestModel().count_awaiting_my_review(
129 129 pull_request.target_repo, user_id=pull_request.author.user_id)
130 130 assert pr_count == 1
131 131
132 132 def test_delete_calls_cleanup_merge(self, pull_request):
133 133 PullRequestModel().delete(pull_request)
134 134
135 135 self.workspace_remove_mock.assert_called_once_with(
136 136 self.workspace_id)
137 137
138 138 def test_close_calls_cleanup_and_hook(self, pull_request):
139 139 PullRequestModel().close_pull_request(
140 140 pull_request, pull_request.author)
141 141
142 142 self.workspace_remove_mock.assert_called_once_with(
143 143 self.workspace_id)
144 144 self.hook_mock.assert_called_with(
145 145 self.pull_request, self.pull_request.author, 'close')
146 146
147 147 def test_merge_status(self, pull_request):
148 148 self.merge_mock.return_value = MergeResponse(
149 149 True, False, None, MergeFailureReason.NONE)
150 150
151 151 assert pull_request._last_merge_source_rev is None
152 152 assert pull_request._last_merge_target_rev is None
153 153 assert pull_request._last_merge_status is None
154 154
155 155 status, msg = PullRequestModel().merge_status(pull_request)
156 156 assert status is True
157 157 assert msg.eval() == 'This pull request can be automatically merged.'
158 158 self.merge_mock.assert_called_once_with(
159 159 pull_request.target_ref_parts,
160 160 pull_request.source_repo.scm_instance(),
161 161 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
162 162 use_rebase=False)
163 163
164 164 assert pull_request._last_merge_source_rev == self.source_commit
165 165 assert pull_request._last_merge_target_rev == self.target_commit
166 166 assert pull_request._last_merge_status is MergeFailureReason.NONE
167 167
168 168 self.merge_mock.reset_mock()
169 169 status, msg = PullRequestModel().merge_status(pull_request)
170 170 assert status is True
171 171 assert msg.eval() == 'This pull request can be automatically merged.'
172 172 assert self.merge_mock.called is False
173 173
174 174 def test_merge_status_known_failure(self, pull_request):
175 175 self.merge_mock.return_value = MergeResponse(
176 176 False, False, None, MergeFailureReason.MERGE_FAILED)
177 177
178 178 assert pull_request._last_merge_source_rev is None
179 179 assert pull_request._last_merge_target_rev is None
180 180 assert pull_request._last_merge_status is None
181 181
182 182 status, msg = PullRequestModel().merge_status(pull_request)
183 183 assert status is False
184 184 assert (
185 185 msg.eval() ==
186 186 'This pull request cannot be merged because of conflicts.')
187 187 self.merge_mock.assert_called_once_with(
188 188 pull_request.target_ref_parts,
189 189 pull_request.source_repo.scm_instance(),
190 190 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
191 191 use_rebase=False)
192 192
193 193 assert pull_request._last_merge_source_rev == self.source_commit
194 194 assert pull_request._last_merge_target_rev == self.target_commit
195 195 assert (
196 196 pull_request._last_merge_status is MergeFailureReason.MERGE_FAILED)
197 197
198 198 self.merge_mock.reset_mock()
199 199 status, msg = PullRequestModel().merge_status(pull_request)
200 200 assert status is False
201 201 assert (
202 202 msg.eval() ==
203 203 'This pull request cannot be merged because of conflicts.')
204 204 assert self.merge_mock.called is False
205 205
206 206 def test_merge_status_unknown_failure(self, pull_request):
207 207 self.merge_mock.return_value = MergeResponse(
208 208 False, False, None, MergeFailureReason.UNKNOWN)
209 209
210 210 assert pull_request._last_merge_source_rev is None
211 211 assert pull_request._last_merge_target_rev is None
212 212 assert pull_request._last_merge_status is None
213 213
214 214 status, msg = PullRequestModel().merge_status(pull_request)
215 215 assert status is False
216 216 assert msg.eval() == (
217 217 'This pull request cannot be merged because of an unhandled'
218 218 ' exception.')
219 219 self.merge_mock.assert_called_once_with(
220 220 pull_request.target_ref_parts,
221 221 pull_request.source_repo.scm_instance(),
222 222 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
223 223 use_rebase=False)
224 224
225 225 assert pull_request._last_merge_source_rev is None
226 226 assert pull_request._last_merge_target_rev is None
227 227 assert pull_request._last_merge_status is None
228 228
229 229 self.merge_mock.reset_mock()
230 230 status, msg = PullRequestModel().merge_status(pull_request)
231 231 assert status is False
232 232 assert msg.eval() == (
233 233 'This pull request cannot be merged because of an unhandled'
234 234 ' exception.')
235 235 assert self.merge_mock.called is True
236 236
237 237 def test_merge_status_when_target_is_locked(self, pull_request):
238 238 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
239 239 status, msg = PullRequestModel().merge_status(pull_request)
240 240 assert status is False
241 241 assert msg.eval() == (
242 242 'This pull request cannot be merged because the target repository'
243 243 ' is locked.')
244 244
245 245 def test_merge_status_requirements_check_target(self, pull_request):
246 246
247 247 def has_largefiles(self, repo):
248 248 return repo == pull_request.source_repo
249 249
250 250 patcher = mock.patch.object(
251 251 PullRequestModel, '_has_largefiles', has_largefiles)
252 252 with patcher:
253 253 status, msg = PullRequestModel().merge_status(pull_request)
254 254
255 255 assert status is False
256 256 assert msg == 'Target repository large files support is disabled.'
257 257
258 258 def test_merge_status_requirements_check_source(self, pull_request):
259 259
260 260 def has_largefiles(self, repo):
261 261 return repo == pull_request.target_repo
262 262
263 263 patcher = mock.patch.object(
264 264 PullRequestModel, '_has_largefiles', has_largefiles)
265 265 with patcher:
266 266 status, msg = PullRequestModel().merge_status(pull_request)
267 267
268 268 assert status is False
269 269 assert msg == 'Source repository large files support is disabled.'
270 270
271 271 def test_merge(self, pull_request, merge_extras):
272 272 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
273 273 merge_ref = Reference(
274 274 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
275 275 self.merge_mock.return_value = MergeResponse(
276 276 True, True, merge_ref, MergeFailureReason.NONE)
277 277
278 278 merge_extras['repository'] = pull_request.target_repo.repo_name
279 279 PullRequestModel().merge(
280 280 pull_request, pull_request.author, extras=merge_extras)
281 281
282 282 message = (
283 283 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
284 284 u'\n\n {pr_title}'.format(
285 285 pr_id=pull_request.pull_request_id,
286 286 source_repo=safe_unicode(
287 287 pull_request.source_repo.scm_instance().name),
288 288 source_ref_name=pull_request.source_ref_parts.name,
289 289 pr_title=safe_unicode(pull_request.title)
290 290 )
291 291 )
292 292 self.merge_mock.assert_called_once_with(
293 293 pull_request.target_ref_parts,
294 294 pull_request.source_repo.scm_instance(),
295 295 pull_request.source_ref_parts, self.workspace_id,
296 296 user_name=user.username, user_email=user.email, message=message,
297 297 use_rebase=False
298 298 )
299 299 self.invalidation_mock.assert_called_once_with(
300 300 pull_request.target_repo.repo_name)
301 301
302 302 self.hook_mock.assert_called_with(
303 303 self.pull_request, self.pull_request.author, 'merge')
304 304
305 305 pull_request = PullRequest.get(pull_request.pull_request_id)
306 306 assert (
307 307 pull_request.merge_rev ==
308 308 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
309 309
310 310 def test_merge_failed(self, pull_request, merge_extras):
311 311 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
312 312 merge_ref = Reference(
313 313 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
314 314 self.merge_mock.return_value = MergeResponse(
315 315 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
316 316
317 317 merge_extras['repository'] = pull_request.target_repo.repo_name
318 318 PullRequestModel().merge(
319 319 pull_request, pull_request.author, extras=merge_extras)
320 320
321 321 message = (
322 322 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
323 323 u'\n\n {pr_title}'.format(
324 324 pr_id=pull_request.pull_request_id,
325 325 source_repo=safe_unicode(
326 326 pull_request.source_repo.scm_instance().name),
327 327 source_ref_name=pull_request.source_ref_parts.name,
328 328 pr_title=safe_unicode(pull_request.title)
329 329 )
330 330 )
331 331 self.merge_mock.assert_called_once_with(
332 332 pull_request.target_ref_parts,
333 333 pull_request.source_repo.scm_instance(),
334 334 pull_request.source_ref_parts, self.workspace_id,
335 335 user_name=user.username, user_email=user.email, message=message,
336 336 use_rebase=False
337 337 )
338 338
339 339 pull_request = PullRequest.get(pull_request.pull_request_id)
340 340 assert self.invalidation_mock.called is False
341 341 assert pull_request.merge_rev is None
342 342
343 343 def test_get_commit_ids(self, pull_request):
344 344 # The PR has been not merget yet, so expect an exception
345 345 with pytest.raises(ValueError):
346 346 PullRequestModel()._get_commit_ids(pull_request)
347 347
348 348 # Merge revision is in the revisions list
349 349 pull_request.merge_rev = pull_request.revisions[0]
350 350 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
351 351 assert commit_ids == pull_request.revisions
352 352
353 353 # Merge revision is not in the revisions list
354 354 pull_request.merge_rev = 'f000' * 10
355 355 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
356 356 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
357 357
358 358 def test_get_diff_from_pr_version(self, pull_request):
359 359 diff = PullRequestModel()._get_diff_from_pr_or_version(
360 360 pull_request, context=6)
361 361 assert 'file_1' in diff.raw
362 362
363 363 def test_generate_title_returns_unicode(self):
364 364 title = PullRequestModel().generate_pullrequest_title(
365 365 source='source-dummy',
366 366 source_ref='source-ref-dummy',
367 367 target='target-dummy',
368 368 )
369 369 assert type(title) == unicode
370 370
371 371
372 372 class TestIntegrationMerge(object):
373 373 @pytest.mark.parametrize('extra_config', (
374 374 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
375 375 {'vcs.hooks.protocol': 'Pyro4', 'vcs.hooks.direct_calls': False},
376 376 ))
377 377 def test_merge_triggers_push_hooks(
378 378 self, pr_util, user_admin, capture_rcextensions, merge_extras,
379 379 extra_config):
380 380 pull_request = pr_util.create_pull_request(
381 381 approved=True, mergeable=True)
382 382 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
383 383 merge_extras['repository'] = pull_request.target_repo.repo_name
384 384 Session().commit()
385 385
386 386 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
387 387 merge_state = PullRequestModel().merge(
388 388 pull_request, user_admin, extras=merge_extras)
389 389
390 390 assert merge_state.executed
391 391 assert 'pre_push' in capture_rcextensions
392 392 assert 'post_push' in capture_rcextensions
393 393
394 394 def test_merge_can_be_rejected_by_pre_push_hook(
395 395 self, pr_util, user_admin, capture_rcextensions, merge_extras):
396 396 pull_request = pr_util.create_pull_request(
397 397 approved=True, mergeable=True)
398 398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 399 merge_extras['repository'] = pull_request.target_repo.repo_name
400 400 Session().commit()
401 401
402 402 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
403 403 pre_pull.side_effect = RepositoryError("Disallow push!")
404 404 merge_status = PullRequestModel().merge(
405 405 pull_request, user_admin, extras=merge_extras)
406 406
407 407 assert not merge_status.executed
408 408 assert 'pre_push' not in capture_rcextensions
409 409 assert 'post_push' not in capture_rcextensions
410 410
411 411 def test_merge_fails_if_target_is_locked(
412 412 self, pr_util, user_regular, merge_extras):
413 413 pull_request = pr_util.create_pull_request(
414 414 approved=True, mergeable=True)
415 415 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
416 416 pull_request.target_repo.locked = locked_by
417 417 # TODO: johbo: Check if this can work based on the database, currently
418 418 # all data is pre-computed, that's why just updating the DB is not
419 419 # enough.
420 420 merge_extras['locked_by'] = locked_by
421 421 merge_extras['repository'] = pull_request.target_repo.repo_name
422 422 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
423 423 Session().commit()
424 424 merge_status = PullRequestModel().merge(
425 425 pull_request, user_regular, extras=merge_extras)
426 426 assert not merge_status.executed
427 427
428 428
429 429 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
430 430 (False, 1, 0),
431 431 (True, 0, 1),
432 432 ])
433 433 def test_outdated_comments(
434 434 pr_util, use_outdated, inlines_count, outdated_count):
435 435 pull_request = pr_util.create_pull_request()
436 436 pr_util.create_inline_comment(file_path='not_in_updated_diff')
437 437
438 438 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
439 439 pr_util.add_one_commit()
440 440 assert_inline_comments(
441 441 pull_request, visible=inlines_count, outdated=outdated_count)
442 442 outdated_comment_mock.assert_called_with(pull_request)
443 443
444 444
445 445 @pytest.fixture
446 446 def merge_extras(user_regular):
447 447 """
448 448 Context for the vcs operation when running a merge.
449 449 """
450 450 extras = {
451 451 'ip': '127.0.0.1',
452 452 'username': user_regular.username,
453 453 'action': 'push',
454 454 'repository': 'fake_target_repo_name',
455 455 'scm': 'git',
456 456 'config': 'fake_config_ini_path',
457 457 'make_lock': None,
458 458 'locked_by': [None, None, None],
459 459 'server_url': 'http://test.example.com:5000',
460 460 'hooks': ['push', 'pull'],
461 461 'is_shadow_repo': False,
462 462 }
463 463 return extras
464 464
465 465
466 466 class TestUpdateCommentHandling(object):
467 467
468 468 @pytest.fixture(autouse=True, scope='class')
469 469 def enable_outdated_comments(self, request, pylonsapp):
470 470 config_patch = mock.patch.dict(
471 471 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
472 472 config_patch.start()
473 473
474 474 @request.addfinalizer
475 475 def cleanup():
476 476 config_patch.stop()
477 477
478 478 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
479 479 commits = [
480 480 {'message': 'a'},
481 481 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
482 482 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
483 483 ]
484 484 pull_request = pr_util.create_pull_request(
485 485 commits=commits, target_head='a', source_head='b', revisions=['b'])
486 486 pr_util.create_inline_comment(file_path='file_b')
487 487 pr_util.add_one_commit(head='c')
488 488
489 489 assert_inline_comments(pull_request, visible=1, outdated=0)
490 490
491 491 def test_comment_stays_unflagged_on_change_above(self, pr_util):
492 492 original_content = ''.join(
493 493 ['line {}\n'.format(x) for x in range(1, 11)])
494 494 updated_content = 'new_line_at_top\n' + original_content
495 495 commits = [
496 496 {'message': 'a'},
497 497 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
498 498 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
499 499 ]
500 500 pull_request = pr_util.create_pull_request(
501 501 commits=commits, target_head='a', source_head='b', revisions=['b'])
502 502
503 503 with outdated_comments_patcher():
504 504 comment = pr_util.create_inline_comment(
505 505 line_no=u'n8', file_path='file_b')
506 506 pr_util.add_one_commit(head='c')
507 507
508 508 assert_inline_comments(pull_request, visible=1, outdated=0)
509 509 assert comment.line_no == u'n9'
510 510
511 511 def test_comment_stays_unflagged_on_change_below(self, pr_util):
512 512 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
513 513 updated_content = original_content + 'new_line_at_end\n'
514 514 commits = [
515 515 {'message': 'a'},
516 516 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
517 517 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
518 518 ]
519 519 pull_request = pr_util.create_pull_request(
520 520 commits=commits, target_head='a', source_head='b', revisions=['b'])
521 521 pr_util.create_inline_comment(file_path='file_b')
522 522 pr_util.add_one_commit(head='c')
523 523
524 524 assert_inline_comments(pull_request, visible=1, outdated=0)
525 525
526 526 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
527 527 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
528 528 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
529 529 change_lines = list(base_lines)
530 530 change_lines.insert(6, 'line 6a added\n')
531 531
532 532 # Changes on the last line of sight
533 533 update_lines = list(change_lines)
534 534 update_lines[0] = 'line 1 changed\n'
535 535 update_lines[-1] = 'line 12 changed\n'
536 536
537 537 def file_b(lines):
538 538 return FileNode('file_b', ''.join(lines))
539 539
540 540 commits = [
541 541 {'message': 'a', 'added': [file_b(base_lines)]},
542 542 {'message': 'b', 'changed': [file_b(change_lines)]},
543 543 {'message': 'c', 'changed': [file_b(update_lines)]},
544 544 ]
545 545
546 546 pull_request = pr_util.create_pull_request(
547 547 commits=commits, target_head='a', source_head='b', revisions=['b'])
548 548 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
549 549
550 550 with outdated_comments_patcher():
551 551 pr_util.add_one_commit(head='c')
552 552 assert_inline_comments(pull_request, visible=0, outdated=1)
553 553
554 554 @pytest.mark.parametrize("change, content", [
555 555 ('changed', 'changed\n'),
556 556 ('removed', ''),
557 557 ], ids=['changed', 'removed'])
558 558 def test_comment_flagged_on_change(self, pr_util, change, content):
559 559 commits = [
560 560 {'message': 'a'},
561 561 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
562 562 {'message': 'c', change: [FileNode('file_b', content)]},
563 563 ]
564 564 pull_request = pr_util.create_pull_request(
565 565 commits=commits, target_head='a', source_head='b', revisions=['b'])
566 566 pr_util.create_inline_comment(file_path='file_b')
567 567
568 568 with outdated_comments_patcher():
569 569 pr_util.add_one_commit(head='c')
570 570 assert_inline_comments(pull_request, visible=0, outdated=1)
571 571
572 572
573 573 class TestUpdateChangedFiles(object):
574 574
575 575 def test_no_changes_on_unchanged_diff(self, pr_util):
576 576 commits = [
577 577 {'message': 'a'},
578 578 {'message': 'b',
579 579 'added': [FileNode('file_b', 'test_content b\n')]},
580 580 {'message': 'c',
581 581 'added': [FileNode('file_c', 'test_content c\n')]},
582 582 ]
583 583 # open a PR from a to b, adding file_b
584 584 pull_request = pr_util.create_pull_request(
585 585 commits=commits, target_head='a', source_head='b', revisions=['b'],
586 586 name_suffix='per-file-review')
587 587
588 588 # modify PR adding new file file_c
589 589 pr_util.add_one_commit(head='c')
590 590
591 591 assert_pr_file_changes(
592 592 pull_request,
593 593 added=['file_c'],
594 594 modified=[],
595 595 removed=[])
596 596
597 597 def test_modify_and_undo_modification_diff(self, pr_util):
598 598 commits = [
599 599 {'message': 'a'},
600 600 {'message': 'b',
601 601 'added': [FileNode('file_b', 'test_content b\n')]},
602 602 {'message': 'c',
603 603 'changed': [FileNode('file_b', 'test_content b modified\n')]},
604 604 {'message': 'd',
605 605 'changed': [FileNode('file_b', 'test_content b\n')]},
606 606 ]
607 607 # open a PR from a to b, adding file_b
608 608 pull_request = pr_util.create_pull_request(
609 609 commits=commits, target_head='a', source_head='b', revisions=['b'],
610 610 name_suffix='per-file-review')
611 611
612 612 # modify PR modifying file file_b
613 613 pr_util.add_one_commit(head='c')
614 614
615 615 assert_pr_file_changes(
616 616 pull_request,
617 617 added=[],
618 618 modified=['file_b'],
619 619 removed=[])
620 620
621 621 # move the head again to d, which rollbacks change,
622 622 # meaning we should indicate no changes
623 623 pr_util.add_one_commit(head='d')
624 624
625 625 assert_pr_file_changes(
626 626 pull_request,
627 627 added=[],
628 628 modified=[],
629 629 removed=[])
630 630
631 631 def test_updated_all_files_in_pr(self, pr_util):
632 632 commits = [
633 633 {'message': 'a'},
634 634 {'message': 'b', 'added': [
635 635 FileNode('file_a', 'test_content a\n'),
636 636 FileNode('file_b', 'test_content b\n'),
637 637 FileNode('file_c', 'test_content c\n')]},
638 638 {'message': 'c', 'changed': [
639 639 FileNode('file_a', 'test_content a changed\n'),
640 640 FileNode('file_b', 'test_content b changed\n'),
641 641 FileNode('file_c', 'test_content c changed\n')]},
642 642 ]
643 643 # open a PR from a to b, changing 3 files
644 644 pull_request = pr_util.create_pull_request(
645 645 commits=commits, target_head='a', source_head='b', revisions=['b'],
646 646 name_suffix='per-file-review')
647 647
648 648 pr_util.add_one_commit(head='c')
649 649
650 650 assert_pr_file_changes(
651 651 pull_request,
652 652 added=[],
653 653 modified=['file_a', 'file_b', 'file_c'],
654 654 removed=[])
655 655
656 656 def test_updated_and_removed_all_files_in_pr(self, pr_util):
657 657 commits = [
658 658 {'message': 'a'},
659 659 {'message': 'b', 'added': [
660 660 FileNode('file_a', 'test_content a\n'),
661 661 FileNode('file_b', 'test_content b\n'),
662 662 FileNode('file_c', 'test_content c\n')]},
663 663 {'message': 'c', 'removed': [
664 664 FileNode('file_a', 'test_content a changed\n'),
665 665 FileNode('file_b', 'test_content b changed\n'),
666 666 FileNode('file_c', 'test_content c changed\n')]},
667 667 ]
668 668 # open a PR from a to b, removing 3 files
669 669 pull_request = pr_util.create_pull_request(
670 670 commits=commits, target_head='a', source_head='b', revisions=['b'],
671 671 name_suffix='per-file-review')
672 672
673 673 pr_util.add_one_commit(head='c')
674 674
675 675 assert_pr_file_changes(
676 676 pull_request,
677 677 added=[],
678 678 modified=[],
679 679 removed=['file_a', 'file_b', 'file_c'])
680 680
681 681
682 682 def test_update_writes_snapshot_into_pull_request_version(pr_util):
683 683 model = PullRequestModel()
684 684 pull_request = pr_util.create_pull_request()
685 685 pr_util.update_source_repository()
686 686
687 687 model.update_commits(pull_request)
688 688
689 689 # Expect that it has a version entry now
690 690 assert len(model.get_versions(pull_request)) == 1
691 691
692 692
693 693 def test_update_skips_new_version_if_unchanged(pr_util):
694 694 pull_request = pr_util.create_pull_request()
695 695 model = PullRequestModel()
696 696 model.update_commits(pull_request)
697 697
698 698 # Expect that it still has no versions
699 699 assert len(model.get_versions(pull_request)) == 0
700 700
701 701
702 702 def test_update_assigns_comments_to_the_new_version(pr_util):
703 703 model = PullRequestModel()
704 704 pull_request = pr_util.create_pull_request()
705 705 comment = pr_util.create_comment()
706 706 pr_util.update_source_repository()
707 707
708 708 model.update_commits(pull_request)
709 709
710 710 # Expect that the comment is linked to the pr version now
711 711 assert comment.pull_request_version == model.get_versions(pull_request)[0]
712 712
713 713
714 714 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util):
715 715 model = PullRequestModel()
716 716 pull_request = pr_util.create_pull_request()
717 717 pr_util.update_source_repository()
718 718 pr_util.update_source_repository()
719 719
720 720 model.update_commits(pull_request)
721 721
722 722 # Expect to find a new comment about the change
723 723 expected_message = textwrap.dedent(
724 724 """\
725 725 Auto status change to |under_review|
726 726
727 727 .. role:: added
728 728 .. role:: removed
729 729 .. parsed-literal::
730 730
731 731 Changed commits:
732 732 * :added:`1 added`
733 733 * :removed:`0 removed`
734 734
735 735 Changed files:
736 736 * `A file_2 <#a_c--92ed3b5f07b4>`_
737 737
738 738 .. |under_review| replace:: *"Under Review"*"""
739 739 )
740 740 pull_request_comments = sorted(
741 741 pull_request.comments, key=lambda c: c.modified_at)
742 742 update_comment = pull_request_comments[-1]
743 743 assert update_comment.text == expected_message
744 744
745 745
746 746 def test_create_version_from_snapshot_updates_attributes(pr_util):
747 747 pull_request = pr_util.create_pull_request()
748 748
749 749 # Avoiding default values
750 750 pull_request.status = PullRequest.STATUS_CLOSED
751 751 pull_request._last_merge_source_rev = "0" * 40
752 752 pull_request._last_merge_target_rev = "1" * 40
753 753 pull_request._last_merge_status = 1
754 754 pull_request.merge_rev = "2" * 40
755 755
756 756 # Remember automatic values
757 757 created_on = pull_request.created_on
758 758 updated_on = pull_request.updated_on
759 759
760 760 # Create a new version of the pull request
761 761 version = PullRequestModel()._create_version_from_snapshot(pull_request)
762 762
763 763 # Check attributes
764 764 assert version.title == pr_util.create_parameters['title']
765 765 assert version.description == pr_util.create_parameters['description']
766 766 assert version.status == PullRequest.STATUS_CLOSED
767 assert version.created_on == created_on
767
768 # versions get updated created_on
769 assert version.created_on != created_on
770
768 771 assert version.updated_on == updated_on
769 772 assert version.user_id == pull_request.user_id
770 773 assert version.revisions == pr_util.create_parameters['revisions']
771 774 assert version.source_repo == pr_util.source_repository
772 775 assert version.source_ref == pr_util.create_parameters['source_ref']
773 776 assert version.target_repo == pr_util.target_repository
774 777 assert version.target_ref == pr_util.create_parameters['target_ref']
775 778 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
776 779 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
777 780 assert version._last_merge_status == pull_request._last_merge_status
778 781 assert version.merge_rev == pull_request.merge_rev
779 782 assert version.pull_request == pull_request
780 783
781 784
782 785 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util):
783 786 version1 = pr_util.create_version_of_pull_request()
784 787 comment_linked = pr_util.create_comment(linked_to=version1)
785 788 comment_unlinked = pr_util.create_comment()
786 789 version2 = pr_util.create_version_of_pull_request()
787 790
788 791 PullRequestModel()._link_comments_to_version(version2)
789 792
790 793 # Expect that only the new comment is linked to version2
791 794 assert (
792 795 comment_unlinked.pull_request_version_id ==
793 796 version2.pull_request_version_id)
794 797 assert (
795 798 comment_linked.pull_request_version_id ==
796 799 version1.pull_request_version_id)
797 800 assert (
798 801 comment_unlinked.pull_request_version_id !=
799 802 comment_linked.pull_request_version_id)
800 803
801 804
802 805 def test_calculate_commits():
803 806 change = PullRequestModel()._calculate_commit_id_changes(
804 807 set([1, 2, 3]), set([1, 3, 4, 5]))
805 808 assert (set([4, 5]), set([1, 3]), set([2])) == (
806 809 change.added, change.common, change.removed)
807 810
808 811
809 812 def assert_inline_comments(pull_request, visible=None, outdated=None):
810 813 if visible is not None:
811 814 inline_comments = ChangesetCommentsModel().get_inline_comments(
812 815 pull_request.target_repo.repo_id, pull_request=pull_request)
813 816 inline_cnt = ChangesetCommentsModel().get_inline_comments_count(
814 817 inline_comments)
815 818 assert inline_cnt == visible
816 819 if outdated is not None:
817 820 outdated_comments = ChangesetCommentsModel().get_outdated_comments(
818 821 pull_request.target_repo.repo_id, pull_request)
819 822 assert len(outdated_comments) == outdated
820 823
821 824
822 825 def assert_pr_file_changes(
823 826 pull_request, added=None, modified=None, removed=None):
824 827 pr_versions = PullRequestModel().get_versions(pull_request)
825 828 # always use first version, ie original PR to calculate changes
826 829 pull_request_version = pr_versions[0]
827 830 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
828 831 pull_request, pull_request_version)
829 832 file_changes = PullRequestModel()._calculate_file_changes(
830 833 old_diff_data, new_diff_data)
831 834
832 835 assert added == file_changes.added, \
833 836 'expected added:%s vs value:%s' % (added, file_changes.added)
834 837 assert modified == file_changes.modified, \
835 838 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
836 839 assert removed == file_changes.removed, \
837 840 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
838 841
839 842
840 843 def outdated_comments_patcher(use_outdated=True):
841 844 return mock.patch.object(
842 845 ChangesetCommentsModel, 'use_outdated_comments',
843 846 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now