##// END OF EJS Templates
pull-requests: properly save merge failure metadata. Before this change...
marcink -
r4471:0186d5e2 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,2072 +1,2072 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 commits = target_scm.compare(
158 158 target_ref, source_ref, source_scm, merge=True,
159 159 pre_load=["author"])
160 160
161 161 for commit in commits:
162 162 user = User.get_from_cs_author(commit.author)
163 163 if user and user not in commit_authors:
164 164 commit_authors.append(user)
165 165
166 166 # lines
167 167 if get_authors:
168 168 target_commit = source_repo.get_commit(ancestor_id)
169 169
170 170 for fname, lines in changed_lines.items():
171 171 try:
172 172 node = target_commit.get_node(fname)
173 173 except Exception:
174 174 continue
175 175
176 176 if not isinstance(node, FileNode):
177 177 continue
178 178
179 179 for annotation in node.annotate:
180 180 line_no, commit_id, get_commit_func, line_text = annotation
181 181 if line_no in lines:
182 182 if commit_id not in _commit_cache:
183 183 _commit_cache[commit_id] = get_commit_func()
184 184 commit = _commit_cache[commit_id]
185 185 author = commit.author
186 186 email = commit.author_email
187 187 user = User.get_from_cs_author(author)
188 188 if user:
189 189 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
190 190 author_counts[author] = author_counts.get(author, 0) + 1
191 191 email_counts[email] = email_counts.get(email, 0) + 1
192 192
193 193 return {
194 194 'commits': commits,
195 195 'files': all_files_changes,
196 196 'stats': stats,
197 197 'ancestor': ancestor_id,
198 198 # original authors of modified files
199 199 'original_authors': {
200 200 'users': user_counts,
201 201 'authors': author_counts,
202 202 'emails': email_counts,
203 203 },
204 204 'commit_authors': commit_authors
205 205 }
206 206
207 207
208 208 class PullRequestModel(BaseModel):
209 209
210 210 cls = PullRequest
211 211
212 212 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
213 213
214 214 UPDATE_STATUS_MESSAGES = {
215 215 UpdateFailureReason.NONE: lazy_ugettext(
216 216 'Pull request update successful.'),
217 217 UpdateFailureReason.UNKNOWN: lazy_ugettext(
218 218 'Pull request update failed because of an unknown error.'),
219 219 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
220 220 'No update needed because the source and target have not changed.'),
221 221 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
222 222 'Pull request cannot be updated because the reference type is '
223 223 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
224 224 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
225 225 'This pull request cannot be updated because the target '
226 226 'reference is missing.'),
227 227 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
228 228 'This pull request cannot be updated because the source '
229 229 'reference is missing.'),
230 230 }
231 231 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
232 232 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
233 233
234 234 def __get_pull_request(self, pull_request):
235 235 return self._get_instance((
236 236 PullRequest, PullRequestVersion), pull_request)
237 237
238 238 def _check_perms(self, perms, pull_request, user, api=False):
239 239 if not api:
240 240 return h.HasRepoPermissionAny(*perms)(
241 241 user=user, repo_name=pull_request.target_repo.repo_name)
242 242 else:
243 243 return h.HasRepoPermissionAnyApi(*perms)(
244 244 user=user, repo_name=pull_request.target_repo.repo_name)
245 245
246 246 def check_user_read(self, pull_request, user, api=False):
247 247 _perms = ('repository.admin', 'repository.write', 'repository.read',)
248 248 return self._check_perms(_perms, pull_request, user, api)
249 249
250 250 def check_user_merge(self, pull_request, user, api=False):
251 251 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
252 252 return self._check_perms(_perms, pull_request, user, api)
253 253
254 254 def check_user_update(self, pull_request, user, api=False):
255 255 owner = user.user_id == pull_request.user_id
256 256 return self.check_user_merge(pull_request, user, api) or owner
257 257
258 258 def check_user_delete(self, pull_request, user):
259 259 owner = user.user_id == pull_request.user_id
260 260 _perms = ('repository.admin',)
261 261 return self._check_perms(_perms, pull_request, user) or owner
262 262
263 263 def check_user_change_status(self, pull_request, user, api=False):
264 264 reviewer = user.user_id in [x.user_id for x in
265 265 pull_request.reviewers]
266 266 return self.check_user_update(pull_request, user, api) or reviewer
267 267
268 268 def check_user_comment(self, pull_request, user):
269 269 owner = user.user_id == pull_request.user_id
270 270 return self.check_user_read(pull_request, user) or owner
271 271
272 272 def get(self, pull_request):
273 273 return self.__get_pull_request(pull_request)
274 274
275 275 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
276 276 statuses=None, opened_by=None, order_by=None,
277 277 order_dir='desc', only_created=False):
278 278 repo = None
279 279 if repo_name:
280 280 repo = self._get_repo(repo_name)
281 281
282 282 q = PullRequest.query()
283 283
284 284 if search_q:
285 285 like_expression = u'%{}%'.format(safe_unicode(search_q))
286 286 q = q.join(User)
287 287 q = q.filter(or_(
288 288 cast(PullRequest.pull_request_id, String).ilike(like_expression),
289 289 User.username.ilike(like_expression),
290 290 PullRequest.title.ilike(like_expression),
291 291 PullRequest.description.ilike(like_expression),
292 292 ))
293 293
294 294 # source or target
295 295 if repo and source:
296 296 q = q.filter(PullRequest.source_repo == repo)
297 297 elif repo:
298 298 q = q.filter(PullRequest.target_repo == repo)
299 299
300 300 # closed,opened
301 301 if statuses:
302 302 q = q.filter(PullRequest.status.in_(statuses))
303 303
304 304 # opened by filter
305 305 if opened_by:
306 306 q = q.filter(PullRequest.user_id.in_(opened_by))
307 307
308 308 # only get those that are in "created" state
309 309 if only_created:
310 310 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
311 311
312 312 if order_by:
313 313 order_map = {
314 314 'name_raw': PullRequest.pull_request_id,
315 315 'id': PullRequest.pull_request_id,
316 316 'title': PullRequest.title,
317 317 'updated_on_raw': PullRequest.updated_on,
318 318 'target_repo': PullRequest.target_repo_id
319 319 }
320 320 if order_dir == 'asc':
321 321 q = q.order_by(order_map[order_by].asc())
322 322 else:
323 323 q = q.order_by(order_map[order_by].desc())
324 324
325 325 return q
326 326
327 327 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
328 328 opened_by=None):
329 329 """
330 330 Count the number of pull requests for a specific repository.
331 331
332 332 :param repo_name: target or source repo
333 333 :param search_q: filter by text
334 334 :param source: boolean flag to specify if repo_name refers to source
335 335 :param statuses: list of pull request statuses
336 336 :param opened_by: author user of the pull request
337 337 :returns: int number of pull requests
338 338 """
339 339 q = self._prepare_get_all_query(
340 340 repo_name, search_q=search_q, source=source, statuses=statuses,
341 341 opened_by=opened_by)
342 342
343 343 return q.count()
344 344
345 345 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
346 346 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
347 347 """
348 348 Get all pull requests for a specific repository.
349 349
350 350 :param repo_name: target or source repo
351 351 :param search_q: filter by text
352 352 :param source: boolean flag to specify if repo_name refers to source
353 353 :param statuses: list of pull request statuses
354 354 :param opened_by: author user of the pull request
355 355 :param offset: pagination offset
356 356 :param length: length of returned list
357 357 :param order_by: order of the returned list
358 358 :param order_dir: 'asc' or 'desc' ordering direction
359 359 :returns: list of pull requests
360 360 """
361 361 q = self._prepare_get_all_query(
362 362 repo_name, search_q=search_q, source=source, statuses=statuses,
363 363 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
364 364
365 365 if length:
366 366 pull_requests = q.limit(length).offset(offset).all()
367 367 else:
368 368 pull_requests = q.all()
369 369
370 370 return pull_requests
371 371
372 372 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
373 373 opened_by=None):
374 374 """
375 375 Count the number of pull requests for a specific repository that are
376 376 awaiting review.
377 377
378 378 :param repo_name: target or source repo
379 379 :param search_q: filter by text
380 380 :param source: boolean flag to specify if repo_name refers to source
381 381 :param statuses: list of pull request statuses
382 382 :param opened_by: author user of the pull request
383 383 :returns: int number of pull requests
384 384 """
385 385 pull_requests = self.get_awaiting_review(
386 386 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
387 387
388 388 return len(pull_requests)
389 389
390 390 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
391 391 opened_by=None, offset=0, length=None,
392 392 order_by=None, order_dir='desc'):
393 393 """
394 394 Get all pull requests for a specific repository that are awaiting
395 395 review.
396 396
397 397 :param repo_name: target or source repo
398 398 :param search_q: filter by text
399 399 :param source: boolean flag to specify if repo_name refers to source
400 400 :param statuses: list of pull request statuses
401 401 :param opened_by: author user of the pull request
402 402 :param offset: pagination offset
403 403 :param length: length of returned list
404 404 :param order_by: order of the returned list
405 405 :param order_dir: 'asc' or 'desc' ordering direction
406 406 :returns: list of pull requests
407 407 """
408 408 pull_requests = self.get_all(
409 409 repo_name, search_q=search_q, source=source, statuses=statuses,
410 410 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
411 411
412 412 _filtered_pull_requests = []
413 413 for pr in pull_requests:
414 414 status = pr.calculated_review_status()
415 415 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
416 416 ChangesetStatus.STATUS_UNDER_REVIEW]:
417 417 _filtered_pull_requests.append(pr)
418 418 if length:
419 419 return _filtered_pull_requests[offset:offset+length]
420 420 else:
421 421 return _filtered_pull_requests
422 422
423 423 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
424 424 opened_by=None, user_id=None):
425 425 """
426 426 Count the number of pull requests for a specific repository that are
427 427 awaiting review from a specific user.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param source: boolean flag to specify if repo_name refers to source
432 432 :param statuses: list of pull request statuses
433 433 :param opened_by: author user of the pull request
434 434 :param user_id: reviewer user of the pull request
435 435 :returns: int number of pull requests
436 436 """
437 437 pull_requests = self.get_awaiting_my_review(
438 438 repo_name, search_q=search_q, source=source, statuses=statuses,
439 439 opened_by=opened_by, user_id=user_id)
440 440
441 441 return len(pull_requests)
442 442
443 443 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
444 444 opened_by=None, user_id=None, offset=0,
445 445 length=None, order_by=None, order_dir='desc'):
446 446 """
447 447 Get all pull requests for a specific repository that are awaiting
448 448 review from a specific user.
449 449
450 450 :param repo_name: target or source repo
451 451 :param search_q: filter by text
452 452 :param source: boolean flag to specify if repo_name refers to source
453 453 :param statuses: list of pull request statuses
454 454 :param opened_by: author user of the pull request
455 455 :param user_id: reviewer user of the pull request
456 456 :param offset: pagination offset
457 457 :param length: length of returned list
458 458 :param order_by: order of the returned list
459 459 :param order_dir: 'asc' or 'desc' ordering direction
460 460 :returns: list of pull requests
461 461 """
462 462 pull_requests = self.get_all(
463 463 repo_name, search_q=search_q, source=source, statuses=statuses,
464 464 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
465 465
466 466 _my = PullRequestModel().get_not_reviewed(user_id)
467 467 my_participation = []
468 468 for pr in pull_requests:
469 469 if pr in _my:
470 470 my_participation.append(pr)
471 471 _filtered_pull_requests = my_participation
472 472 if length:
473 473 return _filtered_pull_requests[offset:offset+length]
474 474 else:
475 475 return _filtered_pull_requests
476 476
477 477 def get_not_reviewed(self, user_id):
478 478 return [
479 479 x.pull_request for x in PullRequestReviewers.query().filter(
480 480 PullRequestReviewers.user_id == user_id).all()
481 481 ]
482 482
483 483 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
484 484 order_by=None, order_dir='desc'):
485 485 q = PullRequest.query()
486 486 if user_id:
487 487 reviewers_subquery = Session().query(
488 488 PullRequestReviewers.pull_request_id).filter(
489 489 PullRequestReviewers.user_id == user_id).subquery()
490 490 user_filter = or_(
491 491 PullRequest.user_id == user_id,
492 492 PullRequest.pull_request_id.in_(reviewers_subquery)
493 493 )
494 494 q = PullRequest.query().filter(user_filter)
495 495
496 496 # closed,opened
497 497 if statuses:
498 498 q = q.filter(PullRequest.status.in_(statuses))
499 499
500 500 if query:
501 501 like_expression = u'%{}%'.format(safe_unicode(query))
502 502 q = q.join(User)
503 503 q = q.filter(or_(
504 504 cast(PullRequest.pull_request_id, String).ilike(like_expression),
505 505 User.username.ilike(like_expression),
506 506 PullRequest.title.ilike(like_expression),
507 507 PullRequest.description.ilike(like_expression),
508 508 ))
509 509 if order_by:
510 510 order_map = {
511 511 'name_raw': PullRequest.pull_request_id,
512 512 'title': PullRequest.title,
513 513 'updated_on_raw': PullRequest.updated_on,
514 514 'target_repo': PullRequest.target_repo_id
515 515 }
516 516 if order_dir == 'asc':
517 517 q = q.order_by(order_map[order_by].asc())
518 518 else:
519 519 q = q.order_by(order_map[order_by].desc())
520 520
521 521 return q
522 522
523 523 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
524 524 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
525 525 return q.count()
526 526
527 527 def get_im_participating_in(
528 528 self, user_id=None, statuses=None, query='', offset=0,
529 529 length=None, order_by=None, order_dir='desc'):
530 530 """
531 531 Get all Pull requests that i'm participating in, or i have opened
532 532 """
533 533
534 534 q = self._prepare_participating_query(
535 535 user_id, statuses=statuses, query=query, order_by=order_by,
536 536 order_dir=order_dir)
537 537
538 538 if length:
539 539 pull_requests = q.limit(length).offset(offset).all()
540 540 else:
541 541 pull_requests = q.all()
542 542
543 543 return pull_requests
544 544
545 545 def get_versions(self, pull_request):
546 546 """
547 547 returns version of pull request sorted by ID descending
548 548 """
549 549 return PullRequestVersion.query()\
550 550 .filter(PullRequestVersion.pull_request == pull_request)\
551 551 .order_by(PullRequestVersion.pull_request_version_id.asc())\
552 552 .all()
553 553
554 554 def get_pr_version(self, pull_request_id, version=None):
555 555 at_version = None
556 556
557 557 if version and version == 'latest':
558 558 pull_request_ver = PullRequest.get(pull_request_id)
559 559 pull_request_obj = pull_request_ver
560 560 _org_pull_request_obj = pull_request_obj
561 561 at_version = 'latest'
562 562 elif version:
563 563 pull_request_ver = PullRequestVersion.get_or_404(version)
564 564 pull_request_obj = pull_request_ver
565 565 _org_pull_request_obj = pull_request_ver.pull_request
566 566 at_version = pull_request_ver.pull_request_version_id
567 567 else:
568 568 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
569 569 pull_request_id)
570 570
571 571 pull_request_display_obj = PullRequest.get_pr_display_object(
572 572 pull_request_obj, _org_pull_request_obj)
573 573
574 574 return _org_pull_request_obj, pull_request_obj, \
575 575 pull_request_display_obj, at_version
576 576
577 577 def create(self, created_by, source_repo, source_ref, target_repo,
578 578 target_ref, revisions, reviewers, title, description=None,
579 579 common_ancestor_id=None,
580 580 description_renderer=None,
581 581 reviewer_data=None, translator=None, auth_user=None):
582 582 translator = translator or get_current_request().translate
583 583
584 584 created_by_user = self._get_user(created_by)
585 585 auth_user = auth_user or created_by_user.AuthUser()
586 586 source_repo = self._get_repo(source_repo)
587 587 target_repo = self._get_repo(target_repo)
588 588
589 589 pull_request = PullRequest()
590 590 pull_request.source_repo = source_repo
591 591 pull_request.source_ref = source_ref
592 592 pull_request.target_repo = target_repo
593 593 pull_request.target_ref = target_ref
594 594 pull_request.revisions = revisions
595 595 pull_request.title = title
596 596 pull_request.description = description
597 597 pull_request.description_renderer = description_renderer
598 598 pull_request.author = created_by_user
599 599 pull_request.reviewer_data = reviewer_data
600 600 pull_request.pull_request_state = pull_request.STATE_CREATING
601 601 pull_request.common_ancestor_id = common_ancestor_id
602 602
603 603 Session().add(pull_request)
604 604 Session().flush()
605 605
606 606 reviewer_ids = set()
607 607 # members / reviewers
608 608 for reviewer_object in reviewers:
609 609 user_id, reasons, mandatory, rules = reviewer_object
610 610 user = self._get_user(user_id)
611 611
612 612 # skip duplicates
613 613 if user.user_id in reviewer_ids:
614 614 continue
615 615
616 616 reviewer_ids.add(user.user_id)
617 617
618 618 reviewer = PullRequestReviewers()
619 619 reviewer.user = user
620 620 reviewer.pull_request = pull_request
621 621 reviewer.reasons = reasons
622 622 reviewer.mandatory = mandatory
623 623
624 624 # NOTE(marcink): pick only first rule for now
625 625 rule_id = list(rules)[0] if rules else None
626 626 rule = RepoReviewRule.get(rule_id) if rule_id else None
627 627 if rule:
628 628 review_group = rule.user_group_vote_rule(user_id)
629 629 # we check if this particular reviewer is member of a voting group
630 630 if review_group:
631 631 # NOTE(marcink):
632 632 # can be that user is member of more but we pick the first same,
633 633 # same as default reviewers algo
634 634 review_group = review_group[0]
635 635
636 636 rule_data = {
637 637 'rule_name':
638 638 rule.review_rule_name,
639 639 'rule_user_group_entry_id':
640 640 review_group.repo_review_rule_users_group_id,
641 641 'rule_user_group_name':
642 642 review_group.users_group.users_group_name,
643 643 'rule_user_group_members':
644 644 [x.user.username for x in review_group.users_group.members],
645 645 'rule_user_group_members_id':
646 646 [x.user.user_id for x in review_group.users_group.members],
647 647 }
648 648 # e.g {'vote_rule': -1, 'mandatory': True}
649 649 rule_data.update(review_group.rule_data())
650 650
651 651 reviewer.rule_data = rule_data
652 652
653 653 Session().add(reviewer)
654 654 Session().flush()
655 655
656 656 # Set approval status to "Under Review" for all commits which are
657 657 # part of this pull request.
658 658 ChangesetStatusModel().set_status(
659 659 repo=target_repo,
660 660 status=ChangesetStatus.STATUS_UNDER_REVIEW,
661 661 user=created_by_user,
662 662 pull_request=pull_request
663 663 )
664 664 # we commit early at this point. This has to do with a fact
665 665 # that before queries do some row-locking. And because of that
666 666 # we need to commit and finish transaction before below validate call
667 667 # that for large repos could be long resulting in long row locks
668 668 Session().commit()
669 669
670 670 # prepare workspace, and run initial merge simulation. Set state during that
671 671 # operation
672 672 pull_request = PullRequest.get(pull_request.pull_request_id)
673 673
674 674 # set as merging, for merge simulation, and if finished to created so we mark
675 675 # simulation is working fine
676 676 with pull_request.set_state(PullRequest.STATE_MERGING,
677 677 final_state=PullRequest.STATE_CREATED) as state_obj:
678 678 MergeCheck.validate(
679 679 pull_request, auth_user=auth_user, translator=translator)
680 680
681 681 self.notify_reviewers(pull_request, reviewer_ids)
682 682 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
683 683
684 684 creation_data = pull_request.get_api_data(with_merge_state=False)
685 685 self._log_audit_action(
686 686 'repo.pull_request.create', {'data': creation_data},
687 687 auth_user, pull_request)
688 688
689 689 return pull_request
690 690
691 691 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
692 692 pull_request = self.__get_pull_request(pull_request)
693 693 target_scm = pull_request.target_repo.scm_instance()
694 694 if action == 'create':
695 695 trigger_hook = hooks_utils.trigger_create_pull_request_hook
696 696 elif action == 'merge':
697 697 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
698 698 elif action == 'close':
699 699 trigger_hook = hooks_utils.trigger_close_pull_request_hook
700 700 elif action == 'review_status_change':
701 701 trigger_hook = hooks_utils.trigger_review_pull_request_hook
702 702 elif action == 'update':
703 703 trigger_hook = hooks_utils.trigger_update_pull_request_hook
704 704 elif action == 'comment':
705 705 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
706 706 elif action == 'comment_edit':
707 707 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
708 708 else:
709 709 return
710 710
711 711 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
712 712 pull_request, action, trigger_hook)
713 713 trigger_hook(
714 714 username=user.username,
715 715 repo_name=pull_request.target_repo.repo_name,
716 716 repo_type=target_scm.alias,
717 717 pull_request=pull_request,
718 718 data=data)
719 719
720 720 def _get_commit_ids(self, pull_request):
721 721 """
722 722 Return the commit ids of the merged pull request.
723 723
724 724 This method is not dealing correctly yet with the lack of autoupdates
725 725 nor with the implicit target updates.
726 726 For example: if a commit in the source repo is already in the target it
727 727 will be reported anyways.
728 728 """
729 729 merge_rev = pull_request.merge_rev
730 730 if merge_rev is None:
731 731 raise ValueError('This pull request was not merged yet')
732 732
733 733 commit_ids = list(pull_request.revisions)
734 734 if merge_rev not in commit_ids:
735 735 commit_ids.append(merge_rev)
736 736
737 737 return commit_ids
738 738
739 739 def merge_repo(self, pull_request, user, extras):
740 740 log.debug("Merging pull request %s", pull_request.pull_request_id)
741 741 extras['user_agent'] = 'internal-merge'
742 742 merge_state = self._merge_pull_request(pull_request, user, extras)
743 743 if merge_state.executed:
744 744 log.debug("Merge was successful, updating the pull request comments.")
745 745 self._comment_and_close_pr(pull_request, user, merge_state)
746 746
747 747 self._log_audit_action(
748 748 'repo.pull_request.merge',
749 749 {'merge_state': merge_state.__dict__},
750 750 user, pull_request)
751 751
752 752 else:
753 753 log.warn("Merge failed, not updating the pull request.")
754 754 return merge_state
755 755
756 756 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
757 757 target_vcs = pull_request.target_repo.scm_instance()
758 758 source_vcs = pull_request.source_repo.scm_instance()
759 759
760 760 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
761 761 pr_id=pull_request.pull_request_id,
762 762 pr_title=pull_request.title,
763 763 source_repo=source_vcs.name,
764 764 source_ref_name=pull_request.source_ref_parts.name,
765 765 target_repo=target_vcs.name,
766 766 target_ref_name=pull_request.target_ref_parts.name,
767 767 )
768 768
769 769 workspace_id = self._workspace_id(pull_request)
770 770 repo_id = pull_request.target_repo.repo_id
771 771 use_rebase = self._use_rebase_for_merging(pull_request)
772 772 close_branch = self._close_branch_before_merging(pull_request)
773 773 user_name = self._user_name_for_merging(pull_request, user)
774 774
775 775 target_ref = self._refresh_reference(
776 776 pull_request.target_ref_parts, target_vcs)
777 777
778 778 callback_daemon, extras = prepare_callback_daemon(
779 779 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
780 780 host=vcs_settings.HOOKS_HOST,
781 781 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
782 782
783 783 with callback_daemon:
784 784 # TODO: johbo: Implement a clean way to run a config_override
785 785 # for a single call.
786 786 target_vcs.config.set(
787 787 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
788 788
789 789 merge_state = target_vcs.merge(
790 790 repo_id, workspace_id, target_ref, source_vcs,
791 791 pull_request.source_ref_parts,
792 792 user_name=user_name, user_email=user.email,
793 793 message=message, use_rebase=use_rebase,
794 794 close_branch=close_branch)
795 795 return merge_state
796 796
797 797 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
798 798 pull_request.merge_rev = merge_state.merge_ref.commit_id
799 799 pull_request.updated_on = datetime.datetime.now()
800 800 close_msg = close_msg or 'Pull request merged and closed'
801 801
802 802 CommentsModel().create(
803 803 text=safe_unicode(close_msg),
804 804 repo=pull_request.target_repo.repo_id,
805 805 user=user.user_id,
806 806 pull_request=pull_request.pull_request_id,
807 807 f_path=None,
808 808 line_no=None,
809 809 closing_pr=True
810 810 )
811 811
812 812 Session().add(pull_request)
813 813 Session().flush()
814 814 # TODO: paris: replace invalidation with less radical solution
815 815 ScmModel().mark_for_invalidation(
816 816 pull_request.target_repo.repo_name)
817 817 self.trigger_pull_request_hook(pull_request, user, 'merge')
818 818
819 819 def has_valid_update_type(self, pull_request):
820 820 source_ref_type = pull_request.source_ref_parts.type
821 821 return source_ref_type in self.REF_TYPES
822 822
823 823 def get_flow_commits(self, pull_request):
824 824
825 825 # source repo
826 826 source_ref_name = pull_request.source_ref_parts.name
827 827 source_ref_type = pull_request.source_ref_parts.type
828 828 source_ref_id = pull_request.source_ref_parts.commit_id
829 829 source_repo = pull_request.source_repo.scm_instance()
830 830
831 831 try:
832 832 if source_ref_type in self.REF_TYPES:
833 833 source_commit = source_repo.get_commit(source_ref_name)
834 834 else:
835 835 source_commit = source_repo.get_commit(source_ref_id)
836 836 except CommitDoesNotExistError:
837 837 raise SourceRefMissing()
838 838
839 839 # target repo
840 840 target_ref_name = pull_request.target_ref_parts.name
841 841 target_ref_type = pull_request.target_ref_parts.type
842 842 target_ref_id = pull_request.target_ref_parts.commit_id
843 843 target_repo = pull_request.target_repo.scm_instance()
844 844
845 845 try:
846 846 if target_ref_type in self.REF_TYPES:
847 847 target_commit = target_repo.get_commit(target_ref_name)
848 848 else:
849 849 target_commit = target_repo.get_commit(target_ref_id)
850 850 except CommitDoesNotExistError:
851 851 raise TargetRefMissing()
852 852
853 853 return source_commit, target_commit
854 854
855 855 def update_commits(self, pull_request, updating_user):
856 856 """
857 857 Get the updated list of commits for the pull request
858 858 and return the new pull request version and the list
859 859 of commits processed by this update action
860 860
861 861 updating_user is the user_object who triggered the update
862 862 """
863 863 pull_request = self.__get_pull_request(pull_request)
864 864 source_ref_type = pull_request.source_ref_parts.type
865 865 source_ref_name = pull_request.source_ref_parts.name
866 866 source_ref_id = pull_request.source_ref_parts.commit_id
867 867
868 868 target_ref_type = pull_request.target_ref_parts.type
869 869 target_ref_name = pull_request.target_ref_parts.name
870 870 target_ref_id = pull_request.target_ref_parts.commit_id
871 871
872 872 if not self.has_valid_update_type(pull_request):
873 873 log.debug("Skipping update of pull request %s due to ref type: %s",
874 874 pull_request, source_ref_type)
875 875 return UpdateResponse(
876 876 executed=False,
877 877 reason=UpdateFailureReason.WRONG_REF_TYPE,
878 878 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
879 879 source_changed=False, target_changed=False)
880 880
881 881 try:
882 882 source_commit, target_commit = self.get_flow_commits(pull_request)
883 883 except SourceRefMissing:
884 884 return UpdateResponse(
885 885 executed=False,
886 886 reason=UpdateFailureReason.MISSING_SOURCE_REF,
887 887 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
888 888 source_changed=False, target_changed=False)
889 889 except TargetRefMissing:
890 890 return UpdateResponse(
891 891 executed=False,
892 892 reason=UpdateFailureReason.MISSING_TARGET_REF,
893 893 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
894 894 source_changed=False, target_changed=False)
895 895
896 896 source_changed = source_ref_id != source_commit.raw_id
897 897 target_changed = target_ref_id != target_commit.raw_id
898 898
899 899 if not (source_changed or target_changed):
900 900 log.debug("Nothing changed in pull request %s", pull_request)
901 901 return UpdateResponse(
902 902 executed=False,
903 903 reason=UpdateFailureReason.NO_CHANGE,
904 904 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
905 905 source_changed=target_changed, target_changed=source_changed)
906 906
907 907 change_in_found = 'target repo' if target_changed else 'source repo'
908 908 log.debug('Updating pull request because of change in %s detected',
909 909 change_in_found)
910 910
911 911 # Finally there is a need for an update, in case of source change
912 912 # we create a new version, else just an update
913 913 if source_changed:
914 914 pull_request_version = self._create_version_from_snapshot(pull_request)
915 915 self._link_comments_to_version(pull_request_version)
916 916 else:
917 917 try:
918 918 ver = pull_request.versions[-1]
919 919 except IndexError:
920 920 ver = None
921 921
922 922 pull_request.pull_request_version_id = \
923 923 ver.pull_request_version_id if ver else None
924 924 pull_request_version = pull_request
925 925
926 926 source_repo = pull_request.source_repo.scm_instance()
927 927 target_repo = pull_request.target_repo.scm_instance()
928 928
929 929 # re-compute commit ids
930 930 old_commit_ids = pull_request.revisions
931 931 pre_load = ["author", "date", "message", "branch"]
932 932 commit_ranges = target_repo.compare(
933 933 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
934 934 pre_load=pre_load)
935 935
936 936 target_ref = target_commit.raw_id
937 937 source_ref = source_commit.raw_id
938 938 ancestor_commit_id = target_repo.get_common_ancestor(
939 939 target_ref, source_ref, source_repo)
940 940
941 941 if not ancestor_commit_id:
942 942 raise ValueError(
943 943 'cannot calculate diff info without a common ancestor. '
944 944 'Make sure both repositories are related, and have a common forking commit.')
945 945
946 946 pull_request.common_ancestor_id = ancestor_commit_id
947 947
948 948 pull_request.source_ref = '%s:%s:%s' % (
949 949 source_ref_type, source_ref_name, source_commit.raw_id)
950 950 pull_request.target_ref = '%s:%s:%s' % (
951 951 target_ref_type, target_ref_name, ancestor_commit_id)
952 952
953 953 pull_request.revisions = [
954 954 commit.raw_id for commit in reversed(commit_ranges)]
955 955 pull_request.updated_on = datetime.datetime.now()
956 956 Session().add(pull_request)
957 957 new_commit_ids = pull_request.revisions
958 958
959 959 old_diff_data, new_diff_data = self._generate_update_diffs(
960 960 pull_request, pull_request_version)
961 961
962 962 # calculate commit and file changes
963 963 commit_changes = self._calculate_commit_id_changes(
964 964 old_commit_ids, new_commit_ids)
965 965 file_changes = self._calculate_file_changes(
966 966 old_diff_data, new_diff_data)
967 967
968 968 # set comments as outdated if DIFFS changed
969 969 CommentsModel().outdate_comments(
970 970 pull_request, old_diff_data=old_diff_data,
971 971 new_diff_data=new_diff_data)
972 972
973 973 valid_commit_changes = (commit_changes.added or commit_changes.removed)
974 974 file_node_changes = (
975 975 file_changes.added or file_changes.modified or file_changes.removed)
976 976 pr_has_changes = valid_commit_changes or file_node_changes
977 977
978 978 # Add an automatic comment to the pull request, in case
979 979 # anything has changed
980 980 if pr_has_changes:
981 981 update_comment = CommentsModel().create(
982 982 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
983 983 repo=pull_request.target_repo,
984 984 user=pull_request.author,
985 985 pull_request=pull_request,
986 986 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
987 987
988 988 # Update status to "Under Review" for added commits
989 989 for commit_id in commit_changes.added:
990 990 ChangesetStatusModel().set_status(
991 991 repo=pull_request.source_repo,
992 992 status=ChangesetStatus.STATUS_UNDER_REVIEW,
993 993 comment=update_comment,
994 994 user=pull_request.author,
995 995 pull_request=pull_request,
996 996 revision=commit_id)
997 997
998 998 # send update email to users
999 999 try:
1000 1000 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1001 1001 ancestor_commit_id=ancestor_commit_id,
1002 1002 commit_changes=commit_changes,
1003 1003 file_changes=file_changes)
1004 1004 except Exception:
1005 1005 log.exception('Failed to send email notification to users')
1006 1006
1007 1007 log.debug(
1008 1008 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1009 1009 'removed_ids: %s', pull_request.pull_request_id,
1010 1010 commit_changes.added, commit_changes.common, commit_changes.removed)
1011 1011 log.debug(
1012 1012 'Updated pull request with the following file changes: %s',
1013 1013 file_changes)
1014 1014
1015 1015 log.info(
1016 1016 "Updated pull request %s from commit %s to commit %s, "
1017 1017 "stored new version %s of this pull request.",
1018 1018 pull_request.pull_request_id, source_ref_id,
1019 1019 pull_request.source_ref_parts.commit_id,
1020 1020 pull_request_version.pull_request_version_id)
1021 1021 Session().commit()
1022 1022 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1023 1023
1024 1024 return UpdateResponse(
1025 1025 executed=True, reason=UpdateFailureReason.NONE,
1026 1026 old=pull_request, new=pull_request_version,
1027 1027 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1028 1028 source_changed=source_changed, target_changed=target_changed)
1029 1029
1030 1030 def _create_version_from_snapshot(self, pull_request):
1031 1031 version = PullRequestVersion()
1032 1032 version.title = pull_request.title
1033 1033 version.description = pull_request.description
1034 1034 version.status = pull_request.status
1035 1035 version.pull_request_state = pull_request.pull_request_state
1036 1036 version.created_on = datetime.datetime.now()
1037 1037 version.updated_on = pull_request.updated_on
1038 1038 version.user_id = pull_request.user_id
1039 1039 version.source_repo = pull_request.source_repo
1040 1040 version.source_ref = pull_request.source_ref
1041 1041 version.target_repo = pull_request.target_repo
1042 1042 version.target_ref = pull_request.target_ref
1043 1043
1044 1044 version._last_merge_source_rev = pull_request._last_merge_source_rev
1045 1045 version._last_merge_target_rev = pull_request._last_merge_target_rev
1046 1046 version.last_merge_status = pull_request.last_merge_status
1047 1047 version.last_merge_metadata = pull_request.last_merge_metadata
1048 1048 version.shadow_merge_ref = pull_request.shadow_merge_ref
1049 1049 version.merge_rev = pull_request.merge_rev
1050 1050 version.reviewer_data = pull_request.reviewer_data
1051 1051
1052 1052 version.revisions = pull_request.revisions
1053 1053 version.common_ancestor_id = pull_request.common_ancestor_id
1054 1054 version.pull_request = pull_request
1055 1055 Session().add(version)
1056 1056 Session().flush()
1057 1057
1058 1058 return version
1059 1059
1060 1060 def _generate_update_diffs(self, pull_request, pull_request_version):
1061 1061
1062 1062 diff_context = (
1063 1063 self.DIFF_CONTEXT +
1064 1064 CommentsModel.needed_extra_diff_context())
1065 1065 hide_whitespace_changes = False
1066 1066 source_repo = pull_request_version.source_repo
1067 1067 source_ref_id = pull_request_version.source_ref_parts.commit_id
1068 1068 target_ref_id = pull_request_version.target_ref_parts.commit_id
1069 1069 old_diff = self._get_diff_from_pr_or_version(
1070 1070 source_repo, source_ref_id, target_ref_id,
1071 1071 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1072 1072
1073 1073 source_repo = pull_request.source_repo
1074 1074 source_ref_id = pull_request.source_ref_parts.commit_id
1075 1075 target_ref_id = pull_request.target_ref_parts.commit_id
1076 1076
1077 1077 new_diff = self._get_diff_from_pr_or_version(
1078 1078 source_repo, source_ref_id, target_ref_id,
1079 1079 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1080 1080
1081 1081 old_diff_data = diffs.DiffProcessor(old_diff)
1082 1082 old_diff_data.prepare()
1083 1083 new_diff_data = diffs.DiffProcessor(new_diff)
1084 1084 new_diff_data.prepare()
1085 1085
1086 1086 return old_diff_data, new_diff_data
1087 1087
1088 1088 def _link_comments_to_version(self, pull_request_version):
1089 1089 """
1090 1090 Link all unlinked comments of this pull request to the given version.
1091 1091
1092 1092 :param pull_request_version: The `PullRequestVersion` to which
1093 1093 the comments shall be linked.
1094 1094
1095 1095 """
1096 1096 pull_request = pull_request_version.pull_request
1097 1097 comments = ChangesetComment.query()\
1098 1098 .filter(
1099 1099 # TODO: johbo: Should we query for the repo at all here?
1100 1100 # Pending decision on how comments of PRs are to be related
1101 1101 # to either the source repo, the target repo or no repo at all.
1102 1102 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1103 1103 ChangesetComment.pull_request == pull_request,
1104 1104 ChangesetComment.pull_request_version == None)\
1105 1105 .order_by(ChangesetComment.comment_id.asc())
1106 1106
1107 1107 # TODO: johbo: Find out why this breaks if it is done in a bulk
1108 1108 # operation.
1109 1109 for comment in comments:
1110 1110 comment.pull_request_version_id = (
1111 1111 pull_request_version.pull_request_version_id)
1112 1112 Session().add(comment)
1113 1113
1114 1114 def _calculate_commit_id_changes(self, old_ids, new_ids):
1115 1115 added = [x for x in new_ids if x not in old_ids]
1116 1116 common = [x for x in new_ids if x in old_ids]
1117 1117 removed = [x for x in old_ids if x not in new_ids]
1118 1118 total = new_ids
1119 1119 return ChangeTuple(added, common, removed, total)
1120 1120
1121 1121 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1122 1122
1123 1123 old_files = OrderedDict()
1124 1124 for diff_data in old_diff_data.parsed_diff:
1125 1125 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1126 1126
1127 1127 added_files = []
1128 1128 modified_files = []
1129 1129 removed_files = []
1130 1130 for diff_data in new_diff_data.parsed_diff:
1131 1131 new_filename = diff_data['filename']
1132 1132 new_hash = md5_safe(diff_data['raw_diff'])
1133 1133
1134 1134 old_hash = old_files.get(new_filename)
1135 1135 if not old_hash:
1136 1136 # file is not present in old diff, we have to figure out from parsed diff
1137 1137 # operation ADD/REMOVE
1138 1138 operations_dict = diff_data['stats']['ops']
1139 1139 if diffs.DEL_FILENODE in operations_dict:
1140 1140 removed_files.append(new_filename)
1141 1141 else:
1142 1142 added_files.append(new_filename)
1143 1143 else:
1144 1144 if new_hash != old_hash:
1145 1145 modified_files.append(new_filename)
1146 1146 # now remove a file from old, since we have seen it already
1147 1147 del old_files[new_filename]
1148 1148
1149 1149 # removed files is when there are present in old, but not in NEW,
1150 1150 # since we remove old files that are present in new diff, left-overs
1151 1151 # if any should be the removed files
1152 1152 removed_files.extend(old_files.keys())
1153 1153
1154 1154 return FileChangeTuple(added_files, modified_files, removed_files)
1155 1155
1156 1156 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1157 1157 """
1158 1158 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1159 1159 so it's always looking the same disregarding on which default
1160 1160 renderer system is using.
1161 1161
1162 1162 :param ancestor_commit_id: ancestor raw_id
1163 1163 :param changes: changes named tuple
1164 1164 :param file_changes: file changes named tuple
1165 1165
1166 1166 """
1167 1167 new_status = ChangesetStatus.get_status_lbl(
1168 1168 ChangesetStatus.STATUS_UNDER_REVIEW)
1169 1169
1170 1170 changed_files = (
1171 1171 file_changes.added + file_changes.modified + file_changes.removed)
1172 1172
1173 1173 params = {
1174 1174 'under_review_label': new_status,
1175 1175 'added_commits': changes.added,
1176 1176 'removed_commits': changes.removed,
1177 1177 'changed_files': changed_files,
1178 1178 'added_files': file_changes.added,
1179 1179 'modified_files': file_changes.modified,
1180 1180 'removed_files': file_changes.removed,
1181 1181 'ancestor_commit_id': ancestor_commit_id
1182 1182 }
1183 1183 renderer = RstTemplateRenderer()
1184 1184 return renderer.render('pull_request_update.mako', **params)
1185 1185
1186 1186 def edit(self, pull_request, title, description, description_renderer, user):
1187 1187 pull_request = self.__get_pull_request(pull_request)
1188 1188 old_data = pull_request.get_api_data(with_merge_state=False)
1189 1189 if pull_request.is_closed():
1190 1190 raise ValueError('This pull request is closed')
1191 1191 if title:
1192 1192 pull_request.title = title
1193 1193 pull_request.description = description
1194 1194 pull_request.updated_on = datetime.datetime.now()
1195 1195 pull_request.description_renderer = description_renderer
1196 1196 Session().add(pull_request)
1197 1197 self._log_audit_action(
1198 1198 'repo.pull_request.edit', {'old_data': old_data},
1199 1199 user, pull_request)
1200 1200
1201 1201 def update_reviewers(self, pull_request, reviewer_data, user):
1202 1202 """
1203 1203 Update the reviewers in the pull request
1204 1204
1205 1205 :param pull_request: the pr to update
1206 1206 :param reviewer_data: list of tuples
1207 1207 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1208 1208 """
1209 1209 pull_request = self.__get_pull_request(pull_request)
1210 1210 if pull_request.is_closed():
1211 1211 raise ValueError('This pull request is closed')
1212 1212
1213 1213 reviewers = {}
1214 1214 for user_id, reasons, mandatory, rules in reviewer_data:
1215 1215 if isinstance(user_id, (int, compat.string_types)):
1216 1216 user_id = self._get_user(user_id).user_id
1217 1217 reviewers[user_id] = {
1218 1218 'reasons': reasons, 'mandatory': mandatory}
1219 1219
1220 1220 reviewers_ids = set(reviewers.keys())
1221 1221 current_reviewers = PullRequestReviewers.query()\
1222 1222 .filter(PullRequestReviewers.pull_request ==
1223 1223 pull_request).all()
1224 1224 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1225 1225
1226 1226 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1227 1227 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1228 1228
1229 1229 log.debug("Adding %s reviewers", ids_to_add)
1230 1230 log.debug("Removing %s reviewers", ids_to_remove)
1231 1231 changed = False
1232 1232 added_audit_reviewers = []
1233 1233 removed_audit_reviewers = []
1234 1234
1235 1235 for uid in ids_to_add:
1236 1236 changed = True
1237 1237 _usr = self._get_user(uid)
1238 1238 reviewer = PullRequestReviewers()
1239 1239 reviewer.user = _usr
1240 1240 reviewer.pull_request = pull_request
1241 1241 reviewer.reasons = reviewers[uid]['reasons']
1242 1242 # NOTE(marcink): mandatory shouldn't be changed now
1243 1243 # reviewer.mandatory = reviewers[uid]['reasons']
1244 1244 Session().add(reviewer)
1245 1245 added_audit_reviewers.append(reviewer.get_dict())
1246 1246
1247 1247 for uid in ids_to_remove:
1248 1248 changed = True
1249 1249 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1250 1250 # that prevents and fixes cases that we added the same reviewer twice.
1251 1251 # this CAN happen due to the lack of DB checks
1252 1252 reviewers = PullRequestReviewers.query()\
1253 1253 .filter(PullRequestReviewers.user_id == uid,
1254 1254 PullRequestReviewers.pull_request == pull_request)\
1255 1255 .all()
1256 1256
1257 1257 for obj in reviewers:
1258 1258 added_audit_reviewers.append(obj.get_dict())
1259 1259 Session().delete(obj)
1260 1260
1261 1261 if changed:
1262 1262 Session().expire_all()
1263 1263 pull_request.updated_on = datetime.datetime.now()
1264 1264 Session().add(pull_request)
1265 1265
1266 1266 # finally store audit logs
1267 1267 for user_data in added_audit_reviewers:
1268 1268 self._log_audit_action(
1269 1269 'repo.pull_request.reviewer.add', {'data': user_data},
1270 1270 user, pull_request)
1271 1271 for user_data in removed_audit_reviewers:
1272 1272 self._log_audit_action(
1273 1273 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1274 1274 user, pull_request)
1275 1275
1276 1276 self.notify_reviewers(pull_request, ids_to_add)
1277 1277 return ids_to_add, ids_to_remove
1278 1278
1279 1279 def get_url(self, pull_request, request=None, permalink=False):
1280 1280 if not request:
1281 1281 request = get_current_request()
1282 1282
1283 1283 if permalink:
1284 1284 return request.route_url(
1285 1285 'pull_requests_global',
1286 1286 pull_request_id=pull_request.pull_request_id,)
1287 1287 else:
1288 1288 return request.route_url('pullrequest_show',
1289 1289 repo_name=safe_str(pull_request.target_repo.repo_name),
1290 1290 pull_request_id=pull_request.pull_request_id,)
1291 1291
1292 1292 def get_shadow_clone_url(self, pull_request, request=None):
1293 1293 """
1294 1294 Returns qualified url pointing to the shadow repository. If this pull
1295 1295 request is closed there is no shadow repository and ``None`` will be
1296 1296 returned.
1297 1297 """
1298 1298 if pull_request.is_closed():
1299 1299 return None
1300 1300 else:
1301 1301 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1302 1302 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1303 1303
1304 1304 def notify_reviewers(self, pull_request, reviewers_ids):
1305 1305 # notification to reviewers
1306 1306 if not reviewers_ids:
1307 1307 return
1308 1308
1309 1309 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1310 1310
1311 1311 pull_request_obj = pull_request
1312 1312 # get the current participants of this pull request
1313 1313 recipients = reviewers_ids
1314 1314 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1315 1315
1316 1316 pr_source_repo = pull_request_obj.source_repo
1317 1317 pr_target_repo = pull_request_obj.target_repo
1318 1318
1319 1319 pr_url = h.route_url('pullrequest_show',
1320 1320 repo_name=pr_target_repo.repo_name,
1321 1321 pull_request_id=pull_request_obj.pull_request_id,)
1322 1322
1323 1323 # set some variables for email notification
1324 1324 pr_target_repo_url = h.route_url(
1325 1325 'repo_summary', repo_name=pr_target_repo.repo_name)
1326 1326
1327 1327 pr_source_repo_url = h.route_url(
1328 1328 'repo_summary', repo_name=pr_source_repo.repo_name)
1329 1329
1330 1330 # pull request specifics
1331 1331 pull_request_commits = [
1332 1332 (x.raw_id, x.message)
1333 1333 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1334 1334
1335 1335 kwargs = {
1336 1336 'user': pull_request.author,
1337 1337 'pull_request': pull_request_obj,
1338 1338 'pull_request_commits': pull_request_commits,
1339 1339
1340 1340 'pull_request_target_repo': pr_target_repo,
1341 1341 'pull_request_target_repo_url': pr_target_repo_url,
1342 1342
1343 1343 'pull_request_source_repo': pr_source_repo,
1344 1344 'pull_request_source_repo_url': pr_source_repo_url,
1345 1345
1346 1346 'pull_request_url': pr_url,
1347 1347 'thread_ids': [pr_url],
1348 1348 }
1349 1349
1350 1350 # pre-generate the subject for notification itself
1351 1351 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1352 1352 notification_type, **kwargs)
1353 1353
1354 1354 # create notification objects, and emails
1355 1355 NotificationModel().create(
1356 1356 created_by=pull_request.author,
1357 1357 notification_subject=subject,
1358 1358 notification_body=body_plaintext,
1359 1359 notification_type=notification_type,
1360 1360 recipients=recipients,
1361 1361 email_kwargs=kwargs,
1362 1362 )
1363 1363
1364 1364 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1365 1365 commit_changes, file_changes):
1366 1366
1367 1367 updating_user_id = updating_user.user_id
1368 1368 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1369 1369 # NOTE(marcink): send notification to all other users except to
1370 1370 # person who updated the PR
1371 1371 recipients = reviewers.difference(set([updating_user_id]))
1372 1372
1373 1373 log.debug('Notify following recipients about pull-request update %s', recipients)
1374 1374
1375 1375 pull_request_obj = pull_request
1376 1376
1377 1377 # send email about the update
1378 1378 changed_files = (
1379 1379 file_changes.added + file_changes.modified + file_changes.removed)
1380 1380
1381 1381 pr_source_repo = pull_request_obj.source_repo
1382 1382 pr_target_repo = pull_request_obj.target_repo
1383 1383
1384 1384 pr_url = h.route_url('pullrequest_show',
1385 1385 repo_name=pr_target_repo.repo_name,
1386 1386 pull_request_id=pull_request_obj.pull_request_id,)
1387 1387
1388 1388 # set some variables for email notification
1389 1389 pr_target_repo_url = h.route_url(
1390 1390 'repo_summary', repo_name=pr_target_repo.repo_name)
1391 1391
1392 1392 pr_source_repo_url = h.route_url(
1393 1393 'repo_summary', repo_name=pr_source_repo.repo_name)
1394 1394
1395 1395 email_kwargs = {
1396 1396 'date': datetime.datetime.now(),
1397 1397 'updating_user': updating_user,
1398 1398
1399 1399 'pull_request': pull_request_obj,
1400 1400
1401 1401 'pull_request_target_repo': pr_target_repo,
1402 1402 'pull_request_target_repo_url': pr_target_repo_url,
1403 1403
1404 1404 'pull_request_source_repo': pr_source_repo,
1405 1405 'pull_request_source_repo_url': pr_source_repo_url,
1406 1406
1407 1407 'pull_request_url': pr_url,
1408 1408
1409 1409 'ancestor_commit_id': ancestor_commit_id,
1410 1410 'added_commits': commit_changes.added,
1411 1411 'removed_commits': commit_changes.removed,
1412 1412 'changed_files': changed_files,
1413 1413 'added_files': file_changes.added,
1414 1414 'modified_files': file_changes.modified,
1415 1415 'removed_files': file_changes.removed,
1416 1416 'thread_ids': [pr_url],
1417 1417 }
1418 1418
1419 1419 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1420 1420 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1421 1421
1422 1422 # create notification objects, and emails
1423 1423 NotificationModel().create(
1424 1424 created_by=updating_user,
1425 1425 notification_subject=subject,
1426 1426 notification_body=body_plaintext,
1427 1427 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1428 1428 recipients=recipients,
1429 1429 email_kwargs=email_kwargs,
1430 1430 )
1431 1431
1432 1432 def delete(self, pull_request, user=None):
1433 1433 if not user:
1434 1434 user = getattr(get_current_rhodecode_user(), 'username', None)
1435 1435
1436 1436 pull_request = self.__get_pull_request(pull_request)
1437 1437 old_data = pull_request.get_api_data(with_merge_state=False)
1438 1438 self._cleanup_merge_workspace(pull_request)
1439 1439 self._log_audit_action(
1440 1440 'repo.pull_request.delete', {'old_data': old_data},
1441 1441 user, pull_request)
1442 1442 Session().delete(pull_request)
1443 1443
1444 1444 def close_pull_request(self, pull_request, user):
1445 1445 pull_request = self.__get_pull_request(pull_request)
1446 1446 self._cleanup_merge_workspace(pull_request)
1447 1447 pull_request.status = PullRequest.STATUS_CLOSED
1448 1448 pull_request.updated_on = datetime.datetime.now()
1449 1449 Session().add(pull_request)
1450 1450 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1451 1451
1452 1452 pr_data = pull_request.get_api_data(with_merge_state=False)
1453 1453 self._log_audit_action(
1454 1454 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1455 1455
1456 1456 def close_pull_request_with_comment(
1457 1457 self, pull_request, user, repo, message=None, auth_user=None):
1458 1458
1459 1459 pull_request_review_status = pull_request.calculated_review_status()
1460 1460
1461 1461 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1462 1462 # approved only if we have voting consent
1463 1463 status = ChangesetStatus.STATUS_APPROVED
1464 1464 else:
1465 1465 status = ChangesetStatus.STATUS_REJECTED
1466 1466 status_lbl = ChangesetStatus.get_status_lbl(status)
1467 1467
1468 1468 default_message = (
1469 1469 'Closing with status change {transition_icon} {status}.'
1470 1470 ).format(transition_icon='>', status=status_lbl)
1471 1471 text = message or default_message
1472 1472
1473 1473 # create a comment, and link it to new status
1474 1474 comment = CommentsModel().create(
1475 1475 text=text,
1476 1476 repo=repo.repo_id,
1477 1477 user=user.user_id,
1478 1478 pull_request=pull_request.pull_request_id,
1479 1479 status_change=status_lbl,
1480 1480 status_change_type=status,
1481 1481 closing_pr=True,
1482 1482 auth_user=auth_user,
1483 1483 )
1484 1484
1485 1485 # calculate old status before we change it
1486 1486 old_calculated_status = pull_request.calculated_review_status()
1487 1487 ChangesetStatusModel().set_status(
1488 1488 repo.repo_id,
1489 1489 status,
1490 1490 user.user_id,
1491 1491 comment=comment,
1492 1492 pull_request=pull_request.pull_request_id
1493 1493 )
1494 1494
1495 1495 Session().flush()
1496 1496
1497 1497 self.trigger_pull_request_hook(pull_request, user, 'comment',
1498 1498 data={'comment': comment})
1499 1499
1500 1500 # we now calculate the status of pull request again, and based on that
1501 1501 # calculation trigger status change. This might happen in cases
1502 1502 # that non-reviewer admin closes a pr, which means his vote doesn't
1503 1503 # change the status, while if he's a reviewer this might change it.
1504 1504 calculated_status = pull_request.calculated_review_status()
1505 1505 if old_calculated_status != calculated_status:
1506 1506 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1507 1507 data={'status': calculated_status})
1508 1508
1509 1509 # finally close the PR
1510 1510 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1511 1511
1512 1512 return comment, status
1513 1513
1514 1514 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1515 1515 _ = translator or get_current_request().translate
1516 1516
1517 1517 if not self._is_merge_enabled(pull_request):
1518 1518 return None, False, _('Server-side pull request merging is disabled.')
1519 1519
1520 1520 if pull_request.is_closed():
1521 1521 return None, False, _('This pull request is closed.')
1522 1522
1523 1523 merge_possible, msg = self._check_repo_requirements(
1524 1524 target=pull_request.target_repo, source=pull_request.source_repo,
1525 1525 translator=_)
1526 1526 if not merge_possible:
1527 1527 return None, merge_possible, msg
1528 1528
1529 1529 try:
1530 1530 merge_response = self._try_merge(
1531 1531 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1532 1532 log.debug("Merge response: %s", merge_response)
1533 1533 return merge_response, merge_response.possible, merge_response.merge_status_message
1534 1534 except NotImplementedError:
1535 1535 return None, False, _('Pull request merging is not supported.')
1536 1536
1537 1537 def _check_repo_requirements(self, target, source, translator):
1538 1538 """
1539 1539 Check if `target` and `source` have compatible requirements.
1540 1540
1541 1541 Currently this is just checking for largefiles.
1542 1542 """
1543 1543 _ = translator
1544 1544 target_has_largefiles = self._has_largefiles(target)
1545 1545 source_has_largefiles = self._has_largefiles(source)
1546 1546 merge_possible = True
1547 1547 message = u''
1548 1548
1549 1549 if target_has_largefiles != source_has_largefiles:
1550 1550 merge_possible = False
1551 1551 if source_has_largefiles:
1552 1552 message = _(
1553 1553 'Target repository large files support is disabled.')
1554 1554 else:
1555 1555 message = _(
1556 1556 'Source repository large files support is disabled.')
1557 1557
1558 1558 return merge_possible, message
1559 1559
1560 1560 def _has_largefiles(self, repo):
1561 1561 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1562 1562 'extensions', 'largefiles')
1563 1563 return largefiles_ui and largefiles_ui[0].active
1564 1564
1565 1565 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1566 1566 """
1567 1567 Try to merge the pull request and return the merge status.
1568 1568 """
1569 1569 log.debug(
1570 1570 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1571 1571 pull_request.pull_request_id, force_shadow_repo_refresh)
1572 1572 target_vcs = pull_request.target_repo.scm_instance()
1573 1573 # Refresh the target reference.
1574 1574 try:
1575 1575 target_ref = self._refresh_reference(
1576 1576 pull_request.target_ref_parts, target_vcs)
1577 1577 except CommitDoesNotExistError:
1578 1578 merge_state = MergeResponse(
1579 1579 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1580 1580 metadata={'target_ref': pull_request.target_ref_parts})
1581 1581 return merge_state
1582 1582
1583 1583 target_locked = pull_request.target_repo.locked
1584 1584 if target_locked and target_locked[0]:
1585 1585 locked_by = 'user:{}'.format(target_locked[0])
1586 1586 log.debug("The target repository is locked by %s.", locked_by)
1587 1587 merge_state = MergeResponse(
1588 1588 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1589 1589 metadata={'locked_by': locked_by})
1590 1590 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1591 1591 pull_request, target_ref):
1592 1592 log.debug("Refreshing the merge status of the repository.")
1593 1593 merge_state = self._refresh_merge_state(
1594 1594 pull_request, target_vcs, target_ref)
1595 1595 else:
1596 1596 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1597 1597 metadata = {
1598 1598 'unresolved_files': '',
1599 1599 'target_ref': pull_request.target_ref_parts,
1600 1600 'source_ref': pull_request.source_ref_parts,
1601 1601 }
1602 1602 if pull_request.last_merge_metadata:
1603 metadata.update(pull_request.last_merge_metadata)
1603 metadata.update(pull_request.last_merge_metadata_parsed)
1604 1604
1605 1605 if not possible and target_ref.type == 'branch':
1606 1606 # NOTE(marcink): case for mercurial multiple heads on branch
1607 1607 heads = target_vcs._heads(target_ref.name)
1608 1608 if len(heads) != 1:
1609 1609 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1610 1610 metadata.update({
1611 1611 'heads': heads
1612 1612 })
1613 1613
1614 1614 merge_state = MergeResponse(
1615 1615 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1616 1616
1617 1617 return merge_state
1618 1618
1619 1619 def _refresh_reference(self, reference, vcs_repository):
1620 1620 if reference.type in self.UPDATABLE_REF_TYPES:
1621 1621 name_or_id = reference.name
1622 1622 else:
1623 1623 name_or_id = reference.commit_id
1624 1624
1625 1625 refreshed_commit = vcs_repository.get_commit(name_or_id)
1626 1626 refreshed_reference = Reference(
1627 1627 reference.type, reference.name, refreshed_commit.raw_id)
1628 1628 return refreshed_reference
1629 1629
1630 1630 def _needs_merge_state_refresh(self, pull_request, target_reference):
1631 1631 return not(
1632 1632 pull_request.revisions and
1633 1633 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1634 1634 target_reference.commit_id == pull_request._last_merge_target_rev)
1635 1635
1636 1636 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1637 1637 workspace_id = self._workspace_id(pull_request)
1638 1638 source_vcs = pull_request.source_repo.scm_instance()
1639 1639 repo_id = pull_request.target_repo.repo_id
1640 1640 use_rebase = self._use_rebase_for_merging(pull_request)
1641 1641 close_branch = self._close_branch_before_merging(pull_request)
1642 1642 merge_state = target_vcs.merge(
1643 1643 repo_id, workspace_id,
1644 1644 target_reference, source_vcs, pull_request.source_ref_parts,
1645 1645 dry_run=True, use_rebase=use_rebase,
1646 1646 close_branch=close_branch)
1647 1647
1648 1648 # Do not store the response if there was an unknown error.
1649 1649 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1650 1650 pull_request._last_merge_source_rev = \
1651 1651 pull_request.source_ref_parts.commit_id
1652 1652 pull_request._last_merge_target_rev = target_reference.commit_id
1653 1653 pull_request.last_merge_status = merge_state.failure_reason
1654 1654 pull_request.last_merge_metadata = merge_state.metadata
1655 1655
1656 1656 pull_request.shadow_merge_ref = merge_state.merge_ref
1657 1657 Session().add(pull_request)
1658 1658 Session().commit()
1659 1659
1660 1660 return merge_state
1661 1661
1662 1662 def _workspace_id(self, pull_request):
1663 1663 workspace_id = 'pr-%s' % pull_request.pull_request_id
1664 1664 return workspace_id
1665 1665
1666 1666 def generate_repo_data(self, repo, commit_id=None, branch=None,
1667 1667 bookmark=None, translator=None):
1668 1668 from rhodecode.model.repo import RepoModel
1669 1669
1670 1670 all_refs, selected_ref = \
1671 1671 self._get_repo_pullrequest_sources(
1672 1672 repo.scm_instance(), commit_id=commit_id,
1673 1673 branch=branch, bookmark=bookmark, translator=translator)
1674 1674
1675 1675 refs_select2 = []
1676 1676 for element in all_refs:
1677 1677 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1678 1678 refs_select2.append({'text': element[1], 'children': children})
1679 1679
1680 1680 return {
1681 1681 'user': {
1682 1682 'user_id': repo.user.user_id,
1683 1683 'username': repo.user.username,
1684 1684 'firstname': repo.user.first_name,
1685 1685 'lastname': repo.user.last_name,
1686 1686 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1687 1687 },
1688 1688 'name': repo.repo_name,
1689 1689 'link': RepoModel().get_url(repo),
1690 1690 'description': h.chop_at_smart(repo.description_safe, '\n'),
1691 1691 'refs': {
1692 1692 'all_refs': all_refs,
1693 1693 'selected_ref': selected_ref,
1694 1694 'select2_refs': refs_select2
1695 1695 }
1696 1696 }
1697 1697
1698 1698 def generate_pullrequest_title(self, source, source_ref, target):
1699 1699 return u'{source}#{at_ref} to {target}'.format(
1700 1700 source=source,
1701 1701 at_ref=source_ref,
1702 1702 target=target,
1703 1703 )
1704 1704
1705 1705 def _cleanup_merge_workspace(self, pull_request):
1706 1706 # Merging related cleanup
1707 1707 repo_id = pull_request.target_repo.repo_id
1708 1708 target_scm = pull_request.target_repo.scm_instance()
1709 1709 workspace_id = self._workspace_id(pull_request)
1710 1710
1711 1711 try:
1712 1712 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1713 1713 except NotImplementedError:
1714 1714 pass
1715 1715
1716 1716 def _get_repo_pullrequest_sources(
1717 1717 self, repo, commit_id=None, branch=None, bookmark=None,
1718 1718 translator=None):
1719 1719 """
1720 1720 Return a structure with repo's interesting commits, suitable for
1721 1721 the selectors in pullrequest controller
1722 1722
1723 1723 :param commit_id: a commit that must be in the list somehow
1724 1724 and selected by default
1725 1725 :param branch: a branch that must be in the list and selected
1726 1726 by default - even if closed
1727 1727 :param bookmark: a bookmark that must be in the list and selected
1728 1728 """
1729 1729 _ = translator or get_current_request().translate
1730 1730
1731 1731 commit_id = safe_str(commit_id) if commit_id else None
1732 1732 branch = safe_unicode(branch) if branch else None
1733 1733 bookmark = safe_unicode(bookmark) if bookmark else None
1734 1734
1735 1735 selected = None
1736 1736
1737 1737 # order matters: first source that has commit_id in it will be selected
1738 1738 sources = []
1739 1739 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1740 1740 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1741 1741
1742 1742 if commit_id:
1743 1743 ref_commit = (h.short_id(commit_id), commit_id)
1744 1744 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1745 1745
1746 1746 sources.append(
1747 1747 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1748 1748 )
1749 1749
1750 1750 groups = []
1751 1751
1752 1752 for group_key, ref_list, group_name, match in sources:
1753 1753 group_refs = []
1754 1754 for ref_name, ref_id in ref_list:
1755 1755 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1756 1756 group_refs.append((ref_key, ref_name))
1757 1757
1758 1758 if not selected:
1759 1759 if set([commit_id, match]) & set([ref_id, ref_name]):
1760 1760 selected = ref_key
1761 1761
1762 1762 if group_refs:
1763 1763 groups.append((group_refs, group_name))
1764 1764
1765 1765 if not selected:
1766 1766 ref = commit_id or branch or bookmark
1767 1767 if ref:
1768 1768 raise CommitDoesNotExistError(
1769 1769 u'No commit refs could be found matching: {}'.format(ref))
1770 1770 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1771 1771 selected = u'branch:{}:{}'.format(
1772 1772 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1773 1773 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1774 1774 )
1775 1775 elif repo.commit_ids:
1776 1776 # make the user select in this case
1777 1777 selected = None
1778 1778 else:
1779 1779 raise EmptyRepositoryError()
1780 1780 return groups, selected
1781 1781
1782 1782 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1783 1783 hide_whitespace_changes, diff_context):
1784 1784
1785 1785 return self._get_diff_from_pr_or_version(
1786 1786 source_repo, source_ref_id, target_ref_id,
1787 1787 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1788 1788
1789 1789 def _get_diff_from_pr_or_version(
1790 1790 self, source_repo, source_ref_id, target_ref_id,
1791 1791 hide_whitespace_changes, diff_context):
1792 1792
1793 1793 target_commit = source_repo.get_commit(
1794 1794 commit_id=safe_str(target_ref_id))
1795 1795 source_commit = source_repo.get_commit(
1796 1796 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1797 1797 if isinstance(source_repo, Repository):
1798 1798 vcs_repo = source_repo.scm_instance()
1799 1799 else:
1800 1800 vcs_repo = source_repo
1801 1801
1802 1802 # TODO: johbo: In the context of an update, we cannot reach
1803 1803 # the old commit anymore with our normal mechanisms. It needs
1804 1804 # some sort of special support in the vcs layer to avoid this
1805 1805 # workaround.
1806 1806 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1807 1807 vcs_repo.alias == 'git'):
1808 1808 source_commit.raw_id = safe_str(source_ref_id)
1809 1809
1810 1810 log.debug('calculating diff between '
1811 1811 'source_ref:%s and target_ref:%s for repo `%s`',
1812 1812 target_ref_id, source_ref_id,
1813 1813 safe_unicode(vcs_repo.path))
1814 1814
1815 1815 vcs_diff = vcs_repo.get_diff(
1816 1816 commit1=target_commit, commit2=source_commit,
1817 1817 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1818 1818 return vcs_diff
1819 1819
1820 1820 def _is_merge_enabled(self, pull_request):
1821 1821 return self._get_general_setting(
1822 1822 pull_request, 'rhodecode_pr_merge_enabled')
1823 1823
1824 1824 def _use_rebase_for_merging(self, pull_request):
1825 1825 repo_type = pull_request.target_repo.repo_type
1826 1826 if repo_type == 'hg':
1827 1827 return self._get_general_setting(
1828 1828 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1829 1829 elif repo_type == 'git':
1830 1830 return self._get_general_setting(
1831 1831 pull_request, 'rhodecode_git_use_rebase_for_merging')
1832 1832
1833 1833 return False
1834 1834
1835 1835 def _user_name_for_merging(self, pull_request, user):
1836 1836 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1837 1837 if env_user_name_attr and hasattr(user, env_user_name_attr):
1838 1838 user_name_attr = env_user_name_attr
1839 1839 else:
1840 1840 user_name_attr = 'short_contact'
1841 1841
1842 1842 user_name = getattr(user, user_name_attr)
1843 1843 return user_name
1844 1844
1845 1845 def _close_branch_before_merging(self, pull_request):
1846 1846 repo_type = pull_request.target_repo.repo_type
1847 1847 if repo_type == 'hg':
1848 1848 return self._get_general_setting(
1849 1849 pull_request, 'rhodecode_hg_close_branch_before_merging')
1850 1850 elif repo_type == 'git':
1851 1851 return self._get_general_setting(
1852 1852 pull_request, 'rhodecode_git_close_branch_before_merging')
1853 1853
1854 1854 return False
1855 1855
1856 1856 def _get_general_setting(self, pull_request, settings_key, default=False):
1857 1857 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1858 1858 settings = settings_model.get_general_settings()
1859 1859 return settings.get(settings_key, default)
1860 1860
1861 1861 def _log_audit_action(self, action, action_data, user, pull_request):
1862 1862 audit_logger.store(
1863 1863 action=action,
1864 1864 action_data=action_data,
1865 1865 user=user,
1866 1866 repo=pull_request.target_repo)
1867 1867
1868 1868 def get_reviewer_functions(self):
1869 1869 """
1870 1870 Fetches functions for validation and fetching default reviewers.
1871 1871 If available we use the EE package, else we fallback to CE
1872 1872 package functions
1873 1873 """
1874 1874 try:
1875 1875 from rc_reviewers.utils import get_default_reviewers_data
1876 1876 from rc_reviewers.utils import validate_default_reviewers
1877 1877 except ImportError:
1878 1878 from rhodecode.apps.repository.utils import get_default_reviewers_data
1879 1879 from rhodecode.apps.repository.utils import validate_default_reviewers
1880 1880
1881 1881 return get_default_reviewers_data, validate_default_reviewers
1882 1882
1883 1883
1884 1884 class MergeCheck(object):
1885 1885 """
1886 1886 Perform Merge Checks and returns a check object which stores information
1887 1887 about merge errors, and merge conditions
1888 1888 """
1889 1889 TODO_CHECK = 'todo'
1890 1890 PERM_CHECK = 'perm'
1891 1891 REVIEW_CHECK = 'review'
1892 1892 MERGE_CHECK = 'merge'
1893 1893 WIP_CHECK = 'wip'
1894 1894
1895 1895 def __init__(self):
1896 1896 self.review_status = None
1897 1897 self.merge_possible = None
1898 1898 self.merge_msg = ''
1899 1899 self.merge_response = None
1900 1900 self.failed = None
1901 1901 self.errors = []
1902 1902 self.error_details = OrderedDict()
1903 1903 self.source_commit = AttributeDict()
1904 1904 self.target_commit = AttributeDict()
1905 1905
1906 1906 def __repr__(self):
1907 1907 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1908 1908 self.merge_possible, self.failed, self.errors)
1909 1909
1910 1910 def push_error(self, error_type, message, error_key, details):
1911 1911 self.failed = True
1912 1912 self.errors.append([error_type, message])
1913 1913 self.error_details[error_key] = dict(
1914 1914 details=details,
1915 1915 error_type=error_type,
1916 1916 message=message
1917 1917 )
1918 1918
1919 1919 @classmethod
1920 1920 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1921 1921 force_shadow_repo_refresh=False):
1922 1922 _ = translator
1923 1923 merge_check = cls()
1924 1924
1925 1925 # title has WIP:
1926 1926 if pull_request.work_in_progress:
1927 1927 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1928 1928
1929 1929 msg = _('WIP marker in title prevents from accidental merge.')
1930 1930 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1931 1931 if fail_early:
1932 1932 return merge_check
1933 1933
1934 1934 # permissions to merge
1935 1935 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1936 1936 if not user_allowed_to_merge:
1937 1937 log.debug("MergeCheck: cannot merge, approval is pending.")
1938 1938
1939 1939 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1940 1940 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1941 1941 if fail_early:
1942 1942 return merge_check
1943 1943
1944 1944 # permission to merge into the target branch
1945 1945 target_commit_id = pull_request.target_ref_parts.commit_id
1946 1946 if pull_request.target_ref_parts.type == 'branch':
1947 1947 branch_name = pull_request.target_ref_parts.name
1948 1948 else:
1949 1949 # for mercurial we can always figure out the branch from the commit
1950 1950 # in case of bookmark
1951 1951 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1952 1952 branch_name = target_commit.branch
1953 1953
1954 1954 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1955 1955 pull_request.target_repo.repo_name, branch_name)
1956 1956 if branch_perm and branch_perm == 'branch.none':
1957 1957 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1958 1958 branch_name, rule)
1959 1959 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1960 1960 if fail_early:
1961 1961 return merge_check
1962 1962
1963 1963 # review status, must be always present
1964 1964 review_status = pull_request.calculated_review_status()
1965 1965 merge_check.review_status = review_status
1966 1966
1967 1967 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1968 1968 if not status_approved:
1969 1969 log.debug("MergeCheck: cannot merge, approval is pending.")
1970 1970
1971 1971 msg = _('Pull request reviewer approval is pending.')
1972 1972
1973 1973 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1974 1974
1975 1975 if fail_early:
1976 1976 return merge_check
1977 1977
1978 1978 # left over TODOs
1979 1979 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1980 1980 if todos:
1981 1981 log.debug("MergeCheck: cannot merge, {} "
1982 1982 "unresolved TODOs left.".format(len(todos)))
1983 1983
1984 1984 if len(todos) == 1:
1985 1985 msg = _('Cannot merge, {} TODO still not resolved.').format(
1986 1986 len(todos))
1987 1987 else:
1988 1988 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1989 1989 len(todos))
1990 1990
1991 1991 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1992 1992
1993 1993 if fail_early:
1994 1994 return merge_check
1995 1995
1996 1996 # merge possible, here is the filesystem simulation + shadow repo
1997 1997 merge_response, merge_status, msg = PullRequestModel().merge_status(
1998 1998 pull_request, translator=translator,
1999 1999 force_shadow_repo_refresh=force_shadow_repo_refresh)
2000 2000
2001 2001 merge_check.merge_possible = merge_status
2002 2002 merge_check.merge_msg = msg
2003 2003 merge_check.merge_response = merge_response
2004 2004
2005 2005 source_ref_id = pull_request.source_ref_parts.commit_id
2006 2006 target_ref_id = pull_request.target_ref_parts.commit_id
2007 2007
2008 2008 try:
2009 2009 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2010 2010 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2011 2011 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2012 2012 merge_check.source_commit.current_raw_id = source_commit.raw_id
2013 2013 merge_check.source_commit.previous_raw_id = source_ref_id
2014 2014
2015 2015 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2016 2016 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2017 2017 merge_check.target_commit.current_raw_id = target_commit.raw_id
2018 2018 merge_check.target_commit.previous_raw_id = target_ref_id
2019 2019 except (SourceRefMissing, TargetRefMissing):
2020 2020 pass
2021 2021
2022 2022 if not merge_status:
2023 2023 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2024 2024 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2025 2025
2026 2026 if fail_early:
2027 2027 return merge_check
2028 2028
2029 2029 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2030 2030 return merge_check
2031 2031
2032 2032 @classmethod
2033 2033 def get_merge_conditions(cls, pull_request, translator):
2034 2034 _ = translator
2035 2035 merge_details = {}
2036 2036
2037 2037 model = PullRequestModel()
2038 2038 use_rebase = model._use_rebase_for_merging(pull_request)
2039 2039
2040 2040 if use_rebase:
2041 2041 merge_details['merge_strategy'] = dict(
2042 2042 details={},
2043 2043 message=_('Merge strategy: rebase')
2044 2044 )
2045 2045 else:
2046 2046 merge_details['merge_strategy'] = dict(
2047 2047 details={},
2048 2048 message=_('Merge strategy: explicit merge commit')
2049 2049 )
2050 2050
2051 2051 close_branch = model._close_branch_before_merging(pull_request)
2052 2052 if close_branch:
2053 2053 repo_type = pull_request.target_repo.repo_type
2054 2054 close_msg = ''
2055 2055 if repo_type == 'hg':
2056 2056 close_msg = _('Source branch will be closed before the merge.')
2057 2057 elif repo_type == 'git':
2058 2058 close_msg = _('Source branch will be deleted after the merge.')
2059 2059
2060 2060 merge_details['close_branch'] = dict(
2061 2061 details={},
2062 2062 message=close_msg
2063 2063 )
2064 2064
2065 2065 return merge_details
2066 2066
2067 2067
2068 2068 ChangeTuple = collections.namedtuple(
2069 2069 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2070 2070
2071 2071 FileChangeTuple = collections.namedtuple(
2072 2072 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now